1 /* Profile counter container type.
2 Copyright (C) 2017-2023 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
28 /* Quality of the profile count. Because gengtype does not support enums
29 inside of classes, this is in global namespace. */
30 enum profile_quality
{
31 /* Uninitialized value. */
32 UNINITIALIZED_PROFILE
,
34 /* Profile is based on static branch prediction heuristics and may
35 or may not match reality. It is local to function and cannot be compared
36 inter-procedurally. Never used by probabilities (they are always local).
40 /* Profile was read by feedback and was 0, we used local heuristics to guess
41 better. This is the case of functions not run in profile feedback.
42 Never used by probabilities. */
45 /* Same as GUESSED_GLOBAL0 but global count is adjusted 0. */
46 GUESSED_GLOBAL0_ADJUSTED
,
48 /* Profile is based on static branch prediction heuristics. It may or may
49 not reflect the reality but it can be compared interprocedurally
50 (for example, we inlined function w/o profile feedback into function
51 with feedback and propagated from that).
52 Never used by probabilities. */
55 /* Profile was determined by autofdo. */
58 /* Profile was originally based on feedback but it was adjusted
59 by code duplicating optimization. It may not precisely reflect the
60 particular code path. */
63 /* Profile was read from profile feedback or determined by accurate static
68 extern const char *profile_quality_as_string (enum profile_quality
);
69 extern bool parse_profile_quality (const char *value
,
70 profile_quality
*quality
);
72 /* The base value for branch probability notes and edge probabilities. */
73 #define REG_BR_PROB_BASE 10000
75 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
77 bool slow_safe_scale_64bit (uint64_t a
, uint64_t b
, uint64_t c
, uint64_t *res
);
79 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
82 safe_scale_64bit (uint64_t a
, uint64_t b
, uint64_t c
, uint64_t *res
)
84 #if (GCC_VERSION >= 5000)
86 if (!__builtin_mul_overflow (a
, b
, &tmp
)
87 && !__builtin_add_overflow (tmp
, c
/2, &tmp
))
98 if (a
< ((uint64_t)1 << 31)
99 && b
< ((uint64_t)1 << 31)
100 && c
< ((uint64_t)1 << 31))
102 *res
= (a
* b
+ (c
/ 2)) / c
;
106 return slow_safe_scale_64bit (a
, b
, c
, res
);
109 /* Data type to hold probabilities. It implements fixed point arithmetics
110 with capping so probability is always in range [0,1] and scaling requiring
111 values greater than 1 needs to be represented otherwise.
113 In addition to actual value the quality of profile is tracked and propagated
114 through all operations. Special value UNINITIALIZED_PROFILE is used for probabilities
115 that has not been determined yet (for example because of
116 -fno-guess-branch-probability)
118 Typically probabilities are derived from profile feedback (via
119 probability_in_gcov_type), autoFDO or guessed statically and then propagated
120 thorough the compilation.
122 Named probabilities are available:
123 - never (0 probability)
125 - very_unlikely (1/2000 probability)
126 - unlikely (1/5 probability)
127 - even (1/2 probability)
128 - likely (4/5 probability)
129 - very_likely (1999/2000 probability)
133 Named probabilities except for never/always are assumed to be statically
134 guessed and thus not necessarily accurate. The difference between never
135 and guessed_never is that the first one should be used only in case that
136 well behaving program will very likely not execute the "never" path.
137 For example if the path is going to abort () call or it exception handling.
139 Always and guessed_always probabilities are symmetric.
141 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
142 integer arithmetics. Once the code is converted to branch probabilities,
143 these conversions will probably go away because they are lossy.
146 class GTY((user
)) profile_probability
148 static const int n_bits
= 29;
149 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
150 will lead to harder multiplication sequences. */
151 static const uint32_t max_probability
= (uint32_t) 1 << (n_bits
- 2);
152 static const uint32_t uninitialized_probability
153 = ((uint32_t) 1 << (n_bits
- 1)) - 1;
156 enum profile_quality m_quality
: 3;
158 friend struct profile_count
;
160 profile_probability (): m_val (uninitialized_probability
),
164 profile_probability (uint32_t val
, profile_quality quality
):
165 m_val (val
), m_quality (quality
)
168 /* Named probabilities. */
169 static profile_probability
never ()
171 profile_probability ret
;
173 ret
.m_quality
= PRECISE
;
177 static profile_probability
guessed_never ()
179 profile_probability ret
;
181 ret
.m_quality
= GUESSED
;
185 static profile_probability
very_unlikely ()
187 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
188 profile_probability r
= guessed_always () / 2000;
193 static profile_probability
unlikely ()
195 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
196 profile_probability r
= guessed_always () / 5;
201 static profile_probability
even ()
203 return guessed_always () / 2;
206 static profile_probability
very_likely ()
208 return always () - very_unlikely ();
211 static profile_probability
likely ()
213 return always () - unlikely ();
215 /* Return true when value is not zero and can be used for scaling. */
216 bool nonzero_p () const
218 return initialized_p () && m_val
!= 0;
221 static profile_probability
guessed_always ()
223 profile_probability ret
;
224 ret
.m_val
= max_probability
;
225 ret
.m_quality
= GUESSED
;
229 static profile_probability
always ()
231 profile_probability ret
;
232 ret
.m_val
= max_probability
;
233 ret
.m_quality
= PRECISE
;
237 /* Probabilities which has not been initialized. Either because
238 initialization did not happen yet or because profile is unknown. */
239 static profile_probability
uninitialized ()
241 profile_probability c
;
242 c
.m_val
= uninitialized_probability
;
243 c
.m_quality
= GUESSED
;
247 /* Return true if value has been initialized. */
248 bool initialized_p () const
250 return m_val
!= uninitialized_probability
;
253 /* Return true if value can be trusted. */
254 bool reliable_p () const
256 return m_quality
>= ADJUSTED
;
259 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
260 this is mostly to support legacy code and should go away. */
261 static profile_probability
from_reg_br_prob_base (int v
)
263 profile_probability ret
;
264 gcc_checking_assert (v
>= 0 && v
<= REG_BR_PROB_BASE
);
265 ret
.m_val
= RDIV (v
* (uint64_t) max_probability
, REG_BR_PROB_BASE
);
266 ret
.m_quality
= GUESSED
;
270 /* Return THIS with quality set to ADJUSTED. */
271 profile_probability
adjusted () const
273 profile_probability ret
= *this;
274 if (!initialized_p ())
276 ret
.m_quality
= ADJUSTED
;
280 int to_reg_br_prob_base () const
282 gcc_checking_assert (initialized_p ());
283 return RDIV (m_val
* (uint64_t) REG_BR_PROB_BASE
, max_probability
);
286 /* Conversion to and from RTL representation of profile probabilities. */
287 static profile_probability
from_reg_br_prob_note (int v
)
289 profile_probability ret
;
290 ret
.m_val
= ((unsigned int)v
) / 8;
291 ret
.m_quality
= (enum profile_quality
)(v
& 7);
295 int to_reg_br_prob_note () const
297 gcc_checking_assert (initialized_p ());
298 int ret
= m_val
* 8 + m_quality
;
299 gcc_checking_assert (from_reg_br_prob_note (ret
) == *this);
303 /* Return VAL1/VAL2. */
304 static profile_probability probability_in_gcov_type
305 (gcov_type val1
, gcov_type val2
)
307 profile_probability ret
;
308 gcc_checking_assert (val1
>= 0 && val2
> 0);
310 ret
.m_val
= max_probability
;
314 safe_scale_64bit (val1
, max_probability
, val2
, &tmp
);
315 gcc_checking_assert (tmp
<= max_probability
);
318 ret
.m_quality
= PRECISE
;
322 /* Basic operations. */
323 bool operator== (const profile_probability
&other
) const
325 return m_val
== other
.m_val
&& m_quality
== other
.m_quality
;
328 profile_probability
operator+ (const profile_probability
&other
) const
330 if (other
== never ())
332 if (*this == never ())
334 if (!initialized_p () || !other
.initialized_p ())
335 return uninitialized ();
337 profile_probability ret
;
338 ret
.m_val
= MIN ((uint32_t)(m_val
+ other
.m_val
), max_probability
);
339 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
343 profile_probability
&operator+= (const profile_probability
&other
)
345 if (other
== never ())
347 if (*this == never ())
352 if (!initialized_p () || !other
.initialized_p ())
353 return *this = uninitialized ();
356 m_val
= MIN ((uint32_t)(m_val
+ other
.m_val
), max_probability
);
357 m_quality
= MIN (m_quality
, other
.m_quality
);
362 profile_probability
operator- (const profile_probability
&other
) const
364 if (*this == never ()
365 || other
== never ())
367 if (!initialized_p () || !other
.initialized_p ())
368 return uninitialized ();
369 profile_probability ret
;
370 ret
.m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
371 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
375 profile_probability
&operator-= (const profile_probability
&other
)
377 if (*this == never ()
378 || other
== never ())
380 if (!initialized_p () || !other
.initialized_p ())
381 return *this = uninitialized ();
384 m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
385 m_quality
= MIN (m_quality
, other
.m_quality
);
390 profile_probability
operator* (const profile_probability
&other
) const
392 if (*this == never ()
393 || other
== never ())
395 if (!initialized_p () || !other
.initialized_p ())
396 return uninitialized ();
397 profile_probability ret
;
398 ret
.m_val
= RDIV ((uint64_t)m_val
* other
.m_val
, max_probability
);
399 ret
.m_quality
= MIN (MIN (m_quality
, other
.m_quality
), ADJUSTED
);
403 profile_probability
&operator*= (const profile_probability
&other
)
405 if (*this == never ()
406 || other
== never ())
407 return *this = never ();
408 if (!initialized_p () || !other
.initialized_p ())
409 return *this = uninitialized ();
412 m_val
= RDIV ((uint64_t)m_val
* other
.m_val
, max_probability
);
413 m_quality
= MIN (MIN (m_quality
, other
.m_quality
), ADJUSTED
);
418 profile_probability
operator/ (const profile_probability
&other
) const
420 if (*this == never ())
422 if (!initialized_p () || !other
.initialized_p ())
423 return uninitialized ();
424 profile_probability ret
;
425 /* If we get probability above 1, mark it as unreliable and return 1. */
426 if (m_val
>= other
.m_val
)
428 ret
.m_val
= max_probability
;
429 ret
.m_quality
= MIN (MIN (m_quality
, other
.m_quality
),
437 gcc_checking_assert (other
.m_val
);
438 ret
.m_val
= MIN (RDIV ((uint64_t)m_val
* max_probability
,
442 ret
.m_quality
= MIN (MIN (m_quality
, other
.m_quality
), ADJUSTED
);
446 profile_probability
&operator/= (const profile_probability
&other
)
448 if (*this == never ())
449 return *this = never ();
450 if (!initialized_p () || !other
.initialized_p ())
451 return *this = uninitialized ();
454 /* If we get probability above 1, mark it as unreliable
456 if (m_val
> other
.m_val
)
458 m_val
= max_probability
;
459 m_quality
= MIN (MIN (m_quality
, other
.m_quality
),
467 gcc_checking_assert (other
.m_val
);
468 m_val
= MIN (RDIV ((uint64_t)m_val
* max_probability
,
472 m_quality
= MIN (MIN (m_quality
, other
.m_quality
), ADJUSTED
);
477 /* Split *THIS (ORIG) probability into 2 probabilities, such that
478 the returned one (FIRST) is *THIS * CPROB and *THIS is
479 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
480 == ORIG. This is useful e.g. when splitting a conditional
483 goto lab; // ORIG probability
486 goto lab; // FIRST = ORIG * CPROB probability
488 goto lab; // SECOND probability
489 such that the overall probability of jumping to lab remains
490 the same. CPROB gives the relative probability between the
492 profile_probability
split (const profile_probability
&cprob
)
494 profile_probability ret
= *this * cprob
;
495 /* The following is equivalent to:
496 *this = cprob.invert () * *this / ret.invert ();
497 Avoid scaling when overall outcome is supposed to be always.
498 Without knowing that one is inverse of other, the result would be
500 if (!(*this == always ()))
501 *this = (*this - ret
) / ret
.invert ();
505 gcov_type
apply (gcov_type val
) const
507 if (*this == uninitialized ())
509 return RDIV (val
* m_val
, max_probability
);
512 /* Return 1-*THIS. */
513 profile_probability
invert () const
515 return always() - *this;
518 /* Return THIS with quality dropped to GUESSED. */
519 profile_probability
guessed () const
521 profile_probability ret
= *this;
522 ret
.m_quality
= GUESSED
;
526 /* Return THIS with quality dropped to AFDO. */
527 profile_probability
afdo () const
529 profile_probability ret
= *this;
530 ret
.m_quality
= AFDO
;
534 /* Return *THIS * NUM / DEN. */
535 profile_probability
apply_scale (int64_t num
, int64_t den
) const
537 if (*this == never ())
539 if (!initialized_p ())
540 return uninitialized ();
541 profile_probability ret
;
543 safe_scale_64bit (m_val
, num
, den
, &tmp
);
544 ret
.m_val
= MIN (tmp
, max_probability
);
545 ret
.m_quality
= MIN (m_quality
, ADJUSTED
);
549 /* Return *THIS * NUM / DEN. */
550 profile_probability
apply_scale (profile_probability num
,
551 profile_probability den
) const
553 if (*this == never ())
557 if (!initialized_p () || !num
.initialized_p () || !den
.initialized_p ())
558 return uninitialized ();
561 gcc_checking_assert (den
.m_val
);
563 profile_probability ret
;
565 safe_scale_64bit (m_val
, num
.m_val
, den
.m_val
, &val
);
566 ret
.m_val
= MIN (val
, max_probability
);
567 ret
.m_quality
= MIN (MIN (MIN (m_quality
, ADJUSTED
),
568 num
.m_quality
), den
.m_quality
);
572 /* Return true when the probability of edge is reliable.
574 The profile guessing code is good at predicting branch outcome (i.e.
575 taken/not taken), that is predicted right slightly over 75% of time.
576 It is however notoriously poor on predicting the probability itself.
577 In general the profile appear a lot flatter (with probabilities closer
578 to 50%) than the reality so it is bad idea to use it to drive optimization
579 such as those disabling dynamic branch prediction for well predictable
582 There are two exceptions - edges leading to noreturn edges and edges
583 predicted by number of iterations heuristics are predicted well. This macro
584 should be able to distinguish those, but at the moment it simply check for
585 noreturn heuristic that is only one giving probability over 99% or bellow
586 1%. In future we might want to propagate reliability information across the
587 CFG if we find this information useful on multiple places. */
588 bool probably_reliable_p () const
590 if (m_quality
>= ADJUSTED
)
592 if (!initialized_p ())
594 return m_val
< max_probability
/ 100
595 || m_val
> max_probability
- max_probability
/ 100;
598 /* Return false if profile_probability is bogus. */
601 gcc_checking_assert (m_quality
!= UNINITIALIZED_PROFILE
);
602 if (m_val
== uninitialized_probability
)
603 return m_quality
== GUESSED
;
604 else if (m_quality
< GUESSED
)
606 return m_val
<= max_probability
;
609 /* Comparisons are three-state and conservative. False is returned if
610 the inequality cannot be decided. */
611 bool operator< (const profile_probability
&other
) const
613 return initialized_p () && other
.initialized_p () && m_val
< other
.m_val
;
616 bool operator> (const profile_probability
&other
) const
618 return initialized_p () && other
.initialized_p () && m_val
> other
.m_val
;
621 bool operator<= (const profile_probability
&other
) const
623 return initialized_p () && other
.initialized_p () && m_val
<= other
.m_val
;
626 bool operator>= (const profile_probability
&other
) const
628 return initialized_p () && other
.initialized_p () && m_val
>= other
.m_val
;
631 profile_probability
operator* (int64_t num
) const
633 return apply_scale (num
, 1);
636 profile_probability
operator*= (int64_t num
)
638 *this = apply_scale (num
, 1);
642 profile_probability
operator/ (int64_t den
) const
644 return apply_scale (1, den
);
647 profile_probability
operator/= (int64_t den
)
649 *this = apply_scale (1, den
);
653 /* Compute n-th power. */
654 profile_probability
pow (int) const;
656 /* Compute sware root. */
657 profile_probability
sqrt () const;
659 /* Get the value of the count. */
660 uint32_t value () const { return m_val
; }
662 /* Get the quality of the count. */
663 enum profile_quality
quality () const { return m_quality
; }
665 /* Output THIS to F. */
666 void dump (FILE *f
) const;
668 /* Output THIS to BUFFER. */
669 void dump (char *buffer
) const;
671 /* Print THIS to stderr. */
674 /* Return true if THIS is known to differ significantly from OTHER. */
675 bool differs_from_p (profile_probability other
) const;
677 /* Return if difference is greater than 50%. */
678 bool differs_lot_from_p (profile_probability other
) const;
680 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
681 happens with COUNT2 probability. Return probability that either *THIS or
683 profile_probability
combine_with_count (profile_count count1
,
684 profile_probability other
,
685 profile_count count2
) const;
687 /* Return probability as sreal. */
688 sreal
to_sreal () const;
689 /* LTO streaming support. */
690 static profile_probability
stream_in (class lto_input_block
*);
691 void stream_out (struct output_block
*);
692 void stream_out (struct lto_output_stream
*);
695 /* Main data type to hold profile counters in GCC. Profile counts originate
696 either from profile feedback, static profile estimation or both. We do not
697 perform whole program profile propagation and thus profile estimation
698 counters are often local to function, while counters from profile feedback
699 (or special cases of profile estimation) can be used inter-procedurally.
701 There are 3 basic types
702 1) local counters which are result of intra-procedural static profile
704 2) ipa counters which are result of profile feedback or special case
705 of static profile estimation (such as in function main).
706 3) counters which counts as 0 inter-procedurally (because given function
707 was never run in train feedback) but they hold local static profile
710 Counters of type 1 and 3 cannot be mixed with counters of different type
711 within operation (because whole function should use one type of counter)
712 with exception that global zero mix in most operations where outcome is
715 To take local counter and use it inter-procedurally use ipa member function
716 which strips information irrelevant at the inter-procedural level.
718 Counters are 61bit integers representing number of executions during the
719 train run or normalized frequency within the function.
721 As the profile is maintained during the compilation, many adjustments are
722 made. Not all transformations can be made precisely, most importantly
723 when code is being duplicated. It also may happen that part of CFG has
724 profile counts known while other do not - for example when LTO optimizing
725 partly profiled program or when profile was lost due to COMDAT merging.
727 For this reason profile_count tracks more information than
728 just unsigned integer and it is also ready for profile mismatches.
729 The API of this data type represent operations that are natural
730 on profile counts - sum, difference and operation with scales and
731 probabilities. All operations are safe by never getting negative counts
732 and they do end up in uninitialized scale if any of the parameters is
735 All comparisons that are three state and handling of probabilities. Thus
736 a < b is not equal to !(a >= b).
738 The following pre-defined counts are available:
740 profile_count::zero () for code that is known to execute zero times at
741 runtime (this can be detected statically i.e. for paths leading to
743 profile_count::one () for code that is known to execute once (such as
745 profile_count::uninitialized () for unknown execution count.
749 struct GTY(()) profile_count
752 /* Use 62bit to hold basic block counters. Should be at least
753 64bit. Although a counter cannot be negative, we use a signed
754 type to hold various extra stages. */
756 static const int n_bits
= 61;
757 static const uint64_t max_count
= ((uint64_t) 1 << n_bits
) - 2;
759 static const uint64_t uninitialized_count
= ((uint64_t) 1 << n_bits
) - 1;
761 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8)
762 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
763 incorrectly detects the alignment of a structure where the only
764 64-bit aligned object is a bit-field. We force the alignment of
765 the entire field to mitigate this. */
766 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
768 #define UINT64_BIT_FIELD_ALIGN
770 uint64_t UINT64_BIT_FIELD_ALIGN m_val
: n_bits
;
771 #undef UINT64_BIT_FIELD_ALIGN
772 enum profile_quality m_quality
: 3;
775 /* Return true if both values can meaningfully appear in single function
776 body. We have either all counters in function local or global, otherwise
777 operations between them are not really defined well. */
778 bool compatible_p (const profile_count other
) const
780 if (!initialized_p () || !other
.initialized_p ())
785 /* Do not allow nonzero global profile together with local guesses
786 that are globally0. */
787 if (ipa ().nonzero_p ()
788 && !(other
.ipa () == other
))
790 if (other
.ipa ().nonzero_p ()
791 && !(ipa () == *this))
794 return ipa_p () == other
.ipa_p ();
797 /* Used for counters which are expected to be never executed. */
798 static profile_count
zero ()
800 return from_gcov_type (0);
803 static profile_count
adjusted_zero ()
807 c
.m_quality
= ADJUSTED
;
811 static profile_count
guessed_zero ()
815 c
.m_quality
= GUESSED
;
819 static profile_count
one ()
821 return from_gcov_type (1);
824 /* Value of counters which has not been initialized. Either because
825 initialization did not happen yet or because profile is unknown. */
826 static profile_count
uninitialized ()
829 c
.m_val
= uninitialized_count
;
830 c
.m_quality
= GUESSED_LOCAL
;
834 /* Conversion to gcov_type is lossy. */
835 gcov_type
to_gcov_type () const
837 gcc_checking_assert (initialized_p ());
841 /* Return true if value has been initialized. */
842 bool initialized_p () const
844 return m_val
!= uninitialized_count
;
847 /* Return true if value can be trusted. */
848 bool reliable_p () const
850 return m_quality
>= ADJUSTED
;
853 /* Return true if value can be operated inter-procedurally. */
856 return !initialized_p () || m_quality
>= GUESSED_GLOBAL0
;
859 /* Return true if quality of profile is precise. */
860 bool precise_p () const
862 return m_quality
== PRECISE
;
865 /* Get the value of the count. */
866 uint64_t value () const { return m_val
; }
868 /* Get the quality of the count. */
869 enum profile_quality
quality () const { return m_quality
; }
871 /* When merging basic blocks, the two different profile counts are unified.
872 Return true if this can be done without losing info about profile.
873 The only case we care about here is when first BB contains something
874 that makes it terminate in a way not visible in CFG. */
875 bool ok_for_merging (profile_count other
) const
877 if (m_quality
< ADJUSTED
878 || other
.m_quality
< ADJUSTED
)
880 return !(other
< *this);
883 /* When merging two BBs with different counts, pick common count that looks
884 most representative. */
885 profile_count
merge (profile_count other
) const
887 if (*this == other
|| !other
.initialized_p ()
888 || m_quality
> other
.m_quality
)
890 if (other
.m_quality
> m_quality
896 /* Basic operations. */
897 bool operator== (const profile_count
&other
) const
899 return m_val
== other
.m_val
&& m_quality
== other
.m_quality
;
902 profile_count
operator+ (const profile_count
&other
) const
904 if (other
== zero ())
906 if (*this == zero ())
908 if (!initialized_p () || !other
.initialized_p ())
909 return uninitialized ();
912 gcc_checking_assert (compatible_p (other
));
913 ret
.m_val
= m_val
+ other
.m_val
;
914 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
918 profile_count
&operator+= (const profile_count
&other
)
920 if (other
== zero ())
922 if (*this == zero ())
927 if (!initialized_p () || !other
.initialized_p ())
928 return *this = uninitialized ();
931 gcc_checking_assert (compatible_p (other
));
932 m_val
+= other
.m_val
;
933 m_quality
= MIN (m_quality
, other
.m_quality
);
938 profile_count
operator- (const profile_count
&other
) const
940 if (*this == zero () || other
== zero ())
942 if (!initialized_p () || !other
.initialized_p ())
943 return uninitialized ();
944 gcc_checking_assert (compatible_p (other
));
946 ret
.m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
947 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
951 profile_count
&operator-= (const profile_count
&other
)
953 if (*this == zero () || other
== zero ())
955 if (!initialized_p () || !other
.initialized_p ())
956 return *this = uninitialized ();
959 gcc_checking_assert (compatible_p (other
));
960 m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
961 m_quality
= MIN (m_quality
, other
.m_quality
);
966 /* Return false if profile_count is bogus. */
969 gcc_checking_assert (m_quality
!= UNINITIALIZED_PROFILE
);
970 return m_val
!= uninitialized_count
|| m_quality
== GUESSED_LOCAL
;
973 /* Comparisons are three-state and conservative. False is returned if
974 the inequality cannot be decided. */
975 bool operator< (const profile_count
&other
) const
977 if (!initialized_p () || !other
.initialized_p ())
979 if (*this == zero ())
980 return !(other
== zero ());
981 if (other
== zero ())
983 gcc_checking_assert (compatible_p (other
));
984 return m_val
< other
.m_val
;
987 bool operator> (const profile_count
&other
) const
989 if (!initialized_p () || !other
.initialized_p ())
991 if (*this == zero ())
993 if (other
== zero ())
994 return !(*this == zero ());
995 gcc_checking_assert (compatible_p (other
));
996 return initialized_p () && other
.initialized_p () && m_val
> other
.m_val
;
999 bool operator< (const gcov_type other
) const
1001 gcc_checking_assert (ipa_p ());
1002 gcc_checking_assert (other
>= 0);
1003 return ipa ().initialized_p () && ipa ().m_val
< (uint64_t) other
;
1006 bool operator> (const gcov_type other
) const
1008 gcc_checking_assert (ipa_p ());
1009 gcc_checking_assert (other
>= 0);
1010 return ipa ().initialized_p () && ipa ().m_val
> (uint64_t) other
;
1013 bool operator<= (const profile_count
&other
) const
1015 if (!initialized_p () || !other
.initialized_p ())
1017 if (*this == zero ())
1019 if (other
== zero ())
1020 return (*this == zero ());
1021 gcc_checking_assert (compatible_p (other
));
1022 return m_val
<= other
.m_val
;
1025 bool operator>= (const profile_count
&other
) const
1027 if (!initialized_p () || !other
.initialized_p ())
1029 if (other
== zero ())
1031 if (*this == zero ())
1032 return (other
== zero ());
1033 gcc_checking_assert (compatible_p (other
));
1034 return m_val
>= other
.m_val
;
1037 bool operator<= (const gcov_type other
) const
1039 gcc_checking_assert (ipa_p ());
1040 gcc_checking_assert (other
>= 0);
1041 return ipa ().initialized_p () && ipa ().m_val
<= (uint64_t) other
;
1044 bool operator>= (const gcov_type other
) const
1046 gcc_checking_assert (ipa_p ());
1047 gcc_checking_assert (other
>= 0);
1048 return ipa ().initialized_p () && ipa ().m_val
>= (uint64_t) other
;
1051 profile_count
operator* (int64_t num
) const
1053 return apply_scale (num
, 1);
1056 profile_count
operator*= (int64_t num
)
1058 *this = apply_scale (num
, 1);
1062 profile_count
operator/ (int64_t den
) const
1064 return apply_scale (1, den
);
1067 profile_count
operator/= (int64_t den
)
1069 *this = apply_scale (1, den
);
1073 /* Return true when value is not zero and can be used for scaling.
1074 This is different from *this > 0 because that requires counter to
1076 bool nonzero_p () const
1078 return initialized_p () && m_val
!= 0;
1081 /* Make counter forcibly nonzero. */
1082 profile_count
force_nonzero () const
1084 if (!initialized_p ())
1086 profile_count ret
= *this;
1090 ret
.m_quality
= MIN (m_quality
, ADJUSTED
);
1095 profile_count
max (profile_count other
) const
1097 profile_count val
= *this;
1099 /* Always prefer nonzero IPA counts over local counts. */
1100 if (ipa ().nonzero_p () || other
.ipa ().nonzero_p ())
1103 other
= other
.ipa ();
1105 if (!initialized_p ())
1107 if (!other
.initialized_p ())
1109 if (*this == zero ())
1111 if (other
== zero ())
1113 gcc_checking_assert (compatible_p (other
));
1114 if (val
.m_val
< other
.m_val
|| (m_val
== other
.m_val
1115 && val
.m_quality
< other
.m_quality
))
1120 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
1122 profile_count
apply_probability (int prob
) const
1124 gcc_checking_assert (prob
>= 0 && prob
<= REG_BR_PROB_BASE
);
1127 if (!initialized_p ())
1128 return uninitialized ();
1130 ret
.m_val
= RDIV (m_val
* prob
, REG_BR_PROB_BASE
);
1131 ret
.m_quality
= MIN (m_quality
, ADJUSTED
);
1135 /* Scale counter according to PROB. */
1136 profile_count
apply_probability (profile_probability prob
) const
1138 if (*this == zero () || prob
== profile_probability::always ())
1140 if (prob
== profile_probability::never ())
1142 if (!initialized_p () || !prob
.initialized_p ())
1143 return uninitialized ();
1146 safe_scale_64bit (m_val
, prob
.m_val
, profile_probability::max_probability
,
1149 ret
.m_quality
= MIN (m_quality
, prob
.m_quality
);
1153 /* Return *THIS * NUM / DEN. */
1154 profile_count
apply_scale (int64_t num
, int64_t den
) const
1158 if (!initialized_p ())
1159 return uninitialized ();
1163 gcc_checking_assert (num
>= 0 && den
> 0);
1164 safe_scale_64bit (m_val
, num
, den
, &tmp
);
1165 ret
.m_val
= MIN (tmp
, max_count
);
1166 ret
.m_quality
= MIN (m_quality
, ADJUSTED
);
1170 profile_count
apply_scale (profile_count num
, profile_count den
) const
1172 if (*this == zero ())
1176 if (!initialized_p () || !num
.initialized_p () || !den
.initialized_p ())
1177 return uninitialized ();
1180 gcc_checking_assert (den
.m_val
);
1184 safe_scale_64bit (m_val
, num
.m_val
, den
.m_val
, &val
);
1185 ret
.m_val
= MIN (val
, max_count
);
1186 ret
.m_quality
= MIN (MIN (MIN (m_quality
, ADJUSTED
),
1187 num
.m_quality
), den
.m_quality
);
1188 /* Be sure that ret is not local if num is global.
1189 Also ensure that ret is not global0 when num is global. */
1191 ret
.m_quality
= MAX (ret
.m_quality
,
1192 num
== num
.ipa () ? GUESSED
: num
.m_quality
);
1196 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1197 profile_count
guessed_local () const
1199 profile_count ret
= *this;
1200 if (!initialized_p ())
1202 ret
.m_quality
= GUESSED_LOCAL
;
1206 /* We know that profile is globally 0 but keep local profile if present. */
1207 profile_count
global0 () const
1209 profile_count ret
= *this;
1210 if (!initialized_p ())
1212 ret
.m_quality
= GUESSED_GLOBAL0
;
1216 /* We know that profile is globally adjusted 0 but keep local profile
1218 profile_count
global0adjusted () const
1220 profile_count ret
= *this;
1221 if (!initialized_p ())
1223 ret
.m_quality
= GUESSED_GLOBAL0_ADJUSTED
;
1227 /* Return THIS with quality dropped to GUESSED. */
1228 profile_count
guessed () const
1230 profile_count ret
= *this;
1231 ret
.m_quality
= MIN (ret
.m_quality
, GUESSED
);
1235 /* Return variant of profile count which is always safe to compare
1236 across functions. */
1237 profile_count
ipa () const
1239 if (m_quality
> GUESSED_GLOBAL0_ADJUSTED
)
1241 if (m_quality
== GUESSED_GLOBAL0
)
1243 if (m_quality
== GUESSED_GLOBAL0_ADJUSTED
)
1244 return adjusted_zero ();
1245 return uninitialized ();
1248 /* Return THIS with quality dropped to AFDO. */
1249 profile_count
afdo () const
1251 profile_count ret
= *this;
1252 ret
.m_quality
= AFDO
;
1256 /* Return probability of event with counter THIS within event with counter
1258 profile_probability
probability_in (const profile_count overall
) const
1260 if (*this == zero ()
1261 && !(overall
== zero ()))
1262 return profile_probability::never ();
1263 if (!initialized_p () || !overall
.initialized_p ()
1265 return profile_probability::uninitialized ();
1266 if (*this == overall
&& m_quality
== PRECISE
)
1267 return profile_probability::always ();
1268 profile_probability ret
;
1269 gcc_checking_assert (compatible_p (overall
));
1271 if (overall
.m_val
< m_val
)
1273 ret
.m_val
= profile_probability::max_probability
;
1274 ret
.m_quality
= GUESSED
;
1278 ret
.m_val
= RDIV (m_val
* profile_probability::max_probability
,
1280 ret
.m_quality
= MIN (MAX (MIN (m_quality
, overall
.m_quality
),
1281 GUESSED
), ADJUSTED
);
1285 /* Return true if profile count is very large, so we risk overflows
1286 with loop transformations. */
1290 if (!initialized_p ())
1292 return m_val
> max_count
/ 65536;
1295 int to_frequency (struct function
*fun
) const;
1296 int to_cgraph_frequency (profile_count entry_bb_count
) const;
1297 sreal
to_sreal_scale (profile_count in
, bool *known
= NULL
) const;
1299 /* Output THIS to F. */
1300 void dump (FILE *f
, struct function
*fun
= NULL
) const;
1302 /* Output THIS to BUFFER. */
1303 void dump (char *buffer
, struct function
*fun
= NULL
) const;
1305 /* Print THIS to stderr. */
1306 void debug () const;
1308 /* Return true if THIS is known to differ significantly from OTHER. */
1309 bool differs_from_p (profile_count other
) const;
1311 /* We want to scale profile across function boundary from NUM to DEN.
1312 Take care of the side case when NUM and DEN are zeros of incompatible
1314 static void adjust_for_ipa_scaling (profile_count
*num
, profile_count
*den
);
1316 /* THIS is a count of bb which is known to be executed IPA times.
1317 Combine this information into bb counter. This means returning IPA
1318 if it is nonzero, not changing anything if IPA is uninitialized
1319 and if IPA is zero, turning THIS into corresponding local profile with
1321 profile_count
combine_with_ipa_count (profile_count ipa
);
1323 /* Same as combine_with_ipa_count but inside function with count IPA2. */
1324 profile_count combine_with_ipa_count_within
1325 (profile_count ipa
, profile_count ipa2
);
1327 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1328 Conversions back and forth are used to read the coverage and get it
1329 into internal representation. */
1330 static profile_count
from_gcov_type (gcov_type v
,
1331 profile_quality quality
= PRECISE
);
1333 /* LTO streaming support. */
1334 static profile_count
stream_in (class lto_input_block
*);
1335 void stream_out (struct output_block
*);
1336 void stream_out (struct lto_output_stream
*);