1 /* Profile counter container type.
2 Copyright (C) 2017-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
26 /* Quality of the profile count. Because gengtype does not support enums
27 inside of classes, this is in global namespace. */
28 enum profile_quality
{
29 /* Uninitialized value. */
30 profile_uninitialized
,
31 /* Profile is based on static branch prediction heuristics and may
32 or may not match reality. It is local to function and can not be compared
33 inter-procedurally. Never used by probabilities (they are always local).
35 profile_guessed_local
,
36 /* Profile was read by feedback and was 0, we used local heuristics to guess
37 better. This is the case of functions not run in profile fedback.
38 Never used by probabilities. */
39 profile_guessed_global0
,
41 /* Same as profile_guessed_global0 but global count is adjusted 0. */
42 profile_guessed_global0adjusted
,
44 /* Profile is based on static branch prediction heuristics. It may or may
45 not reflect the reality but it can be compared interprocedurally
46 (for example, we inlined function w/o profile feedback into function
47 with feedback and propagated from that).
48 Never used by probablities. */
50 /* Profile was determined by autofdo. */
52 /* Profile was originally based on feedback but it was adjusted
53 by code duplicating optimization. It may not precisely reflect the
54 particular code path. */
56 /* Profile was read from profile feedback or determined by accurate static
61 /* The base value for branch probability notes and edge probabilities. */
62 #define REG_BR_PROB_BASE 10000
64 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
66 bool slow_safe_scale_64bit (uint64_t a
, uint64_t b
, uint64_t c
, uint64_t *res
);
68 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
71 safe_scale_64bit (uint64_t a
, uint64_t b
, uint64_t c
, uint64_t *res
)
73 #if (GCC_VERSION >= 5000)
75 if (!__builtin_mul_overflow (a
, b
, &tmp
)
76 && !__builtin_add_overflow (tmp
, c
/2, &tmp
))
87 if (a
< ((uint64_t)1 << 31)
88 && b
< ((uint64_t)1 << 31)
89 && c
< ((uint64_t)1 << 31))
91 *res
= (a
* b
+ (c
/ 2)) / c
;
95 return slow_safe_scale_64bit (a
, b
, c
, res
);
98 /* Data type to hold probabilities. It implements fixed point arithmetics
99 with capping so probability is always in range [0,1] and scaling requiring
100 values greater than 1 needs to be represented otherwise.
102 In addition to actual value the quality of profile is tracked and propagated
103 through all operations. Special value UNINITIALIZED is used for probabilities
104 that has not been determined yet (for example bacause of
105 -fno-guess-branch-probability)
107 Typically probabilities are derived from profile feedback (via
108 probability_in_gcov_type), autoFDO or guessed statically and then propagated
109 thorough the compilation.
111 Named probabilities are available:
112 - never (0 probability)
114 - very_unlikely (1/2000 probability)
115 - unlikely (1/5 probablity)
116 - even (1/2 probability)
117 - likely (4/5 probability)
118 - very_likely (1999/2000 probability)
122 Named probabilities except for never/always are assumed to be statically
123 guessed and thus not necessarily accurate. The difference between never
124 and guessed_never is that the first one should be used only in case that
125 well behaving program will very likely not execute the "never" path.
126 For example if the path is going to abort () call or it exception handling.
128 Always and guessed_always probabilities are symmetric.
130 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
131 integer arithmetics. Once the code is converted to branch probabilities,
132 these conversions will probably go away because they are lossy.
135 class GTY((user
)) profile_probability
137 static const int n_bits
= 29;
138 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
139 will lead to harder multiplication sequences. */
140 static const uint32_t max_probability
= (uint32_t) 1 << (n_bits
- 2);
141 static const uint32_t uninitialized_probability
142 = ((uint32_t) 1 << (n_bits
- 1)) - 1;
145 enum profile_quality m_quality
: 3;
147 friend class profile_count
;
150 /* Named probabilities. */
151 static profile_probability
never ()
153 profile_probability ret
;
155 ret
.m_quality
= profile_precise
;
158 static profile_probability
guessed_never ()
160 profile_probability ret
;
162 ret
.m_quality
= profile_guessed
;
165 static profile_probability
very_unlikely ()
167 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
168 profile_probability r
169 = profile_probability::guessed_always ().apply_scale (1, 2000);
173 static profile_probability
unlikely ()
175 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
176 profile_probability r
177 = profile_probability::guessed_always ().apply_scale (1, 5);
181 static profile_probability
even ()
183 return profile_probability::guessed_always ().apply_scale (1, 2);
185 static profile_probability
very_likely ()
187 return profile_probability::always () - very_unlikely ();
189 static profile_probability
likely ()
191 return profile_probability::always () - unlikely ();
193 static profile_probability
guessed_always ()
195 profile_probability ret
;
196 ret
.m_val
= max_probability
;
197 ret
.m_quality
= profile_guessed
;
200 static profile_probability
always ()
202 profile_probability ret
;
203 ret
.m_val
= max_probability
;
204 ret
.m_quality
= profile_precise
;
207 /* Probabilities which has not been initialized. Either because
208 initialization did not happen yet or because profile is unknown. */
209 static profile_probability
uninitialized ()
211 profile_probability c
;
212 c
.m_val
= uninitialized_probability
;
213 c
.m_quality
= profile_guessed
;
218 /* Return true if value has been initialized. */
219 bool initialized_p () const
221 return m_val
!= uninitialized_probability
;
223 /* Return true if value can be trusted. */
224 bool reliable_p () const
226 return m_quality
>= profile_adjusted
;
229 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
230 this is mostly to support legacy code and should go away. */
231 static profile_probability
from_reg_br_prob_base (int v
)
233 profile_probability ret
;
234 gcc_checking_assert (v
>= 0 && v
<= REG_BR_PROB_BASE
);
235 ret
.m_val
= RDIV (v
* (uint64_t) max_probability
, REG_BR_PROB_BASE
);
236 ret
.m_quality
= profile_guessed
;
239 int to_reg_br_prob_base () const
241 gcc_checking_assert (initialized_p ());
242 return RDIV (m_val
* (uint64_t) REG_BR_PROB_BASE
, max_probability
);
245 /* Conversion to and from RTL representation of profile probabilities. */
246 static profile_probability
from_reg_br_prob_note (int v
)
248 profile_probability ret
;
249 ret
.m_val
= ((unsigned int)v
) / 8;
250 ret
.m_quality
= (enum profile_quality
)(v
& 7);
253 int to_reg_br_prob_note () const
255 gcc_checking_assert (initialized_p ());
256 int ret
= m_val
* 8 + m_quality
;
257 gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret
)
262 /* Return VAL1/VAL2. */
263 static profile_probability probability_in_gcov_type
264 (gcov_type val1
, gcov_type val2
)
266 profile_probability ret
;
267 gcc_checking_assert (val1
>= 0 && val2
> 0);
269 ret
.m_val
= max_probability
;
273 safe_scale_64bit (val1
, max_probability
, val2
, &tmp
);
274 gcc_checking_assert (tmp
<= max_probability
);
277 ret
.m_quality
= profile_precise
;
281 /* Basic operations. */
282 bool operator== (const profile_probability
&other
) const
284 return m_val
== other
.m_val
&& m_quality
== other
.m_quality
;
286 profile_probability
operator+ (const profile_probability
&other
) const
288 if (other
== profile_probability::never ())
290 if (*this == profile_probability::never ())
292 if (!initialized_p () || !other
.initialized_p ())
293 return profile_probability::uninitialized ();
295 profile_probability ret
;
296 ret
.m_val
= MIN ((uint32_t)(m_val
+ other
.m_val
), max_probability
);
297 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
300 profile_probability
&operator+= (const profile_probability
&other
)
302 if (other
== profile_probability::never ())
304 if (*this == profile_probability::never ())
309 if (!initialized_p () || !other
.initialized_p ())
310 return *this = profile_probability::uninitialized ();
313 m_val
= MIN ((uint32_t)(m_val
+ other
.m_val
), max_probability
);
314 m_quality
= MIN (m_quality
, other
.m_quality
);
318 profile_probability
operator- (const profile_probability
&other
) const
320 if (*this == profile_probability::never ()
321 || other
== profile_probability::never ())
323 if (!initialized_p () || !other
.initialized_p ())
324 return profile_probability::uninitialized ();
325 profile_probability ret
;
326 ret
.m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
327 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
330 profile_probability
&operator-= (const profile_probability
&other
)
332 if (*this == profile_probability::never ()
333 || other
== profile_probability::never ())
335 if (!initialized_p () || !other
.initialized_p ())
336 return *this = profile_probability::uninitialized ();
339 m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
340 m_quality
= MIN (m_quality
, other
.m_quality
);
344 profile_probability
operator* (const profile_probability
&other
) const
346 if (*this == profile_probability::never ()
347 || other
== profile_probability::never ())
348 return profile_probability::never ();
349 if (!initialized_p () || !other
.initialized_p ())
350 return profile_probability::uninitialized ();
351 profile_probability ret
;
352 ret
.m_val
= RDIV ((uint64_t)m_val
* other
.m_val
, max_probability
);
353 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
356 profile_probability
&operator*= (const profile_probability
&other
)
358 if (*this == profile_probability::never ()
359 || other
== profile_probability::never ())
360 return *this = profile_probability::never ();
361 if (!initialized_p () || !other
.initialized_p ())
362 return *this = profile_probability::uninitialized ();
365 m_val
= RDIV ((uint64_t)m_val
* other
.m_val
, max_probability
);
366 m_quality
= MIN (m_quality
, other
.m_quality
);
370 profile_probability
operator/ (const profile_probability
&other
) const
372 if (*this == profile_probability::never ())
373 return profile_probability::never ();
374 if (!initialized_p () || !other
.initialized_p ())
375 return profile_probability::uninitialized ();
376 profile_probability ret
;
377 if (m_val
>= other
.m_val
)
378 ret
.m_val
= max_probability
;
383 gcc_checking_assert (other
.m_val
);
384 ret
.m_val
= MIN (RDIV ((uint64_t)m_val
* max_probability
,
388 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
391 profile_probability
&operator/= (const profile_probability
&other
)
393 if (*this == profile_probability::never ())
394 return *this = profile_probability::never ();
395 if (!initialized_p () || !other
.initialized_p ())
396 return *this = profile_probability::uninitialized ();
399 if (m_val
> other
.m_val
)
400 m_val
= max_probability
;
405 gcc_checking_assert (other
.m_val
);
406 m_val
= MIN (RDIV ((uint64_t)m_val
* max_probability
,
410 m_quality
= MIN (m_quality
, other
.m_quality
);
415 /* Split *THIS (ORIG) probability into 2 probabilities, such that
416 the returned one (FIRST) is *THIS * CPROB and *THIS is
417 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
418 == ORIG. This is useful e.g. when splitting a conditional
421 goto lab; // ORIG probability
424 goto lab; // FIRST = ORIG * CPROB probability
426 goto lab; // SECOND probability
427 such that the overall probability of jumping to lab remains
428 the same. CPROB gives the relative probability between the
430 profile_probability
split (const profile_probability
&cprob
)
432 profile_probability ret
= *this * cprob
;
433 /* The following is equivalent to:
434 *this = cprob.invert () * *this / ret.invert (); */
435 *this = (*this - ret
) / ret
.invert ();
439 gcov_type
apply (gcov_type val
) const
441 if (*this == profile_probability::uninitialized ())
443 return RDIV (val
* m_val
, max_probability
);
446 /* Return 1-*THIS. */
447 profile_probability
invert () const
449 return profile_probability::always() - *this;
452 /* Return THIS with quality dropped to GUESSED. */
453 profile_probability
guessed () const
455 profile_probability ret
= *this;
456 ret
.m_quality
= profile_guessed
;
460 /* Return THIS with quality dropped to AFDO. */
461 profile_probability
afdo () const
463 profile_probability ret
= *this;
464 ret
.m_quality
= profile_afdo
;
468 profile_probability
combine_with_freq (int freq1
, profile_probability other
,
471 profile_probability ret
;
473 if (*this == profile_probability::uninitialized ()
474 || other
== profile_probability::uninitialized ())
475 return profile_probability::uninitialized ();
477 gcc_checking_assert (freq1
>= 0 && freq2
>= 0);
478 if (!freq1
&& !freq2
)
480 ret
.m_val
= (m_val
+ other
.m_val
) / 2;
483 ret
.m_val
= RDIV (m_val
* (uint64_t) freq1
484 + other
.m_val
* (uint64_t) freq2
, freq1
+ freq2
);
485 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
489 /* Return *THIS * NUM / DEN. */
490 profile_probability
apply_scale (int64_t num
, int64_t den
) const
492 if (*this == profile_probability::never ())
494 if (!initialized_p ())
495 return profile_probability::uninitialized ();
496 profile_probability ret
;
498 safe_scale_64bit (m_val
, num
, den
, &tmp
);
499 ret
.m_val
= MIN (tmp
, max_probability
);
500 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
504 /* Return true when the probability of edge is reliable.
506 The profile guessing code is good at predicting branch outcome (ie.
507 taken/not taken), that is predicted right slightly over 75% of time.
508 It is however notoriously poor on predicting the probability itself.
509 In general the profile appear a lot flatter (with probabilities closer
510 to 50%) than the reality so it is bad idea to use it to drive optimization
511 such as those disabling dynamic branch prediction for well predictable
514 There are two exceptions - edges leading to noreturn edges and edges
515 predicted by number of iterations heuristics are predicted well. This macro
516 should be able to distinguish those, but at the moment it simply check for
517 noreturn heuristic that is only one giving probability over 99% or bellow
518 1%. In future we might want to propagate reliability information across the
519 CFG if we find this information useful on multiple places. */
521 bool probably_reliable_p () const
523 if (m_quality
>= profile_adjusted
)
525 if (!initialized_p ())
527 return m_val
< max_probability
/ 100
528 || m_val
> max_probability
- max_probability
/ 100;
531 /* Return false if profile_probability is bogus. */
534 gcc_checking_assert (m_quality
!= profile_uninitialized
);
535 if (m_val
== uninitialized_probability
)
536 return m_quality
== profile_guessed
;
537 else if (m_quality
< profile_guessed
)
539 return m_val
<= max_probability
;
542 /* Comparsions are three-state and conservative. False is returned if
543 the inequality can not be decided. */
544 bool operator< (const profile_probability
&other
) const
546 return initialized_p () && other
.initialized_p () && m_val
< other
.m_val
;
548 bool operator> (const profile_probability
&other
) const
550 return initialized_p () && other
.initialized_p () && m_val
> other
.m_val
;
553 bool operator<= (const profile_probability
&other
) const
555 return initialized_p () && other
.initialized_p () && m_val
<= other
.m_val
;
557 bool operator>= (const profile_probability
&other
) const
559 return initialized_p () && other
.initialized_p () && m_val
>= other
.m_val
;
562 /* Output THIS to F. */
563 void dump (FILE *f
) const;
565 /* Print THIS to stderr. */
568 /* Return true if THIS is known to differ significantly from OTHER. */
569 bool differs_from_p (profile_probability other
) const;
570 /* Return if difference is greater than 50%. */
571 bool differs_lot_from_p (profile_probability other
) const;
573 /* LTO streaming support. */
574 static profile_probability
stream_in (struct lto_input_block
*);
575 void stream_out (struct output_block
*);
576 void stream_out (struct lto_output_stream
*);
579 /* Main data type to hold profile counters in GCC. Profile counts originate
580 either from profile feedback, static profile estimation or both. We do not
581 perform whole program profile propagation and thus profile estimation
582 counters are often local to function, while counters from profile feedback
583 (or special cases of profile estimation) can be used inter-procedurally.
585 There are 3 basic types
586 1) local counters which are result of intra-procedural static profile
588 2) ipa counters which are result of profile feedback or special case
589 of static profile estimation (such as in function main).
590 3) counters which counts as 0 inter-procedurally (beause given function
591 was never run in train feedback) but they hold local static profile
594 Counters of type 1 and 3 can not be mixed with counters of different type
595 within operation (because whole function should use one type of counter)
596 with exception that global zero mix in most operations where outcome is
599 To take local counter and use it inter-procedurally use ipa member function
600 which strips information irelevant at the inter-procedural level.
602 Counters are 61bit integers representing number of executions during the
603 train run or normalized frequency within the function.
605 As the profile is maintained during the compilation, many adjustments are
606 made. Not all transformations can be made precisely, most importantly
607 when code is being duplicated. It also may happen that part of CFG has
608 profile counts known while other do not - for example when LTO optimizing
609 partly profiled program or when profile was lost due to COMDAT merging.
611 For this reason profile_count tracks more information than
612 just unsigned integer and it is also ready for profile mismatches.
613 The API of this data type represent operations that are natural
614 on profile counts - sum, difference and operation with scales and
615 probabilities. All operations are safe by never getting negative counts
616 and they do end up in uninitialized scale if any of the parameters is
619 All comparsions that are three state and handling of probabilities. Thus
620 a < b is not equal to !(a >= b).
622 The following pre-defined counts are available:
624 profile_count::zero () for code that is known to execute zero times at
625 runtime (this can be detected statically i.e. for paths leading to
627 profile_count::one () for code that is known to execute once (such as
629 profile_count::uninitialized () for unknown execution count.
635 class GTY(()) profile_count
638 /* Use 62bit to hold basic block counters. Should be at least
639 64bit. Although a counter cannot be negative, we use a signed
640 type to hold various extra stages. */
642 static const int n_bits
= 61;
644 static const uint64_t max_count
= ((uint64_t) 1 << n_bits
) - 2;
645 static const uint64_t uninitialized_count
= ((uint64_t) 1 << n_bits
) - 1;
647 uint64_t m_val
: n_bits
;
648 enum profile_quality m_quality
: 3;
650 /* Return true if both values can meaningfully appear in single function
651 body. We have either all counters in function local or global, otherwise
652 operations between them are not really defined well. */
653 bool compatible_p (const profile_count other
) const
655 if (!initialized_p () || !other
.initialized_p ())
657 if (*this == profile_count::zero ()
658 || other
== profile_count::zero ())
660 return ipa_p () == other
.ipa_p ();
664 /* Used for counters which are expected to be never executed. */
665 static profile_count
zero ()
667 return from_gcov_type (0);
669 static profile_count
adjusted_zero ()
673 c
.m_quality
= profile_adjusted
;
676 static profile_count
guessed_zero ()
680 c
.m_quality
= profile_guessed
;
683 static profile_count
one ()
685 return from_gcov_type (1);
687 /* Value of counters which has not been initialized. Either because
688 initialization did not happen yet or because profile is unknown. */
689 static profile_count
uninitialized ()
692 c
.m_val
= uninitialized_count
;
693 c
.m_quality
= profile_guessed_local
;
697 /* Conversion to gcov_type is lossy. */
698 gcov_type
to_gcov_type () const
700 gcc_checking_assert (initialized_p ());
704 /* Return true if value has been initialized. */
705 bool initialized_p () const
707 return m_val
!= uninitialized_count
;
709 /* Return true if value can be trusted. */
710 bool reliable_p () const
712 return m_quality
>= profile_adjusted
;
714 /* Return true if vlaue can be operated inter-procedurally. */
717 return !initialized_p () || m_quality
>= profile_guessed_global0
;
719 /* Return true if quality of profile is precise. */
720 bool precise_p () const
722 return m_quality
== profile_precise
;
725 /* When merging basic blocks, the two different profile counts are unified.
726 Return true if this can be done without losing info about profile.
727 The only case we care about here is when first BB contains something
728 that makes it terminate in a way not visible in CFG. */
729 bool ok_for_merging (profile_count other
) const
731 if (m_quality
< profile_adjusted
732 || other
.m_quality
< profile_adjusted
)
734 return !(other
< *this);
737 /* When merging two BBs with different counts, pick common count that looks
738 most representative. */
739 profile_count
merge (profile_count other
) const
741 if (*this == other
|| !other
.initialized_p ()
742 || m_quality
> other
.m_quality
)
744 if (other
.m_quality
> m_quality
750 /* Basic operations. */
751 bool operator== (const profile_count
&other
) const
753 return m_val
== other
.m_val
&& m_quality
== other
.m_quality
;
755 profile_count
operator+ (const profile_count
&other
) const
757 if (other
== profile_count::zero ())
759 if (*this == profile_count::zero ())
761 if (!initialized_p () || !other
.initialized_p ())
762 return profile_count::uninitialized ();
765 gcc_checking_assert (compatible_p (other
));
766 ret
.m_val
= m_val
+ other
.m_val
;
767 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
770 profile_count
&operator+= (const profile_count
&other
)
772 if (other
== profile_count::zero ())
774 if (*this == profile_count::zero ())
779 if (!initialized_p () || !other
.initialized_p ())
780 return *this = profile_count::uninitialized ();
783 gcc_checking_assert (compatible_p (other
));
784 m_val
+= other
.m_val
;
785 m_quality
= MIN (m_quality
, other
.m_quality
);
789 profile_count
operator- (const profile_count
&other
) const
791 if (*this == profile_count::zero () || other
== profile_count::zero ())
793 if (!initialized_p () || !other
.initialized_p ())
794 return profile_count::uninitialized ();
795 gcc_checking_assert (compatible_p (other
));
797 ret
.m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
798 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
801 profile_count
&operator-= (const profile_count
&other
)
803 if (*this == profile_count::zero () || other
== profile_count::zero ())
805 if (!initialized_p () || !other
.initialized_p ())
806 return *this = profile_count::uninitialized ();
809 gcc_checking_assert (compatible_p (other
));
810 m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
811 m_quality
= MIN (m_quality
, other
.m_quality
);
816 /* Return false if profile_count is bogus. */
819 gcc_checking_assert (m_quality
!= profile_uninitialized
);
820 return m_val
!= uninitialized_count
|| m_quality
== profile_guessed_local
;
823 /* Comparsions are three-state and conservative. False is returned if
824 the inequality can not be decided. */
825 bool operator< (const profile_count
&other
) const
827 if (!initialized_p () || !other
.initialized_p ())
829 if (*this == profile_count::zero ())
830 return !(other
== profile_count::zero ());
831 if (other
== profile_count::zero ())
833 gcc_checking_assert (compatible_p (other
));
834 return m_val
< other
.m_val
;
836 bool operator> (const profile_count
&other
) const
838 if (!initialized_p () || !other
.initialized_p ())
840 if (*this == profile_count::zero ())
842 if (other
== profile_count::zero ())
843 return !(*this == profile_count::zero ());
844 gcc_checking_assert (compatible_p (other
));
845 return initialized_p () && other
.initialized_p () && m_val
> other
.m_val
;
847 bool operator< (const gcov_type other
) const
849 gcc_checking_assert (ipa_p ());
850 gcc_checking_assert (other
>= 0);
851 return initialized_p () && m_val
< (uint64_t) other
;
853 bool operator> (const gcov_type other
) const
855 gcc_checking_assert (ipa_p ());
856 gcc_checking_assert (other
>= 0);
857 return initialized_p () && m_val
> (uint64_t) other
;
860 bool operator<= (const profile_count
&other
) const
862 if (!initialized_p () || !other
.initialized_p ())
864 if (*this == profile_count::zero ())
866 if (other
== profile_count::zero ())
867 return (*this == profile_count::zero ());
868 gcc_checking_assert (compatible_p (other
));
869 return m_val
<= other
.m_val
;
871 bool operator>= (const profile_count
&other
) const
873 if (!initialized_p () || !other
.initialized_p ())
875 if (other
== profile_count::zero ())
877 if (*this == profile_count::zero ())
878 return !(other
== profile_count::zero ());
879 gcc_checking_assert (compatible_p (other
));
880 return m_val
>= other
.m_val
;
882 bool operator<= (const gcov_type other
) const
884 gcc_checking_assert (ipa_p ());
885 gcc_checking_assert (other
>= 0);
886 return initialized_p () && m_val
<= (uint64_t) other
;
888 bool operator>= (const gcov_type other
) const
890 gcc_checking_assert (ipa_p ());
891 gcc_checking_assert (other
>= 0);
892 return initialized_p () && m_val
>= (uint64_t) other
;
894 /* Return true when value is not zero and can be used for scaling.
895 This is different from *this > 0 because that requires counter to
897 bool nonzero_p () const
899 return initialized_p () && m_val
!= 0;
902 /* Make counter forcingly nonzero. */
903 profile_count
force_nonzero () const
905 if (!initialized_p ())
907 profile_count ret
= *this;
913 profile_count
max (profile_count other
) const
915 if (!initialized_p ())
917 if (!other
.initialized_p ())
919 if (*this == profile_count::zero ())
921 if (other
== profile_count::zero ())
923 gcc_checking_assert (compatible_p (other
));
924 if (m_val
< other
.m_val
|| (m_val
== other
.m_val
925 && m_quality
< other
.m_quality
))
930 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
932 profile_count
apply_probability (int prob
) const
934 gcc_checking_assert (prob
>= 0 && prob
<= REG_BR_PROB_BASE
);
937 if (!initialized_p ())
938 return profile_count::uninitialized ();
940 ret
.m_val
= RDIV (m_val
* prob
, REG_BR_PROB_BASE
);
941 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
945 /* Scale counter according to PROB. */
946 profile_count
apply_probability (profile_probability prob
) const
948 if (*this == profile_count::zero ())
950 if (prob
== profile_probability::never ())
951 return profile_count::zero ();
952 if (!initialized_p ())
953 return profile_count::uninitialized ();
956 safe_scale_64bit (m_val
, prob
.m_val
, profile_probability::max_probability
,
959 ret
.m_quality
= MIN (m_quality
, prob
.m_quality
);
962 /* Return *THIS * NUM / DEN. */
963 profile_count
apply_scale (int64_t num
, int64_t den
) const
967 if (!initialized_p ())
968 return profile_count::uninitialized ();
972 gcc_checking_assert (num
>= 0 && den
> 0);
973 safe_scale_64bit (m_val
, num
, den
, &tmp
);
974 ret
.m_val
= MIN (tmp
, max_count
);
975 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
978 profile_count
apply_scale (profile_count num
, profile_count den
) const
980 if (*this == profile_count::zero ())
982 if (num
== profile_count::zero ())
984 if (!initialized_p () || !num
.initialized_p () || !den
.initialized_p ())
985 return profile_count::uninitialized ();
988 gcc_checking_assert (den
.m_val
);
992 safe_scale_64bit (m_val
, num
.m_val
, den
.m_val
, &val
);
993 ret
.m_val
= MIN (val
, max_count
);
994 ret
.m_quality
= MIN (MIN (MIN (m_quality
, profile_adjusted
),
995 num
.m_quality
), den
.m_quality
);
996 if (num
.ipa_p () && !ret
.ipa_p ())
997 ret
.m_quality
= MIN (num
.m_quality
, profile_guessed
);
1001 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1002 profile_count
guessed_local () const
1004 profile_count ret
= *this;
1005 if (!initialized_p ())
1007 ret
.m_quality
= profile_guessed_local
;
1011 /* We know that profile is globally 0 but keep local profile if present. */
1012 profile_count
global0 () const
1014 profile_count ret
= *this;
1015 if (!initialized_p ())
1017 ret
.m_quality
= profile_guessed_global0
;
1021 /* We know that profile is globally adjusted 0 but keep local profile
1023 profile_count
global0adjusted () const
1025 profile_count ret
= *this;
1026 if (!initialized_p ())
1028 ret
.m_quality
= profile_guessed_global0adjusted
;
1032 /* Return THIS with quality dropped to GUESSED. */
1033 profile_count
guessed () const
1035 profile_count ret
= *this;
1036 ret
.m_quality
= MIN (ret
.m_quality
, profile_guessed
);
1040 /* Return variant of profile counte which is always safe to compare
1041 acorss functions. */
1042 profile_count
ipa () const
1044 if (m_quality
> profile_guessed_global0adjusted
)
1046 if (m_quality
== profile_guessed_global0
)
1047 return profile_count::zero ();
1048 if (m_quality
== profile_guessed_global0adjusted
)
1049 return profile_count::adjusted_zero ();
1050 return profile_count::uninitialized ();
1053 /* Return THIS with quality dropped to AFDO. */
1054 profile_count
afdo () const
1056 profile_count ret
= *this;
1057 ret
.m_quality
= profile_afdo
;
1061 /* Return probability of event with counter THIS within event with counter
1063 profile_probability
probability_in (const profile_count overall
) const
1065 if (*this == profile_count::zero ())
1066 return profile_probability::never ();
1067 if (!initialized_p () || !overall
.initialized_p ()
1069 return profile_probability::uninitialized ();
1070 profile_probability ret
;
1071 gcc_checking_assert (compatible_p (overall
));
1073 if (overall
.m_val
< m_val
)
1074 ret
.m_val
= profile_probability::max_probability
;
1076 ret
.m_val
= RDIV (m_val
* profile_probability::max_probability
,
1078 ret
.m_quality
= MAX (MIN (m_quality
, overall
.m_quality
), profile_guessed
);
1082 int to_frequency (struct function
*fun
) const;
1083 int to_cgraph_frequency (profile_count entry_bb_count
) const;
1084 sreal
to_sreal_scale (profile_count in
, bool *known
= NULL
) const;
1086 /* Output THIS to F. */
1087 void dump (FILE *f
) const;
1089 /* Print THIS to stderr. */
1090 void debug () const;
1092 /* Return true if THIS is known to differ significantly from OTHER. */
1093 bool differs_from_p (profile_count other
) const;
1095 /* We want to scale profile across function boundary from NUM to DEN.
1096 Take care of the side case when NUM and DEN are zeros of incompatible
1098 static void adjust_for_ipa_scaling (profile_count
*num
, profile_count
*den
);
1100 /* THIS is a count of bb which is known to be executed IPA times.
1101 Combine this information into bb counter. This means returning IPA
1102 if it is nonzero, not changing anything if IPA is uninitialized
1103 and if IPA is zero, turning THIS into corresponding local profile with
1105 profile_count
combine_with_ipa_count (profile_count ipa
);
1107 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1108 Conversions back and forth are used to read the coverage and get it
1109 into internal representation. */
1110 static profile_count
from_gcov_type (gcov_type v
);
1112 /* LTO streaming support. */
1113 static profile_count
stream_in (struct lto_input_block
*);
1114 void stream_out (struct output_block
*);
1115 void stream_out (struct lto_output_stream
*);