1 /* Profile counter container type.
2 Copyright (C) 2017-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
27 /* Quality of the profile count. Because gengtype does not support enums
28 inside of classes, this is in global namespace. */
29 enum profile_quality
{
30 /* Uninitialized value. */
31 profile_uninitialized
,
32 /* Profile is based on static branch prediction heuristics and may
33 or may not match reality. It is local to function and cannot be compared
34 inter-procedurally. Never used by probabilities (they are always local).
36 profile_guessed_local
,
37 /* Profile was read by feedback and was 0, we used local heuristics to guess
38 better. This is the case of functions not run in profile fedback.
39 Never used by probabilities. */
40 profile_guessed_global0
,
42 /* Same as profile_guessed_global0 but global count is adjusted 0. */
43 profile_guessed_global0adjusted
,
45 /* Profile is based on static branch prediction heuristics. It may or may
46 not reflect the reality but it can be compared interprocedurally
47 (for example, we inlined function w/o profile feedback into function
48 with feedback and propagated from that).
49 Never used by probablities. */
51 /* Profile was determined by autofdo. */
53 /* Profile was originally based on feedback but it was adjusted
54 by code duplicating optimization. It may not precisely reflect the
55 particular code path. */
57 /* Profile was read from profile feedback or determined by accurate static
62 extern const char *profile_quality_as_string (enum profile_quality
);
64 /* The base value for branch probability notes and edge probabilities. */
65 #define REG_BR_PROB_BASE 10000
67 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
69 bool slow_safe_scale_64bit (uint64_t a
, uint64_t b
, uint64_t c
, uint64_t *res
);
71 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
74 safe_scale_64bit (uint64_t a
, uint64_t b
, uint64_t c
, uint64_t *res
)
76 #if (GCC_VERSION >= 5000)
78 if (!__builtin_mul_overflow (a
, b
, &tmp
)
79 && !__builtin_add_overflow (tmp
, c
/2, &tmp
))
90 if (a
< ((uint64_t)1 << 31)
91 && b
< ((uint64_t)1 << 31)
92 && c
< ((uint64_t)1 << 31))
94 *res
= (a
* b
+ (c
/ 2)) / c
;
98 return slow_safe_scale_64bit (a
, b
, c
, res
);
101 /* Data type to hold probabilities. It implements fixed point arithmetics
102 with capping so probability is always in range [0,1] and scaling requiring
103 values greater than 1 needs to be represented otherwise.
105 In addition to actual value the quality of profile is tracked and propagated
106 through all operations. Special value UNINITIALIZED is used for probabilities
107 that has not been determined yet (for example bacause of
108 -fno-guess-branch-probability)
110 Typically probabilities are derived from profile feedback (via
111 probability_in_gcov_type), autoFDO or guessed statically and then propagated
112 thorough the compilation.
114 Named probabilities are available:
115 - never (0 probability)
117 - very_unlikely (1/2000 probability)
118 - unlikely (1/5 probablity)
119 - even (1/2 probability)
120 - likely (4/5 probability)
121 - very_likely (1999/2000 probability)
125 Named probabilities except for never/always are assumed to be statically
126 guessed and thus not necessarily accurate. The difference between never
127 and guessed_never is that the first one should be used only in case that
128 well behaving program will very likely not execute the "never" path.
129 For example if the path is going to abort () call or it exception handling.
131 Always and guessed_always probabilities are symmetric.
133 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
134 integer arithmetics. Once the code is converted to branch probabilities,
135 these conversions will probably go away because they are lossy.
138 class GTY((user
)) profile_probability
140 static const int n_bits
= 29;
141 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
142 will lead to harder multiplication sequences. */
143 static const uint32_t max_probability
= (uint32_t) 1 << (n_bits
- 2);
144 static const uint32_t uninitialized_probability
145 = ((uint32_t) 1 << (n_bits
- 1)) - 1;
148 enum profile_quality m_quality
: 3;
150 friend class profile_count
;
153 /* Named probabilities. */
154 static profile_probability
never ()
156 profile_probability ret
;
158 ret
.m_quality
= profile_precise
;
161 static profile_probability
guessed_never ()
163 profile_probability ret
;
165 ret
.m_quality
= profile_guessed
;
168 static profile_probability
very_unlikely ()
170 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
171 profile_probability r
172 = profile_probability::guessed_always ().apply_scale (1, 2000);
176 static profile_probability
unlikely ()
178 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
179 profile_probability r
180 = profile_probability::guessed_always ().apply_scale (1, 5);
184 static profile_probability
even ()
186 return profile_probability::guessed_always ().apply_scale (1, 2);
188 static profile_probability
very_likely ()
190 return profile_probability::always () - very_unlikely ();
192 static profile_probability
likely ()
194 return profile_probability::always () - unlikely ();
196 static profile_probability
guessed_always ()
198 profile_probability ret
;
199 ret
.m_val
= max_probability
;
200 ret
.m_quality
= profile_guessed
;
203 static profile_probability
always ()
205 profile_probability ret
;
206 ret
.m_val
= max_probability
;
207 ret
.m_quality
= profile_precise
;
210 /* Probabilities which has not been initialized. Either because
211 initialization did not happen yet or because profile is unknown. */
212 static profile_probability
uninitialized ()
214 profile_probability c
;
215 c
.m_val
= uninitialized_probability
;
216 c
.m_quality
= profile_guessed
;
221 /* Return true if value has been initialized. */
222 bool initialized_p () const
224 return m_val
!= uninitialized_probability
;
226 /* Return true if value can be trusted. */
227 bool reliable_p () const
229 return m_quality
>= profile_adjusted
;
232 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
233 this is mostly to support legacy code and should go away. */
234 static profile_probability
from_reg_br_prob_base (int v
)
236 profile_probability ret
;
237 gcc_checking_assert (v
>= 0 && v
<= REG_BR_PROB_BASE
);
238 ret
.m_val
= RDIV (v
* (uint64_t) max_probability
, REG_BR_PROB_BASE
);
239 ret
.m_quality
= profile_guessed
;
242 int to_reg_br_prob_base () const
244 gcc_checking_assert (initialized_p ());
245 return RDIV (m_val
* (uint64_t) REG_BR_PROB_BASE
, max_probability
);
248 /* Conversion to and from RTL representation of profile probabilities. */
249 static profile_probability
from_reg_br_prob_note (int v
)
251 profile_probability ret
;
252 ret
.m_val
= ((unsigned int)v
) / 8;
253 ret
.m_quality
= (enum profile_quality
)(v
& 7);
256 int to_reg_br_prob_note () const
258 gcc_checking_assert (initialized_p ());
259 int ret
= m_val
* 8 + m_quality
;
260 gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret
)
265 /* Return VAL1/VAL2. */
266 static profile_probability probability_in_gcov_type
267 (gcov_type val1
, gcov_type val2
)
269 profile_probability ret
;
270 gcc_checking_assert (val1
>= 0 && val2
> 0);
272 ret
.m_val
= max_probability
;
276 safe_scale_64bit (val1
, max_probability
, val2
, &tmp
);
277 gcc_checking_assert (tmp
<= max_probability
);
280 ret
.m_quality
= profile_precise
;
284 /* Basic operations. */
285 bool operator== (const profile_probability
&other
) const
287 return m_val
== other
.m_val
&& m_quality
== other
.m_quality
;
289 profile_probability
operator+ (const profile_probability
&other
) const
291 if (other
== profile_probability::never ())
293 if (*this == profile_probability::never ())
295 if (!initialized_p () || !other
.initialized_p ())
296 return profile_probability::uninitialized ();
298 profile_probability ret
;
299 ret
.m_val
= MIN ((uint32_t)(m_val
+ other
.m_val
), max_probability
);
300 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
303 profile_probability
&operator+= (const profile_probability
&other
)
305 if (other
== profile_probability::never ())
307 if (*this == profile_probability::never ())
312 if (!initialized_p () || !other
.initialized_p ())
313 return *this = profile_probability::uninitialized ();
316 m_val
= MIN ((uint32_t)(m_val
+ other
.m_val
), max_probability
);
317 m_quality
= MIN (m_quality
, other
.m_quality
);
321 profile_probability
operator- (const profile_probability
&other
) const
323 if (*this == profile_probability::never ()
324 || other
== profile_probability::never ())
326 if (!initialized_p () || !other
.initialized_p ())
327 return profile_probability::uninitialized ();
328 profile_probability ret
;
329 ret
.m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
330 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
333 profile_probability
&operator-= (const profile_probability
&other
)
335 if (*this == profile_probability::never ()
336 || other
== profile_probability::never ())
338 if (!initialized_p () || !other
.initialized_p ())
339 return *this = profile_probability::uninitialized ();
342 m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
343 m_quality
= MIN (m_quality
, other
.m_quality
);
347 profile_probability
operator* (const profile_probability
&other
) const
349 if (*this == profile_probability::never ()
350 || other
== profile_probability::never ())
351 return profile_probability::never ();
352 if (!initialized_p () || !other
.initialized_p ())
353 return profile_probability::uninitialized ();
354 profile_probability ret
;
355 ret
.m_val
= RDIV ((uint64_t)m_val
* other
.m_val
, max_probability
);
356 ret
.m_quality
= MIN (MIN (m_quality
, other
.m_quality
), profile_adjusted
);
359 profile_probability
&operator*= (const profile_probability
&other
)
361 if (*this == profile_probability::never ()
362 || other
== profile_probability::never ())
363 return *this = profile_probability::never ();
364 if (!initialized_p () || !other
.initialized_p ())
365 return *this = profile_probability::uninitialized ();
368 m_val
= RDIV ((uint64_t)m_val
* other
.m_val
, max_probability
);
369 m_quality
= MIN (MIN (m_quality
, other
.m_quality
), profile_adjusted
);
373 profile_probability
operator/ (const profile_probability
&other
) const
375 if (*this == profile_probability::never ())
376 return profile_probability::never ();
377 if (!initialized_p () || !other
.initialized_p ())
378 return profile_probability::uninitialized ();
379 profile_probability ret
;
380 /* If we get probability above 1, mark it as unreliable and return 1. */
381 if (m_val
>= other
.m_val
)
383 ret
.m_val
= max_probability
;
384 ret
.m_quality
= MIN (MIN (m_quality
, other
.m_quality
),
392 gcc_checking_assert (other
.m_val
);
393 ret
.m_val
= MIN (RDIV ((uint64_t)m_val
* max_probability
,
397 ret
.m_quality
= MIN (MIN (m_quality
, other
.m_quality
), profile_adjusted
);
400 profile_probability
&operator/= (const profile_probability
&other
)
402 if (*this == profile_probability::never ())
403 return *this = profile_probability::never ();
404 if (!initialized_p () || !other
.initialized_p ())
405 return *this = profile_probability::uninitialized ();
408 /* If we get probability above 1, mark it as unreliable
410 if (m_val
> other
.m_val
)
412 m_val
= max_probability
;
413 m_quality
= MIN (MIN (m_quality
, other
.m_quality
),
421 gcc_checking_assert (other
.m_val
);
422 m_val
= MIN (RDIV ((uint64_t)m_val
* max_probability
,
426 m_quality
= MIN (MIN (m_quality
, other
.m_quality
), profile_adjusted
);
431 /* Split *THIS (ORIG) probability into 2 probabilities, such that
432 the returned one (FIRST) is *THIS * CPROB and *THIS is
433 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
434 == ORIG. This is useful e.g. when splitting a conditional
437 goto lab; // ORIG probability
440 goto lab; // FIRST = ORIG * CPROB probability
442 goto lab; // SECOND probability
443 such that the overall probability of jumping to lab remains
444 the same. CPROB gives the relative probability between the
446 profile_probability
split (const profile_probability
&cprob
)
448 profile_probability ret
= *this * cprob
;
449 /* The following is equivalent to:
450 *this = cprob.invert () * *this / ret.invert ();
451 Avoid scaling when overall outcome is supposed to be always.
452 Without knowing that one is inverse of toher, the result would be
454 if (!(*this == profile_probability::always ()))
455 *this = (*this - ret
) / ret
.invert ();
459 gcov_type
apply (gcov_type val
) const
461 if (*this == profile_probability::uninitialized ())
463 return RDIV (val
* m_val
, max_probability
);
466 /* Return 1-*THIS. */
467 profile_probability
invert () const
469 return profile_probability::always() - *this;
472 /* Return THIS with quality dropped to GUESSED. */
473 profile_probability
guessed () const
475 profile_probability ret
= *this;
476 ret
.m_quality
= profile_guessed
;
480 /* Return THIS with quality dropped to AFDO. */
481 profile_probability
afdo () const
483 profile_probability ret
= *this;
484 ret
.m_quality
= profile_afdo
;
488 /* Return *THIS * NUM / DEN. */
489 profile_probability
apply_scale (int64_t num
, int64_t den
) const
491 if (*this == profile_probability::never ())
493 if (!initialized_p ())
494 return profile_probability::uninitialized ();
495 profile_probability ret
;
497 safe_scale_64bit (m_val
, num
, den
, &tmp
);
498 ret
.m_val
= MIN (tmp
, max_probability
);
499 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
503 /* Return true when the probability of edge is reliable.
505 The profile guessing code is good at predicting branch outcome (ie.
506 taken/not taken), that is predicted right slightly over 75% of time.
507 It is however notoriously poor on predicting the probability itself.
508 In general the profile appear a lot flatter (with probabilities closer
509 to 50%) than the reality so it is bad idea to use it to drive optimization
510 such as those disabling dynamic branch prediction for well predictable
513 There are two exceptions - edges leading to noreturn edges and edges
514 predicted by number of iterations heuristics are predicted well. This macro
515 should be able to distinguish those, but at the moment it simply check for
516 noreturn heuristic that is only one giving probability over 99% or bellow
517 1%. In future we might want to propagate reliability information across the
518 CFG if we find this information useful on multiple places. */
520 bool probably_reliable_p () const
522 if (m_quality
>= profile_adjusted
)
524 if (!initialized_p ())
526 return m_val
< max_probability
/ 100
527 || m_val
> max_probability
- max_probability
/ 100;
530 /* Return false if profile_probability is bogus. */
533 gcc_checking_assert (m_quality
!= profile_uninitialized
);
534 if (m_val
== uninitialized_probability
)
535 return m_quality
== profile_guessed
;
536 else if (m_quality
< profile_guessed
)
538 return m_val
<= max_probability
;
541 /* Comparsions are three-state and conservative. False is returned if
542 the inequality cannot be decided. */
543 bool operator< (const profile_probability
&other
) const
545 return initialized_p () && other
.initialized_p () && m_val
< other
.m_val
;
547 bool operator> (const profile_probability
&other
) const
549 return initialized_p () && other
.initialized_p () && m_val
> other
.m_val
;
552 bool operator<= (const profile_probability
&other
) const
554 return initialized_p () && other
.initialized_p () && m_val
<= other
.m_val
;
556 bool operator>= (const profile_probability
&other
) const
558 return initialized_p () && other
.initialized_p () && m_val
>= other
.m_val
;
561 /* Output THIS to F. */
562 void dump (FILE *f
) const;
564 /* Print THIS to stderr. */
567 /* Return true if THIS is known to differ significantly from OTHER. */
568 bool differs_from_p (profile_probability other
) const;
569 /* Return if difference is greater than 50%. */
570 bool differs_lot_from_p (profile_probability other
) const;
571 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
572 happens with COUNT2 probablity. Return probablity that either *THIS or
574 profile_probability
combine_with_count (profile_count count1
,
575 profile_probability other
,
576 profile_count count2
) const;
578 /* LTO streaming support. */
579 static profile_probability
stream_in (struct lto_input_block
*);
580 void stream_out (struct output_block
*);
581 void stream_out (struct lto_output_stream
*);
584 /* Main data type to hold profile counters in GCC. Profile counts originate
585 either from profile feedback, static profile estimation or both. We do not
586 perform whole program profile propagation and thus profile estimation
587 counters are often local to function, while counters from profile feedback
588 (or special cases of profile estimation) can be used inter-procedurally.
590 There are 3 basic types
591 1) local counters which are result of intra-procedural static profile
593 2) ipa counters which are result of profile feedback or special case
594 of static profile estimation (such as in function main).
595 3) counters which counts as 0 inter-procedurally (beause given function
596 was never run in train feedback) but they hold local static profile
599 Counters of type 1 and 3 cannot be mixed with counters of different type
600 within operation (because whole function should use one type of counter)
601 with exception that global zero mix in most operations where outcome is
604 To take local counter and use it inter-procedurally use ipa member function
605 which strips information irelevant at the inter-procedural level.
607 Counters are 61bit integers representing number of executions during the
608 train run or normalized frequency within the function.
610 As the profile is maintained during the compilation, many adjustments are
611 made. Not all transformations can be made precisely, most importantly
612 when code is being duplicated. It also may happen that part of CFG has
613 profile counts known while other do not - for example when LTO optimizing
614 partly profiled program or when profile was lost due to COMDAT merging.
616 For this reason profile_count tracks more information than
617 just unsigned integer and it is also ready for profile mismatches.
618 The API of this data type represent operations that are natural
619 on profile counts - sum, difference and operation with scales and
620 probabilities. All operations are safe by never getting negative counts
621 and they do end up in uninitialized scale if any of the parameters is
624 All comparsions that are three state and handling of probabilities. Thus
625 a < b is not equal to !(a >= b).
627 The following pre-defined counts are available:
629 profile_count::zero () for code that is known to execute zero times at
630 runtime (this can be detected statically i.e. for paths leading to
632 profile_count::one () for code that is known to execute once (such as
634 profile_count::uninitialized () for unknown execution count.
640 class GTY(()) profile_count
643 /* Use 62bit to hold basic block counters. Should be at least
644 64bit. Although a counter cannot be negative, we use a signed
645 type to hold various extra stages. */
647 static const int n_bits
= 61;
648 static const uint64_t max_count
= ((uint64_t) 1 << n_bits
) - 2;
650 static const uint64_t uninitialized_count
= ((uint64_t) 1 << n_bits
) - 1;
652 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8)
653 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
654 incorrectly detects the alignment of a structure where the only
655 64-bit aligned object is a bit-field. We force the alignment of
656 the entire field to mitigate this. */
657 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
659 #define UINT64_BIT_FIELD_ALIGN
661 uint64_t UINT64_BIT_FIELD_ALIGN m_val
: n_bits
;
662 #undef UINT64_BIT_FIELD_ALIGN
663 enum profile_quality m_quality
: 3;
665 /* Return true if both values can meaningfully appear in single function
666 body. We have either all counters in function local or global, otherwise
667 operations between them are not really defined well. */
668 bool compatible_p (const profile_count other
) const
670 if (!initialized_p () || !other
.initialized_p ())
672 if (*this == profile_count::zero ()
673 || other
== profile_count::zero ())
675 return ipa_p () == other
.ipa_p ();
679 /* Used for counters which are expected to be never executed. */
680 static profile_count
zero ()
682 return from_gcov_type (0);
684 static profile_count
adjusted_zero ()
688 c
.m_quality
= profile_adjusted
;
691 static profile_count
guessed_zero ()
695 c
.m_quality
= profile_guessed
;
698 static profile_count
one ()
700 return from_gcov_type (1);
702 /* Value of counters which has not been initialized. Either because
703 initialization did not happen yet or because profile is unknown. */
704 static profile_count
uninitialized ()
707 c
.m_val
= uninitialized_count
;
708 c
.m_quality
= profile_guessed_local
;
712 /* Conversion to gcov_type is lossy. */
713 gcov_type
to_gcov_type () const
715 gcc_checking_assert (initialized_p ());
719 /* Return true if value has been initialized. */
720 bool initialized_p () const
722 return m_val
!= uninitialized_count
;
724 /* Return true if value can be trusted. */
725 bool reliable_p () const
727 return m_quality
>= profile_adjusted
;
729 /* Return true if vlaue can be operated inter-procedurally. */
732 return !initialized_p () || m_quality
>= profile_guessed_global0
;
734 /* Return true if quality of profile is precise. */
735 bool precise_p () const
737 return m_quality
== profile_precise
;
740 /* Get the quality of the count. */
741 enum profile_quality
quality () const { return m_quality
; }
743 /* When merging basic blocks, the two different profile counts are unified.
744 Return true if this can be done without losing info about profile.
745 The only case we care about here is when first BB contains something
746 that makes it terminate in a way not visible in CFG. */
747 bool ok_for_merging (profile_count other
) const
749 if (m_quality
< profile_adjusted
750 || other
.m_quality
< profile_adjusted
)
752 return !(other
< *this);
755 /* When merging two BBs with different counts, pick common count that looks
756 most representative. */
757 profile_count
merge (profile_count other
) const
759 if (*this == other
|| !other
.initialized_p ()
760 || m_quality
> other
.m_quality
)
762 if (other
.m_quality
> m_quality
768 /* Basic operations. */
769 bool operator== (const profile_count
&other
) const
771 return m_val
== other
.m_val
&& m_quality
== other
.m_quality
;
773 profile_count
operator+ (const profile_count
&other
) const
775 if (other
== profile_count::zero ())
777 if (*this == profile_count::zero ())
779 if (!initialized_p () || !other
.initialized_p ())
780 return profile_count::uninitialized ();
783 gcc_checking_assert (compatible_p (other
));
784 ret
.m_val
= m_val
+ other
.m_val
;
785 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
788 profile_count
&operator+= (const profile_count
&other
)
790 if (other
== profile_count::zero ())
792 if (*this == profile_count::zero ())
797 if (!initialized_p () || !other
.initialized_p ())
798 return *this = profile_count::uninitialized ();
801 gcc_checking_assert (compatible_p (other
));
802 m_val
+= other
.m_val
;
803 m_quality
= MIN (m_quality
, other
.m_quality
);
807 profile_count
operator- (const profile_count
&other
) const
809 if (*this == profile_count::zero () || other
== profile_count::zero ())
811 if (!initialized_p () || !other
.initialized_p ())
812 return profile_count::uninitialized ();
813 gcc_checking_assert (compatible_p (other
));
815 ret
.m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
816 ret
.m_quality
= MIN (m_quality
, other
.m_quality
);
819 profile_count
&operator-= (const profile_count
&other
)
821 if (*this == profile_count::zero () || other
== profile_count::zero ())
823 if (!initialized_p () || !other
.initialized_p ())
824 return *this = profile_count::uninitialized ();
827 gcc_checking_assert (compatible_p (other
));
828 m_val
= m_val
>= other
.m_val
? m_val
- other
.m_val
: 0;
829 m_quality
= MIN (m_quality
, other
.m_quality
);
834 /* Return false if profile_count is bogus. */
837 gcc_checking_assert (m_quality
!= profile_uninitialized
);
838 return m_val
!= uninitialized_count
|| m_quality
== profile_guessed_local
;
841 /* Comparsions are three-state and conservative. False is returned if
842 the inequality cannot be decided. */
843 bool operator< (const profile_count
&other
) const
845 if (!initialized_p () || !other
.initialized_p ())
847 if (*this == profile_count::zero ())
848 return !(other
== profile_count::zero ());
849 if (other
== profile_count::zero ())
851 gcc_checking_assert (compatible_p (other
));
852 return m_val
< other
.m_val
;
854 bool operator> (const profile_count
&other
) const
856 if (!initialized_p () || !other
.initialized_p ())
858 if (*this == profile_count::zero ())
860 if (other
== profile_count::zero ())
861 return !(*this == profile_count::zero ());
862 gcc_checking_assert (compatible_p (other
));
863 return initialized_p () && other
.initialized_p () && m_val
> other
.m_val
;
865 bool operator< (const gcov_type other
) const
867 gcc_checking_assert (ipa_p ());
868 gcc_checking_assert (other
>= 0);
869 return initialized_p () && m_val
< (uint64_t) other
;
871 bool operator> (const gcov_type other
) const
873 gcc_checking_assert (ipa_p ());
874 gcc_checking_assert (other
>= 0);
875 return initialized_p () && m_val
> (uint64_t) other
;
878 bool operator<= (const profile_count
&other
) const
880 if (!initialized_p () || !other
.initialized_p ())
882 if (*this == profile_count::zero ())
884 if (other
== profile_count::zero ())
885 return (*this == profile_count::zero ());
886 gcc_checking_assert (compatible_p (other
));
887 return m_val
<= other
.m_val
;
889 bool operator>= (const profile_count
&other
) const
891 if (!initialized_p () || !other
.initialized_p ())
893 if (other
== profile_count::zero ())
895 if (*this == profile_count::zero ())
896 return (other
== profile_count::zero ());
897 gcc_checking_assert (compatible_p (other
));
898 return m_val
>= other
.m_val
;
900 bool operator<= (const gcov_type other
) const
902 gcc_checking_assert (ipa_p ());
903 gcc_checking_assert (other
>= 0);
904 return initialized_p () && m_val
<= (uint64_t) other
;
906 bool operator>= (const gcov_type other
) const
908 gcc_checking_assert (ipa_p ());
909 gcc_checking_assert (other
>= 0);
910 return initialized_p () && m_val
>= (uint64_t) other
;
912 /* Return true when value is not zero and can be used for scaling.
913 This is different from *this > 0 because that requires counter to
915 bool nonzero_p () const
917 return initialized_p () && m_val
!= 0;
920 /* Make counter forcingly nonzero. */
921 profile_count
force_nonzero () const
923 if (!initialized_p ())
925 profile_count ret
= *this;
929 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
934 profile_count
max (profile_count other
) const
936 if (!initialized_p ())
938 if (!other
.initialized_p ())
940 if (*this == profile_count::zero ())
942 if (other
== profile_count::zero ())
944 gcc_checking_assert (compatible_p (other
));
945 if (m_val
< other
.m_val
|| (m_val
== other
.m_val
946 && m_quality
< other
.m_quality
))
951 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
953 profile_count
apply_probability (int prob
) const
955 gcc_checking_assert (prob
>= 0 && prob
<= REG_BR_PROB_BASE
);
958 if (!initialized_p ())
959 return profile_count::uninitialized ();
961 ret
.m_val
= RDIV (m_val
* prob
, REG_BR_PROB_BASE
);
962 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
966 /* Scale counter according to PROB. */
967 profile_count
apply_probability (profile_probability prob
) const
969 if (*this == profile_count::zero ())
971 if (prob
== profile_probability::never ())
972 return profile_count::zero ();
973 if (!initialized_p ())
974 return profile_count::uninitialized ();
977 safe_scale_64bit (m_val
, prob
.m_val
, profile_probability::max_probability
,
980 ret
.m_quality
= MIN (m_quality
, prob
.m_quality
);
983 /* Return *THIS * NUM / DEN. */
984 profile_count
apply_scale (int64_t num
, int64_t den
) const
988 if (!initialized_p ())
989 return profile_count::uninitialized ();
993 gcc_checking_assert (num
>= 0 && den
> 0);
994 safe_scale_64bit (m_val
, num
, den
, &tmp
);
995 ret
.m_val
= MIN (tmp
, max_count
);
996 ret
.m_quality
= MIN (m_quality
, profile_adjusted
);
999 profile_count
apply_scale (profile_count num
, profile_count den
) const
1001 if (*this == profile_count::zero ())
1003 if (num
== profile_count::zero ())
1005 if (!initialized_p () || !num
.initialized_p () || !den
.initialized_p ())
1006 return profile_count::uninitialized ();
1009 gcc_checking_assert (den
.m_val
);
1013 safe_scale_64bit (m_val
, num
.m_val
, den
.m_val
, &val
);
1014 ret
.m_val
= MIN (val
, max_count
);
1015 ret
.m_quality
= MIN (MIN (MIN (m_quality
, profile_adjusted
),
1016 num
.m_quality
), den
.m_quality
);
1017 if (num
.ipa_p () && !ret
.ipa_p ())
1018 ret
.m_quality
= MIN (num
.m_quality
, profile_guessed
);
1022 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1023 profile_count
guessed_local () const
1025 profile_count ret
= *this;
1026 if (!initialized_p ())
1028 ret
.m_quality
= profile_guessed_local
;
1032 /* We know that profile is globally 0 but keep local profile if present. */
1033 profile_count
global0 () const
1035 profile_count ret
= *this;
1036 if (!initialized_p ())
1038 ret
.m_quality
= profile_guessed_global0
;
1042 /* We know that profile is globally adjusted 0 but keep local profile
1044 profile_count
global0adjusted () const
1046 profile_count ret
= *this;
1047 if (!initialized_p ())
1049 ret
.m_quality
= profile_guessed_global0adjusted
;
1053 /* Return THIS with quality dropped to GUESSED. */
1054 profile_count
guessed () const
1056 profile_count ret
= *this;
1057 ret
.m_quality
= MIN (ret
.m_quality
, profile_guessed
);
1061 /* Return variant of profile counte which is always safe to compare
1062 acorss functions. */
1063 profile_count
ipa () const
1065 if (m_quality
> profile_guessed_global0adjusted
)
1067 if (m_quality
== profile_guessed_global0
)
1068 return profile_count::zero ();
1069 if (m_quality
== profile_guessed_global0adjusted
)
1070 return profile_count::adjusted_zero ();
1071 return profile_count::uninitialized ();
1074 /* Return THIS with quality dropped to AFDO. */
1075 profile_count
afdo () const
1077 profile_count ret
= *this;
1078 ret
.m_quality
= profile_afdo
;
1082 /* Return probability of event with counter THIS within event with counter
1084 profile_probability
probability_in (const profile_count overall
) const
1086 if (*this == profile_count::zero ()
1087 && !(overall
== profile_count::zero ()))
1088 return profile_probability::never ();
1089 if (!initialized_p () || !overall
.initialized_p ()
1091 return profile_probability::uninitialized ();
1092 if (*this == overall
&& m_quality
== profile_precise
)
1093 return profile_probability::always ();
1094 profile_probability ret
;
1095 gcc_checking_assert (compatible_p (overall
));
1097 if (overall
.m_val
< m_val
)
1099 ret
.m_val
= profile_probability::max_probability
;
1100 ret
.m_quality
= profile_guessed
;
1104 ret
.m_val
= RDIV (m_val
* profile_probability::max_probability
,
1106 ret
.m_quality
= MIN (MAX (MIN (m_quality
, overall
.m_quality
),
1107 profile_guessed
), profile_adjusted
);
1111 int to_frequency (struct function
*fun
) const;
1112 int to_cgraph_frequency (profile_count entry_bb_count
) const;
1113 sreal
to_sreal_scale (profile_count in
, bool *known
= NULL
) const;
1115 /* Output THIS to F. */
1116 void dump (FILE *f
) const;
1118 /* Print THIS to stderr. */
1119 void debug () const;
1121 /* Return true if THIS is known to differ significantly from OTHER. */
1122 bool differs_from_p (profile_count other
) const;
1124 /* We want to scale profile across function boundary from NUM to DEN.
1125 Take care of the side case when NUM and DEN are zeros of incompatible
1127 static void adjust_for_ipa_scaling (profile_count
*num
, profile_count
*den
);
1129 /* THIS is a count of bb which is known to be executed IPA times.
1130 Combine this information into bb counter. This means returning IPA
1131 if it is nonzero, not changing anything if IPA is uninitialized
1132 and if IPA is zero, turning THIS into corresponding local profile with
1134 profile_count
combine_with_ipa_count (profile_count ipa
);
1136 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1137 Conversions back and forth are used to read the coverage and get it
1138 into internal representation. */
1139 static profile_count
from_gcov_type (gcov_type v
);
1141 /* LTO streaming support. */
1142 static profile_count
stream_in (struct lto_input_block
*);
1143 void stream_out (struct output_block
*);
1144 void stream_out (struct lto_output_stream
*);