3 // Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 // ????????????????????????????????????????????????????????????????????
27 // This is a copy of the libstdc++ header, with the trivial modification
28 // of ignoring the c++config.h include. If and when the top-level build is
29 // fixed so that target libraries can be built using the newly built, we can
32 // ????????????????????????????????????????????????????????????????????
34 /** @file include/atomic
35 * This is a Standard C++ Library header.
38 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
39 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
41 #ifndef _GLIBCXX_ATOMIC
42 #define _GLIBCXX_ATOMIC 1
44 #undef __always_inline
45 #define __always_inline __attribute__((always_inline))
47 // #pragma GCC system_header
49 // #ifndef __GXX_EXPERIMENTAL_CXX0X__
50 // # include <bits/c++0x_warning.h>
53 // #include <bits/atomic_base.h>
55 namespace std // _GLIBCXX_VISIBILITY(default)
57 // _GLIBCXX_BEGIN_NAMESPACE_VERSION
60 * @defgroup atomics Atomics
62 * Components for performing atomic operations.
66 /// Enumeration for memory_order
67 typedef enum memory_order
77 inline __always_inline memory_order
78 __calculate_memory_order(memory_order __m) noexcept
80 const bool __cond1 = __m == memory_order_release;
81 const bool __cond2 = __m == memory_order_acq_rel;
82 memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
83 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
87 inline __always_inline void
88 atomic_thread_fence(memory_order __m) noexcept
90 __atomic_thread_fence (__m);
93 inline __always_inline void
94 atomic_signal_fence(memory_order __m) noexcept
96 __atomic_thread_fence (__m);
100 template<typename _Tp>
102 kill_dependency(_Tp __y) noexcept
108 /// Lock-free Property
111 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
112 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
113 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
114 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
115 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
116 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
117 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
118 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
119 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
120 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
122 // Base types for atomics.
123 template<typename _IntTp>
124 struct __atomic_base;
127 typedef __atomic_base<char> atomic_char;
130 typedef __atomic_base<signed char> atomic_schar;
133 typedef __atomic_base<unsigned char> atomic_uchar;
136 typedef __atomic_base<short> atomic_short;
139 typedef __atomic_base<unsigned short> atomic_ushort;
142 typedef __atomic_base<int> atomic_int;
145 typedef __atomic_base<unsigned int> atomic_uint;
148 typedef __atomic_base<long> atomic_long;
151 typedef __atomic_base<unsigned long> atomic_ulong;
154 typedef __atomic_base<long long> atomic_llong;
157 typedef __atomic_base<unsigned long long> atomic_ullong;
160 typedef __atomic_base<wchar_t> atomic_wchar_t;
163 typedef __atomic_base<char16_t> atomic_char16_t;
166 typedef __atomic_base<char32_t> atomic_char32_t;
169 typedef __atomic_base<char32_t> atomic_char32_t;
172 /// atomic_int_least8_t
173 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
175 /// atomic_uint_least8_t
176 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
178 /// atomic_int_least16_t
179 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
181 /// atomic_uint_least16_t
182 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
184 /// atomic_int_least32_t
185 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
187 /// atomic_uint_least32_t
188 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
190 /// atomic_int_least64_t
191 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
193 /// atomic_uint_least64_t
194 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
197 /// atomic_int_fast8_t
198 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
200 /// atomic_uint_fast8_t
201 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
203 /// atomic_int_fast16_t
204 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
206 /// atomic_uint_fast16_t
207 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
209 /// atomic_int_fast32_t
210 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
212 /// atomic_uint_fast32_t
213 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
215 /// atomic_int_fast64_t
216 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
218 /// atomic_uint_fast64_t
219 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
223 typedef __atomic_base<intptr_t> atomic_intptr_t;
226 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
229 typedef __atomic_base<size_t> atomic_size_t;
232 typedef __atomic_base<intmax_t> atomic_intmax_t;
235 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
238 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
241 #define ATOMIC_VAR_INIT(_VI) { _VI }
243 template<typename _Tp>
246 template<typename _Tp>
251 * @brief Base type for atomic_flag.
253 * Base type is POD with data, allowing atomic_flag to derive from
254 * it and meet the standard layout type requirement. In addition to
255 * compatibilty with a C interface, this allows different
256 * implementations of atomic_flag to use the same atomic operation
257 * functions, via a standard conversion to the __atomic_flag_base
260 // _GLIBCXX_BEGIN_EXTERN_C
262 struct __atomic_flag_base
267 // _GLIBCXX_END_EXTERN_C
269 #define ATOMIC_FLAG_INIT { false }
272 struct atomic_flag : public __atomic_flag_base
274 atomic_flag() noexcept = default;
275 ~atomic_flag() noexcept = default;
276 atomic_flag(const atomic_flag&) = delete;
277 atomic_flag& operator=(const atomic_flag&) = delete;
278 atomic_flag& operator=(const atomic_flag&) volatile = delete;
280 // Conversion to ATOMIC_FLAG_INIT.
281 atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
284 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
286 return __atomic_test_and_set (&_M_i, __m);
290 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
292 return __atomic_test_and_set (&_M_i, __m);
296 clear(memory_order __m = memory_order_seq_cst) noexcept
298 // __glibcxx_assert(__m != memory_order_consume);
299 // __glibcxx_assert(__m != memory_order_acquire);
300 // __glibcxx_assert(__m != memory_order_acq_rel);
302 __atomic_clear (&_M_i, __m);
306 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
308 // __glibcxx_assert(__m != memory_order_consume);
309 // __glibcxx_assert(__m != memory_order_acquire);
310 // __glibcxx_assert(__m != memory_order_acq_rel);
312 __atomic_clear (&_M_i, __m);
317 /// Base class for atomic integrals.
319 // For each of the integral types, define atomic_[integral type] struct
323 // atomic_schar signed char
324 // atomic_uchar unsigned char
325 // atomic_short short
326 // atomic_ushort unsigned short
328 // atomic_uint unsigned int
330 // atomic_ulong unsigned long
331 // atomic_llong long long
332 // atomic_ullong unsigned long long
333 // atomic_char16_t char16_t
334 // atomic_char32_t char32_t
335 // atomic_wchar_t wchar_t
337 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
338 // 8 bytes, since that is what GCC built-in functions for atomic
339 // memory access expect.
340 template<typename _ITp>
344 typedef _ITp __int_type;
349 __atomic_base() noexcept = default;
350 ~__atomic_base() noexcept = default;
351 __atomic_base(const __atomic_base&) = delete;
352 __atomic_base& operator=(const __atomic_base&) = delete;
353 __atomic_base& operator=(const __atomic_base&) volatile = delete;
355 // Requires __int_type convertible to _M_i.
356 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
358 operator __int_type() const noexcept
361 operator __int_type() const volatile noexcept
365 operator=(__int_type __i) noexcept
372 operator=(__int_type __i) volatile noexcept
379 operator++(int) noexcept
380 { return fetch_add(1); }
383 operator++(int) volatile noexcept
384 { return fetch_add(1); }
387 operator--(int) noexcept
388 { return fetch_sub(1); }
391 operator--(int) volatile noexcept
392 { return fetch_sub(1); }
395 operator++() noexcept
396 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
399 operator++() volatile noexcept
400 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
403 operator--() noexcept
404 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
407 operator--() volatile noexcept
408 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
411 operator+=(__int_type __i) noexcept
412 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
415 operator+=(__int_type __i) volatile noexcept
416 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
419 operator-=(__int_type __i) noexcept
420 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
423 operator-=(__int_type __i) volatile noexcept
424 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
427 operator&=(__int_type __i) noexcept
428 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
431 operator&=(__int_type __i) volatile noexcept
432 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
435 operator|=(__int_type __i) noexcept
436 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
439 operator|=(__int_type __i) volatile noexcept
440 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
443 operator^=(__int_type __i) noexcept
444 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
447 operator^=(__int_type __i) volatile noexcept
448 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
451 is_lock_free() const noexcept
452 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
455 is_lock_free() const volatile noexcept
456 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
459 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
461 // __glibcxx_assert(__m != memory_order_acquire);
462 // __glibcxx_assert(__m != memory_order_acq_rel);
463 // __glibcxx_assert(__m != memory_order_consume);
465 __atomic_store_n(&_M_i, __i, __m);
469 store(__int_type __i,
470 memory_order __m = memory_order_seq_cst) volatile noexcept
472 // __glibcxx_assert(__m != memory_order_acquire);
473 // __glibcxx_assert(__m != memory_order_acq_rel);
474 // __glibcxx_assert(__m != memory_order_consume);
476 __atomic_store_n(&_M_i, __i, __m);
479 __always_inline __int_type
480 load(memory_order __m = memory_order_seq_cst) const noexcept
482 // __glibcxx_assert(__m != memory_order_release);
483 // __glibcxx_assert(__m != memory_order_acq_rel);
485 return __atomic_load_n(&_M_i, __m);
488 __always_inline __int_type
489 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
491 // __glibcxx_assert(__m != memory_order_release);
492 // __glibcxx_assert(__m != memory_order_acq_rel);
494 return __atomic_load_n(&_M_i, __m);
497 __always_inline __int_type
498 exchange(__int_type __i,
499 memory_order __m = memory_order_seq_cst) noexcept
501 return __atomic_exchange_n(&_M_i, __i, __m);
504 __always_inline __int_type
505 exchange(__int_type __i,
506 memory_order __m = memory_order_seq_cst) volatile noexcept
508 return __atomic_exchange_n(&_M_i, __i, __m);
512 compare_exchange_weak(__int_type& __i1, __int_type __i2,
513 memory_order __m1, memory_order __m2) noexcept
515 // __glibcxx_assert(__m2 != memory_order_release);
516 // __glibcxx_assert(__m2 != memory_order_acq_rel);
517 // __glibcxx_assert(__m2 <= __m1);
519 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
523 compare_exchange_weak(__int_type& __i1, __int_type __i2,
525 memory_order __m2) volatile noexcept
527 // __glibcxx_assert(__m2 != memory_order_release);
528 // __glibcxx_assert(__m2 != memory_order_acq_rel);
529 // __glibcxx_assert(__m2 <= __m1);
531 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
535 compare_exchange_weak(__int_type& __i1, __int_type __i2,
536 memory_order __m = memory_order_seq_cst) noexcept
538 return compare_exchange_weak(__i1, __i2, __m,
539 __calculate_memory_order(__m));
543 compare_exchange_weak(__int_type& __i1, __int_type __i2,
544 memory_order __m = memory_order_seq_cst) volatile noexcept
546 return compare_exchange_weak(__i1, __i2, __m,
547 __calculate_memory_order(__m));
551 compare_exchange_strong(__int_type& __i1, __int_type __i2,
552 memory_order __m1, memory_order __m2) noexcept
554 // __glibcxx_assert(__m2 != memory_order_release);
555 // __glibcxx_assert(__m2 != memory_order_acq_rel);
556 // __glibcxx_assert(__m2 <= __m1);
558 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
562 compare_exchange_strong(__int_type& __i1, __int_type __i2,
564 memory_order __m2) volatile noexcept
566 // __glibcxx_assert(__m2 != memory_order_release);
567 // __glibcxx_assert(__m2 != memory_order_acq_rel);
568 // __glibcxx_assert(__m2 <= __m1);
570 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
574 compare_exchange_strong(__int_type& __i1, __int_type __i2,
575 memory_order __m = memory_order_seq_cst) noexcept
577 return compare_exchange_strong(__i1, __i2, __m,
578 __calculate_memory_order(__m));
582 compare_exchange_strong(__int_type& __i1, __int_type __i2,
583 memory_order __m = memory_order_seq_cst) volatile noexcept
585 return compare_exchange_strong(__i1, __i2, __m,
586 __calculate_memory_order(__m));
589 __always_inline __int_type
590 fetch_add(__int_type __i,
591 memory_order __m = memory_order_seq_cst) noexcept
592 { return __atomic_fetch_add(&_M_i, __i, __m); }
594 __always_inline __int_type
595 fetch_add(__int_type __i,
596 memory_order __m = memory_order_seq_cst) volatile noexcept
597 { return __atomic_fetch_add(&_M_i, __i, __m); }
599 __always_inline __int_type
600 fetch_sub(__int_type __i,
601 memory_order __m = memory_order_seq_cst) noexcept
602 { return __atomic_fetch_sub(&_M_i, __i, __m); }
604 __always_inline __int_type
605 fetch_sub(__int_type __i,
606 memory_order __m = memory_order_seq_cst) volatile noexcept
607 { return __atomic_fetch_sub(&_M_i, __i, __m); }
609 __always_inline __int_type
610 fetch_and(__int_type __i,
611 memory_order __m = memory_order_seq_cst) noexcept
612 { return __atomic_fetch_and(&_M_i, __i, __m); }
614 __always_inline __int_type
615 fetch_and(__int_type __i,
616 memory_order __m = memory_order_seq_cst) volatile noexcept
617 { return __atomic_fetch_and(&_M_i, __i, __m); }
619 __always_inline __int_type
620 fetch_or(__int_type __i,
621 memory_order __m = memory_order_seq_cst) noexcept
622 { return __atomic_fetch_or(&_M_i, __i, __m); }
624 __always_inline __int_type
625 fetch_or(__int_type __i,
626 memory_order __m = memory_order_seq_cst) volatile noexcept
627 { return __atomic_fetch_or(&_M_i, __i, __m); }
629 __always_inline __int_type
630 fetch_xor(__int_type __i,
631 memory_order __m = memory_order_seq_cst) noexcept
632 { return __atomic_fetch_xor(&_M_i, __i, __m); }
634 __always_inline __int_type
635 fetch_xor(__int_type __i,
636 memory_order __m = memory_order_seq_cst) volatile noexcept
637 { return __atomic_fetch_xor(&_M_i, __i, __m); }
641 /// Partial specialization for pointer types.
642 template<typename _PTp>
643 struct __atomic_base<_PTp*>
646 typedef _PTp* __pointer_type;
651 __atomic_base() noexcept = default;
652 ~__atomic_base() noexcept = default;
653 __atomic_base(const __atomic_base&) = delete;
654 __atomic_base& operator=(const __atomic_base&) = delete;
655 __atomic_base& operator=(const __atomic_base&) volatile = delete;
657 // Requires __pointer_type convertible to _M_p.
658 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
660 operator __pointer_type() const noexcept
663 operator __pointer_type() const volatile noexcept
667 operator=(__pointer_type __p) noexcept
674 operator=(__pointer_type __p) volatile noexcept
681 operator++(int) noexcept
682 { return fetch_add(1); }
685 operator++(int) volatile noexcept
686 { return fetch_add(1); }
689 operator--(int) noexcept
690 { return fetch_sub(1); }
693 operator--(int) volatile noexcept
694 { return fetch_sub(1); }
697 operator++() noexcept
698 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
701 operator++() volatile noexcept
702 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
705 operator--() noexcept
706 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
709 operator--() volatile noexcept
710 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
713 operator+=(ptrdiff_t __d) noexcept
714 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
717 operator+=(ptrdiff_t __d) volatile noexcept
718 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
721 operator-=(ptrdiff_t __d) noexcept
722 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
725 operator-=(ptrdiff_t __d) volatile noexcept
726 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
729 is_lock_free() const noexcept
730 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
733 is_lock_free() const volatile noexcept
734 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
737 store(__pointer_type __p,
738 memory_order __m = memory_order_seq_cst) noexcept
740 // __glibcxx_assert(__m != memory_order_acquire);
741 // __glibcxx_assert(__m != memory_order_acq_rel);
742 // __glibcxx_assert(__m != memory_order_consume);
744 __atomic_store_n(&_M_p, __p, __m);
748 store(__pointer_type __p,
749 memory_order __m = memory_order_seq_cst) volatile noexcept
751 // __glibcxx_assert(__m != memory_order_acquire);
752 // __glibcxx_assert(__m != memory_order_acq_rel);
753 // __glibcxx_assert(__m != memory_order_consume);
755 __atomic_store_n(&_M_p, __p, __m);
758 __always_inline __pointer_type
759 load(memory_order __m = memory_order_seq_cst) const noexcept
761 // __glibcxx_assert(__m != memory_order_release);
762 // __glibcxx_assert(__m != memory_order_acq_rel);
764 return __atomic_load_n(&_M_p, __m);
767 __always_inline __pointer_type
768 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
770 // __glibcxx_assert(__m != memory_order_release);
771 // __glibcxx_assert(__m != memory_order_acq_rel);
773 return __atomic_load_n(&_M_p, __m);
776 __always_inline __pointer_type
777 exchange(__pointer_type __p,
778 memory_order __m = memory_order_seq_cst) noexcept
780 return __atomic_exchange_n(&_M_p, __p, __m);
783 __always_inline __pointer_type
784 exchange(__pointer_type __p,
785 memory_order __m = memory_order_seq_cst) volatile noexcept
787 return __atomic_exchange_n(&_M_p, __p, __m);
791 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
793 memory_order __m2) noexcept
795 // __glibcxx_assert(__m2 != memory_order_release);
796 // __glibcxx_assert(__m2 != memory_order_acq_rel);
797 // __glibcxx_assert(__m2 <= __m1);
799 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
803 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
805 memory_order __m2) volatile noexcept
807 // __glibcxx_assert(__m2 != memory_order_release);
808 // __glibcxx_assert(__m2 != memory_order_acq_rel);
809 // __glibcxx_assert(__m2 <= __m1);
811 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
814 __always_inline __pointer_type
815 fetch_add(ptrdiff_t __d,
816 memory_order __m = memory_order_seq_cst) noexcept
817 { return __atomic_fetch_add(&_M_p, __d, __m); }
819 __always_inline __pointer_type
820 fetch_add(ptrdiff_t __d,
821 memory_order __m = memory_order_seq_cst) volatile noexcept
822 { return __atomic_fetch_add(&_M_p, __d, __m); }
824 __always_inline __pointer_type
825 fetch_sub(ptrdiff_t __d,
826 memory_order __m = memory_order_seq_cst) noexcept
827 { return __atomic_fetch_sub(&_M_p, __d, __m); }
829 __always_inline __pointer_type
830 fetch_sub(ptrdiff_t __d,
831 memory_order __m = memory_order_seq_cst) volatile noexcept
832 { return __atomic_fetch_sub(&_M_p, __d, __m); }
837 * @addtogroup atomics
842 // NB: No operators or fetch-operations for this type.
846 __atomic_base<bool> _M_base;
849 atomic_bool() noexcept = default;
850 ~atomic_bool() noexcept = default;
851 atomic_bool(const atomic_bool&) = delete;
852 atomic_bool& operator=(const atomic_bool&) = delete;
853 atomic_bool& operator=(const atomic_bool&) volatile = delete;
855 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
858 operator=(bool __i) noexcept
859 { return _M_base.operator=(__i); }
861 operator bool() const noexcept
862 { return _M_base.load(); }
864 operator bool() const volatile noexcept
865 { return _M_base.load(); }
868 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
871 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
874 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
875 { _M_base.store(__i, __m); }
878 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
879 { _M_base.store(__i, __m); }
882 load(memory_order __m = memory_order_seq_cst) const noexcept
883 { return _M_base.load(__m); }
886 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
887 { return _M_base.load(__m); }
890 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
891 { return _M_base.exchange(__i, __m); }
895 memory_order __m = memory_order_seq_cst) volatile noexcept
896 { return _M_base.exchange(__i, __m); }
899 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
900 memory_order __m2) noexcept
901 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
904 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
905 memory_order __m2) volatile noexcept
906 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
909 compare_exchange_weak(bool& __i1, bool __i2,
910 memory_order __m = memory_order_seq_cst) noexcept
911 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
914 compare_exchange_weak(bool& __i1, bool __i2,
915 memory_order __m = memory_order_seq_cst) volatile noexcept
916 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
919 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
920 memory_order __m2) noexcept
921 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
924 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
925 memory_order __m2) volatile noexcept
926 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
929 compare_exchange_strong(bool& __i1, bool __i2,
930 memory_order __m = memory_order_seq_cst) noexcept
931 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
934 compare_exchange_strong(bool& __i1, bool __i2,
935 memory_order __m = memory_order_seq_cst) volatile noexcept
936 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
941 /// 29.4.3, Generic atomic type, primary class template.
942 template<typename _Tp>
949 atomic() noexcept = default;
950 ~atomic() noexcept = default;
951 atomic(const atomic&) = delete;
952 atomic& operator=(const atomic&) = delete;
953 atomic& operator=(const atomic&) volatile = delete;
955 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
957 operator _Tp() const noexcept
960 operator _Tp() const volatile noexcept
964 operator=(_Tp __i) noexcept
965 { store(__i); return __i; }
968 operator=(_Tp __i) volatile noexcept
969 { store(__i); return __i; }
972 is_lock_free() const noexcept
973 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
976 is_lock_free() const volatile noexcept
977 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
980 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
981 { __atomic_store(&_M_i, &__i, _m); }
984 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
985 { __atomic_store(&_M_i, &__i, _m); }
988 load(memory_order _m = memory_order_seq_cst) const noexcept
991 __atomic_load(&_M_i, &tmp, _m);
996 load(memory_order _m = memory_order_seq_cst) const volatile noexcept
999 __atomic_load(&_M_i, &tmp, _m);
1004 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
1007 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1013 memory_order _m = memory_order_seq_cst) volatile noexcept
1016 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1020 __always_inline bool
1021 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1022 memory_order __f) noexcept
1024 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1027 __always_inline bool
1028 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1029 memory_order __f) volatile noexcept
1031 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1034 __always_inline bool
1035 compare_exchange_weak(_Tp& __e, _Tp __i,
1036 memory_order __m = memory_order_seq_cst) noexcept
1037 { return compare_exchange_weak(__e, __i, __m, __m); }
1039 __always_inline bool
1040 compare_exchange_weak(_Tp& __e, _Tp __i,
1041 memory_order __m = memory_order_seq_cst) volatile noexcept
1042 { return compare_exchange_weak(__e, __i, __m, __m); }
1044 __always_inline bool
1045 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1046 memory_order __f) noexcept
1048 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1051 __always_inline bool
1052 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1053 memory_order __f) volatile noexcept
1055 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1058 __always_inline bool
1059 compare_exchange_strong(_Tp& __e, _Tp __i,
1060 memory_order __m = memory_order_seq_cst) noexcept
1061 { return compare_exchange_strong(__e, __i, __m, __m); }
1063 __always_inline bool
1064 compare_exchange_strong(_Tp& __e, _Tp __i,
1065 memory_order __m = memory_order_seq_cst) volatile noexcept
1066 { return compare_exchange_strong(__e, __i, __m, __m); }
1070 /// Partial specialization for pointer types.
1071 template<typename _Tp>
1074 typedef _Tp* __pointer_type;
1075 typedef __atomic_base<_Tp*> __base_type;
1078 atomic() noexcept = default;
1079 ~atomic() noexcept = default;
1080 atomic(const atomic&) = delete;
1081 atomic& operator=(const atomic&) = delete;
1082 atomic& operator=(const atomic&) volatile = delete;
1084 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
1086 operator __pointer_type() const noexcept
1087 { return __pointer_type(_M_b); }
1089 operator __pointer_type() const volatile noexcept
1090 { return __pointer_type(_M_b); }
1093 operator=(__pointer_type __p) noexcept
1094 { return _M_b.operator=(__p); }
1097 operator=(__pointer_type __p) volatile noexcept
1098 { return _M_b.operator=(__p); }
1101 operator++(int) noexcept
1105 operator++(int) volatile noexcept
1109 operator--(int) noexcept
1113 operator--(int) volatile noexcept
1117 operator++() noexcept
1121 operator++() volatile noexcept
1125 operator--() noexcept
1129 operator--() volatile noexcept
1133 operator+=(ptrdiff_t __d) noexcept
1134 { return _M_b.operator+=(__d); }
1137 operator+=(ptrdiff_t __d) volatile noexcept
1138 { return _M_b.operator+=(__d); }
1141 operator-=(ptrdiff_t __d) noexcept
1142 { return _M_b.operator-=(__d); }
1145 operator-=(ptrdiff_t __d) volatile noexcept
1146 { return _M_b.operator-=(__d); }
1149 is_lock_free() const noexcept
1150 { return _M_b.is_lock_free(); }
1153 is_lock_free() const volatile noexcept
1154 { return _M_b.is_lock_free(); }
1156 __always_inline void
1157 store(__pointer_type __p,
1158 memory_order __m = memory_order_seq_cst) noexcept
1159 { return _M_b.store(__p, __m); }
1161 __always_inline void
1162 store(__pointer_type __p,
1163 memory_order __m = memory_order_seq_cst) volatile noexcept
1164 { return _M_b.store(__p, __m); }
1166 __always_inline __pointer_type
1167 load(memory_order __m = memory_order_seq_cst) const noexcept
1168 { return _M_b.load(__m); }
1170 __always_inline __pointer_type
1171 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
1172 { return _M_b.load(__m); }
1174 __always_inline __pointer_type
1175 exchange(__pointer_type __p,
1176 memory_order __m = memory_order_seq_cst) noexcept
1177 { return _M_b.exchange(__p, __m); }
1179 __always_inline __pointer_type
1180 exchange(__pointer_type __p,
1181 memory_order __m = memory_order_seq_cst) volatile noexcept
1182 { return _M_b.exchange(__p, __m); }
1184 __always_inline bool
1185 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1186 memory_order __m1, memory_order __m2) noexcept
1187 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1189 __always_inline bool
1190 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1192 memory_order __m2) volatile noexcept
1193 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1195 __always_inline bool
1196 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1197 memory_order __m = memory_order_seq_cst) noexcept
1199 return compare_exchange_weak(__p1, __p2, __m,
1200 __calculate_memory_order(__m));
1203 __always_inline bool
1204 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1205 memory_order __m = memory_order_seq_cst) volatile noexcept
1207 return compare_exchange_weak(__p1, __p2, __m,
1208 __calculate_memory_order(__m));
1211 __always_inline bool
1212 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1213 memory_order __m1, memory_order __m2) noexcept
1214 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1216 __always_inline bool
1217 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1219 memory_order __m2) volatile noexcept
1220 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1222 __always_inline bool
1223 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1224 memory_order __m = memory_order_seq_cst) noexcept
1226 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1227 __calculate_memory_order(__m));
1230 __always_inline bool
1231 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1232 memory_order __m = memory_order_seq_cst) volatile noexcept
1234 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1235 __calculate_memory_order(__m));
1238 __always_inline __pointer_type
1239 fetch_add(ptrdiff_t __d,
1240 memory_order __m = memory_order_seq_cst) noexcept
1241 { return _M_b.fetch_add(__d, __m); }
1243 __always_inline __pointer_type
1244 fetch_add(ptrdiff_t __d,
1245 memory_order __m = memory_order_seq_cst) volatile noexcept
1246 { return _M_b.fetch_add(__d, __m); }
1248 __always_inline __pointer_type
1249 fetch_sub(ptrdiff_t __d,
1250 memory_order __m = memory_order_seq_cst) noexcept
1251 { return _M_b.fetch_sub(__d, __m); }
1253 __always_inline __pointer_type
1254 fetch_sub(ptrdiff_t __d,
1255 memory_order __m = memory_order_seq_cst) volatile noexcept
1256 { return _M_b.fetch_sub(__d, __m); }
1260 /// Explicit specialization for bool.
1262 struct atomic<bool> : public atomic_bool
1264 typedef bool __integral_type;
1265 typedef atomic_bool __base_type;
1267 atomic() noexcept = default;
1268 ~atomic() noexcept = default;
1269 atomic(const atomic&) = delete;
1270 atomic& operator=(const atomic&) = delete;
1271 atomic& operator=(const atomic&) volatile = delete;
1273 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1275 using __base_type::operator __integral_type;
1276 using __base_type::operator=;
1279 /// Explicit specialization for char.
1281 struct atomic<char> : public atomic_char
1283 typedef char __integral_type;
1284 typedef atomic_char __base_type;
1286 atomic() noexcept = default;
1287 ~atomic() noexcept = default;
1288 atomic(const atomic&) = delete;
1289 atomic& operator=(const atomic&) = delete;
1290 atomic& operator=(const atomic&) volatile = delete;
1292 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1294 using __base_type::operator __integral_type;
1295 using __base_type::operator=;
1298 /// Explicit specialization for signed char.
1300 struct atomic<signed char> : public atomic_schar
1302 typedef signed char __integral_type;
1303 typedef atomic_schar __base_type;
1305 atomic() noexcept= default;
1306 ~atomic() noexcept = default;
1307 atomic(const atomic&) = delete;
1308 atomic& operator=(const atomic&) = delete;
1309 atomic& operator=(const atomic&) volatile = delete;
1311 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1313 using __base_type::operator __integral_type;
1314 using __base_type::operator=;
1317 /// Explicit specialization for unsigned char.
1319 struct atomic<unsigned char> : public atomic_uchar
1321 typedef unsigned char __integral_type;
1322 typedef atomic_uchar __base_type;
1324 atomic() noexcept= default;
1325 ~atomic() noexcept = default;
1326 atomic(const atomic&) = delete;
1327 atomic& operator=(const atomic&) = delete;
1328 atomic& operator=(const atomic&) volatile = delete;
1330 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1332 using __base_type::operator __integral_type;
1333 using __base_type::operator=;
1336 /// Explicit specialization for short.
1338 struct atomic<short> : public atomic_short
1340 typedef short __integral_type;
1341 typedef atomic_short __base_type;
1343 atomic() noexcept = default;
1344 ~atomic() noexcept = default;
1345 atomic(const atomic&) = delete;
1346 atomic& operator=(const atomic&) = delete;
1347 atomic& operator=(const atomic&) volatile = delete;
1349 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1351 using __base_type::operator __integral_type;
1352 using __base_type::operator=;
1355 /// Explicit specialization for unsigned short.
1357 struct atomic<unsigned short> : public atomic_ushort
1359 typedef unsigned short __integral_type;
1360 typedef atomic_ushort __base_type;
1362 atomic() noexcept = default;
1363 ~atomic() noexcept = default;
1364 atomic(const atomic&) = delete;
1365 atomic& operator=(const atomic&) = delete;
1366 atomic& operator=(const atomic&) volatile = delete;
1368 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1370 using __base_type::operator __integral_type;
1371 using __base_type::operator=;
1374 /// Explicit specialization for int.
1376 struct atomic<int> : atomic_int
1378 typedef int __integral_type;
1379 typedef atomic_int __base_type;
1381 atomic() noexcept = default;
1382 ~atomic() noexcept = default;
1383 atomic(const atomic&) = delete;
1384 atomic& operator=(const atomic&) = delete;
1385 atomic& operator=(const atomic&) volatile = delete;
1387 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1389 using __base_type::operator __integral_type;
1390 using __base_type::operator=;
1393 /// Explicit specialization for unsigned int.
1395 struct atomic<unsigned int> : public atomic_uint
1397 typedef unsigned int __integral_type;
1398 typedef atomic_uint __base_type;
1400 atomic() noexcept = default;
1401 ~atomic() noexcept = default;
1402 atomic(const atomic&) = delete;
1403 atomic& operator=(const atomic&) = delete;
1404 atomic& operator=(const atomic&) volatile = delete;
1406 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1408 using __base_type::operator __integral_type;
1409 using __base_type::operator=;
1412 /// Explicit specialization for long.
1414 struct atomic<long> : public atomic_long
1416 typedef long __integral_type;
1417 typedef atomic_long __base_type;
1419 atomic() noexcept = default;
1420 ~atomic() noexcept = default;
1421 atomic(const atomic&) = delete;
1422 atomic& operator=(const atomic&) = delete;
1423 atomic& operator=(const atomic&) volatile = delete;
1425 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1427 using __base_type::operator __integral_type;
1428 using __base_type::operator=;
1431 /// Explicit specialization for unsigned long.
1433 struct atomic<unsigned long> : public atomic_ulong
1435 typedef unsigned long __integral_type;
1436 typedef atomic_ulong __base_type;
1438 atomic() noexcept = default;
1439 ~atomic() noexcept = default;
1440 atomic(const atomic&) = delete;
1441 atomic& operator=(const atomic&) = delete;
1442 atomic& operator=(const atomic&) volatile = delete;
1444 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1446 using __base_type::operator __integral_type;
1447 using __base_type::operator=;
1450 /// Explicit specialization for long long.
1452 struct atomic<long long> : public atomic_llong
1454 typedef long long __integral_type;
1455 typedef atomic_llong __base_type;
1457 atomic() noexcept = default;
1458 ~atomic() noexcept = default;
1459 atomic(const atomic&) = delete;
1460 atomic& operator=(const atomic&) = delete;
1461 atomic& operator=(const atomic&) volatile = delete;
1463 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1465 using __base_type::operator __integral_type;
1466 using __base_type::operator=;
1469 /// Explicit specialization for unsigned long long.
1471 struct atomic<unsigned long long> : public atomic_ullong
1473 typedef unsigned long long __integral_type;
1474 typedef atomic_ullong __base_type;
1476 atomic() noexcept = default;
1477 ~atomic() noexcept = default;
1478 atomic(const atomic&) = delete;
1479 atomic& operator=(const atomic&) = delete;
1480 atomic& operator=(const atomic&) volatile = delete;
1482 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1484 using __base_type::operator __integral_type;
1485 using __base_type::operator=;
1488 /// Explicit specialization for wchar_t.
1490 struct atomic<wchar_t> : public atomic_wchar_t
1492 typedef wchar_t __integral_type;
1493 typedef atomic_wchar_t __base_type;
1495 atomic() noexcept = default;
1496 ~atomic() noexcept = default;
1497 atomic(const atomic&) = delete;
1498 atomic& operator=(const atomic&) = delete;
1499 atomic& operator=(const atomic&) volatile = delete;
1501 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1503 using __base_type::operator __integral_type;
1504 using __base_type::operator=;
1507 /// Explicit specialization for char16_t.
1509 struct atomic<char16_t> : public atomic_char16_t
1511 typedef char16_t __integral_type;
1512 typedef atomic_char16_t __base_type;
1514 atomic() noexcept = default;
1515 ~atomic() noexcept = default;
1516 atomic(const atomic&) = delete;
1517 atomic& operator=(const atomic&) = delete;
1518 atomic& operator=(const atomic&) volatile = delete;
1520 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1522 using __base_type::operator __integral_type;
1523 using __base_type::operator=;
1526 /// Explicit specialization for char32_t.
1528 struct atomic<char32_t> : public atomic_char32_t
1530 typedef char32_t __integral_type;
1531 typedef atomic_char32_t __base_type;
1533 atomic() noexcept = default;
1534 ~atomic() noexcept = default;
1535 atomic(const atomic&) = delete;
1536 atomic& operator=(const atomic&) = delete;
1537 atomic& operator=(const atomic&) volatile = delete;
1539 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1541 using __base_type::operator __integral_type;
1542 using __base_type::operator=;
1546 // Function definitions, atomic_flag operations.
1547 inline __always_inline bool
1548 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1549 memory_order __m) noexcept
1550 { return __a->test_and_set(__m); }
1552 inline __always_inline bool
1553 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1554 memory_order __m) noexcept
1555 { return __a->test_and_set(__m); }
1557 inline __always_inline void
1558 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1559 { __a->clear(__m); }
1561 inline __always_inline void
1562 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1563 memory_order __m) noexcept
1564 { __a->clear(__m); }
1566 inline __always_inline bool
1567 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1568 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1570 inline __always_inline bool
1571 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1572 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1574 inline __always_inline void
1575 atomic_flag_clear(atomic_flag* __a) noexcept
1576 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1578 inline __always_inline void
1579 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1580 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1583 // Function templates generally applicable to atomic types.
1584 template<typename _ITp>
1585 __always_inline bool
1586 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1587 { return __a->is_lock_free(); }
1589 template<typename _ITp>
1590 __always_inline bool
1591 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1592 { return __a->is_lock_free(); }
1594 template<typename _ITp>
1595 __always_inline void
1596 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
1598 template<typename _ITp>
1599 __always_inline void
1600 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
1602 template<typename _ITp>
1603 __always_inline void
1604 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1605 memory_order __m) noexcept
1606 { __a->store(__i, __m); }
1608 template<typename _ITp>
1609 __always_inline void
1610 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1611 memory_order __m) noexcept
1612 { __a->store(__i, __m); }
1614 template<typename _ITp>
1615 __always_inline _ITp
1616 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1617 { return __a->load(__m); }
1619 template<typename _ITp>
1620 __always_inline _ITp
1621 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1622 memory_order __m) noexcept
1623 { return __a->load(__m); }
1625 template<typename _ITp>
1626 __always_inline _ITp
1627 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1628 memory_order __m) noexcept
1629 { return __a->exchange(__i, __m); }
1631 template<typename _ITp>
1632 __always_inline _ITp
1633 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1634 memory_order __m) noexcept
1635 { return __a->exchange(__i, __m); }
1637 template<typename _ITp>
1638 __always_inline bool
1639 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1640 _ITp* __i1, _ITp __i2,
1642 memory_order __m2) noexcept
1643 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1645 template<typename _ITp>
1646 __always_inline bool
1647 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1648 _ITp* __i1, _ITp __i2,
1650 memory_order __m2) noexcept
1651 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1653 template<typename _ITp>
1654 __always_inline bool
1655 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1656 _ITp* __i1, _ITp __i2,
1658 memory_order __m2) noexcept
1659 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1661 template<typename _ITp>
1662 __always_inline bool
1663 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1664 _ITp* __i1, _ITp __i2,
1666 memory_order __m2) noexcept
1667 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1670 template<typename _ITp>
1671 __always_inline void
1672 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1673 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1675 template<typename _ITp>
1676 __always_inline void
1677 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1678 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1680 template<typename _ITp>
1681 __always_inline _ITp
1682 atomic_load(const atomic<_ITp>* __a) noexcept
1683 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1685 template<typename _ITp>
1686 __always_inline _ITp
1687 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1688 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1690 template<typename _ITp>
1691 __always_inline _ITp
1692 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1693 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1695 template<typename _ITp>
1696 __always_inline _ITp
1697 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1698 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1700 template<typename _ITp>
1701 __always_inline bool
1702 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1703 _ITp* __i1, _ITp __i2) noexcept
1705 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1706 memory_order_seq_cst,
1707 memory_order_seq_cst);
1710 template<typename _ITp>
1711 __always_inline bool
1712 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1713 _ITp* __i1, _ITp __i2) noexcept
1715 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1716 memory_order_seq_cst,
1717 memory_order_seq_cst);
1720 template<typename _ITp>
1721 __always_inline bool
1722 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1723 _ITp* __i1, _ITp __i2) noexcept
1725 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1726 memory_order_seq_cst,
1727 memory_order_seq_cst);
1730 template<typename _ITp>
1731 __always_inline bool
1732 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1733 _ITp* __i1, _ITp __i2) noexcept
1735 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1736 memory_order_seq_cst,
1737 memory_order_seq_cst);
1740 // Function templates for atomic_integral operations only, using
1741 // __atomic_base. Template argument should be constricted to
1742 // intergral types as specified in the standard, excluding address
1744 template<typename _ITp>
1745 __always_inline _ITp
1746 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1747 memory_order __m) noexcept
1748 { return __a->fetch_add(__i, __m); }
1750 template<typename _ITp>
1751 __always_inline _ITp
1752 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1753 memory_order __m) noexcept
1754 { return __a->fetch_add(__i, __m); }
1756 template<typename _ITp>
1757 __always_inline _ITp
1758 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1759 memory_order __m) noexcept
1760 { return __a->fetch_sub(__i, __m); }
1762 template<typename _ITp>
1763 __always_inline _ITp
1764 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1765 memory_order __m) noexcept
1766 { return __a->fetch_sub(__i, __m); }
1768 template<typename _ITp>
1769 __always_inline _ITp
1770 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1771 memory_order __m) noexcept
1772 { return __a->fetch_and(__i, __m); }
1774 template<typename _ITp>
1775 __always_inline _ITp
1776 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1777 memory_order __m) noexcept
1778 { return __a->fetch_and(__i, __m); }
1780 template<typename _ITp>
1781 __always_inline _ITp
1782 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1783 memory_order __m) noexcept
1784 { return __a->fetch_or(__i, __m); }
1786 template<typename _ITp>
1787 __always_inline _ITp
1788 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1789 memory_order __m) noexcept
1790 { return __a->fetch_or(__i, __m); }
1792 template<typename _ITp>
1793 __always_inline _ITp
1794 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1795 memory_order __m) noexcept
1796 { return __a->fetch_xor(__i, __m); }
1798 template<typename _ITp>
1799 __always_inline _ITp
1800 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1801 memory_order __m) noexcept
1802 { return __a->fetch_xor(__i, __m); }
1804 template<typename _ITp>
1805 __always_inline _ITp
1806 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1807 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1809 template<typename _ITp>
1810 __always_inline _ITp
1811 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1812 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1814 template<typename _ITp>
1815 __always_inline _ITp
1816 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1817 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1819 template<typename _ITp>
1820 __always_inline _ITp
1821 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1822 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1824 template<typename _ITp>
1825 __always_inline _ITp
1826 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1827 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1829 template<typename _ITp>
1830 __always_inline _ITp
1831 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1832 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1834 template<typename _ITp>
1835 __always_inline _ITp
1836 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1837 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1839 template<typename _ITp>
1840 __always_inline _ITp
1841 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1842 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1844 template<typename _ITp>
1845 __always_inline _ITp
1846 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1847 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1849 template<typename _ITp>
1850 __always_inline _ITp
1851 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1852 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1855 // Partial specializations for pointers.
1856 template<typename _ITp>
1857 __always_inline _ITp*
1858 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1859 memory_order __m) noexcept
1860 { return __a->fetch_add(__d, __m); }
1862 template<typename _ITp>
1863 __always_inline _ITp*
1864 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1865 memory_order __m) noexcept
1866 { return __a->fetch_add(__d, __m); }
1868 template<typename _ITp>
1869 __always_inline _ITp*
1870 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1871 { return __a->fetch_add(__d); }
1873 template<typename _ITp>
1874 __always_inline _ITp*
1875 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1876 { return __a->fetch_add(__d); }
1878 template<typename _ITp>
1879 __always_inline _ITp*
1880 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1881 ptrdiff_t __d, memory_order __m) noexcept
1882 { return __a->fetch_sub(__d, __m); }
1884 template<typename _ITp>
1885 __always_inline _ITp*
1886 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1887 memory_order __m) noexcept
1888 { return __a->fetch_sub(__d, __m); }
1890 template<typename _ITp>
1891 __always_inline _ITp*
1892 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1893 { return __a->fetch_sub(__d); }
1895 template<typename _ITp>
1896 __always_inline _ITp*
1897 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1898 { return __a->fetch_sub(__d); }
1901 // _GLIBCXX_END_NAMESPACE_VERSION