3 // Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 // ????????????????????????????????????????????????????????????????????
27 // This is a copy of the libstdc++ header, with the trivial modification
28 // of ignoring the c++config.h include. If and when the top-level build is
29 // fixed so that target libraries can be built using the newly built, we can
32 // ????????????????????????????????????????????????????????????????????
34 /** @file include/atomic
35 * This is a Standard C++ Library header.
38 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
39 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
41 #ifndef _GLIBCXX_ATOMIC
42 #define _GLIBCXX_ATOMIC 1
44 // #pragma GCC system_header
46 // #ifndef __GXX_EXPERIMENTAL_CXX0X__
47 // # include <bits/c++0x_warning.h>
50 // #include <bits/atomic_base.h>
52 namespace std // _GLIBCXX_VISIBILITY(default)
54 // _GLIBCXX_BEGIN_NAMESPACE_VERSION
57 * @defgroup atomics Atomics
59 * Components for performing atomic operations.
63 /// Enumeration for memory_order
64 typedef enum memory_order
75 __calculate_memory_order(memory_order __m) noexcept
77 const bool __cond1 = __m == memory_order_release;
78 const bool __cond2 = __m == memory_order_acq_rel;
79 memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
80 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
85 atomic_thread_fence(memory_order __m) noexcept
87 __atomic_thread_fence (__m);
91 atomic_signal_fence(memory_order __m) noexcept
93 __atomic_thread_fence (__m);
97 template<typename _Tp>
99 kill_dependency(_Tp __y) noexcept
105 /// Lock-free Property
108 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
109 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
110 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
111 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
112 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
113 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
114 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
115 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
116 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
117 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
119 // Base types for atomics.
120 template<typename _IntTp>
121 struct __atomic_base;
124 typedef __atomic_base<char> atomic_char;
127 typedef __atomic_base<signed char> atomic_schar;
130 typedef __atomic_base<unsigned char> atomic_uchar;
133 typedef __atomic_base<short> atomic_short;
136 typedef __atomic_base<unsigned short> atomic_ushort;
139 typedef __atomic_base<int> atomic_int;
142 typedef __atomic_base<unsigned int> atomic_uint;
145 typedef __atomic_base<long> atomic_long;
148 typedef __atomic_base<unsigned long> atomic_ulong;
151 typedef __atomic_base<long long> atomic_llong;
154 typedef __atomic_base<unsigned long long> atomic_ullong;
157 typedef __atomic_base<wchar_t> atomic_wchar_t;
160 typedef __atomic_base<char16_t> atomic_char16_t;
163 typedef __atomic_base<char32_t> atomic_char32_t;
166 typedef __atomic_base<char32_t> atomic_char32_t;
169 /// atomic_int_least8_t
170 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
172 /// atomic_uint_least8_t
173 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
175 /// atomic_int_least16_t
176 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
178 /// atomic_uint_least16_t
179 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
181 /// atomic_int_least32_t
182 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
184 /// atomic_uint_least32_t
185 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
187 /// atomic_int_least64_t
188 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
190 /// atomic_uint_least64_t
191 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
194 /// atomic_int_fast8_t
195 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
197 /// atomic_uint_fast8_t
198 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
200 /// atomic_int_fast16_t
201 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
203 /// atomic_uint_fast16_t
204 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
206 /// atomic_int_fast32_t
207 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
209 /// atomic_uint_fast32_t
210 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
212 /// atomic_int_fast64_t
213 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
215 /// atomic_uint_fast64_t
216 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
220 typedef __atomic_base<intptr_t> atomic_intptr_t;
223 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
226 typedef __atomic_base<size_t> atomic_size_t;
229 typedef __atomic_base<intmax_t> atomic_intmax_t;
232 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
235 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
238 #define ATOMIC_VAR_INIT(_VI) { _VI }
240 template<typename _Tp>
243 template<typename _Tp>
248 * @brief Base type for atomic_flag.
250 * Base type is POD with data, allowing atomic_flag to derive from
251 * it and meet the standard layout type requirement. In addition to
252 * compatibilty with a C interface, this allows different
253 * implementations of atomic_flag to use the same atomic operation
254 * functions, via a standard conversion to the __atomic_flag_base
257 // _GLIBCXX_BEGIN_EXTERN_C
259 struct __atomic_flag_base
264 // _GLIBCXX_END_EXTERN_C
266 #define ATOMIC_FLAG_INIT { false }
269 struct atomic_flag : public __atomic_flag_base
271 atomic_flag() noexcept = default;
272 ~atomic_flag() noexcept = default;
273 atomic_flag(const atomic_flag&) = delete;
274 atomic_flag& operator=(const atomic_flag&) = delete;
275 atomic_flag& operator=(const atomic_flag&) volatile = delete;
277 // Conversion to ATOMIC_FLAG_INIT.
278 atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
281 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
283 return __atomic_test_and_set (&_M_i, __m);
287 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
289 return __atomic_test_and_set (&_M_i, __m);
293 clear(memory_order __m = memory_order_seq_cst) noexcept
295 // __glibcxx_assert(__m != memory_order_consume);
296 // __glibcxx_assert(__m != memory_order_acquire);
297 // __glibcxx_assert(__m != memory_order_acq_rel);
299 __atomic_clear (&_M_i, __m);
303 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
305 // __glibcxx_assert(__m != memory_order_consume);
306 // __glibcxx_assert(__m != memory_order_acquire);
307 // __glibcxx_assert(__m != memory_order_acq_rel);
309 __atomic_clear (&_M_i, __m);
314 /// Base class for atomic integrals.
316 // For each of the integral types, define atomic_[integral type] struct
320 // atomic_schar signed char
321 // atomic_uchar unsigned char
322 // atomic_short short
323 // atomic_ushort unsigned short
325 // atomic_uint unsigned int
327 // atomic_ulong unsigned long
328 // atomic_llong long long
329 // atomic_ullong unsigned long long
330 // atomic_char16_t char16_t
331 // atomic_char32_t char32_t
332 // atomic_wchar_t wchar_t
334 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
335 // 8 bytes, since that is what GCC built-in functions for atomic
336 // memory access expect.
337 template<typename _ITp>
341 typedef _ITp __int_type;
346 __atomic_base() noexcept = default;
347 ~__atomic_base() noexcept = default;
348 __atomic_base(const __atomic_base&) = delete;
349 __atomic_base& operator=(const __atomic_base&) = delete;
350 __atomic_base& operator=(const __atomic_base&) volatile = delete;
352 // Requires __int_type convertible to _M_i.
353 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
355 operator __int_type() const noexcept
358 operator __int_type() const volatile noexcept
362 operator=(__int_type __i) noexcept
369 operator=(__int_type __i) volatile noexcept
376 operator++(int) noexcept
377 { return fetch_add(1); }
380 operator++(int) volatile noexcept
381 { return fetch_add(1); }
384 operator--(int) noexcept
385 { return fetch_sub(1); }
388 operator--(int) volatile noexcept
389 { return fetch_sub(1); }
392 operator++() noexcept
393 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
396 operator++() volatile noexcept
397 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
400 operator--() noexcept
401 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
404 operator--() volatile noexcept
405 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
408 operator+=(__int_type __i) noexcept
409 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
412 operator+=(__int_type __i) volatile noexcept
413 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
416 operator-=(__int_type __i) noexcept
417 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
420 operator-=(__int_type __i) volatile noexcept
421 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
424 operator&=(__int_type __i) noexcept
425 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
428 operator&=(__int_type __i) volatile noexcept
429 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
432 operator|=(__int_type __i) noexcept
433 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
436 operator|=(__int_type __i) volatile noexcept
437 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
440 operator^=(__int_type __i) noexcept
441 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
444 operator^=(__int_type __i) volatile noexcept
445 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
448 is_lock_free() const noexcept
449 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
452 is_lock_free() const volatile noexcept
453 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
456 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
458 // __glibcxx_assert(__m != memory_order_acquire);
459 // __glibcxx_assert(__m != memory_order_acq_rel);
460 // __glibcxx_assert(__m != memory_order_consume);
462 __atomic_store_n(&_M_i, __i, __m);
466 store(__int_type __i,
467 memory_order __m = memory_order_seq_cst) volatile noexcept
469 // __glibcxx_assert(__m != memory_order_acquire);
470 // __glibcxx_assert(__m != memory_order_acq_rel);
471 // __glibcxx_assert(__m != memory_order_consume);
473 __atomic_store_n(&_M_i, __i, __m);
477 load(memory_order __m = memory_order_seq_cst) const noexcept
479 // __glibcxx_assert(__m != memory_order_release);
480 // __glibcxx_assert(__m != memory_order_acq_rel);
482 return __atomic_load_n(&_M_i, __m);
486 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
488 // __glibcxx_assert(__m != memory_order_release);
489 // __glibcxx_assert(__m != memory_order_acq_rel);
491 return __atomic_load_n(&_M_i, __m);
495 exchange(__int_type __i,
496 memory_order __m = memory_order_seq_cst) noexcept
498 return __atomic_exchange_n(&_M_i, __i, __m);
503 exchange(__int_type __i,
504 memory_order __m = memory_order_seq_cst) volatile noexcept
506 return __atomic_exchange_n(&_M_i, __i, __m);
510 compare_exchange_weak(__int_type& __i1, __int_type __i2,
511 memory_order __m1, memory_order __m2) noexcept
513 // __glibcxx_assert(__m2 != memory_order_release);
514 // __glibcxx_assert(__m2 != memory_order_acq_rel);
515 // __glibcxx_assert(__m2 <= __m1);
517 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
521 compare_exchange_weak(__int_type& __i1, __int_type __i2,
523 memory_order __m2) volatile noexcept
525 // __glibcxx_assert(__m2 != memory_order_release);
526 // __glibcxx_assert(__m2 != memory_order_acq_rel);
527 // __glibcxx_assert(__m2 <= __m1);
529 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
533 compare_exchange_weak(__int_type& __i1, __int_type __i2,
534 memory_order __m = memory_order_seq_cst) noexcept
536 return compare_exchange_weak(__i1, __i2, __m,
537 __calculate_memory_order(__m));
541 compare_exchange_weak(__int_type& __i1, __int_type __i2,
542 memory_order __m = memory_order_seq_cst) volatile noexcept
544 return compare_exchange_weak(__i1, __i2, __m,
545 __calculate_memory_order(__m));
549 compare_exchange_strong(__int_type& __i1, __int_type __i2,
550 memory_order __m1, memory_order __m2) noexcept
552 // __glibcxx_assert(__m2 != memory_order_release);
553 // __glibcxx_assert(__m2 != memory_order_acq_rel);
554 // __glibcxx_assert(__m2 <= __m1);
556 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
560 compare_exchange_strong(__int_type& __i1, __int_type __i2,
562 memory_order __m2) volatile noexcept
564 // __glibcxx_assert(__m2 != memory_order_release);
565 // __glibcxx_assert(__m2 != memory_order_acq_rel);
566 // __glibcxx_assert(__m2 <= __m1);
568 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
572 compare_exchange_strong(__int_type& __i1, __int_type __i2,
573 memory_order __m = memory_order_seq_cst) noexcept
575 return compare_exchange_strong(__i1, __i2, __m,
576 __calculate_memory_order(__m));
580 compare_exchange_strong(__int_type& __i1, __int_type __i2,
581 memory_order __m = memory_order_seq_cst) volatile noexcept
583 return compare_exchange_strong(__i1, __i2, __m,
584 __calculate_memory_order(__m));
588 fetch_add(__int_type __i,
589 memory_order __m = memory_order_seq_cst) noexcept
590 { return __atomic_fetch_add(&_M_i, __i, __m); }
593 fetch_add(__int_type __i,
594 memory_order __m = memory_order_seq_cst) volatile noexcept
595 { return __atomic_fetch_add(&_M_i, __i, __m); }
598 fetch_sub(__int_type __i,
599 memory_order __m = memory_order_seq_cst) noexcept
600 { return __atomic_fetch_sub(&_M_i, __i, __m); }
603 fetch_sub(__int_type __i,
604 memory_order __m = memory_order_seq_cst) volatile noexcept
605 { return __atomic_fetch_sub(&_M_i, __i, __m); }
608 fetch_and(__int_type __i,
609 memory_order __m = memory_order_seq_cst) noexcept
610 { return __atomic_fetch_and(&_M_i, __i, __m); }
613 fetch_and(__int_type __i,
614 memory_order __m = memory_order_seq_cst) volatile noexcept
615 { return __atomic_fetch_and(&_M_i, __i, __m); }
618 fetch_or(__int_type __i,
619 memory_order __m = memory_order_seq_cst) noexcept
620 { return __atomic_fetch_or(&_M_i, __i, __m); }
623 fetch_or(__int_type __i,
624 memory_order __m = memory_order_seq_cst) volatile noexcept
625 { return __atomic_fetch_or(&_M_i, __i, __m); }
628 fetch_xor(__int_type __i,
629 memory_order __m = memory_order_seq_cst) noexcept
630 { return __atomic_fetch_xor(&_M_i, __i, __m); }
633 fetch_xor(__int_type __i,
634 memory_order __m = memory_order_seq_cst) volatile noexcept
635 { return __atomic_fetch_xor(&_M_i, __i, __m); }
639 /// Partial specialization for pointer types.
640 template<typename _PTp>
641 struct __atomic_base<_PTp*>
644 typedef _PTp* __pointer_type;
649 __atomic_base() noexcept = default;
650 ~__atomic_base() noexcept = default;
651 __atomic_base(const __atomic_base&) = delete;
652 __atomic_base& operator=(const __atomic_base&) = delete;
653 __atomic_base& operator=(const __atomic_base&) volatile = delete;
655 // Requires __pointer_type convertible to _M_p.
656 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
658 operator __pointer_type() const noexcept
661 operator __pointer_type() const volatile noexcept
665 operator=(__pointer_type __p) noexcept
672 operator=(__pointer_type __p) volatile noexcept
679 operator++(int) noexcept
680 { return fetch_add(1); }
683 operator++(int) volatile noexcept
684 { return fetch_add(1); }
687 operator--(int) noexcept
688 { return fetch_sub(1); }
691 operator--(int) volatile noexcept
692 { return fetch_sub(1); }
695 operator++() noexcept
696 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
699 operator++() volatile noexcept
700 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
703 operator--() noexcept
704 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
707 operator--() volatile noexcept
708 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
711 operator+=(ptrdiff_t __d) noexcept
712 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
715 operator+=(ptrdiff_t __d) volatile noexcept
716 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
719 operator-=(ptrdiff_t __d) noexcept
720 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
723 operator-=(ptrdiff_t __d) volatile noexcept
724 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
727 is_lock_free() const noexcept
728 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
731 is_lock_free() const volatile noexcept
732 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
735 store(__pointer_type __p,
736 memory_order __m = memory_order_seq_cst) noexcept
738 // __glibcxx_assert(__m != memory_order_acquire);
739 // __glibcxx_assert(__m != memory_order_acq_rel);
740 // __glibcxx_assert(__m != memory_order_consume);
742 __atomic_store_n(&_M_p, __p, __m);
746 store(__pointer_type __p,
747 memory_order __m = memory_order_seq_cst) volatile noexcept
749 // __glibcxx_assert(__m != memory_order_acquire);
750 // __glibcxx_assert(__m != memory_order_acq_rel);
751 // __glibcxx_assert(__m != memory_order_consume);
753 __atomic_store_n(&_M_p, __p, __m);
757 load(memory_order __m = memory_order_seq_cst) const noexcept
759 // __glibcxx_assert(__m != memory_order_release);
760 // __glibcxx_assert(__m != memory_order_acq_rel);
762 return __atomic_load_n(&_M_p, __m);
766 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
768 // __glibcxx_assert(__m != memory_order_release);
769 // __glibcxx_assert(__m != memory_order_acq_rel);
771 return __atomic_load_n(&_M_p, __m);
775 exchange(__pointer_type __p,
776 memory_order __m = memory_order_seq_cst) noexcept
778 return __atomic_exchange_n(&_M_p, __p, __m);
783 exchange(__pointer_type __p,
784 memory_order __m = memory_order_seq_cst) volatile noexcept
786 return __atomic_exchange_n(&_M_p, __p, __m);
790 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
792 memory_order __m2) noexcept
794 // __glibcxx_assert(__m2 != memory_order_release);
795 // __glibcxx_assert(__m2 != memory_order_acq_rel);
796 // __glibcxx_assert(__m2 <= __m1);
798 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
802 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
804 memory_order __m2) volatile noexcept
806 // __glibcxx_assert(__m2 != memory_order_release);
807 // __glibcxx_assert(__m2 != memory_order_acq_rel);
808 // __glibcxx_assert(__m2 <= __m1);
810 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
814 fetch_add(ptrdiff_t __d,
815 memory_order __m = memory_order_seq_cst) noexcept
816 { return __atomic_fetch_add(&_M_p, __d, __m); }
819 fetch_add(ptrdiff_t __d,
820 memory_order __m = memory_order_seq_cst) volatile noexcept
821 { return __atomic_fetch_add(&_M_p, __d, __m); }
824 fetch_sub(ptrdiff_t __d,
825 memory_order __m = memory_order_seq_cst) noexcept
826 { return __atomic_fetch_sub(&_M_p, __d, __m); }
829 fetch_sub(ptrdiff_t __d,
830 memory_order __m = memory_order_seq_cst) volatile noexcept
831 { return __atomic_fetch_sub(&_M_p, __d, __m); }
836 * @addtogroup atomics
841 // NB: No operators or fetch-operations for this type.
845 __atomic_base<bool> _M_base;
848 atomic_bool() noexcept = default;
849 ~atomic_bool() noexcept = default;
850 atomic_bool(const atomic_bool&) = delete;
851 atomic_bool& operator=(const atomic_bool&) = delete;
852 atomic_bool& operator=(const atomic_bool&) volatile = delete;
854 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
857 operator=(bool __i) noexcept
858 { return _M_base.operator=(__i); }
860 operator bool() const noexcept
861 { return _M_base.load(); }
863 operator bool() const volatile noexcept
864 { return _M_base.load(); }
867 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
870 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
873 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
874 { _M_base.store(__i, __m); }
877 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
878 { _M_base.store(__i, __m); }
881 load(memory_order __m = memory_order_seq_cst) const noexcept
882 { return _M_base.load(__m); }
885 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
886 { return _M_base.load(__m); }
889 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
890 { return _M_base.exchange(__i, __m); }
894 memory_order __m = memory_order_seq_cst) volatile noexcept
895 { return _M_base.exchange(__i, __m); }
898 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
899 memory_order __m2) noexcept
900 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
903 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
904 memory_order __m2) volatile noexcept
905 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
908 compare_exchange_weak(bool& __i1, bool __i2,
909 memory_order __m = memory_order_seq_cst) noexcept
910 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
913 compare_exchange_weak(bool& __i1, bool __i2,
914 memory_order __m = memory_order_seq_cst) volatile noexcept
915 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
918 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
919 memory_order __m2) noexcept
920 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
923 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
924 memory_order __m2) volatile noexcept
925 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
928 compare_exchange_strong(bool& __i1, bool __i2,
929 memory_order __m = memory_order_seq_cst) noexcept
930 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
933 compare_exchange_strong(bool& __i1, bool __i2,
934 memory_order __m = memory_order_seq_cst) volatile noexcept
935 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
940 /// 29.4.3, Generic atomic type, primary class template.
941 template<typename _Tp>
948 atomic() noexcept = default;
949 ~atomic() noexcept = default;
950 atomic(const atomic&) = delete;
951 atomic& operator=(const atomic&) = delete;
952 atomic& operator=(const atomic&) volatile = delete;
954 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
956 operator _Tp() const noexcept
959 operator _Tp() const volatile noexcept
963 operator=(_Tp __i) noexcept
964 { store(__i); return __i; }
967 operator=(_Tp __i) volatile noexcept
968 { store(__i); return __i; }
971 is_lock_free() const noexcept
972 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
975 is_lock_free() const volatile noexcept
976 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
979 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
980 { __atomic_store(&_M_i, &__i, _m); }
983 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
984 { __atomic_store(&_M_i, &__i, _m); }
987 load(memory_order _m = memory_order_seq_cst) const noexcept
990 __atomic_load(&_M_i, &tmp, _m);
995 load(memory_order _m = memory_order_seq_cst) const volatile noexcept
998 __atomic_load(&_M_i, &tmp, _m);
1003 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
1006 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1012 memory_order _m = memory_order_seq_cst) volatile noexcept
1015 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1020 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1021 memory_order __f) noexcept
1023 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1027 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1028 memory_order __f) volatile noexcept
1030 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1034 compare_exchange_weak(_Tp& __e, _Tp __i,
1035 memory_order __m = memory_order_seq_cst) noexcept
1036 { return compare_exchange_weak(__e, __i, __m, __m); }
1039 compare_exchange_weak(_Tp& __e, _Tp __i,
1040 memory_order __m = memory_order_seq_cst) volatile noexcept
1041 { return compare_exchange_weak(__e, __i, __m, __m); }
1044 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1045 memory_order __f) noexcept
1047 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1051 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1052 memory_order __f) volatile noexcept
1054 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1058 compare_exchange_strong(_Tp& __e, _Tp __i,
1059 memory_order __m = memory_order_seq_cst) noexcept
1060 { return compare_exchange_strong(__e, __i, __m, __m); }
1063 compare_exchange_strong(_Tp& __e, _Tp __i,
1064 memory_order __m = memory_order_seq_cst) volatile noexcept
1065 { return compare_exchange_strong(__e, __i, __m, __m); }
1069 /// Partial specialization for pointer types.
1070 template<typename _Tp>
1073 typedef _Tp* __pointer_type;
1074 typedef __atomic_base<_Tp*> __base_type;
1077 atomic() noexcept = default;
1078 ~atomic() noexcept = default;
1079 atomic(const atomic&) = delete;
1080 atomic& operator=(const atomic&) = delete;
1081 atomic& operator=(const atomic&) volatile = delete;
1083 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
1085 operator __pointer_type() const noexcept
1086 { return __pointer_type(_M_b); }
1088 operator __pointer_type() const volatile noexcept
1089 { return __pointer_type(_M_b); }
1092 operator=(__pointer_type __p) noexcept
1093 { return _M_b.operator=(__p); }
1096 operator=(__pointer_type __p) volatile noexcept
1097 { return _M_b.operator=(__p); }
1100 operator++(int) noexcept
1104 operator++(int) volatile noexcept
1108 operator--(int) noexcept
1112 operator--(int) volatile noexcept
1116 operator++() noexcept
1120 operator++() volatile noexcept
1124 operator--() noexcept
1128 operator--() volatile noexcept
1132 operator+=(ptrdiff_t __d) noexcept
1133 { return _M_b.operator+=(__d); }
1136 operator+=(ptrdiff_t __d) volatile noexcept
1137 { return _M_b.operator+=(__d); }
1140 operator-=(ptrdiff_t __d) noexcept
1141 { return _M_b.operator-=(__d); }
1144 operator-=(ptrdiff_t __d) volatile noexcept
1145 { return _M_b.operator-=(__d); }
1148 is_lock_free() const noexcept
1149 { return _M_b.is_lock_free(); }
1152 is_lock_free() const volatile noexcept
1153 { return _M_b.is_lock_free(); }
1156 store(__pointer_type __p,
1157 memory_order __m = memory_order_seq_cst) noexcept
1158 { return _M_b.store(__p, __m); }
1161 store(__pointer_type __p,
1162 memory_order __m = memory_order_seq_cst) volatile noexcept
1163 { return _M_b.store(__p, __m); }
1166 load(memory_order __m = memory_order_seq_cst) const noexcept
1167 { return _M_b.load(__m); }
1170 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
1171 { return _M_b.load(__m); }
1174 exchange(__pointer_type __p,
1175 memory_order __m = memory_order_seq_cst) noexcept
1176 { return _M_b.exchange(__p, __m); }
1179 exchange(__pointer_type __p,
1180 memory_order __m = memory_order_seq_cst) volatile noexcept
1181 { return _M_b.exchange(__p, __m); }
1184 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1185 memory_order __m1, memory_order __m2) noexcept
1186 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1189 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1191 memory_order __m2) volatile noexcept
1192 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1195 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1196 memory_order __m = memory_order_seq_cst) noexcept
1198 return compare_exchange_weak(__p1, __p2, __m,
1199 __calculate_memory_order(__m));
1203 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1204 memory_order __m = memory_order_seq_cst) volatile noexcept
1206 return compare_exchange_weak(__p1, __p2, __m,
1207 __calculate_memory_order(__m));
1211 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1212 memory_order __m1, memory_order __m2) noexcept
1213 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1216 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1218 memory_order __m2) volatile noexcept
1219 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1222 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1223 memory_order __m = memory_order_seq_cst) noexcept
1225 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1226 __calculate_memory_order(__m));
1230 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1231 memory_order __m = memory_order_seq_cst) volatile noexcept
1233 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1234 __calculate_memory_order(__m));
1238 fetch_add(ptrdiff_t __d,
1239 memory_order __m = memory_order_seq_cst) noexcept
1240 { return _M_b.fetch_add(__d, __m); }
1243 fetch_add(ptrdiff_t __d,
1244 memory_order __m = memory_order_seq_cst) volatile noexcept
1245 { return _M_b.fetch_add(__d, __m); }
1248 fetch_sub(ptrdiff_t __d,
1249 memory_order __m = memory_order_seq_cst) noexcept
1250 { return _M_b.fetch_sub(__d, __m); }
1253 fetch_sub(ptrdiff_t __d,
1254 memory_order __m = memory_order_seq_cst) volatile noexcept
1255 { return _M_b.fetch_sub(__d, __m); }
1259 /// Explicit specialization for bool.
1261 struct atomic<bool> : public atomic_bool
1263 typedef bool __integral_type;
1264 typedef atomic_bool __base_type;
1266 atomic() noexcept = default;
1267 ~atomic() noexcept = default;
1268 atomic(const atomic&) = delete;
1269 atomic& operator=(const atomic&) = delete;
1270 atomic& operator=(const atomic&) volatile = delete;
1272 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1274 using __base_type::operator __integral_type;
1275 using __base_type::operator=;
1278 /// Explicit specialization for char.
1280 struct atomic<char> : public atomic_char
1282 typedef char __integral_type;
1283 typedef atomic_char __base_type;
1285 atomic() noexcept = default;
1286 ~atomic() noexcept = default;
1287 atomic(const atomic&) = delete;
1288 atomic& operator=(const atomic&) = delete;
1289 atomic& operator=(const atomic&) volatile = delete;
1291 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1293 using __base_type::operator __integral_type;
1294 using __base_type::operator=;
1297 /// Explicit specialization for signed char.
1299 struct atomic<signed char> : public atomic_schar
1301 typedef signed char __integral_type;
1302 typedef atomic_schar __base_type;
1304 atomic() noexcept= default;
1305 ~atomic() noexcept = default;
1306 atomic(const atomic&) = delete;
1307 atomic& operator=(const atomic&) = delete;
1308 atomic& operator=(const atomic&) volatile = delete;
1310 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1312 using __base_type::operator __integral_type;
1313 using __base_type::operator=;
1316 /// Explicit specialization for unsigned char.
1318 struct atomic<unsigned char> : public atomic_uchar
1320 typedef unsigned char __integral_type;
1321 typedef atomic_uchar __base_type;
1323 atomic() noexcept= default;
1324 ~atomic() noexcept = default;
1325 atomic(const atomic&) = delete;
1326 atomic& operator=(const atomic&) = delete;
1327 atomic& operator=(const atomic&) volatile = delete;
1329 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1331 using __base_type::operator __integral_type;
1332 using __base_type::operator=;
1335 /// Explicit specialization for short.
1337 struct atomic<short> : public atomic_short
1339 typedef short __integral_type;
1340 typedef atomic_short __base_type;
1342 atomic() noexcept = default;
1343 ~atomic() noexcept = default;
1344 atomic(const atomic&) = delete;
1345 atomic& operator=(const atomic&) = delete;
1346 atomic& operator=(const atomic&) volatile = delete;
1348 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1350 using __base_type::operator __integral_type;
1351 using __base_type::operator=;
1354 /// Explicit specialization for unsigned short.
1356 struct atomic<unsigned short> : public atomic_ushort
1358 typedef unsigned short __integral_type;
1359 typedef atomic_ushort __base_type;
1361 atomic() noexcept = default;
1362 ~atomic() noexcept = default;
1363 atomic(const atomic&) = delete;
1364 atomic& operator=(const atomic&) = delete;
1365 atomic& operator=(const atomic&) volatile = delete;
1367 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1369 using __base_type::operator __integral_type;
1370 using __base_type::operator=;
1373 /// Explicit specialization for int.
1375 struct atomic<int> : atomic_int
1377 typedef int __integral_type;
1378 typedef atomic_int __base_type;
1380 atomic() noexcept = default;
1381 ~atomic() noexcept = default;
1382 atomic(const atomic&) = delete;
1383 atomic& operator=(const atomic&) = delete;
1384 atomic& operator=(const atomic&) volatile = delete;
1386 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1388 using __base_type::operator __integral_type;
1389 using __base_type::operator=;
1392 /// Explicit specialization for unsigned int.
1394 struct atomic<unsigned int> : public atomic_uint
1396 typedef unsigned int __integral_type;
1397 typedef atomic_uint __base_type;
1399 atomic() noexcept = default;
1400 ~atomic() noexcept = default;
1401 atomic(const atomic&) = delete;
1402 atomic& operator=(const atomic&) = delete;
1403 atomic& operator=(const atomic&) volatile = delete;
1405 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1407 using __base_type::operator __integral_type;
1408 using __base_type::operator=;
1411 /// Explicit specialization for long.
1413 struct atomic<long> : public atomic_long
1415 typedef long __integral_type;
1416 typedef atomic_long __base_type;
1418 atomic() noexcept = default;
1419 ~atomic() noexcept = default;
1420 atomic(const atomic&) = delete;
1421 atomic& operator=(const atomic&) = delete;
1422 atomic& operator=(const atomic&) volatile = delete;
1424 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1426 using __base_type::operator __integral_type;
1427 using __base_type::operator=;
1430 /// Explicit specialization for unsigned long.
1432 struct atomic<unsigned long> : public atomic_ulong
1434 typedef unsigned long __integral_type;
1435 typedef atomic_ulong __base_type;
1437 atomic() noexcept = default;
1438 ~atomic() noexcept = default;
1439 atomic(const atomic&) = delete;
1440 atomic& operator=(const atomic&) = delete;
1441 atomic& operator=(const atomic&) volatile = delete;
1443 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1445 using __base_type::operator __integral_type;
1446 using __base_type::operator=;
1449 /// Explicit specialization for long long.
1451 struct atomic<long long> : public atomic_llong
1453 typedef long long __integral_type;
1454 typedef atomic_llong __base_type;
1456 atomic() noexcept = default;
1457 ~atomic() noexcept = default;
1458 atomic(const atomic&) = delete;
1459 atomic& operator=(const atomic&) = delete;
1460 atomic& operator=(const atomic&) volatile = delete;
1462 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1464 using __base_type::operator __integral_type;
1465 using __base_type::operator=;
1468 /// Explicit specialization for unsigned long long.
1470 struct atomic<unsigned long long> : public atomic_ullong
1472 typedef unsigned long long __integral_type;
1473 typedef atomic_ullong __base_type;
1475 atomic() noexcept = default;
1476 ~atomic() noexcept = default;
1477 atomic(const atomic&) = delete;
1478 atomic& operator=(const atomic&) = delete;
1479 atomic& operator=(const atomic&) volatile = delete;
1481 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1483 using __base_type::operator __integral_type;
1484 using __base_type::operator=;
1487 /// Explicit specialization for wchar_t.
1489 struct atomic<wchar_t> : public atomic_wchar_t
1491 typedef wchar_t __integral_type;
1492 typedef atomic_wchar_t __base_type;
1494 atomic() noexcept = default;
1495 ~atomic() noexcept = default;
1496 atomic(const atomic&) = delete;
1497 atomic& operator=(const atomic&) = delete;
1498 atomic& operator=(const atomic&) volatile = delete;
1500 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1502 using __base_type::operator __integral_type;
1503 using __base_type::operator=;
1506 /// Explicit specialization for char16_t.
1508 struct atomic<char16_t> : public atomic_char16_t
1510 typedef char16_t __integral_type;
1511 typedef atomic_char16_t __base_type;
1513 atomic() noexcept = default;
1514 ~atomic() noexcept = default;
1515 atomic(const atomic&) = delete;
1516 atomic& operator=(const atomic&) = delete;
1517 atomic& operator=(const atomic&) volatile = delete;
1519 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1521 using __base_type::operator __integral_type;
1522 using __base_type::operator=;
1525 /// Explicit specialization for char32_t.
1527 struct atomic<char32_t> : public atomic_char32_t
1529 typedef char32_t __integral_type;
1530 typedef atomic_char32_t __base_type;
1532 atomic() noexcept = default;
1533 ~atomic() noexcept = default;
1534 atomic(const atomic&) = delete;
1535 atomic& operator=(const atomic&) = delete;
1536 atomic& operator=(const atomic&) volatile = delete;
1538 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1540 using __base_type::operator __integral_type;
1541 using __base_type::operator=;
1545 // Function definitions, atomic_flag operations.
1547 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1548 memory_order __m) noexcept
1549 { return __a->test_and_set(__m); }
1552 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1553 memory_order __m) noexcept
1554 { return __a->test_and_set(__m); }
1557 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1558 { __a->clear(__m); }
1561 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1562 memory_order __m) noexcept
1563 { __a->clear(__m); }
1566 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1567 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1570 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1571 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1574 atomic_flag_clear(atomic_flag* __a) noexcept
1575 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1578 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1579 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1582 // Function templates generally applicable to atomic types.
1583 template<typename _ITp>
1585 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1586 { return __a->is_lock_free(); }
1588 template<typename _ITp>
1590 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1591 { return __a->is_lock_free(); }
1593 template<typename _ITp>
1595 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
1597 template<typename _ITp>
1599 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
1601 template<typename _ITp>
1603 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1604 memory_order __m) noexcept
1605 { __a->store(__i, __m); }
1607 template<typename _ITp>
1609 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1610 memory_order __m) noexcept
1611 { __a->store(__i, __m); }
1613 template<typename _ITp>
1615 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1616 { return __a->load(__m); }
1618 template<typename _ITp>
1620 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1621 memory_order __m) noexcept
1622 { return __a->load(__m); }
1624 template<typename _ITp>
1626 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1627 memory_order __m) noexcept
1628 { return __a->exchange(__i, __m); }
1630 template<typename _ITp>
1632 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1633 memory_order __m) noexcept
1634 { return __a->exchange(__i, __m); }
1636 template<typename _ITp>
1638 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1639 _ITp* __i1, _ITp __i2,
1641 memory_order __m2) noexcept
1642 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1644 template<typename _ITp>
1646 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1647 _ITp* __i1, _ITp __i2,
1649 memory_order __m2) noexcept
1650 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1652 template<typename _ITp>
1654 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1655 _ITp* __i1, _ITp __i2,
1657 memory_order __m2) noexcept
1658 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1660 template<typename _ITp>
1662 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1663 _ITp* __i1, _ITp __i2,
1665 memory_order __m2) noexcept
1666 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1669 template<typename _ITp>
1671 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1672 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1674 template<typename _ITp>
1676 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1677 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1679 template<typename _ITp>
1681 atomic_load(const atomic<_ITp>* __a) noexcept
1682 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1684 template<typename _ITp>
1686 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1687 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1689 template<typename _ITp>
1691 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1692 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1694 template<typename _ITp>
1696 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1697 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1699 template<typename _ITp>
1701 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1702 _ITp* __i1, _ITp __i2) noexcept
1704 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1705 memory_order_seq_cst,
1706 memory_order_seq_cst);
1709 template<typename _ITp>
1711 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1712 _ITp* __i1, _ITp __i2) noexcept
1714 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1715 memory_order_seq_cst,
1716 memory_order_seq_cst);
1719 template<typename _ITp>
1721 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1722 _ITp* __i1, _ITp __i2) noexcept
1724 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1725 memory_order_seq_cst,
1726 memory_order_seq_cst);
1729 template<typename _ITp>
1731 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1732 _ITp* __i1, _ITp __i2) noexcept
1734 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1735 memory_order_seq_cst,
1736 memory_order_seq_cst);
1739 // Function templates for atomic_integral operations only, using
1740 // __atomic_base. Template argument should be constricted to
1741 // intergral types as specified in the standard, excluding address
1743 template<typename _ITp>
1745 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1746 memory_order __m) noexcept
1747 { return __a->fetch_add(__i, __m); }
1749 template<typename _ITp>
1751 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1752 memory_order __m) noexcept
1753 { return __a->fetch_add(__i, __m); }
1755 template<typename _ITp>
1757 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1758 memory_order __m) noexcept
1759 { return __a->fetch_sub(__i, __m); }
1761 template<typename _ITp>
1763 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1764 memory_order __m) noexcept
1765 { return __a->fetch_sub(__i, __m); }
1767 template<typename _ITp>
1769 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1770 memory_order __m) noexcept
1771 { return __a->fetch_and(__i, __m); }
1773 template<typename _ITp>
1775 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1776 memory_order __m) noexcept
1777 { return __a->fetch_and(__i, __m); }
1779 template<typename _ITp>
1781 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1782 memory_order __m) noexcept
1783 { return __a->fetch_or(__i, __m); }
1785 template<typename _ITp>
1787 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1788 memory_order __m) noexcept
1789 { return __a->fetch_or(__i, __m); }
1791 template<typename _ITp>
1793 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1794 memory_order __m) noexcept
1795 { return __a->fetch_xor(__i, __m); }
1797 template<typename _ITp>
1799 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1800 memory_order __m) noexcept
1801 { return __a->fetch_xor(__i, __m); }
1803 template<typename _ITp>
1805 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1806 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1808 template<typename _ITp>
1810 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1811 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1813 template<typename _ITp>
1815 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1816 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1818 template<typename _ITp>
1820 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1821 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1823 template<typename _ITp>
1825 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1826 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1828 template<typename _ITp>
1830 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1831 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1833 template<typename _ITp>
1835 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1836 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1838 template<typename _ITp>
1840 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1841 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1843 template<typename _ITp>
1845 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1846 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1848 template<typename _ITp>
1850 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1851 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1854 // Partial specializations for pointers.
1855 template<typename _ITp>
1857 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1858 memory_order __m) noexcept
1859 { return __a->fetch_add(__d, __m); }
1861 template<typename _ITp>
1863 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1864 memory_order __m) noexcept
1865 { return __a->fetch_add(__d, __m); }
1867 template<typename _ITp>
1869 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1870 { return __a->fetch_add(__d); }
1872 template<typename _ITp>
1874 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1875 { return __a->fetch_add(__d); }
1877 template<typename _ITp>
1879 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1880 ptrdiff_t __d, memory_order __m) noexcept
1881 { return __a->fetch_sub(__d, __m); }
1883 template<typename _ITp>
1885 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1886 memory_order __m) noexcept
1887 { return __a->fetch_sub(__d, __m); }
1889 template<typename _ITp>
1891 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1892 { return __a->fetch_sub(__d); }
1894 template<typename _ITp>
1896 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1897 { return __a->fetch_sub(__d); }
1900 // _GLIBCXX_END_NAMESPACE_VERSION