3 // Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 // ????????????????????????????????????????????????????????????????????
27 // This is a copy of the libstdc++ header, with the trivial modification
28 // of ignoring the c++config.h include. If and when the top-level build is
29 // fixed so that target libraries can be built using the newly built, we can
32 // ????????????????????????????????????????????????????????????????????
34 /** @file include/atomic
35 * This is a Standard C++ Library header.
38 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
39 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
41 #ifndef _GLIBCXX_ATOMIC
42 #define _GLIBCXX_ATOMIC 1
44 #ifndef __always_inline
45 #define __always_inline inline __attribute__((always_inline))
48 // #pragma GCC system_header
50 // #ifndef __GXX_EXPERIMENTAL_CXX0X__
51 // # include <bits/c++0x_warning.h>
54 // #include <bits/atomic_base.h>
56 namespace std // _GLIBCXX_VISIBILITY(default)
58 // _GLIBCXX_BEGIN_NAMESPACE_VERSION
61 * @defgroup atomics Atomics
63 * Components for performing atomic operations.
67 /// Enumeration for memory_order
68 typedef enum memory_order
78 __always_inline memory_order
79 __calculate_memory_order(memory_order __m) noexcept
81 const bool __cond1 = __m == memory_order_release;
82 const bool __cond2 = __m == memory_order_acq_rel;
83 memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
84 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
89 atomic_thread_fence(memory_order __m) noexcept
91 __atomic_thread_fence (__m);
95 atomic_signal_fence(memory_order __m) noexcept
97 __atomic_thread_fence (__m);
101 template<typename _Tp>
103 kill_dependency(_Tp __y) noexcept
109 /// Lock-free Property
112 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
113 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
114 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
115 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
116 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
117 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
118 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
119 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
120 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
121 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
123 // Base types for atomics.
124 template<typename _IntTp>
125 struct __atomic_base;
128 typedef __atomic_base<char> atomic_char;
131 typedef __atomic_base<signed char> atomic_schar;
134 typedef __atomic_base<unsigned char> atomic_uchar;
137 typedef __atomic_base<short> atomic_short;
140 typedef __atomic_base<unsigned short> atomic_ushort;
143 typedef __atomic_base<int> atomic_int;
146 typedef __atomic_base<unsigned int> atomic_uint;
149 typedef __atomic_base<long> atomic_long;
152 typedef __atomic_base<unsigned long> atomic_ulong;
155 typedef __atomic_base<long long> atomic_llong;
158 typedef __atomic_base<unsigned long long> atomic_ullong;
161 typedef __atomic_base<wchar_t> atomic_wchar_t;
164 typedef __atomic_base<char16_t> atomic_char16_t;
167 typedef __atomic_base<char32_t> atomic_char32_t;
170 typedef __atomic_base<char32_t> atomic_char32_t;
173 /// atomic_int_least8_t
174 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
176 /// atomic_uint_least8_t
177 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
179 /// atomic_int_least16_t
180 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
182 /// atomic_uint_least16_t
183 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
185 /// atomic_int_least32_t
186 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
188 /// atomic_uint_least32_t
189 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
191 /// atomic_int_least64_t
192 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
194 /// atomic_uint_least64_t
195 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
198 /// atomic_int_fast8_t
199 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
201 /// atomic_uint_fast8_t
202 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
204 /// atomic_int_fast16_t
205 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
207 /// atomic_uint_fast16_t
208 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
210 /// atomic_int_fast32_t
211 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
213 /// atomic_uint_fast32_t
214 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
216 /// atomic_int_fast64_t
217 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
219 /// atomic_uint_fast64_t
220 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
224 typedef __atomic_base<intptr_t> atomic_intptr_t;
227 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
230 typedef __atomic_base<size_t> atomic_size_t;
233 typedef __atomic_base<intmax_t> atomic_intmax_t;
236 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
239 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
242 #define ATOMIC_VAR_INIT(_VI) { _VI }
244 template<typename _Tp>
247 template<typename _Tp>
252 * @brief Base type for atomic_flag.
254 * Base type is POD with data, allowing atomic_flag to derive from
255 * it and meet the standard layout type requirement. In addition to
256 * compatibilty with a C interface, this allows different
257 * implementations of atomic_flag to use the same atomic operation
258 * functions, via a standard conversion to the __atomic_flag_base
261 // _GLIBCXX_BEGIN_EXTERN_C
263 struct __atomic_flag_base
268 // _GLIBCXX_END_EXTERN_C
270 #define ATOMIC_FLAG_INIT { false }
273 struct atomic_flag : public __atomic_flag_base
275 atomic_flag() noexcept = default;
276 ~atomic_flag() noexcept = default;
277 atomic_flag(const atomic_flag&) = delete;
278 atomic_flag& operator=(const atomic_flag&) = delete;
279 atomic_flag& operator=(const atomic_flag&) volatile = delete;
281 // Conversion to ATOMIC_FLAG_INIT.
282 atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
285 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
287 return __atomic_test_and_set (&_M_i, __m);
291 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
293 return __atomic_test_and_set (&_M_i, __m);
297 clear(memory_order __m = memory_order_seq_cst) noexcept
299 // __glibcxx_assert(__m != memory_order_consume);
300 // __glibcxx_assert(__m != memory_order_acquire);
301 // __glibcxx_assert(__m != memory_order_acq_rel);
303 __atomic_clear (&_M_i, __m);
307 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
309 // __glibcxx_assert(__m != memory_order_consume);
310 // __glibcxx_assert(__m != memory_order_acquire);
311 // __glibcxx_assert(__m != memory_order_acq_rel);
313 __atomic_clear (&_M_i, __m);
318 /// Base class for atomic integrals.
320 // For each of the integral types, define atomic_[integral type] struct
324 // atomic_schar signed char
325 // atomic_uchar unsigned char
326 // atomic_short short
327 // atomic_ushort unsigned short
329 // atomic_uint unsigned int
331 // atomic_ulong unsigned long
332 // atomic_llong long long
333 // atomic_ullong unsigned long long
334 // atomic_char16_t char16_t
335 // atomic_char32_t char32_t
336 // atomic_wchar_t wchar_t
338 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
339 // 8 bytes, since that is what GCC built-in functions for atomic
340 // memory access expect.
341 template<typename _ITp>
345 typedef _ITp __int_type;
350 __atomic_base() noexcept = default;
351 ~__atomic_base() noexcept = default;
352 __atomic_base(const __atomic_base&) = delete;
353 __atomic_base& operator=(const __atomic_base&) = delete;
354 __atomic_base& operator=(const __atomic_base&) volatile = delete;
356 // Requires __int_type convertible to _M_i.
357 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
359 operator __int_type() const noexcept
362 operator __int_type() const volatile noexcept
366 operator=(__int_type __i) noexcept
373 operator=(__int_type __i) volatile noexcept
380 operator++(int) noexcept
381 { return fetch_add(1); }
384 operator++(int) volatile noexcept
385 { return fetch_add(1); }
388 operator--(int) noexcept
389 { return fetch_sub(1); }
392 operator--(int) volatile noexcept
393 { return fetch_sub(1); }
396 operator++() noexcept
397 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
400 operator++() volatile noexcept
401 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
404 operator--() noexcept
405 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
408 operator--() volatile noexcept
409 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
412 operator+=(__int_type __i) noexcept
413 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
416 operator+=(__int_type __i) volatile noexcept
417 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
420 operator-=(__int_type __i) noexcept
421 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
424 operator-=(__int_type __i) volatile noexcept
425 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
428 operator&=(__int_type __i) noexcept
429 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
432 operator&=(__int_type __i) volatile noexcept
433 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
436 operator|=(__int_type __i) noexcept
437 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
440 operator|=(__int_type __i) volatile noexcept
441 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
444 operator^=(__int_type __i) noexcept
445 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
448 operator^=(__int_type __i) volatile noexcept
449 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
452 is_lock_free() const noexcept
453 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
456 is_lock_free() const volatile noexcept
457 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
460 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
462 // __glibcxx_assert(__m != memory_order_acquire);
463 // __glibcxx_assert(__m != memory_order_acq_rel);
464 // __glibcxx_assert(__m != memory_order_consume);
466 __atomic_store_n(&_M_i, __i, __m);
470 store(__int_type __i,
471 memory_order __m = memory_order_seq_cst) volatile noexcept
473 // __glibcxx_assert(__m != memory_order_acquire);
474 // __glibcxx_assert(__m != memory_order_acq_rel);
475 // __glibcxx_assert(__m != memory_order_consume);
477 __atomic_store_n(&_M_i, __i, __m);
480 __always_inline __int_type
481 load(memory_order __m = memory_order_seq_cst) const noexcept
483 // __glibcxx_assert(__m != memory_order_release);
484 // __glibcxx_assert(__m != memory_order_acq_rel);
486 return __atomic_load_n(&_M_i, __m);
489 __always_inline __int_type
490 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
492 // __glibcxx_assert(__m != memory_order_release);
493 // __glibcxx_assert(__m != memory_order_acq_rel);
495 return __atomic_load_n(&_M_i, __m);
498 __always_inline __int_type
499 exchange(__int_type __i,
500 memory_order __m = memory_order_seq_cst) noexcept
502 return __atomic_exchange_n(&_M_i, __i, __m);
505 __always_inline __int_type
506 exchange(__int_type __i,
507 memory_order __m = memory_order_seq_cst) volatile noexcept
509 return __atomic_exchange_n(&_M_i, __i, __m);
513 compare_exchange_weak(__int_type& __i1, __int_type __i2,
514 memory_order __m1, memory_order __m2) noexcept
516 // __glibcxx_assert(__m2 != memory_order_release);
517 // __glibcxx_assert(__m2 != memory_order_acq_rel);
518 // __glibcxx_assert(__m2 <= __m1);
520 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
524 compare_exchange_weak(__int_type& __i1, __int_type __i2,
526 memory_order __m2) volatile noexcept
528 // __glibcxx_assert(__m2 != memory_order_release);
529 // __glibcxx_assert(__m2 != memory_order_acq_rel);
530 // __glibcxx_assert(__m2 <= __m1);
532 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
536 compare_exchange_weak(__int_type& __i1, __int_type __i2,
537 memory_order __m = memory_order_seq_cst) noexcept
539 return compare_exchange_weak(__i1, __i2, __m,
540 __calculate_memory_order(__m));
544 compare_exchange_weak(__int_type& __i1, __int_type __i2,
545 memory_order __m = memory_order_seq_cst) volatile noexcept
547 return compare_exchange_weak(__i1, __i2, __m,
548 __calculate_memory_order(__m));
552 compare_exchange_strong(__int_type& __i1, __int_type __i2,
553 memory_order __m1, memory_order __m2) noexcept
555 // __glibcxx_assert(__m2 != memory_order_release);
556 // __glibcxx_assert(__m2 != memory_order_acq_rel);
557 // __glibcxx_assert(__m2 <= __m1);
559 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
563 compare_exchange_strong(__int_type& __i1, __int_type __i2,
565 memory_order __m2) volatile noexcept
567 // __glibcxx_assert(__m2 != memory_order_release);
568 // __glibcxx_assert(__m2 != memory_order_acq_rel);
569 // __glibcxx_assert(__m2 <= __m1);
571 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
575 compare_exchange_strong(__int_type& __i1, __int_type __i2,
576 memory_order __m = memory_order_seq_cst) noexcept
578 return compare_exchange_strong(__i1, __i2, __m,
579 __calculate_memory_order(__m));
583 compare_exchange_strong(__int_type& __i1, __int_type __i2,
584 memory_order __m = memory_order_seq_cst) volatile noexcept
586 return compare_exchange_strong(__i1, __i2, __m,
587 __calculate_memory_order(__m));
590 __always_inline __int_type
591 fetch_add(__int_type __i,
592 memory_order __m = memory_order_seq_cst) noexcept
593 { return __atomic_fetch_add(&_M_i, __i, __m); }
595 __always_inline __int_type
596 fetch_add(__int_type __i,
597 memory_order __m = memory_order_seq_cst) volatile noexcept
598 { return __atomic_fetch_add(&_M_i, __i, __m); }
600 __always_inline __int_type
601 fetch_sub(__int_type __i,
602 memory_order __m = memory_order_seq_cst) noexcept
603 { return __atomic_fetch_sub(&_M_i, __i, __m); }
605 __always_inline __int_type
606 fetch_sub(__int_type __i,
607 memory_order __m = memory_order_seq_cst) volatile noexcept
608 { return __atomic_fetch_sub(&_M_i, __i, __m); }
610 __always_inline __int_type
611 fetch_and(__int_type __i,
612 memory_order __m = memory_order_seq_cst) noexcept
613 { return __atomic_fetch_and(&_M_i, __i, __m); }
615 __always_inline __int_type
616 fetch_and(__int_type __i,
617 memory_order __m = memory_order_seq_cst) volatile noexcept
618 { return __atomic_fetch_and(&_M_i, __i, __m); }
620 __always_inline __int_type
621 fetch_or(__int_type __i,
622 memory_order __m = memory_order_seq_cst) noexcept
623 { return __atomic_fetch_or(&_M_i, __i, __m); }
625 __always_inline __int_type
626 fetch_or(__int_type __i,
627 memory_order __m = memory_order_seq_cst) volatile noexcept
628 { return __atomic_fetch_or(&_M_i, __i, __m); }
630 __always_inline __int_type
631 fetch_xor(__int_type __i,
632 memory_order __m = memory_order_seq_cst) noexcept
633 { return __atomic_fetch_xor(&_M_i, __i, __m); }
635 __always_inline __int_type
636 fetch_xor(__int_type __i,
637 memory_order __m = memory_order_seq_cst) volatile noexcept
638 { return __atomic_fetch_xor(&_M_i, __i, __m); }
642 /// Partial specialization for pointer types.
643 template<typename _PTp>
644 struct __atomic_base<_PTp*>
647 typedef _PTp* __pointer_type;
652 __atomic_base() noexcept = default;
653 ~__atomic_base() noexcept = default;
654 __atomic_base(const __atomic_base&) = delete;
655 __atomic_base& operator=(const __atomic_base&) = delete;
656 __atomic_base& operator=(const __atomic_base&) volatile = delete;
658 // Requires __pointer_type convertible to _M_p.
659 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
661 operator __pointer_type() const noexcept
664 operator __pointer_type() const volatile noexcept
668 operator=(__pointer_type __p) noexcept
675 operator=(__pointer_type __p) volatile noexcept
682 operator++(int) noexcept
683 { return fetch_add(1); }
686 operator++(int) volatile noexcept
687 { return fetch_add(1); }
690 operator--(int) noexcept
691 { return fetch_sub(1); }
694 operator--(int) volatile noexcept
695 { return fetch_sub(1); }
698 operator++() noexcept
699 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
702 operator++() volatile noexcept
703 { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
706 operator--() noexcept
707 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
710 operator--() volatile noexcept
711 { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
714 operator+=(ptrdiff_t __d) noexcept
715 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
718 operator+=(ptrdiff_t __d) volatile noexcept
719 { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
722 operator-=(ptrdiff_t __d) noexcept
723 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
726 operator-=(ptrdiff_t __d) volatile noexcept
727 { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
730 is_lock_free() const noexcept
731 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
734 is_lock_free() const volatile noexcept
735 { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
738 store(__pointer_type __p,
739 memory_order __m = memory_order_seq_cst) noexcept
741 // __glibcxx_assert(__m != memory_order_acquire);
742 // __glibcxx_assert(__m != memory_order_acq_rel);
743 // __glibcxx_assert(__m != memory_order_consume);
745 __atomic_store_n(&_M_p, __p, __m);
749 store(__pointer_type __p,
750 memory_order __m = memory_order_seq_cst) volatile noexcept
752 // __glibcxx_assert(__m != memory_order_acquire);
753 // __glibcxx_assert(__m != memory_order_acq_rel);
754 // __glibcxx_assert(__m != memory_order_consume);
756 __atomic_store_n(&_M_p, __p, __m);
759 __always_inline __pointer_type
760 load(memory_order __m = memory_order_seq_cst) const noexcept
762 // __glibcxx_assert(__m != memory_order_release);
763 // __glibcxx_assert(__m != memory_order_acq_rel);
765 return __atomic_load_n(&_M_p, __m);
768 __always_inline __pointer_type
769 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
771 // __glibcxx_assert(__m != memory_order_release);
772 // __glibcxx_assert(__m != memory_order_acq_rel);
774 return __atomic_load_n(&_M_p, __m);
777 __always_inline __pointer_type
778 exchange(__pointer_type __p,
779 memory_order __m = memory_order_seq_cst) noexcept
781 return __atomic_exchange_n(&_M_p, __p, __m);
784 __always_inline __pointer_type
785 exchange(__pointer_type __p,
786 memory_order __m = memory_order_seq_cst) volatile noexcept
788 return __atomic_exchange_n(&_M_p, __p, __m);
792 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
794 memory_order __m2) noexcept
796 // __glibcxx_assert(__m2 != memory_order_release);
797 // __glibcxx_assert(__m2 != memory_order_acq_rel);
798 // __glibcxx_assert(__m2 <= __m1);
800 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
804 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
806 memory_order __m2) volatile noexcept
808 // __glibcxx_assert(__m2 != memory_order_release);
809 // __glibcxx_assert(__m2 != memory_order_acq_rel);
810 // __glibcxx_assert(__m2 <= __m1);
812 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
815 __always_inline __pointer_type
816 fetch_add(ptrdiff_t __d,
817 memory_order __m = memory_order_seq_cst) noexcept
818 { return __atomic_fetch_add(&_M_p, __d, __m); }
820 __always_inline __pointer_type
821 fetch_add(ptrdiff_t __d,
822 memory_order __m = memory_order_seq_cst) volatile noexcept
823 { return __atomic_fetch_add(&_M_p, __d, __m); }
825 __always_inline __pointer_type
826 fetch_sub(ptrdiff_t __d,
827 memory_order __m = memory_order_seq_cst) noexcept
828 { return __atomic_fetch_sub(&_M_p, __d, __m); }
830 __always_inline __pointer_type
831 fetch_sub(ptrdiff_t __d,
832 memory_order __m = memory_order_seq_cst) volatile noexcept
833 { return __atomic_fetch_sub(&_M_p, __d, __m); }
838 * @addtogroup atomics
843 // NB: No operators or fetch-operations for this type.
847 __atomic_base<bool> _M_base;
850 atomic_bool() noexcept = default;
851 ~atomic_bool() noexcept = default;
852 atomic_bool(const atomic_bool&) = delete;
853 atomic_bool& operator=(const atomic_bool&) = delete;
854 atomic_bool& operator=(const atomic_bool&) volatile = delete;
856 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
859 operator=(bool __i) noexcept
860 { return _M_base.operator=(__i); }
862 operator bool() const noexcept
863 { return _M_base.load(); }
865 operator bool() const volatile noexcept
866 { return _M_base.load(); }
869 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
872 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
875 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
876 { _M_base.store(__i, __m); }
879 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
880 { _M_base.store(__i, __m); }
883 load(memory_order __m = memory_order_seq_cst) const noexcept
884 { return _M_base.load(__m); }
887 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
888 { return _M_base.load(__m); }
891 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
892 { return _M_base.exchange(__i, __m); }
896 memory_order __m = memory_order_seq_cst) volatile noexcept
897 { return _M_base.exchange(__i, __m); }
900 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
901 memory_order __m2) noexcept
902 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
905 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
906 memory_order __m2) volatile noexcept
907 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
910 compare_exchange_weak(bool& __i1, bool __i2,
911 memory_order __m = memory_order_seq_cst) noexcept
912 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
915 compare_exchange_weak(bool& __i1, bool __i2,
916 memory_order __m = memory_order_seq_cst) volatile noexcept
917 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
920 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
921 memory_order __m2) noexcept
922 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
925 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
926 memory_order __m2) volatile noexcept
927 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
930 compare_exchange_strong(bool& __i1, bool __i2,
931 memory_order __m = memory_order_seq_cst) noexcept
932 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
935 compare_exchange_strong(bool& __i1, bool __i2,
936 memory_order __m = memory_order_seq_cst) volatile noexcept
937 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
942 /// 29.4.3, Generic atomic type, primary class template.
943 template<typename _Tp>
950 atomic() noexcept = default;
951 ~atomic() noexcept = default;
952 atomic(const atomic&) = delete;
953 atomic& operator=(const atomic&) = delete;
954 atomic& operator=(const atomic&) volatile = delete;
956 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
958 operator _Tp() const noexcept
961 operator _Tp() const volatile noexcept
965 operator=(_Tp __i) noexcept
966 { store(__i); return __i; }
969 operator=(_Tp __i) volatile noexcept
970 { store(__i); return __i; }
973 is_lock_free() const noexcept
974 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
977 is_lock_free() const volatile noexcept
978 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
981 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
982 { __atomic_store(&_M_i, &__i, _m); }
985 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
986 { __atomic_store(&_M_i, &__i, _m); }
989 load(memory_order _m = memory_order_seq_cst) const noexcept
992 __atomic_load(&_M_i, &tmp, _m);
997 load(memory_order _m = memory_order_seq_cst) const volatile noexcept
1000 __atomic_load(&_M_i, &tmp, _m);
1005 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
1008 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1014 memory_order _m = memory_order_seq_cst) volatile noexcept
1017 __atomic_exchange(&_M_i, &__i, &tmp, _m);
1021 __always_inline bool
1022 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1023 memory_order __f) noexcept
1025 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1028 __always_inline bool
1029 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1030 memory_order __f) volatile noexcept
1032 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1035 __always_inline bool
1036 compare_exchange_weak(_Tp& __e, _Tp __i,
1037 memory_order __m = memory_order_seq_cst) noexcept
1038 { return compare_exchange_weak(__e, __i, __m, __m); }
1040 __always_inline bool
1041 compare_exchange_weak(_Tp& __e, _Tp __i,
1042 memory_order __m = memory_order_seq_cst) volatile noexcept
1043 { return compare_exchange_weak(__e, __i, __m, __m); }
1045 __always_inline bool
1046 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1047 memory_order __f) noexcept
1049 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1052 __always_inline bool
1053 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1054 memory_order __f) volatile noexcept
1056 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1059 __always_inline bool
1060 compare_exchange_strong(_Tp& __e, _Tp __i,
1061 memory_order __m = memory_order_seq_cst) noexcept
1062 { return compare_exchange_strong(__e, __i, __m, __m); }
1064 __always_inline bool
1065 compare_exchange_strong(_Tp& __e, _Tp __i,
1066 memory_order __m = memory_order_seq_cst) volatile noexcept
1067 { return compare_exchange_strong(__e, __i, __m, __m); }
1071 /// Partial specialization for pointer types.
1072 template<typename _Tp>
1075 typedef _Tp* __pointer_type;
1076 typedef __atomic_base<_Tp*> __base_type;
1079 atomic() noexcept = default;
1080 ~atomic() noexcept = default;
1081 atomic(const atomic&) = delete;
1082 atomic& operator=(const atomic&) = delete;
1083 atomic& operator=(const atomic&) volatile = delete;
1085 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
1087 operator __pointer_type() const noexcept
1088 { return __pointer_type(_M_b); }
1090 operator __pointer_type() const volatile noexcept
1091 { return __pointer_type(_M_b); }
1094 operator=(__pointer_type __p) noexcept
1095 { return _M_b.operator=(__p); }
1098 operator=(__pointer_type __p) volatile noexcept
1099 { return _M_b.operator=(__p); }
1102 operator++(int) noexcept
1106 operator++(int) volatile noexcept
1110 operator--(int) noexcept
1114 operator--(int) volatile noexcept
1118 operator++() noexcept
1122 operator++() volatile noexcept
1126 operator--() noexcept
1130 operator--() volatile noexcept
1134 operator+=(ptrdiff_t __d) noexcept
1135 { return _M_b.operator+=(__d); }
1138 operator+=(ptrdiff_t __d) volatile noexcept
1139 { return _M_b.operator+=(__d); }
1142 operator-=(ptrdiff_t __d) noexcept
1143 { return _M_b.operator-=(__d); }
1146 operator-=(ptrdiff_t __d) volatile noexcept
1147 { return _M_b.operator-=(__d); }
1150 is_lock_free() const noexcept
1151 { return _M_b.is_lock_free(); }
1154 is_lock_free() const volatile noexcept
1155 { return _M_b.is_lock_free(); }
1157 __always_inline void
1158 store(__pointer_type __p,
1159 memory_order __m = memory_order_seq_cst) noexcept
1160 { return _M_b.store(__p, __m); }
1162 __always_inline void
1163 store(__pointer_type __p,
1164 memory_order __m = memory_order_seq_cst) volatile noexcept
1165 { return _M_b.store(__p, __m); }
1167 __always_inline __pointer_type
1168 load(memory_order __m = memory_order_seq_cst) const noexcept
1169 { return _M_b.load(__m); }
1171 __always_inline __pointer_type
1172 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
1173 { return _M_b.load(__m); }
1175 __always_inline __pointer_type
1176 exchange(__pointer_type __p,
1177 memory_order __m = memory_order_seq_cst) noexcept
1178 { return _M_b.exchange(__p, __m); }
1180 __always_inline __pointer_type
1181 exchange(__pointer_type __p,
1182 memory_order __m = memory_order_seq_cst) volatile noexcept
1183 { return _M_b.exchange(__p, __m); }
1185 __always_inline bool
1186 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1187 memory_order __m1, memory_order __m2) noexcept
1188 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1190 __always_inline bool
1191 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1193 memory_order __m2) volatile noexcept
1194 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1196 __always_inline bool
1197 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1198 memory_order __m = memory_order_seq_cst) noexcept
1200 return compare_exchange_weak(__p1, __p2, __m,
1201 __calculate_memory_order(__m));
1204 __always_inline bool
1205 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1206 memory_order __m = memory_order_seq_cst) volatile noexcept
1208 return compare_exchange_weak(__p1, __p2, __m,
1209 __calculate_memory_order(__m));
1212 __always_inline bool
1213 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1214 memory_order __m1, memory_order __m2) noexcept
1215 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1217 __always_inline bool
1218 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1220 memory_order __m2) volatile noexcept
1221 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1223 __always_inline bool
1224 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1225 memory_order __m = memory_order_seq_cst) noexcept
1227 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1228 __calculate_memory_order(__m));
1231 __always_inline bool
1232 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1233 memory_order __m = memory_order_seq_cst) volatile noexcept
1235 return _M_b.compare_exchange_strong(__p1, __p2, __m,
1236 __calculate_memory_order(__m));
1239 __always_inline __pointer_type
1240 fetch_add(ptrdiff_t __d,
1241 memory_order __m = memory_order_seq_cst) noexcept
1242 { return _M_b.fetch_add(__d, __m); }
1244 __always_inline __pointer_type
1245 fetch_add(ptrdiff_t __d,
1246 memory_order __m = memory_order_seq_cst) volatile noexcept
1247 { return _M_b.fetch_add(__d, __m); }
1249 __always_inline __pointer_type
1250 fetch_sub(ptrdiff_t __d,
1251 memory_order __m = memory_order_seq_cst) noexcept
1252 { return _M_b.fetch_sub(__d, __m); }
1254 __always_inline __pointer_type
1255 fetch_sub(ptrdiff_t __d,
1256 memory_order __m = memory_order_seq_cst) volatile noexcept
1257 { return _M_b.fetch_sub(__d, __m); }
1261 /// Explicit specialization for bool.
1263 struct atomic<bool> : public atomic_bool
1265 typedef bool __integral_type;
1266 typedef atomic_bool __base_type;
1268 atomic() noexcept = default;
1269 ~atomic() noexcept = default;
1270 atomic(const atomic&) = delete;
1271 atomic& operator=(const atomic&) = delete;
1272 atomic& operator=(const atomic&) volatile = delete;
1274 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1276 using __base_type::operator __integral_type;
1277 using __base_type::operator=;
1280 /// Explicit specialization for char.
1282 struct atomic<char> : public atomic_char
1284 typedef char __integral_type;
1285 typedef atomic_char __base_type;
1287 atomic() noexcept = default;
1288 ~atomic() noexcept = default;
1289 atomic(const atomic&) = delete;
1290 atomic& operator=(const atomic&) = delete;
1291 atomic& operator=(const atomic&) volatile = delete;
1293 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1295 using __base_type::operator __integral_type;
1296 using __base_type::operator=;
1299 /// Explicit specialization for signed char.
1301 struct atomic<signed char> : public atomic_schar
1303 typedef signed char __integral_type;
1304 typedef atomic_schar __base_type;
1306 atomic() noexcept= default;
1307 ~atomic() noexcept = default;
1308 atomic(const atomic&) = delete;
1309 atomic& operator=(const atomic&) = delete;
1310 atomic& operator=(const atomic&) volatile = delete;
1312 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1314 using __base_type::operator __integral_type;
1315 using __base_type::operator=;
1318 /// Explicit specialization for unsigned char.
1320 struct atomic<unsigned char> : public atomic_uchar
1322 typedef unsigned char __integral_type;
1323 typedef atomic_uchar __base_type;
1325 atomic() noexcept= default;
1326 ~atomic() noexcept = default;
1327 atomic(const atomic&) = delete;
1328 atomic& operator=(const atomic&) = delete;
1329 atomic& operator=(const atomic&) volatile = delete;
1331 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1333 using __base_type::operator __integral_type;
1334 using __base_type::operator=;
1337 /// Explicit specialization for short.
1339 struct atomic<short> : public atomic_short
1341 typedef short __integral_type;
1342 typedef atomic_short __base_type;
1344 atomic() noexcept = default;
1345 ~atomic() noexcept = default;
1346 atomic(const atomic&) = delete;
1347 atomic& operator=(const atomic&) = delete;
1348 atomic& operator=(const atomic&) volatile = delete;
1350 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1352 using __base_type::operator __integral_type;
1353 using __base_type::operator=;
1356 /// Explicit specialization for unsigned short.
1358 struct atomic<unsigned short> : public atomic_ushort
1360 typedef unsigned short __integral_type;
1361 typedef atomic_ushort __base_type;
1363 atomic() noexcept = default;
1364 ~atomic() noexcept = default;
1365 atomic(const atomic&) = delete;
1366 atomic& operator=(const atomic&) = delete;
1367 atomic& operator=(const atomic&) volatile = delete;
1369 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1371 using __base_type::operator __integral_type;
1372 using __base_type::operator=;
1375 /// Explicit specialization for int.
1377 struct atomic<int> : atomic_int
1379 typedef int __integral_type;
1380 typedef atomic_int __base_type;
1382 atomic() noexcept = default;
1383 ~atomic() noexcept = default;
1384 atomic(const atomic&) = delete;
1385 atomic& operator=(const atomic&) = delete;
1386 atomic& operator=(const atomic&) volatile = delete;
1388 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1390 using __base_type::operator __integral_type;
1391 using __base_type::operator=;
1394 /// Explicit specialization for unsigned int.
1396 struct atomic<unsigned int> : public atomic_uint
1398 typedef unsigned int __integral_type;
1399 typedef atomic_uint __base_type;
1401 atomic() noexcept = default;
1402 ~atomic() noexcept = default;
1403 atomic(const atomic&) = delete;
1404 atomic& operator=(const atomic&) = delete;
1405 atomic& operator=(const atomic&) volatile = delete;
1407 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1409 using __base_type::operator __integral_type;
1410 using __base_type::operator=;
1413 /// Explicit specialization for long.
1415 struct atomic<long> : public atomic_long
1417 typedef long __integral_type;
1418 typedef atomic_long __base_type;
1420 atomic() noexcept = default;
1421 ~atomic() noexcept = default;
1422 atomic(const atomic&) = delete;
1423 atomic& operator=(const atomic&) = delete;
1424 atomic& operator=(const atomic&) volatile = delete;
1426 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1428 using __base_type::operator __integral_type;
1429 using __base_type::operator=;
1432 /// Explicit specialization for unsigned long.
1434 struct atomic<unsigned long> : public atomic_ulong
1436 typedef unsigned long __integral_type;
1437 typedef atomic_ulong __base_type;
1439 atomic() noexcept = default;
1440 ~atomic() noexcept = default;
1441 atomic(const atomic&) = delete;
1442 atomic& operator=(const atomic&) = delete;
1443 atomic& operator=(const atomic&) volatile = delete;
1445 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1447 using __base_type::operator __integral_type;
1448 using __base_type::operator=;
1451 /// Explicit specialization for long long.
1453 struct atomic<long long> : public atomic_llong
1455 typedef long long __integral_type;
1456 typedef atomic_llong __base_type;
1458 atomic() noexcept = default;
1459 ~atomic() noexcept = default;
1460 atomic(const atomic&) = delete;
1461 atomic& operator=(const atomic&) = delete;
1462 atomic& operator=(const atomic&) volatile = delete;
1464 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1466 using __base_type::operator __integral_type;
1467 using __base_type::operator=;
1470 /// Explicit specialization for unsigned long long.
1472 struct atomic<unsigned long long> : public atomic_ullong
1474 typedef unsigned long long __integral_type;
1475 typedef atomic_ullong __base_type;
1477 atomic() noexcept = default;
1478 ~atomic() noexcept = default;
1479 atomic(const atomic&) = delete;
1480 atomic& operator=(const atomic&) = delete;
1481 atomic& operator=(const atomic&) volatile = delete;
1483 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1485 using __base_type::operator __integral_type;
1486 using __base_type::operator=;
1489 /// Explicit specialization for wchar_t.
1491 struct atomic<wchar_t> : public atomic_wchar_t
1493 typedef wchar_t __integral_type;
1494 typedef atomic_wchar_t __base_type;
1496 atomic() noexcept = default;
1497 ~atomic() noexcept = default;
1498 atomic(const atomic&) = delete;
1499 atomic& operator=(const atomic&) = delete;
1500 atomic& operator=(const atomic&) volatile = delete;
1502 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1504 using __base_type::operator __integral_type;
1505 using __base_type::operator=;
1508 /// Explicit specialization for char16_t.
1510 struct atomic<char16_t> : public atomic_char16_t
1512 typedef char16_t __integral_type;
1513 typedef atomic_char16_t __base_type;
1515 atomic() noexcept = default;
1516 ~atomic() noexcept = default;
1517 atomic(const atomic&) = delete;
1518 atomic& operator=(const atomic&) = delete;
1519 atomic& operator=(const atomic&) volatile = delete;
1521 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1523 using __base_type::operator __integral_type;
1524 using __base_type::operator=;
1527 /// Explicit specialization for char32_t.
1529 struct atomic<char32_t> : public atomic_char32_t
1531 typedef char32_t __integral_type;
1532 typedef atomic_char32_t __base_type;
1534 atomic() noexcept = default;
1535 ~atomic() noexcept = default;
1536 atomic(const atomic&) = delete;
1537 atomic& operator=(const atomic&) = delete;
1538 atomic& operator=(const atomic&) volatile = delete;
1540 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1542 using __base_type::operator __integral_type;
1543 using __base_type::operator=;
1547 // Function definitions, atomic_flag operations.
1548 __always_inline bool
1549 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1550 memory_order __m) noexcept
1551 { return __a->test_and_set(__m); }
1553 __always_inline bool
1554 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1555 memory_order __m) noexcept
1556 { return __a->test_and_set(__m); }
1558 __always_inline void
1559 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1560 { __a->clear(__m); }
1562 __always_inline void
1563 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1564 memory_order __m) noexcept
1565 { __a->clear(__m); }
1567 __always_inline bool
1568 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1569 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1571 __always_inline bool
1572 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1573 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1575 __always_inline void
1576 atomic_flag_clear(atomic_flag* __a) noexcept
1577 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1579 __always_inline void
1580 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1581 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1584 // Function templates generally applicable to atomic types.
1585 template<typename _ITp>
1586 __always_inline bool
1587 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1588 { return __a->is_lock_free(); }
1590 template<typename _ITp>
1591 __always_inline bool
1592 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1593 { return __a->is_lock_free(); }
1595 template<typename _ITp>
1596 __always_inline void
1597 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
1599 template<typename _ITp>
1600 __always_inline void
1601 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
1603 template<typename _ITp>
1604 __always_inline void
1605 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1606 memory_order __m) noexcept
1607 { __a->store(__i, __m); }
1609 template<typename _ITp>
1610 __always_inline void
1611 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1612 memory_order __m) noexcept
1613 { __a->store(__i, __m); }
1615 template<typename _ITp>
1616 __always_inline _ITp
1617 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1618 { return __a->load(__m); }
1620 template<typename _ITp>
1621 __always_inline _ITp
1622 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1623 memory_order __m) noexcept
1624 { return __a->load(__m); }
1626 template<typename _ITp>
1627 __always_inline _ITp
1628 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1629 memory_order __m) noexcept
1630 { return __a->exchange(__i, __m); }
1632 template<typename _ITp>
1633 __always_inline _ITp
1634 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1635 memory_order __m) noexcept
1636 { return __a->exchange(__i, __m); }
1638 template<typename _ITp>
1639 __always_inline bool
1640 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1641 _ITp* __i1, _ITp __i2,
1643 memory_order __m2) noexcept
1644 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1646 template<typename _ITp>
1647 __always_inline bool
1648 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1649 _ITp* __i1, _ITp __i2,
1651 memory_order __m2) noexcept
1652 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1654 template<typename _ITp>
1655 __always_inline bool
1656 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1657 _ITp* __i1, _ITp __i2,
1659 memory_order __m2) noexcept
1660 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1662 template<typename _ITp>
1663 __always_inline bool
1664 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1665 _ITp* __i1, _ITp __i2,
1667 memory_order __m2) noexcept
1668 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1671 template<typename _ITp>
1672 __always_inline void
1673 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1674 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1676 template<typename _ITp>
1677 __always_inline void
1678 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1679 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1681 template<typename _ITp>
1682 __always_inline _ITp
1683 atomic_load(const atomic<_ITp>* __a) noexcept
1684 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1686 template<typename _ITp>
1687 __always_inline _ITp
1688 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1689 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1691 template<typename _ITp>
1692 __always_inline _ITp
1693 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1694 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1696 template<typename _ITp>
1697 __always_inline _ITp
1698 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1699 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1701 template<typename _ITp>
1702 __always_inline bool
1703 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1704 _ITp* __i1, _ITp __i2) noexcept
1706 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1707 memory_order_seq_cst,
1708 memory_order_seq_cst);
1711 template<typename _ITp>
1712 __always_inline bool
1713 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1714 _ITp* __i1, _ITp __i2) noexcept
1716 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1717 memory_order_seq_cst,
1718 memory_order_seq_cst);
1721 template<typename _ITp>
1722 __always_inline bool
1723 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1724 _ITp* __i1, _ITp __i2) noexcept
1726 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1727 memory_order_seq_cst,
1728 memory_order_seq_cst);
1731 template<typename _ITp>
1732 __always_inline bool
1733 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1734 _ITp* __i1, _ITp __i2) noexcept
1736 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1737 memory_order_seq_cst,
1738 memory_order_seq_cst);
1741 // Function templates for atomic_integral operations only, using
1742 // __atomic_base. Template argument should be constricted to
1743 // intergral types as specified in the standard, excluding address
1745 template<typename _ITp>
1746 __always_inline _ITp
1747 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1748 memory_order __m) noexcept
1749 { return __a->fetch_add(__i, __m); }
1751 template<typename _ITp>
1752 __always_inline _ITp
1753 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1754 memory_order __m) noexcept
1755 { return __a->fetch_add(__i, __m); }
1757 template<typename _ITp>
1758 __always_inline _ITp
1759 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1760 memory_order __m) noexcept
1761 { return __a->fetch_sub(__i, __m); }
1763 template<typename _ITp>
1764 __always_inline _ITp
1765 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1766 memory_order __m) noexcept
1767 { return __a->fetch_sub(__i, __m); }
1769 template<typename _ITp>
1770 __always_inline _ITp
1771 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1772 memory_order __m) noexcept
1773 { return __a->fetch_and(__i, __m); }
1775 template<typename _ITp>
1776 __always_inline _ITp
1777 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1778 memory_order __m) noexcept
1779 { return __a->fetch_and(__i, __m); }
1781 template<typename _ITp>
1782 __always_inline _ITp
1783 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1784 memory_order __m) noexcept
1785 { return __a->fetch_or(__i, __m); }
1787 template<typename _ITp>
1788 __always_inline _ITp
1789 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1790 memory_order __m) noexcept
1791 { return __a->fetch_or(__i, __m); }
1793 template<typename _ITp>
1794 __always_inline _ITp
1795 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1796 memory_order __m) noexcept
1797 { return __a->fetch_xor(__i, __m); }
1799 template<typename _ITp>
1800 __always_inline _ITp
1801 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1802 memory_order __m) noexcept
1803 { return __a->fetch_xor(__i, __m); }
1805 template<typename _ITp>
1806 __always_inline _ITp
1807 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1808 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1810 template<typename _ITp>
1811 __always_inline _ITp
1812 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1813 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1815 template<typename _ITp>
1816 __always_inline _ITp
1817 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1818 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1820 template<typename _ITp>
1821 __always_inline _ITp
1822 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1823 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1825 template<typename _ITp>
1826 __always_inline _ITp
1827 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1828 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1830 template<typename _ITp>
1831 __always_inline _ITp
1832 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1833 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1835 template<typename _ITp>
1836 __always_inline _ITp
1837 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1838 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1840 template<typename _ITp>
1841 __always_inline _ITp
1842 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1843 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1845 template<typename _ITp>
1846 __always_inline _ITp
1847 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1848 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1850 template<typename _ITp>
1851 __always_inline _ITp
1852 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1853 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1856 // Partial specializations for pointers.
1857 template<typename _ITp>
1858 __always_inline _ITp*
1859 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1860 memory_order __m) noexcept
1861 { return __a->fetch_add(__d, __m); }
1863 template<typename _ITp>
1864 __always_inline _ITp*
1865 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1866 memory_order __m) noexcept
1867 { return __a->fetch_add(__d, __m); }
1869 template<typename _ITp>
1870 __always_inline _ITp*
1871 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1872 { return __a->fetch_add(__d); }
1874 template<typename _ITp>
1875 __always_inline _ITp*
1876 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1877 { return __a->fetch_add(__d); }
1879 template<typename _ITp>
1880 __always_inline _ITp*
1881 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1882 ptrdiff_t __d, memory_order __m) noexcept
1883 { return __a->fetch_sub(__d, __m); }
1885 template<typename _ITp>
1886 __always_inline _ITp*
1887 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1888 memory_order __m) noexcept
1889 { return __a->fetch_sub(__d, __m); }
1891 template<typename _ITp>
1892 __always_inline _ITp*
1893 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1894 { return __a->fetch_sub(__d); }
1896 template<typename _ITp>
1897 __always_inline _ITp*
1898 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1899 { return __a->fetch_sub(__d); }
1902 // _GLIBCXX_END_NAMESPACE_VERSION