4 // Free Software Foundation, Inc.
6 // This file is part of the GNU ISO C++ Library. This library is free
7 // software; you can redistribute it and/or modify it under the
8 // terms of the GNU General Public License as published by the
9 // Free Software Foundation; either version 2, or (at your option)
12 // This library is distributed in the hope that it will be useful,
13 // but WITHOUT ANY WARRANTY; without even the implied warranty of
14 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 // GNU General Public License for more details.
17 // You should have received a copy of the GNU General Public License
18 // along with this library; see the file COPYING. If not, write to
19 // the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 // Boston, MA 02110-1301, USA.
22 // As a special exception, you may use this file as part of a free software
23 // library without restriction. Specifically, if other files instantiate
24 // templates or use macros or inline functions from this file, or you compile
25 // this file and link it with other files to produce an executable, this
26 // file does not by itself cause the resulting executable to be covered by
27 // the GNU General Public License. This exception does not however
28 // invalidate any other reasons why the executable file might be covered by
29 // the GNU General Public License.
32 * This is a Standard C++ Library file. You should @c #include this file
33 * in your programs, rather than any of the "*.h" implementation files.
35 * This is the C++ version of the Standard C Library header @c stdatomic.h,
36 * and its contents are (mostly) the same as that header, but are all
37 * contained in the namespace @c std (except for names which are defined
41 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
42 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
44 #ifndef _GLIBCXX_STDATOMIC
45 #define _GLIBCXX_STDATOMIC 1
47 #pragma GCC system_header
49 #ifndef __GXX_EXPERIMENTAL_CXX0X__
50 # include <c++0x_warning.h>
53 #include <stdatomic.h>
56 _GLIBCXX_BEGIN_NAMESPACE(std)
58 // Can either subclass or encapsulate "C" functionality, and here
59 // encapsulating works with C++2003's version of POD and so is
60 // portable across C++2003/200x.
61 // Both end up being sub-optimal in terms of a constructor
62 // initialization list, but oh well.
67 __atomic_flag_base _M_base;
70 test_and_set(memory_order __x = memory_order_seq_cst) volatile
71 { return atomic_flag_test_and_set_explicit(this, __x); }
74 clear(memory_order __x = memory_order_seq_cst) volatile
75 { atomic_flag_clear_explicit(this, __x); }
78 fence(memory_order __x) const volatile
79 { atomic_flag_fence(this, __x); }
81 #if _GLIBCXX_USE_STANDARD_LAYOUT
82 // Add in non-trivial default constructor that correctly
83 // initializes member "as if" by ATOMIC_FLAG_INIT.
84 atomic_flag() { _M_base._M_b = false; }
87 atomic_flag(const atomic_flag&);
88 atomic_flag& operator=(const atomic_flag&);
92 /// 29.4.2, address types
93 typedef struct atomic_address
95 __atomic_address_base _M_base;
98 is_lock_free() const volatile;
101 store(void*, memory_order = memory_order_seq_cst) volatile;
104 load(memory_order = memory_order_seq_cst) volatile;
107 swap(void*, memory_order = memory_order_seq_cst) volatile;
110 compare_swap(void*&, void*, memory_order, memory_order) volatile;
113 compare_swap(void*&, void*, memory_order = memory_order_seq_cst) volatile;
116 fence(memory_order) const volatile;
119 fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
122 fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
125 operator=(void* __v) volatile
126 { store(__v); return __v; }
129 operator+=(ptrdiff_t __v) volatile
130 { return fetch_add(__v); }
133 operator-=(ptrdiff_t __v) volatile
134 { return fetch_sub(__v); }
137 atomic_store_explicit(volatile atomic_address*, void*, memory_order);
140 atomic_load_explicit(volatile atomic_address*, memory_order);
143 atomic_swap_explicit(volatile atomic_address*, void*, memory_order);
146 atomic_compare_swap_explicit(volatile atomic_address*, void**, void*,
147 memory_order, memory_order);
150 atomic_fence(const volatile atomic_address*, memory_order);
153 atomic_fetch_add_explicit(volatile atomic_address*, ptrdiff_t,
157 atomic_fetch_sub_explicit(volatile atomic_address*, ptrdiff_t,
162 explicit atomic_address(void* __v)
163 { _M_base._M_i = __v; }
166 atomic_address(const atomic_address&);
167 atomic_address& operator=(const atomic_address &);
171 // 29.4.1 atomic integral types
172 // For each of the integral types, define atomic_[integral type] struct
176 // atomic_schar signed char
177 // atomic_uchar unsigned char
178 // atomic_short short
179 // atomic_ushort unsigned short
181 // atomic_uint unsigned int
183 // atomic_ulong unsigned long
184 // atomic_llong long long
185 // atomic_ullong unsigned long long
186 // atomic_char16_t char16_t
187 // atomic_char32_t char32_t
188 // atomic_wchar_t wchar_t
193 __atomic_bool_base _M_base;
196 is_lock_free() const volatile;
199 store(bool, memory_order = memory_order_seq_cst) volatile;
202 load(memory_order = memory_order_seq_cst) volatile;
205 swap(bool, memory_order = memory_order_seq_cst) volatile;
208 compare_swap(bool&, bool, memory_order, memory_order) volatile;
211 compare_swap(bool&, bool, memory_order = memory_order_seq_cst) volatile;
214 fence(memory_order) const volatile;
217 operator=(bool __v) volatile { store(__v); return __v; }
220 atomic_store_explicit(volatile atomic_bool*, bool, memory_order);
223 atomic_load_explicit(volatile atomic_bool*, memory_order);
226 atomic_swap_explicit(volatile atomic_bool*, bool, memory_order);
229 atomic_compare_swap_explicit(volatile atomic_bool*, bool*, bool,
230 memory_order, memory_order);
232 atomic_fence(const volatile atomic_bool*, memory_order);
236 explicit atomic_bool(bool __v) { _M_base._M_i = __v; }
239 atomic_bool(const atomic_bool&);
240 atomic_bool& operator=(const atomic_bool&);
246 __atomic_char_base _M_base;
249 is_lock_free() const volatile;
252 store(char, memory_order = memory_order_seq_cst) volatile;
255 load(memory_order = memory_order_seq_cst) volatile;
258 swap(char, memory_order = memory_order_seq_cst) volatile;
261 compare_swap(char&, char, memory_order, memory_order) volatile;
264 compare_swap(char&, char, memory_order = memory_order_seq_cst) volatile;
267 fence(memory_order) const volatile;
270 fetch_add(char, memory_order = memory_order_seq_cst) volatile;
273 fetch_sub(char, memory_order = memory_order_seq_cst) volatile;
276 fetch_and(char, memory_order = memory_order_seq_cst) volatile;
279 fetch_or(char, memory_order = memory_order_seq_cst) volatile;
282 fetch_xor(char, memory_order = memory_order_seq_cst) volatile;
285 operator=(char __v) volatile { store(__v); return __v; }
288 operator++(int) volatile { return fetch_add(1); }
291 operator--(int) volatile { return fetch_sub(1); }
294 operator++() volatile { return fetch_add(1) + 1; }
297 operator--() volatile { return fetch_sub(1) - 1; }
300 operator+=(char __v) volatile { return fetch_add(__v) + __v; }
303 operator-=(char __v) volatile { return fetch_sub(__v) - __v; }
306 operator&=(char __v) volatile { return fetch_and(__v) & __v; }
309 operator|=(char __v) volatile { return fetch_or(__v) | __v; }
312 operator^=(char __v) volatile { return fetch_xor(__v) ^ __v; }
315 atomic_store_explicit(volatile atomic_char*, char, memory_order);
318 atomic_load_explicit(volatile atomic_char*, memory_order);
321 atomic_swap_explicit(volatile atomic_char*, char, memory_order);
324 atomic_compare_swap_explicit(volatile atomic_char*, char*, char,
325 memory_order, memory_order);
328 atomic_fence(const volatile atomic_char*, memory_order);
331 atomic_fetch_add_explicit(volatile atomic_char*, char, memory_order);
334 atomic_fetch_sub_explicit(volatile atomic_char*, char, memory_order);
337 atomic_fetch_and_explicit(volatile atomic_char*, char, memory_order);
340 atomic_fetch_or_explicit( volatile atomic_char*, char, memory_order);
343 atomic_fetch_xor_explicit(volatile atomic_char*, char, memory_order);
347 atomic_char(char __v) { _M_base._M_i = __v; }
350 atomic_char(const atomic_char&);
351 atomic_char& operator=(const atomic_char&);
357 __atomic_schar_base _M_base;
360 is_lock_free() const volatile;
363 store(signed char, memory_order = memory_order_seq_cst) volatile;
366 load(memory_order = memory_order_seq_cst) volatile;
369 swap(signed char, memory_order = memory_order_seq_cst) volatile;
372 compare_swap(signed char&, signed char, memory_order,
373 memory_order) volatile;
376 compare_swap(signed char&, signed char,
377 memory_order = memory_order_seq_cst) volatile;
380 fence(memory_order) const volatile;
383 fetch_add(signed char, memory_order = memory_order_seq_cst) volatile;
386 fetch_sub(signed char, memory_order = memory_order_seq_cst) volatile;
389 fetch_and(signed char, memory_order = memory_order_seq_cst) volatile;
392 fetch_or(signed char, memory_order = memory_order_seq_cst) volatile;
395 fetch_xor(signed char, memory_order = memory_order_seq_cst) volatile;
398 operator=(signed char __v) volatile { store(__v); return __v; }
401 operator++(int) volatile { return fetch_add(1); }
404 operator--(int) volatile { return fetch_sub(1); }
407 operator++() volatile { return fetch_add(1) + 1; }
410 operator--() volatile { return fetch_sub(1) - 1; }
413 operator+=(signed char __v) volatile { return fetch_add(__v) + __v; }
416 operator-=(signed char __v) volatile { return fetch_sub(__v) - __v; }
419 operator&=(signed char __v) volatile { return fetch_and(__v) & __v; }
422 operator|=(signed char __v) volatile { return fetch_or(__v) | __v; }
425 operator^=(signed char __v) volatile { return fetch_xor(__v) ^ __v; }
428 atomic_store_explicit(volatile atomic_schar*, signed char, memory_order);
431 atomic_load_explicit(volatile atomic_schar*, memory_order);
434 atomic_swap_explicit(volatile atomic_schar*, signed char, memory_order);
437 atomic_compare_swap_explicit(volatile atomic_schar*, signed char*,
438 signed char, memory_order, memory_order);
441 atomic_fence(const volatile atomic_schar*, memory_order);
444 atomic_fetch_add_explicit(volatile atomic_schar*,
445 signed char, memory_order);
448 atomic_fetch_sub_explicit(volatile atomic_schar*, signed char,
452 atomic_fetch_and_explicit(volatile atomic_schar*, signed char,
456 atomic_fetch_or_explicit(volatile atomic_schar*, signed char,
460 atomic_fetch_xor_explicit(volatile atomic_schar*, signed char,
465 atomic_schar(signed char __v) { _M_base._M_i = __v; }
468 atomic_schar(const atomic_schar&);
469 atomic_schar& operator=(const atomic_schar&);
475 __atomic_uchar_base _M_base;
478 is_lock_free() const volatile;
481 store(unsigned char, memory_order = memory_order_seq_cst) volatile;
484 load(memory_order = memory_order_seq_cst) volatile;
487 swap(unsigned char, memory_order = memory_order_seq_cst) volatile;
490 compare_swap(unsigned char&, unsigned char, memory_order,
491 memory_order) volatile;
494 compare_swap(unsigned char&, unsigned char,
495 memory_order = memory_order_seq_cst) volatile;
498 fence(memory_order) const volatile;
501 fetch_add(unsigned char, memory_order = memory_order_seq_cst) volatile;
504 fetch_sub(unsigned char, memory_order = memory_order_seq_cst) volatile;
507 fetch_and(unsigned char, memory_order = memory_order_seq_cst) volatile;
510 fetch_or(unsigned char, memory_order = memory_order_seq_cst) volatile;
513 fetch_xor(unsigned char, memory_order = memory_order_seq_cst) volatile;
516 operator=(unsigned char __v) volatile { store(__v); return __v; }
519 operator++(int) volatile { return fetch_add(1); }
522 operator--(int) volatile { return fetch_sub(1); }
525 operator++() volatile { return fetch_add(1) + 1; }
528 operator--() volatile { return fetch_sub(1) - 1; }
531 operator+=(unsigned char __v) volatile { return fetch_add(__v) + __v; }
534 operator-=(unsigned char __v) volatile { return fetch_sub(__v) - __v; }
537 operator&=(unsigned char __v) volatile { return fetch_and(__v) & __v; }
540 operator|=(unsigned char __v) volatile { return fetch_or(__v) | __v; }
543 operator^=(unsigned char __v) volatile { return fetch_xor(__v) ^ __v; }
546 atomic_store_explicit(volatile atomic_uchar*, unsigned char, memory_order);
549 atomic_load_explicit(volatile atomic_uchar*, memory_order);
552 atomic_swap_explicit(volatile atomic_uchar*, unsigned char, memory_order);
555 atomic_compare_swap_explicit(volatile atomic_uchar*, unsigned char*,
556 unsigned char, memory_order, memory_order);
559 atomic_fence(const volatile atomic_uchar*, memory_order);
562 atomic_fetch_add_explicit(volatile atomic_uchar*, unsigned char,
566 atomic_fetch_sub_explicit(volatile atomic_uchar*, unsigned char,
570 atomic_fetch_and_explicit(volatile atomic_uchar*,
571 unsigned char, memory_order);
574 atomic_fetch_or_explicit( volatile atomic_uchar*, unsigned char,
578 atomic_fetch_xor_explicit(volatile atomic_uchar*, unsigned char,
583 atomic_uchar(unsigned char __v) { _M_base._M_i = __v; }
586 atomic_uchar(const atomic_uchar&);
587 atomic_uchar& operator=(const atomic_uchar&);
594 __atomic_short_base _M_base;
597 is_lock_free() const volatile;
600 store(short, memory_order = memory_order_seq_cst) volatile;
603 load(memory_order = memory_order_seq_cst) volatile;
606 swap(short, memory_order = memory_order_seq_cst) volatile;
609 compare_swap(short&, short, memory_order, memory_order) volatile;
612 compare_swap(short&, short, memory_order = memory_order_seq_cst) volatile;
615 fence(memory_order) const volatile;
618 fetch_add(short, memory_order = memory_order_seq_cst) volatile;
621 fetch_sub(short, memory_order = memory_order_seq_cst) volatile;
624 fetch_and(short, memory_order = memory_order_seq_cst) volatile;
627 fetch_or(short, memory_order = memory_order_seq_cst) volatile;
630 fetch_xor(short, memory_order = memory_order_seq_cst) volatile;
633 operator=(short __v) volatile { store(__v); return __v; }
636 operator++(int) volatile { return fetch_add(1); }
639 operator--(int) volatile { return fetch_sub(1); }
642 operator++() volatile { return fetch_add(1) + 1; }
645 operator--() volatile { return fetch_sub(1) - 1; }
648 operator+=(short __v) volatile { return fetch_add(__v) + __v; }
651 operator-=(short __v) volatile { return fetch_sub(__v) - __v; }
654 operator&=(short __v) volatile { return fetch_and(__v) & __v; }
657 operator|=(short __v) volatile { return fetch_or(__v) | __v; }
660 operator^=(short __v) volatile { return fetch_xor(__v) ^ __v; }
663 atomic_store_explicit(volatile atomic_short*, short, memory_order);
666 atomic_load_explicit(volatile atomic_short*, memory_order);
669 atomic_swap_explicit(volatile atomic_short*, short, memory_order);
672 atomic_compare_swap_explicit(volatile atomic_short*, short*, short,
673 memory_order, memory_order);
676 atomic_fence(const volatile atomic_short*, memory_order);
679 atomic_fetch_add_explicit(volatile atomic_short*, short, memory_order);
682 atomic_fetch_sub_explicit(volatile atomic_short*, short, memory_order);
685 atomic_fetch_and_explicit(volatile atomic_short*, short, memory_order);
688 atomic_fetch_or_explicit( volatile atomic_short*, short, memory_order);
691 atomic_fetch_xor_explicit(volatile atomic_short*, short, memory_order);
695 atomic_short(short __v) { _M_base._M_i = __v; }
698 atomic_short(const atomic_short&);
699 atomic_short& operator=(const atomic_short&);
705 __atomic_ushort_base _M_base;
708 is_lock_free() const volatile;
711 store(unsigned short, memory_order = memory_order_seq_cst) volatile;
714 load(memory_order = memory_order_seq_cst) volatile;
717 swap(unsigned short, memory_order = memory_order_seq_cst) volatile;
720 compare_swap(unsigned short&, unsigned short, memory_order,
721 memory_order) volatile;
724 compare_swap(unsigned short&, unsigned short,
725 memory_order = memory_order_seq_cst) volatile;
728 fence(memory_order) const volatile;
731 fetch_add(unsigned short, memory_order = memory_order_seq_cst) volatile;
734 fetch_sub(unsigned short, memory_order = memory_order_seq_cst) volatile;
737 fetch_and(unsigned short, memory_order = memory_order_seq_cst) volatile;
740 fetch_or(unsigned short, memory_order = memory_order_seq_cst) volatile;
743 fetch_xor(unsigned short, memory_order = memory_order_seq_cst) volatile;
746 operator=(unsigned short __v) volatile { store(__v); return __v; }
749 operator++(int) volatile { return fetch_add(1); }
752 operator--(int) volatile { return fetch_sub(1); }
755 operator++() volatile { return fetch_add(1) + 1; }
758 operator--() volatile { return fetch_sub(1) - 1; }
761 operator+=(unsigned short __v) volatile { return fetch_add(__v) + __v; }
764 operator-=(unsigned short __v) volatile { return fetch_sub(__v) - __v; }
767 operator&=(unsigned short __v) volatile { return fetch_and(__v) & __v; }
770 operator|=(unsigned short __v) volatile { return fetch_or(__v) | __v; }
773 operator^=(unsigned short __v) volatile { return fetch_xor(__v) ^ __v; }
776 atomic_store_explicit(volatile atomic_ushort*, unsigned short,
779 friend unsigned short
780 atomic_load_explicit(volatile atomic_ushort*, memory_order);
782 friend unsigned short
783 atomic_swap_explicit(volatile atomic_ushort*, unsigned short, memory_order);
786 atomic_compare_swap_explicit(volatile atomic_ushort*, unsigned short*,
787 unsigned short, memory_order, memory_order);
790 atomic_fence(const volatile atomic_ushort*, memory_order);
792 friend unsigned short
793 atomic_fetch_add_explicit(volatile atomic_ushort*, unsigned short,
796 friend unsigned short
797 atomic_fetch_sub_explicit(volatile atomic_ushort*, unsigned short,
800 friend unsigned short
801 atomic_fetch_and_explicit(volatile atomic_ushort*, unsigned short,
804 friend unsigned short
805 atomic_fetch_or_explicit( volatile atomic_ushort*, unsigned short,
808 friend unsigned short
809 atomic_fetch_xor_explicit(volatile atomic_ushort*, unsigned short,
814 atomic_ushort(unsigned short __v) { _M_base._M_i = __v; }
817 atomic_ushort(const atomic_ushort&);
818 atomic_ushort& operator=(const atomic_ushort&);
824 __atomic_int_base _M_base;
827 is_lock_free() const volatile;
830 store(int, memory_order = memory_order_seq_cst) volatile;
833 load(memory_order = memory_order_seq_cst) volatile;
836 swap(int, memory_order = memory_order_seq_cst) volatile;
839 compare_swap(int&, int, memory_order, memory_order) volatile;
842 compare_swap(int&, int, memory_order = memory_order_seq_cst) volatile;
845 fence(memory_order) const volatile;
848 fetch_add(int, memory_order = memory_order_seq_cst) volatile;
851 fetch_sub(int, memory_order = memory_order_seq_cst) volatile;
854 fetch_and(int, memory_order = memory_order_seq_cst) volatile;
857 fetch_or(int, memory_order = memory_order_seq_cst) volatile;
860 fetch_xor(int, memory_order = memory_order_seq_cst) volatile;
863 operator=(int __v) volatile { store(__v); return __v; }
866 operator++(int) volatile { return fetch_add(1); }
869 operator--(int) volatile { return fetch_sub(1); }
872 operator++() volatile { return fetch_add(1) + 1; }
875 operator--() volatile { return fetch_sub(1) - 1; }
878 operator+=(int __v) volatile { return fetch_add(__v) + __v; }
881 operator-=(int __v) volatile { return fetch_sub(__v) - __v; }
884 operator&=(int __v) volatile { return fetch_and(__v) & __v; }
887 operator|=(int __v) volatile { return fetch_or(__v) | __v; }
890 operator^=(int __v) volatile { return fetch_xor(__v) ^ __v; }
893 atomic_store_explicit(volatile atomic_int*, int, memory_order);
896 atomic_load_explicit(volatile atomic_int*, memory_order);
899 atomic_swap_explicit(volatile atomic_int*, int, memory_order);
902 atomic_compare_swap_explicit(volatile atomic_int*, int*, int,
903 memory_order, memory_order);
906 atomic_fence(const volatile atomic_int*, memory_order);
909 atomic_fetch_add_explicit(volatile atomic_int*, int, memory_order);
912 atomic_fetch_sub_explicit(volatile atomic_int*, int, memory_order);
915 atomic_fetch_and_explicit(volatile atomic_int*, int, memory_order);
918 atomic_fetch_or_explicit( volatile atomic_int*, int, memory_order);
921 atomic_fetch_xor_explicit(volatile atomic_int*, int, memory_order);
925 atomic_int(int __v) { _M_base._M_i = __v; }
928 atomic_int(const atomic_int&);
929 atomic_int& operator=(const atomic_int&);
935 __atomic_uint_base _M_base;
938 is_lock_free() const volatile;
941 store(unsigned int, memory_order = memory_order_seq_cst) volatile;
944 load(memory_order = memory_order_seq_cst) volatile;
947 swap(unsigned int, memory_order = memory_order_seq_cst) volatile;
950 compare_swap(unsigned int&, unsigned int, memory_order,
951 memory_order) volatile;
954 compare_swap(unsigned int&, unsigned int,
955 memory_order = memory_order_seq_cst) volatile;
958 fence(memory_order) const volatile;
961 fetch_add(unsigned int, memory_order = memory_order_seq_cst) volatile;
964 fetch_sub(unsigned int, memory_order = memory_order_seq_cst) volatile;
967 fetch_and(unsigned int, memory_order = memory_order_seq_cst) volatile;
970 fetch_or(unsigned int, memory_order = memory_order_seq_cst) volatile;
973 fetch_xor(unsigned int, memory_order = memory_order_seq_cst) volatile;
976 operator=(unsigned int __v) volatile { store(__v); return __v; }
979 operator++(int) volatile { return fetch_add(1); }
982 operator--(int) volatile { return fetch_sub(1); }
985 operator++() volatile { return fetch_add(1) + 1; }
988 operator--() volatile { return fetch_sub(1) - 1; }
991 operator+=(unsigned int __v) volatile { return fetch_add(__v) + __v; }
994 operator-=(unsigned int __v) volatile { return fetch_sub(__v) - __v; }
997 operator&=(unsigned int __v) volatile { return fetch_and(__v) & __v; }
1000 operator|=(unsigned int __v) volatile { return fetch_or(__v) | __v; }
1003 operator^=(unsigned int __v) volatile { return fetch_xor(__v) ^ __v; }
1006 atomic_store_explicit(volatile atomic_uint*, unsigned int, memory_order);
1009 atomic_load_explicit(volatile atomic_uint*, memory_order);
1012 atomic_swap_explicit(volatile atomic_uint*, unsigned int, memory_order);
1015 atomic_compare_swap_explicit(volatile atomic_uint*, unsigned int*,
1016 unsigned int, memory_order, memory_order);
1019 atomic_fence(const volatile atomic_uint*, memory_order);
1022 atomic_fetch_add_explicit(volatile atomic_uint*, unsigned int,
1026 atomic_fetch_sub_explicit(volatile atomic_uint*, unsigned int,
1030 atomic_fetch_and_explicit(volatile atomic_uint*, unsigned int,
1034 atomic_fetch_or_explicit( volatile atomic_uint*, unsigned int,
1038 atomic_fetch_xor_explicit(volatile atomic_uint*, unsigned int,
1043 atomic_uint(unsigned int __v) { _M_base._M_i = __v; }
1046 atomic_uint(const atomic_uint&);
1047 atomic_uint& operator=(const atomic_uint&);
1053 __atomic_long_base _M_base;
1056 is_lock_free() const volatile;
1059 store(long, memory_order = memory_order_seq_cst) volatile;
1062 load(memory_order = memory_order_seq_cst) volatile;
1065 swap(long, memory_order = memory_order_seq_cst) volatile;
1068 compare_swap(long&, long, memory_order, memory_order) volatile;
1071 compare_swap(long&, long, memory_order = memory_order_seq_cst) volatile;
1074 fence(memory_order) const volatile;
1077 fetch_add(long, memory_order = memory_order_seq_cst) volatile;
1080 fetch_sub(long, memory_order = memory_order_seq_cst) volatile;
1083 fetch_and(long, memory_order = memory_order_seq_cst) volatile;
1086 fetch_or(long, memory_order = memory_order_seq_cst) volatile;
1089 fetch_xor(long, memory_order = memory_order_seq_cst) volatile;
1092 operator=(long __v) volatile { store(__v); return __v; }
1095 operator++(int) volatile { return fetch_add(1); }
1098 operator--(int) volatile { return fetch_sub(1); }
1101 operator++() volatile { return fetch_add(1) + 1; }
1104 operator--() volatile { return fetch_sub(1) - 1; }
1107 operator+=(long __v) volatile { return fetch_add(__v) + __v; }
1110 operator-=(long __v) volatile { return fetch_sub(__v) - __v; }
1113 operator&=(long __v) volatile { return fetch_and(__v) & __v; }
1116 operator|=(long __v) volatile { return fetch_or(__v) | __v; }
1119 operator^=(long __v) volatile { return fetch_xor(__v) ^ __v; }
1122 atomic_store_explicit(volatile atomic_long*, long, memory_order);
1125 atomic_load_explicit(volatile atomic_long*, memory_order);
1128 atomic_swap_explicit(volatile atomic_long*, long, memory_order);
1131 atomic_compare_swap_explicit(volatile atomic_long*, long*, long,
1132 memory_order, memory_order);
1135 atomic_fence(const volatile atomic_long*, memory_order);
1138 atomic_fetch_add_explicit(volatile atomic_long*, long, memory_order);
1141 atomic_fetch_sub_explicit(volatile atomic_long*, long, memory_order);
1144 atomic_fetch_and_explicit(volatile atomic_long*, long, memory_order);
1147 atomic_fetch_or_explicit( volatile atomic_long*, long, memory_order);
1150 atomic_fetch_xor_explicit(volatile atomic_long*, long, memory_order);
1154 atomic_long(long __v) { _M_base._M_i = __v; }
1157 atomic_long(const atomic_long&);
1158 atomic_long& operator=(const atomic_long&);
1164 __atomic_ulong_base _M_base;
1167 is_lock_free() const volatile;
1170 store(unsigned long, memory_order = memory_order_seq_cst) volatile;
1173 load(memory_order = memory_order_seq_cst) volatile;
1176 swap(unsigned long, memory_order = memory_order_seq_cst) volatile;
1179 compare_swap(unsigned long&, unsigned long, memory_order,
1180 memory_order) volatile;
1183 compare_swap(unsigned long&, unsigned long,
1184 memory_order = memory_order_seq_cst) volatile;
1187 fence(memory_order) const volatile;
1190 fetch_add(unsigned long, memory_order = memory_order_seq_cst) volatile;
1193 fetch_sub(unsigned long, memory_order = memory_order_seq_cst) volatile;
1196 fetch_and(unsigned long, memory_order = memory_order_seq_cst) volatile;
1199 fetch_or(unsigned long, memory_order = memory_order_seq_cst) volatile;
1202 fetch_xor(unsigned long, memory_order = memory_order_seq_cst) volatile;
1205 operator=(unsigned long __v) volatile { store(__v); return __v; }
1208 operator++(int) volatile { return fetch_add(1); }
1211 operator--(int) volatile { return fetch_sub(1); }
1214 operator++() volatile { return fetch_add(1) + 1; }
1217 operator--() volatile { return fetch_sub(1) - 1; }
1220 operator+=(unsigned long __v) volatile { return fetch_add(__v) + __v; }
1223 operator-=(unsigned long __v) volatile { return fetch_sub(__v) - __v; }
1226 operator&=(unsigned long __v) volatile { return fetch_and(__v) & __v; }
1229 operator|=(unsigned long __v) volatile { return fetch_or(__v) | __v; }
1232 operator^=(unsigned long __v) volatile { return fetch_xor(__v) ^ __v; }
1235 atomic_store_explicit(volatile atomic_ulong*, unsigned long, memory_order);
1237 friend unsigned long
1238 atomic_load_explicit(volatile atomic_ulong*, memory_order);
1240 friend unsigned long
1241 atomic_swap_explicit(volatile atomic_ulong*, unsigned long, memory_order);
1244 atomic_compare_swap_explicit(volatile atomic_ulong*, unsigned long*,
1245 unsigned long, memory_order, memory_order);
1248 atomic_fence(const volatile atomic_ulong*, memory_order);
1250 friend unsigned long
1251 atomic_fetch_add_explicit(volatile atomic_ulong*, unsigned long,
1254 friend unsigned long
1255 atomic_fetch_sub_explicit(volatile atomic_ulong*, unsigned long,
1258 friend unsigned long
1259 atomic_fetch_and_explicit(volatile atomic_ulong*, unsigned long,
1261 friend unsigned long
1262 atomic_fetch_or_explicit(volatile atomic_ulong*, unsigned long,
1265 friend unsigned long
1266 atomic_fetch_xor_explicit(volatile atomic_ulong*, unsigned long,
1271 atomic_ulong(unsigned long __v) { _M_base._M_i = __v; }
1274 atomic_ulong(const atomic_ulong&);
1275 atomic_ulong& operator=(const atomic_ulong&);
1281 __atomic_llong_base _M_base;
1284 is_lock_free() const volatile;
1287 store(long long, memory_order = memory_order_seq_cst) volatile;
1290 load(memory_order = memory_order_seq_cst) volatile;
1293 swap(long long, memory_order = memory_order_seq_cst) volatile;
1296 compare_swap(long long&, long long, memory_order, memory_order) volatile;
1299 compare_swap(long long&, long long,
1300 memory_order = memory_order_seq_cst) volatile;
1303 fence(memory_order) const volatile;
1306 fetch_add(long long, memory_order = memory_order_seq_cst) volatile;
1309 fetch_sub(long long, memory_order = memory_order_seq_cst) volatile;
1312 fetch_and(long long, memory_order = memory_order_seq_cst) volatile;
1315 fetch_or(long long, memory_order = memory_order_seq_cst) volatile;
1318 fetch_xor(long long, memory_order = memory_order_seq_cst) volatile;
1321 operator=(long long __v) volatile { store(__v); return __v; }
1324 operator++(int) volatile { return fetch_add(1); }
1327 operator--(int) volatile { return fetch_sub(1); }
1330 operator++() volatile { return fetch_add(1) + 1; }
1333 operator--() volatile { return fetch_sub(1) - 1; }
1336 operator+=(long long __v) volatile { return fetch_add(__v) + __v; }
1339 operator-=(long long __v) volatile { return fetch_sub(__v) - __v; }
1342 operator&=(long long __v) volatile { return fetch_and(__v) & __v; }
1345 operator|=(long long __v) volatile { return fetch_or(__v) | __v; }
1348 operator^=(long long __v) volatile { return fetch_xor(__v) ^ __v; }
1351 atomic_store_explicit(volatile atomic_llong*, long long, memory_order);
1354 atomic_load_explicit(volatile atomic_llong*, memory_order);
1357 atomic_swap_explicit(volatile atomic_llong*, long long, memory_order);
1360 atomic_compare_swap_explicit(volatile atomic_llong*, long long*,
1361 long long, memory_order, memory_order);
1364 atomic_fence(const volatile atomic_llong*, memory_order);
1367 atomic_fetch_add_explicit(volatile atomic_llong*, long long, memory_order);
1370 atomic_fetch_sub_explicit(volatile atomic_llong*, long long, memory_order);
1373 atomic_fetch_and_explicit(volatile atomic_llong*, long long, memory_order);
1376 atomic_fetch_or_explicit(volatile atomic_llong*, long long, memory_order);
1379 atomic_fetch_xor_explicit(volatile atomic_llong*, long long, memory_order);
1383 atomic_llong(long long __v) { _M_base._M_i = __v; }
1386 atomic_llong(const atomic_llong&);
1387 atomic_llong& operator=(const atomic_llong&);
1391 struct atomic_ullong
1393 __atomic_ullong_base _M_base;
1396 is_lock_free() const volatile;
1399 store(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1402 load(memory_order = memory_order_seq_cst) volatile;
1405 swap(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1408 compare_swap(unsigned long long&, unsigned long long, memory_order,
1409 memory_order) volatile;
1412 compare_swap(unsigned long long&, unsigned long long,
1413 memory_order = memory_order_seq_cst) volatile;
1416 fence(memory_order) const volatile;
1419 fetch_add(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1422 fetch_sub(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1425 fetch_and(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1428 fetch_or(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1431 fetch_xor(unsigned long long, memory_order = memory_order_seq_cst) volatile;
1434 operator=(unsigned long long __v) volatile
1435 { store(__v); return __v; }
1438 operator++(int) volatile
1439 { return fetch_add(1); }
1442 operator--(int) volatile
1443 { return fetch_sub(1); }
1446 operator++() volatile
1447 { return fetch_add(1) + 1; }
1450 operator--() volatile
1451 { return fetch_sub(1) - 1; }
1454 operator+=(unsigned long long __v) volatile
1455 { return fetch_add(__v) + __v; }
1458 operator-=(unsigned long long __v) volatile
1459 { return fetch_sub(__v) - __v; }
1462 operator&=(unsigned long long __v) volatile
1463 { return fetch_and(__v) & __v; }
1466 operator|=(unsigned long long __v) volatile
1467 { return fetch_or(__v) | __v; }
1470 operator^=(unsigned long long __v) volatile
1471 { return fetch_xor(__v) ^ __v; }
1474 atomic_store_explicit(volatile atomic_ullong*, unsigned long long,
1476 friend unsigned long long
1477 atomic_load_explicit(volatile atomic_ullong*, memory_order);
1479 friend unsigned long long
1480 atomic_swap_explicit(volatile atomic_ullong*, unsigned long long,
1484 atomic_compare_swap_explicit(volatile atomic_ullong*, unsigned long long*,
1485 unsigned long long, memory_order,
1489 atomic_fence(const volatile atomic_ullong*, memory_order);
1491 friend unsigned long long
1492 atomic_fetch_add_explicit(volatile atomic_ullong*, unsigned long long,
1495 friend unsigned long long
1496 atomic_fetch_sub_explicit(volatile atomic_ullong*, unsigned long long,
1499 friend unsigned long long
1500 atomic_fetch_and_explicit(volatile atomic_ullong*, unsigned long long,
1503 friend unsigned long long
1504 atomic_fetch_or_explicit(volatile atomic_ullong*, unsigned long long,
1507 friend unsigned long long
1508 atomic_fetch_xor_explicit(volatile atomic_ullong*, unsigned long long,
1513 atomic_ullong(unsigned long long __v) { _M_base._M_i = __v; }
1516 atomic_ullong(const atomic_ullong&);
1517 atomic_ullong& operator=(const atomic_ullong&);
1521 struct atomic_wchar_t
1523 __atomic_wchar_t_base _M_base;
1526 is_lock_free() const volatile;
1529 store(wchar_t, memory_order = memory_order_seq_cst) volatile;
1532 load(memory_order = memory_order_seq_cst) volatile;
1535 swap(wchar_t, memory_order = memory_order_seq_cst) volatile;
1538 compare_swap(wchar_t&, wchar_t, memory_order, memory_order) volatile;
1541 compare_swap(wchar_t&, wchar_t,
1542 memory_order = memory_order_seq_cst) volatile;
1545 fence(memory_order) const volatile;
1548 fetch_add(wchar_t, memory_order = memory_order_seq_cst) volatile;
1551 fetch_sub(wchar_t, memory_order = memory_order_seq_cst) volatile;
1554 fetch_and(wchar_t, memory_order = memory_order_seq_cst) volatile;
1557 fetch_or(wchar_t, memory_order = memory_order_seq_cst) volatile;
1560 fetch_xor(wchar_t, memory_order = memory_order_seq_cst) volatile;
1563 operator=(wchar_t __v) volatile
1564 { store(__v); return __v; }
1567 operator++(int) volatile
1568 { return fetch_add(1); }
1571 operator--(int) volatile
1572 { return fetch_sub(1); }
1575 operator++() volatile
1576 { return fetch_add(1) + 1; }
1579 operator--() volatile
1580 { return fetch_sub(1) - 1; }
1583 operator+=(wchar_t __v) volatile
1584 { return fetch_add(__v) + __v; }
1587 operator-=(wchar_t __v) volatile
1588 { return fetch_sub(__v) - __v; }
1591 operator&=(wchar_t __v) volatile
1592 { return fetch_and(__v) & __v; }
1595 operator|=(wchar_t __v) volatile
1596 { return fetch_or(__v) | __v; }
1599 operator^=(wchar_t __v) volatile
1600 { return fetch_xor(__v) ^ __v; }
1603 atomic_store_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1606 atomic_load_explicit(volatile atomic_wchar_t*, memory_order);
1609 atomic_swap_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1612 atomic_compare_swap_explicit(volatile atomic_wchar_t*,
1613 wchar_t*, wchar_t, memory_order, memory_order);
1616 atomic_fence(const volatile atomic_wchar_t*, memory_order);
1619 atomic_fetch_add_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1622 atomic_fetch_sub_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1625 atomic_fetch_and_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1628 atomic_fetch_or_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1631 atomic_fetch_xor_explicit(volatile atomic_wchar_t*, wchar_t, memory_order);
1633 atomic_wchar_t() { }
1635 atomic_wchar_t(wchar_t __v) { _M_base._M_i = __v; }
1638 atomic_wchar_t(const atomic_wchar_t&);
1639 atomic_wchar_t& operator=(const atomic_wchar_t&);
1644 /// 29.4.3, Generic atomic type, primary class template.
1645 template<typename _Tp>
1649 is_lock_free() const volatile;
1652 store(_Tp, memory_order = memory_order_seq_cst) volatile;
1655 load(memory_order = memory_order_seq_cst) volatile;
1658 swap(_Tp __v, memory_order = memory_order_seq_cst) volatile;
1661 compare_swap(_Tp&, _Tp, memory_order, memory_order) volatile;
1664 compare_swap(_Tp&, _Tp, memory_order = memory_order_seq_cst) volatile;
1667 fence(memory_order) const volatile;
1670 operator=(_Tp __v) volatile { store(__v); return __v; }
1674 explicit atomic(_Tp __v) : __f(__v) { }
1677 atomic(const atomic&);
1678 atomic& operator=(const atomic&);
1683 /// Partial specialization for pointer types.
1684 template<typename _Tp>
1685 struct atomic<_Tp*> : atomic_address
1688 load(memory_order = memory_order_seq_cst) volatile;
1691 swap(_Tp*, memory_order = memory_order_seq_cst) volatile;
1694 compare_swap(_Tp*&, _Tp*, memory_order, memory_order) volatile;
1697 compare_swap(_Tp*&, _Tp*, memory_order = memory_order_seq_cst) volatile;
1700 fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
1703 fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
1706 operator=(_Tp* __v) volatile { store(__v); return __v; }
1709 operator++(int) volatile { return fetch_add(1); }
1712 operator--(int) volatile { return fetch_sub(1); }
1715 operator++() volatile { return fetch_add(1) + 1; }
1718 operator--() volatile { return fetch_sub(1) - 1; }
1721 operator+=(ptrdiff_t __v) volatile
1722 { return fetch_add(__v) + __v; }
1725 operator-=(ptrdiff_t __v) volatile
1726 { return fetch_sub(__v) - __v; }
1730 explicit atomic(_Tp* __v) : atomic_address(__v) { }
1733 atomic(const atomic&);
1734 atomic& operator=(const atomic&);
1737 /// Explicit specialization for bool.
1739 struct atomic<bool> : atomic_bool
1743 explicit atomic(bool __v) : atomic_bool(__v) { }
1746 operator=(bool __v) volatile { store(__v); return __v; }
1749 atomic(const atomic&);
1750 atomic& operator=(const atomic&);
1753 /// Explicit specialization for void*
1755 struct atomic<void*> : atomic_address
1759 explicit atomic(void* __v) : atomic_address(__v) { }
1762 operator=(void* __v) volatile { store(__v); return __v; }
1765 atomic(const atomic&);
1766 atomic& operator=(const atomic&);
1769 /// Explicit specialization for char.
1771 struct atomic<char> : atomic_char
1775 explicit atomic(char __v) : atomic_char(__v) { }
1778 operator=(char __v) volatile { store(__v); return __v; }
1781 atomic(const atomic&);
1782 atomic& operator=(const atomic&);
1786 /// Explicit specialization for signed char.
1788 struct atomic<signed char> : atomic_schar
1792 explicit atomic(signed char __v) : atomic_schar(__v) { }
1795 operator=(signed char __v) volatile { store(__v); return __v; }
1798 atomic(const atomic&);
1799 atomic& operator=(const atomic&);
1802 /// Explicit specialization for unsigned char.
1804 struct atomic<unsigned char> : atomic_uchar
1808 explicit atomic(unsigned char __v) : atomic_uchar(__v) { }
1811 operator=(unsigned char __v) volatile { store(__v); return __v; }
1814 atomic(const atomic&);
1816 operator=(const atomic&);
1819 /// Explicit specialization for short.
1821 struct atomic<short> : atomic_short
1825 explicit atomic(short __v) : atomic_short(__v) { }
1828 operator=(short __v) volatile { store(__v); return __v; }
1831 atomic(const atomic&);
1832 atomic& operator=(const atomic&);
1835 /// Explicit specialization for unsigned short.
1837 struct atomic<unsigned short> : atomic_ushort
1841 explicit atomic(unsigned short __v) : atomic_ushort(__v) { }
1844 operator=(unsigned short __v) volatile { store(__v); return __v; }
1847 atomic(const atomic&);
1848 atomic& operator=(const atomic&);
1851 /// Explicit specialization for int.
1853 struct atomic<int> : atomic_int
1857 explicit atomic(int __v) : atomic_int(__v) { }
1860 operator=(int __v) volatile { store(__v); return __v; }
1863 atomic(const atomic&);
1864 atomic& operator=(const atomic&);
1867 /// Explicit specialization for unsigned int.
1869 struct atomic<unsigned int> : atomic_uint
1873 explicit atomic(unsigned int __v) : atomic_uint(__v) { }
1876 operator=(unsigned int __v) volatile { store(__v); return __v; }
1879 atomic(const atomic&);
1880 atomic& operator=(const atomic&);
1883 /// Explicit specialization for long.
1885 struct atomic<long> : atomic_long
1889 explicit atomic(long __v) : atomic_long(__v) { }
1892 operator=(long __v) volatile { store(__v); return __v; }
1895 atomic(const atomic&);
1896 atomic& operator=(const atomic&);
1899 /// Explicit specialization for unsigned long.
1901 struct atomic<unsigned long> : atomic_ulong
1905 explicit atomic(unsigned long __v) : atomic_ulong(__v) { }
1908 operator=(unsigned long __v) volatile
1909 { store(__v); return __v; }
1912 atomic(const atomic&);
1913 atomic& operator=(const atomic&);
1916 /// Explicit specialization for long long.
1918 struct atomic<long long> : atomic_llong
1922 explicit atomic(long long __v) : atomic_llong(__v) { }
1925 operator=(long long __v) volatile { store(__v); return __v; }
1928 atomic(const atomic&);
1929 atomic& operator=(const atomic&);
1932 /// Explicit specialization for unsigned long long.
1934 struct atomic<unsigned long long> : atomic_ullong
1938 explicit atomic(unsigned long long __v) : atomic_ullong(__v) { }
1941 operator=(unsigned long long __v) volatile { store(__v); return __v; }
1944 atomic(const atomic&);
1945 atomic& operator=(const atomic&);
1948 /// Explicit specialization for wchar_t.
1950 struct atomic<wchar_t> : atomic_wchar_t
1954 explicit atomic(wchar_t __v) : atomic_wchar_t(__v) { }
1957 operator=(wchar_t __v) volatile { store(__v); return __v; }
1960 atomic(const atomic&);
1961 atomic& operator=(const atomic&);
1965 atomic_is_lock_free(const volatile atomic_bool* __a)
1969 atomic_load_explicit(volatile atomic_bool* __a, memory_order __x)
1970 { return _ATOMIC_LOAD_(__a, __x); }
1973 atomic_load(volatile atomic_bool* __a)
1974 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1977 atomic_store_explicit(volatile atomic_bool* __a, bool __m, memory_order __x)
1978 { _ATOMIC_STORE_(__a, __m, __x); }
1981 atomic_store(volatile atomic_bool* __a, bool __m)
1982 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
1985 atomic_swap_explicit(volatile atomic_bool* __a, bool __m, memory_order __x)
1986 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
1989 atomic_swap(volatile atomic_bool* __a, bool __m)
1990 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
1993 atomic_compare_swap_explicit(volatile atomic_bool* __a, bool* __e, bool __m,
1994 memory_order __x, memory_order __y)
1995 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
1998 atomic_compare_swap(volatile atomic_bool* __a, bool* __e, bool __m)
1999 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2000 memory_order_seq_cst); }
2003 atomic_fence(const volatile atomic_bool* __a, memory_order __x)
2004 { _ATOMIC_FENCE_(__a, __x); }
2007 atomic_is_lock_free(const volatile atomic_address* __a)
2011 atomic_load_explicit(volatile atomic_address* __a, memory_order __x)
2012 { return _ATOMIC_LOAD_(__a, __x); }
2015 atomic_load(volatile atomic_address* __a)
2016 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2019 atomic_store_explicit(volatile atomic_address* __a, void* __m,
2021 { _ATOMIC_STORE_(__a, __m, __x); }
2024 atomic_store(volatile atomic_address* __a, void* __m)
2025 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2028 atomic_swap_explicit(volatile atomic_address* __a, void* __m,
2030 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2033 atomic_swap(volatile atomic_address* __a, void* __m)
2034 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2037 atomic_compare_swap_explicit(volatile atomic_address* __a, void** __e,
2038 void* __m, memory_order __x, memory_order __y)
2039 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2042 atomic_compare_swap(volatile atomic_address* __a, void** __e, void* __m)
2043 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2044 memory_order_seq_cst); }
2047 atomic_fence(const volatile atomic_address* __a, memory_order __x)
2048 { _ATOMIC_FENCE_(__a, __x); }
2052 atomic_is_lock_free(const volatile atomic_char* __a)
2056 atomic_load_explicit(volatile atomic_char* __a, memory_order __x)
2057 { return _ATOMIC_LOAD_(__a, __x); }
2060 atomic_load(volatile atomic_char* __a)
2061 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2064 atomic_store_explicit(volatile atomic_char* __a, char __m, memory_order __x)
2065 { _ATOMIC_STORE_(__a, __m, __x); }
2068 atomic_store(volatile atomic_char* __a, char __m)
2069 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2072 atomic_swap_explicit(volatile atomic_char* __a, char __m, memory_order __x)
2073 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2076 atomic_swap(volatile atomic_char* __a, char __m)
2077 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2080 atomic_compare_swap_explicit(volatile atomic_char* __a, char* __e, char __m,
2081 memory_order __x, memory_order __y)
2082 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2085 atomic_compare_swap(volatile atomic_char* __a, char* __e, char __m)
2086 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2087 memory_order_seq_cst); }
2090 atomic_fence(const volatile atomic_char* __a, memory_order __x)
2091 { _ATOMIC_FENCE_(__a, __x); }
2095 atomic_is_lock_free(const volatile atomic_schar* __a)
2099 atomic_load_explicit(volatile atomic_schar* __a, memory_order __x)
2100 { return _ATOMIC_LOAD_(__a, __x); }
2103 atomic_load(volatile atomic_schar* __a)
2104 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2107 atomic_store_explicit(volatile atomic_schar* __a, signed char __m,
2109 { _ATOMIC_STORE_(__a, __m, __x); }
2112 atomic_store(volatile atomic_schar* __a, signed char __m)
2113 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2116 atomic_swap_explicit(volatile atomic_schar* __a, signed char __m,
2118 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2121 atomic_swap(volatile atomic_schar* __a, signed char __m)
2122 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2125 atomic_compare_swap_explicit(volatile atomic_schar* __a, signed char* __e,
2126 signed char __m, memory_order __x,
2128 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2131 atomic_compare_swap(volatile atomic_schar* __a, signed char* __e,
2133 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2134 memory_order_seq_cst); }
2137 atomic_fence(const volatile atomic_schar* __a, memory_order __x)
2138 { _ATOMIC_FENCE_(__a, __x); }
2142 atomic_is_lock_free(const volatile atomic_uchar* __a)
2145 inline unsigned char
2146 atomic_load_explicit(volatile atomic_uchar* __a, memory_order __x)
2147 { return _ATOMIC_LOAD_(__a, __x); }
2149 inline unsigned char
2150 atomic_load(volatile atomic_uchar* __a)
2151 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2154 atomic_store_explicit(volatile atomic_uchar* __a, unsigned char __m,
2156 { _ATOMIC_STORE_(__a, __m, __x); }
2159 atomic_store(volatile atomic_uchar* __a, unsigned char __m)
2160 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2162 inline unsigned char
2163 atomic_swap_explicit(volatile atomic_uchar* __a, unsigned char __m,
2165 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2167 inline unsigned char
2168 atomic_swap(volatile atomic_uchar* __a, unsigned char __m)
2169 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2172 atomic_compare_swap_explicit(volatile atomic_uchar* __a, unsigned char* __e,
2173 unsigned char __m, memory_order __x,
2175 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2178 atomic_compare_swap(volatile atomic_uchar* __a, unsigned char* __e,
2180 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2181 memory_order_seq_cst); }
2184 atomic_fence(const volatile atomic_uchar* __a, memory_order __x)
2185 { _ATOMIC_FENCE_(__a, __x); }
2189 atomic_is_lock_free(const volatile atomic_short* __a)
2193 atomic_load_explicit(volatile atomic_short* __a, memory_order __x)
2194 { return _ATOMIC_LOAD_(__a, __x); }
2197 atomic_load(volatile atomic_short* __a)
2198 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2201 atomic_store_explicit(volatile atomic_short* __a, short __m,
2203 { _ATOMIC_STORE_(__a, __m, __x); }
2206 atomic_store(volatile atomic_short* __a, short __m)
2207 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2210 atomic_swap_explicit(volatile atomic_short* __a, short __m, memory_order __x)
2211 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2214 atomic_swap(volatile atomic_short* __a, short __m)
2215 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2218 atomic_compare_swap_explicit(volatile atomic_short* __a, short* __e,
2219 short __m, memory_order __x, memory_order __y)
2220 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2223 atomic_compare_swap(volatile atomic_short* __a, short* __e, short __m)
2224 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2225 memory_order_seq_cst); }
2228 atomic_fence(const volatile atomic_short* __a, memory_order __x)
2229 { _ATOMIC_FENCE_(__a, __x); }
2233 atomic_is_lock_free(const volatile atomic_ushort* __a)
2236 inline unsigned short
2237 atomic_load_explicit(volatile atomic_ushort* __a, memory_order __x)
2238 { return _ATOMIC_LOAD_(__a, __x); }
2240 inline unsigned short
2241 atomic_load(volatile atomic_ushort* __a)
2242 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2245 atomic_store_explicit(volatile atomic_ushort* __a, unsigned short __m,
2247 { _ATOMIC_STORE_(__a, __m, __x); }
2250 atomic_store(volatile atomic_ushort* __a, unsigned short __m)
2251 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2253 inline unsigned short
2254 atomic_swap_explicit(volatile atomic_ushort* __a, unsigned short __m,
2256 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2258 inline unsigned short
2259 atomic_swap(volatile atomic_ushort* __a, unsigned short __m)
2260 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2263 atomic_compare_swap_explicit(volatile atomic_ushort* __a,
2264 unsigned short* __e, unsigned short __m,
2265 memory_order __x, memory_order __y)
2266 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2269 atomic_compare_swap(volatile atomic_ushort* __a, unsigned short* __e,
2271 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2272 memory_order_seq_cst); }
2275 atomic_fence(const volatile atomic_ushort* __a, memory_order __x)
2276 { _ATOMIC_FENCE_(__a, __x); }
2280 atomic_is_lock_free(const volatile atomic_int* __a)
2284 atomic_load_explicit(volatile atomic_int* __a, memory_order __x)
2285 { return _ATOMIC_LOAD_(__a, __x); }
2288 atomic_load(volatile atomic_int* __a)
2289 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2292 atomic_store_explicit(volatile atomic_int* __a, int __m, memory_order __x)
2293 { _ATOMIC_STORE_(__a, __m, __x); }
2296 atomic_store(volatile atomic_int* __a, int __m)
2297 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2300 atomic_swap_explicit(volatile atomic_int* __a, int __m, memory_order __x)
2301 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2304 atomic_swap(volatile atomic_int* __a, int __m)
2305 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2308 atomic_compare_swap_explicit(volatile atomic_int* __a, int* __e, int __m,
2309 memory_order __x, memory_order __y)
2310 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2313 atomic_compare_swap(volatile atomic_int* __a, int* __e, int __m)
2314 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2315 memory_order_seq_cst); }
2318 atomic_fence(const volatile atomic_int* __a, memory_order __x)
2319 { _ATOMIC_FENCE_(__a, __x); }
2323 atomic_is_lock_free(const volatile atomic_uint* __a)
2327 atomic_load_explicit(volatile atomic_uint* __a, memory_order __x)
2328 { return _ATOMIC_LOAD_(__a, __x); }
2331 atomic_load(volatile atomic_uint* __a)
2332 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2335 atomic_store_explicit(volatile atomic_uint* __a, unsigned int __m,
2337 { _ATOMIC_STORE_(__a, __m, __x); }
2340 atomic_store(volatile atomic_uint* __a, unsigned int __m)
2341 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2344 atomic_swap_explicit(volatile atomic_uint* __a, unsigned int __m,
2346 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2349 atomic_swap(volatile atomic_uint* __a, unsigned int __m)
2350 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2353 atomic_compare_swap_explicit(volatile atomic_uint* __a, unsigned int* __e,
2354 unsigned int __m, memory_order __x,
2356 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2359 atomic_compare_swap(volatile atomic_uint* __a, unsigned int* __e,
2361 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2362 memory_order_seq_cst); }
2365 atomic_fence(const volatile atomic_uint* __a, memory_order __x)
2366 { _ATOMIC_FENCE_(__a, __x); }
2370 atomic_is_lock_free(const volatile atomic_long* __a)
2374 atomic_load_explicit(volatile atomic_long* __a, memory_order __x)
2375 { return _ATOMIC_LOAD_(__a, __x); }
2378 atomic_load(volatile atomic_long* __a)
2379 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2382 atomic_store_explicit(volatile atomic_long* __a, long __m, memory_order __x)
2383 { _ATOMIC_STORE_(__a, __m, __x); }
2386 atomic_store(volatile atomic_long* __a, long __m)
2387 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2390 atomic_swap_explicit(volatile atomic_long* __a, long __m, memory_order __x)
2391 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2394 atomic_swap(volatile atomic_long* __a, long __m)
2395 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2398 atomic_compare_swap_explicit(volatile atomic_long* __a, long* __e, long __m,
2399 memory_order __x, memory_order __y)
2400 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2403 atomic_compare_swap(volatile atomic_long* __a, long* __e, long __m)
2404 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2405 memory_order_seq_cst); }
2408 atomic_fence(const volatile atomic_long* __a, memory_order __x)
2409 { _ATOMIC_FENCE_(__a, __x); }
2413 atomic_is_lock_free(const volatile atomic_ulong* __a)
2416 inline unsigned long
2417 atomic_load_explicit(volatile atomic_ulong* __a, memory_order __x)
2418 { return _ATOMIC_LOAD_(__a, __x); }
2420 inline unsigned long
2421 atomic_load(volatile atomic_ulong* __a)
2422 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2425 atomic_store_explicit(volatile atomic_ulong* __a, unsigned long __m,
2427 { _ATOMIC_STORE_(__a, __m, __x); }
2430 atomic_store(volatile atomic_ulong* __a, unsigned long __m)
2431 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2433 inline unsigned long
2434 atomic_swap_explicit(volatile atomic_ulong* __a, unsigned long __m,
2436 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2438 inline unsigned long
2439 atomic_swap(volatile atomic_ulong* __a, unsigned long __m)
2440 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2443 atomic_compare_swap_explicit(volatile atomic_ulong* __a, unsigned long* __e,
2444 unsigned long __m, memory_order __x,
2446 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2449 atomic_compare_swap(volatile atomic_ulong* __a, unsigned long* __e,
2451 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2452 memory_order_seq_cst); }
2455 atomic_fence(const volatile atomic_ulong* __a, memory_order __x)
2456 { _ATOMIC_FENCE_(__a, __x); }
2460 atomic_is_lock_free(const volatile atomic_llong* __a)
2464 atomic_load_explicit(volatile atomic_llong* __a, memory_order __x)
2465 { return _ATOMIC_LOAD_(__a, __x); }
2468 atomic_load(volatile atomic_llong* __a)
2469 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2472 atomic_store_explicit(volatile atomic_llong* __a, long long __m,
2474 { _ATOMIC_STORE_(__a, __m, __x); }
2477 atomic_store(volatile atomic_llong* __a, long long __m)
2478 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2481 atomic_swap_explicit(volatile atomic_llong* __a, long long __m,
2483 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2486 atomic_swap(volatile atomic_llong* __a, long long __m)
2487 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2490 atomic_compare_swap_explicit(volatile atomic_llong* __a, long long* __e,
2491 long long __m, memory_order __x,
2493 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2496 atomic_compare_swap(volatile atomic_llong* __a, long long* __e,
2498 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2499 memory_order_seq_cst); }
2502 atomic_fence(const volatile atomic_llong* __a, memory_order __x)
2503 { _ATOMIC_FENCE_(__a, __x); }
2507 atomic_is_lock_free(const volatile atomic_ullong* __a)
2510 inline unsigned long long
2511 atomic_load_explicit(volatile atomic_ullong* __a, memory_order __x)
2512 { return _ATOMIC_LOAD_(__a, __x); }
2514 inline unsigned long long
2515 atomic_load(volatile atomic_ullong* __a)
2516 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2519 atomic_store_explicit(volatile atomic_ullong* __a, unsigned long long __m,
2521 { _ATOMIC_STORE_(__a, __m, __x); }
2524 atomic_store(volatile atomic_ullong* __a, unsigned long long __m)
2525 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2527 inline unsigned long long
2528 atomic_swap_explicit(volatile atomic_ullong* __a, unsigned long long __m,
2530 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2532 inline unsigned long long
2533 atomic_swap(volatile atomic_ullong* __a, unsigned long long __m)
2534 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2537 atomic_compare_swap_explicit(volatile atomic_ullong* __a,
2538 unsigned long long* __e, unsigned long long __m,
2539 memory_order __x, memory_order __y)
2540 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2543 atomic_compare_swap(volatile atomic_ullong* __a, unsigned long long* __e,
2544 unsigned long long __m)
2545 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2546 memory_order_seq_cst); }
2549 atomic_fence(const volatile atomic_ullong* __a, memory_order __x)
2550 { _ATOMIC_FENCE_(__a, __x); }
2553 atomic_is_lock_free(const volatile atomic_wchar_t* __a)
2557 atomic_load_explicit(volatile atomic_wchar_t* __a, memory_order __x)
2558 { return _ATOMIC_LOAD_(__a, __x); }
2561 atomic_load(volatile atomic_wchar_t* __a)
2562 { return atomic_load_explicit(__a, memory_order_seq_cst); }
2566 atomic_store_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
2568 { _ATOMIC_STORE_(__a, __m, __x); }
2571 atomic_store(volatile atomic_wchar_t* __a, wchar_t __m)
2572 { atomic_store_explicit(__a, __m, memory_order_seq_cst); }
2575 atomic_swap_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
2577 { return _ATOMIC_MODIFY_(__a, =, __m, __x); }
2580 atomic_swap(volatile atomic_wchar_t* __a, wchar_t __m)
2581 { return atomic_swap_explicit(__a, __m, memory_order_seq_cst); }
2584 atomic_compare_swap_explicit(volatile atomic_wchar_t* __a, wchar_t* __e,
2585 wchar_t __m, memory_order __x, memory_order __y)
2586 { return _ATOMIC_CMPSWP_(__a, __e, __m, __x); }
2589 atomic_compare_swap(volatile atomic_wchar_t* __a, wchar_t* __e, wchar_t __m)
2590 { return atomic_compare_swap_explicit(__a, __e, __m, memory_order_seq_cst,
2591 memory_order_seq_cst); }
2594 atomic_fence(const volatile atomic_wchar_t* __a, memory_order __x)
2595 { _ATOMIC_FENCE_(__a, __x); }
2598 atomic_fetch_add_explicit(volatile atomic_address* __a, ptrdiff_t __m,
2601 void* volatile* __p = &((__a)->_M_base._M_i);
2602 volatile atomic_flag* __g = __atomic_flag_for_address(__p);
2603 __atomic_flag_wait_explicit(__g, __x);
2605 *__p = (void*)((char*)(*__p) + __m);
2606 atomic_flag_clear_explicit(__g, __x);
2611 atomic_fetch_add(volatile atomic_address* __a, ptrdiff_t __m)
2612 { return atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2616 atomic_fetch_sub_explicit(volatile atomic_address* __a, ptrdiff_t __m,
2619 void* volatile* __p = &((__a)->_M_base._M_i);
2620 volatile atomic_flag* __g = __atomic_flag_for_address(__p);
2621 __atomic_flag_wait_explicit(__g, __x);
2623 *__p = (void*)((char*)(*__p) - __m);
2624 atomic_flag_clear_explicit(__g, __x);
2629 atomic_fetch_sub(volatile atomic_address* __a, ptrdiff_t __m)
2630 { return atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2634 atomic_fetch_add_explicit(volatile atomic_char* __a, char __m,
2636 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2639 atomic_fetch_add(volatile atomic_char* __a, char __m)
2640 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2643 atomic_fetch_sub_explicit(volatile atomic_char* __a, char __m,
2645 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2648 atomic_fetch_sub(volatile atomic_char* __a, char __m)
2649 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2652 atomic_fetch_and_explicit(volatile atomic_char* __a, char __m,
2654 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2657 atomic_fetch_and(volatile atomic_char* __a, char __m)
2658 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2661 atomic_fetch_or_explicit(volatile atomic_char* __a, char __m,
2663 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2666 atomic_fetch_or(volatile atomic_char* __a, char __m)
2667 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2670 atomic_fetch_xor_explicit(volatile atomic_char* __a, char __m,
2672 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2675 atomic_fetch_xor(volatile atomic_char* __a, char __m)
2676 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2680 atomic_fetch_add_explicit(volatile atomic_schar* __a, signed char __m,
2682 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2685 atomic_fetch_add(volatile atomic_schar* __a, signed char __m)
2686 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2689 atomic_fetch_sub_explicit(volatile atomic_schar* __a, signed char __m,
2691 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2694 atomic_fetch_sub(volatile atomic_schar* __a, signed char __m)
2695 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2698 atomic_fetch_and_explicit(volatile atomic_schar* __a, signed char __m,
2700 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2703 atomic_fetch_and(volatile atomic_schar* __a, signed char __m)
2704 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2707 atomic_fetch_or_explicit(volatile atomic_schar* __a, signed char __m,
2709 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2712 atomic_fetch_or(volatile atomic_schar* __a, signed char __m)
2713 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2717 atomic_fetch_xor_explicit(volatile atomic_schar* __a, signed char __m,
2719 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2722 atomic_fetch_xor(volatile atomic_schar* __a, signed char __m)
2723 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2726 inline unsigned char
2727 atomic_fetch_add_explicit(volatile atomic_uchar* __a, unsigned char __m,
2729 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2731 inline unsigned char
2732 atomic_fetch_add(volatile atomic_uchar* __a, unsigned char __m)
2733 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2735 inline unsigned char
2736 atomic_fetch_sub_explicit(volatile atomic_uchar* __a, unsigned char __m,
2738 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2740 inline unsigned char
2741 atomic_fetch_sub(volatile atomic_uchar* __a, unsigned char __m)
2742 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2745 inline unsigned char
2746 atomic_fetch_and_explicit(volatile atomic_uchar* __a, unsigned char __m,
2748 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2750 inline unsigned char
2751 atomic_fetch_and(volatile atomic_uchar* __a, unsigned char __m)
2752 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2754 inline unsigned char
2755 atomic_fetch_or_explicit(volatile atomic_uchar* __a, unsigned char __m,
2757 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2759 inline unsigned char
2760 atomic_fetch_or(volatile atomic_uchar* __a, unsigned char __m)
2761 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2763 inline unsigned char
2764 atomic_fetch_xor_explicit(volatile atomic_uchar* __a,
2765 unsigned char __m, memory_order __x)
2766 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2768 inline unsigned char
2769 atomic_fetch_xor(volatile atomic_uchar* __a, unsigned char __m)
2770 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2774 atomic_fetch_add_explicit(volatile atomic_short* __a, short __m,
2776 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2779 atomic_fetch_add(volatile atomic_short* __a, short __m)
2780 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2783 atomic_fetch_sub_explicit(volatile atomic_short* __a, short __m,
2785 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2788 atomic_fetch_sub(volatile atomic_short* __a, short __m)
2789 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2792 atomic_fetch_and_explicit(volatile atomic_short* __a, short __m,
2794 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2797 atomic_fetch_and(volatile atomic_short* __a, short __m)
2798 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2801 atomic_fetch_or_explicit(volatile atomic_short* __a, short __m,
2803 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2806 atomic_fetch_or(volatile atomic_short* __a, short __m)
2807 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2810 atomic_fetch_xor_explicit(volatile atomic_short* __a, short __m,
2812 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2815 atomic_fetch_xor(volatile atomic_short* __a, short __m)
2816 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2819 inline unsigned short
2820 atomic_fetch_add_explicit(volatile atomic_ushort* __a, unsigned short __m,
2822 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2824 inline unsigned short
2825 atomic_fetch_add(volatile atomic_ushort* __a, unsigned short __m)
2826 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2828 inline unsigned short
2829 atomic_fetch_sub_explicit(volatile atomic_ushort* __a, unsigned short __m,
2831 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2833 inline unsigned short
2834 atomic_fetch_sub(volatile atomic_ushort* __a, unsigned short __m)
2835 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2837 inline unsigned short
2838 atomic_fetch_and_explicit(volatile atomic_ushort* __a, unsigned short __m,
2840 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2842 inline unsigned short
2843 atomic_fetch_and(volatile atomic_ushort* __a, unsigned short __m)
2844 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2846 inline unsigned short
2847 atomic_fetch_or_explicit(volatile atomic_ushort* __a, unsigned short __m,
2849 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2851 inline unsigned short
2852 atomic_fetch_or(volatile atomic_ushort* __a, unsigned short __m)
2853 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2855 inline unsigned short
2856 atomic_fetch_xor_explicit(volatile atomic_ushort* __a, unsigned short __m,
2858 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2860 inline unsigned short
2861 atomic_fetch_xor(volatile atomic_ushort* __a, unsigned short __m)
2862 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2866 atomic_fetch_add_explicit(volatile atomic_int* __a, int __m,
2868 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2871 atomic_fetch_add(volatile atomic_int* __a, int __m)
2872 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2875 atomic_fetch_sub_explicit(volatile atomic_int* __a, int __m,
2877 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2880 atomic_fetch_sub(volatile atomic_int* __a, int __m)
2881 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2884 atomic_fetch_and_explicit(volatile atomic_int* __a, int __m,
2886 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2889 atomic_fetch_and(volatile atomic_int* __a, int __m)
2890 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2893 atomic_fetch_or_explicit(volatile atomic_int* __a, int __m,
2895 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2898 atomic_fetch_or(volatile atomic_int* __a, int __m)
2899 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2902 atomic_fetch_xor_explicit(volatile atomic_int* __a, int __m,
2904 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2907 atomic_fetch_xor(volatile atomic_int* __a, int __m)
2908 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2912 atomic_fetch_add_explicit(volatile atomic_uint* __a, unsigned int __m,
2914 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2917 atomic_fetch_add(volatile atomic_uint* __a, unsigned int __m)
2918 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2921 atomic_fetch_sub_explicit(volatile atomic_uint* __a, unsigned int __m,
2923 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2926 atomic_fetch_sub(volatile atomic_uint* __a, unsigned int __m)
2927 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2930 atomic_fetch_and_explicit(volatile atomic_uint* __a, unsigned int __m,
2932 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2935 atomic_fetch_and(volatile atomic_uint* __a, unsigned int __m)
2936 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2939 atomic_fetch_or_explicit(volatile atomic_uint* __a, unsigned int __m,
2941 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2944 atomic_fetch_or(volatile atomic_uint* __a, unsigned int __m)
2945 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2948 atomic_fetch_xor_explicit(volatile atomic_uint* __a, unsigned int __m,
2950 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2953 atomic_fetch_xor(volatile atomic_uint* __a, unsigned int __m)
2954 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
2958 atomic_fetch_add_explicit(volatile atomic_long* __a, long __m,
2960 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
2963 atomic_fetch_add(volatile atomic_long* __a, long __m)
2964 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
2967 atomic_fetch_sub_explicit(volatile atomic_long* __a, long __m,
2969 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
2972 atomic_fetch_sub(volatile atomic_long* __a, long __m)
2973 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
2976 atomic_fetch_and_explicit(volatile atomic_long* __a, long __m,
2978 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
2981 atomic_fetch_and(volatile atomic_long* __a, long __m)
2982 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
2985 atomic_fetch_or_explicit(volatile atomic_long* __a, long __m,
2987 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
2990 atomic_fetch_or(volatile atomic_long* __a, long __m)
2991 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
2994 atomic_fetch_xor_explicit(volatile atomic_long* __a, long __m,
2996 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
2999 atomic_fetch_xor(volatile atomic_long* __a, long __m)
3000 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
3003 inline unsigned long
3004 atomic_fetch_add_explicit(volatile atomic_ulong* __a, unsigned long __m,
3006 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
3008 inline unsigned long
3009 atomic_fetch_add(volatile atomic_ulong* __a, unsigned long __m)
3010 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
3012 inline unsigned long
3013 atomic_fetch_sub_explicit(volatile atomic_ulong* __a, unsigned long __m,
3015 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
3017 inline unsigned long
3018 atomic_fetch_sub(volatile atomic_ulong* __a, unsigned long __m)
3019 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
3021 inline unsigned long
3022 atomic_fetch_and_explicit(volatile atomic_ulong* __a, unsigned long __m,
3024 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
3026 inline unsigned long
3027 atomic_fetch_and(volatile atomic_ulong* __a, unsigned long __m)
3028 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
3030 inline unsigned long
3031 atomic_fetch_or_explicit(volatile atomic_ulong* __a, unsigned long __m,
3033 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
3035 inline unsigned long
3036 atomic_fetch_or(volatile atomic_ulong* __a, unsigned long __m)
3037 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
3039 inline unsigned long
3040 atomic_fetch_xor_explicit(volatile atomic_ulong* __a, unsigned long __m,
3042 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
3044 inline unsigned long
3045 atomic_fetch_xor(volatile atomic_ulong* __a, unsigned long __m)
3046 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
3050 atomic_fetch_add_explicit(volatile atomic_llong* __a, long long __m,
3052 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
3055 atomic_fetch_add(volatile atomic_llong* __a, long long __m)
3056 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
3059 atomic_fetch_sub_explicit(volatile atomic_llong* __a, long long __m,
3061 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
3064 atomic_fetch_sub(volatile atomic_llong* __a, long long __m)
3065 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
3068 atomic_fetch_and_explicit(volatile atomic_llong* __a,
3069 long long __m, memory_order __x)
3070 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
3073 atomic_fetch_and(volatile atomic_llong* __a, long long __m)
3074 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
3077 atomic_fetch_or_explicit(volatile atomic_llong* __a,
3078 long long __m, memory_order __x)
3079 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
3082 atomic_fetch_or(volatile atomic_llong* __a, long long __m)
3083 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
3086 atomic_fetch_xor_explicit(volatile atomic_llong* __a,
3087 long long __m, memory_order __x)
3088 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
3091 atomic_fetch_xor(volatile atomic_llong* __a, long long __m)
3092 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
3095 inline unsigned long long
3096 atomic_fetch_add_explicit(volatile atomic_ullong* __a,
3097 unsigned long long __m, memory_order __x)
3098 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
3100 inline unsigned long long
3101 atomic_fetch_add(volatile atomic_ullong* __a, unsigned long long __m)
3102 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
3104 inline unsigned long long
3105 atomic_fetch_sub_explicit(volatile atomic_ullong* __a,
3106 unsigned long long __m, memory_order __x)
3107 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
3109 inline unsigned long long
3110 atomic_fetch_sub(volatile atomic_ullong* __a, unsigned long long __m)
3111 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
3113 inline unsigned long long
3114 atomic_fetch_and_explicit(volatile atomic_ullong* __a,
3115 unsigned long long __m, memory_order __x)
3116 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
3118 inline unsigned long long
3119 atomic_fetch_and(volatile atomic_ullong* __a, unsigned long long __m)
3120 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
3122 inline unsigned long long
3123 atomic_fetch_or_explicit(volatile atomic_ullong* __a,
3124 unsigned long long __m, memory_order __x)
3125 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
3127 inline unsigned long long
3128 atomic_fetch_or(volatile atomic_ullong* __a, unsigned long long __m)
3129 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
3131 inline unsigned long long
3132 atomic_fetch_xor_explicit(volatile atomic_ullong* __a,
3133 unsigned long long __m, memory_order __x)
3134 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
3136 inline unsigned long long
3137 atomic_fetch_xor(volatile atomic_ullong* __a, unsigned long long __m)
3138 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
3142 atomic_fetch_add_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
3144 { return _ATOMIC_MODIFY_(__a, +=, __m, __x); }
3147 atomic_fetch_add(volatile atomic_wchar_t* __a, wchar_t __m)
3148 { atomic_fetch_add_explicit(__a, __m, memory_order_seq_cst); }
3151 atomic_fetch_sub_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
3153 { return _ATOMIC_MODIFY_(__a, -=, __m, __x); }
3156 atomic_fetch_sub(volatile atomic_wchar_t* __a, wchar_t __m)
3157 { atomic_fetch_sub_explicit(__a, __m, memory_order_seq_cst); }
3160 atomic_fetch_and_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
3162 { return _ATOMIC_MODIFY_(__a, &=, __m, __x); }
3165 atomic_fetch_and(volatile atomic_wchar_t* __a, wchar_t __m)
3166 { atomic_fetch_and_explicit(__a, __m, memory_order_seq_cst); }
3169 atomic_fetch_or_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
3171 { return _ATOMIC_MODIFY_(__a, |=, __m, __x); }
3174 atomic_fetch_or(volatile atomic_wchar_t* __a, wchar_t __m)
3175 { atomic_fetch_or_explicit(__a, __m, memory_order_seq_cst); }
3178 atomic_fetch_xor_explicit(volatile atomic_wchar_t* __a, wchar_t __m,
3180 { return _ATOMIC_MODIFY_(__a, ^=, __m, __x); }
3183 atomic_fetch_xor(volatile atomic_wchar_t* __a, wchar_t __m)
3184 { atomic_fetch_xor_explicit(__a, __m, memory_order_seq_cst); }
3187 atomic_bool::is_lock_free() const volatile
3191 atomic_bool::store(bool __m, memory_order __x) volatile
3192 { atomic_store_explicit(this, __m, __x); }
3195 atomic_bool::load(memory_order __x) volatile
3196 { return atomic_load_explicit(this, __x); }
3199 atomic_bool::swap(bool __m, memory_order __x) volatile
3200 { return atomic_swap_explicit(this, __m, __x); }
3203 atomic_bool::compare_swap(bool& __e, bool __m, memory_order __x,
3204 memory_order __y) volatile
3205 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3208 atomic_bool::compare_swap(bool& __e, bool __m, memory_order __x) volatile
3210 const bool __cond1 = __x == memory_order_release;
3211 const bool __cond2 = __x == memory_order_acq_rel;
3212 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3213 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3214 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3218 atomic_bool::fence(memory_order __x) const volatile
3219 { return atomic_fence(this, __x); }
3223 atomic_char::is_lock_free() const volatile
3227 atomic_char::store(char __m, memory_order __x) volatile
3228 { atomic_store_explicit(this, __m, __x); }
3231 atomic_char::load(memory_order __x) volatile
3232 { return atomic_load_explicit(this, __x); }
3235 atomic_char::swap(char __m, memory_order __x) volatile
3236 { return atomic_swap_explicit(this, __m, __x); }
3239 atomic_char::compare_swap(char& __e, char __m,
3240 memory_order __x, memory_order __y) volatile
3241 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3244 atomic_char::compare_swap(char& __e, char __m, memory_order __x) volatile
3246 const bool __cond1 = __x == memory_order_release;
3247 const bool __cond2 = __x == memory_order_acq_rel;
3248 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3249 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3250 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3254 atomic_char::fence(memory_order __x) const volatile
3255 { return atomic_fence(this, __x); }
3259 atomic_schar::is_lock_free() const volatile
3263 atomic_schar::store(signed char __m, memory_order __x) volatile
3264 { atomic_store_explicit(this, __m, __x); }
3267 atomic_schar::load(memory_order __x) volatile
3268 { return atomic_load_explicit(this, __x); }
3271 atomic_schar::swap(signed char __m, memory_order __x) volatile
3272 { return atomic_swap_explicit(this, __m, __x); }
3275 atomic_schar::compare_swap(signed char& __e, signed char __m,
3276 memory_order __x, memory_order __y) volatile
3277 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3280 atomic_schar::compare_swap(signed char& __e, signed char __m,
3281 memory_order __x) volatile
3283 const bool __cond1 = __x == memory_order_release;
3284 const bool __cond2 = __x == memory_order_acq_rel;
3285 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3286 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3287 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3291 atomic_schar::fence(memory_order __x) const volatile
3292 { return atomic_fence(this, __x); }
3295 atomic_uchar::is_lock_free() const volatile
3299 atomic_uchar::store(unsigned char __m, memory_order __x) volatile
3300 { atomic_store_explicit(this, __m, __x); }
3302 inline unsigned char
3303 atomic_uchar::load(memory_order __x) volatile
3304 { return atomic_load_explicit(this, __x); }
3306 inline unsigned char
3307 atomic_uchar::swap(unsigned char __m, memory_order __x) volatile
3308 { return atomic_swap_explicit(this, __m, __x); }
3311 atomic_uchar::compare_swap(unsigned char& __e, unsigned char __m,
3312 memory_order __x, memory_order __y) volatile
3313 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3316 atomic_uchar::compare_swap(unsigned char& __e, unsigned char __m,
3317 memory_order __x) volatile
3319 const bool __cond1 = __x == memory_order_release;
3320 const bool __cond2 = __x == memory_order_acq_rel;
3321 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3322 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3323 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3327 atomic_uchar::fence(memory_order __x) const volatile
3328 { return atomic_fence(this, __x); }
3332 atomic_short::is_lock_free() const volatile
3336 atomic_short::store(short __m, memory_order __x) volatile
3337 { atomic_store_explicit(this, __m, __x); }
3340 atomic_short::load(memory_order __x) volatile
3341 { return atomic_load_explicit(this, __x); }
3344 atomic_short::swap(short __m, memory_order __x) volatile
3345 { return atomic_swap_explicit(this, __m, __x); }
3348 atomic_short::compare_swap(short& __e, short __m,
3349 memory_order __x, memory_order __y) volatile
3350 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3353 atomic_short::compare_swap(short& __e, short __m, memory_order __x) volatile
3355 const bool __cond1 = __x == memory_order_release;
3356 const bool __cond2 = __x == memory_order_acq_rel;
3357 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3358 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3359 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3363 atomic_short::fence(memory_order __x) const volatile
3364 { return atomic_fence(this, __x); }
3368 atomic_ushort::is_lock_free() const volatile
3372 atomic_ushort::store(unsigned short __m, memory_order __x) volatile
3373 { atomic_store_explicit(this, __m, __x); }
3375 inline unsigned short
3376 atomic_ushort::load(memory_order __x) volatile
3377 { return atomic_load_explicit(this, __x); }
3379 inline unsigned short
3380 atomic_ushort::swap(unsigned short __m, memory_order __x) volatile
3381 { return atomic_swap_explicit(this, __m, __x); }
3384 atomic_ushort::compare_swap(unsigned short& __e, unsigned short __m,
3385 memory_order __x, memory_order __y) volatile
3386 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3389 atomic_ushort::compare_swap(unsigned short& __e, unsigned short __m,
3390 memory_order __x) volatile
3392 const bool __cond1 = __x == memory_order_release;
3393 const bool __cond2 = __x == memory_order_acq_rel;
3394 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3395 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3396 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3400 atomic_ushort::fence(memory_order __x) const volatile
3401 { return atomic_fence(this, __x); }
3405 atomic_int::is_lock_free() const volatile
3409 atomic_int::store(int __m, memory_order __x) volatile
3410 { atomic_store_explicit(this, __m, __x); }
3413 atomic_int::load(memory_order __x) volatile
3414 { return atomic_load_explicit(this, __x); }
3417 atomic_int::swap(int __m, memory_order __x) volatile
3418 { return atomic_swap_explicit(this, __m, __x); }
3421 atomic_int::compare_swap(int& __e, int __m, memory_order __x,
3422 memory_order __y) volatile
3423 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3426 atomic_int::compare_swap(int& __e, int __m, memory_order __x) volatile
3428 const bool __cond1 = __x == memory_order_release;
3429 const bool __cond2 = __x == memory_order_acq_rel;
3430 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3431 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3432 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3436 atomic_int::fence(memory_order __x) const volatile
3437 { return atomic_fence(this, __x); }
3441 atomic_uint::is_lock_free() const volatile
3445 atomic_uint::store(unsigned int __m, memory_order __x) volatile
3446 { atomic_store_explicit(this, __m, __x); }
3449 atomic_uint::load(memory_order __x) volatile
3450 { return atomic_load_explicit(this, __x); }
3453 atomic_uint::swap(unsigned int __m, memory_order __x) volatile
3454 { return atomic_swap_explicit(this, __m, __x); }
3457 atomic_uint::compare_swap(unsigned int& __e, unsigned int __m,
3458 memory_order __x, memory_order __y) volatile
3459 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3462 atomic_uint::compare_swap(unsigned int& __e, unsigned int __m,
3463 memory_order __x) volatile
3465 const bool __cond1 = __x == memory_order_release;
3466 const bool __cond2 = __x == memory_order_acq_rel;
3467 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3468 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3469 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3473 atomic_uint::fence(memory_order __x) const volatile
3474 { return atomic_fence(this, __x); }
3478 atomic_long::is_lock_free() const volatile
3482 atomic_long::store(long __m, memory_order __x) volatile
3483 { atomic_store_explicit(this, __m, __x); }
3486 atomic_long::load(memory_order __x) volatile
3487 { return atomic_load_explicit(this, __x); }
3490 atomic_long::swap(long __m, memory_order __x) volatile
3491 { return atomic_swap_explicit(this, __m, __x); }
3494 atomic_long::compare_swap(long& __e, long __m,
3495 memory_order __x, memory_order __y) volatile
3496 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3499 atomic_long::compare_swap(long& __e, long __m, memory_order __x) volatile
3501 const bool __cond1 = __x == memory_order_release;
3502 const bool __cond2 = __x == memory_order_acq_rel;
3503 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3504 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3505 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3509 atomic_long::fence(memory_order __x) const volatile
3510 { return atomic_fence(this, __x); }
3514 atomic_ulong::is_lock_free() const volatile
3518 atomic_ulong::store(unsigned long __m, memory_order __x) volatile
3519 { atomic_store_explicit(this, __m, __x); }
3521 inline unsigned long
3522 atomic_ulong::load(memory_order __x) volatile
3523 { return atomic_load_explicit(this, __x); }
3525 inline unsigned long
3526 atomic_ulong::swap(unsigned long __m, memory_order __x) volatile
3527 { return atomic_swap_explicit(this, __m, __x); }
3530 atomic_ulong::compare_swap(unsigned long& __e, unsigned long __m,
3531 memory_order __x, memory_order __y) volatile
3532 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3535 atomic_ulong::compare_swap(unsigned long& __e, unsigned long __m,
3536 memory_order __x) volatile
3538 const bool __cond1 = __x == memory_order_release;
3539 const bool __cond2 = __x == memory_order_acq_rel;
3540 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3541 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3542 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3546 atomic_ulong::fence(memory_order __x) const volatile
3547 { return atomic_fence(this, __x); }
3551 atomic_llong::is_lock_free() const volatile
3555 atomic_llong::store(long long __m, memory_order __x) volatile
3556 { atomic_store_explicit(this, __m, __x); }
3559 atomic_llong::load(memory_order __x) volatile
3560 { return atomic_load_explicit(this, __x); }
3563 atomic_llong::swap(long long __m, memory_order __x) volatile
3564 { return atomic_swap_explicit(this, __m, __x); }
3567 atomic_llong::compare_swap(long long& __e, long long __m,
3568 memory_order __x, memory_order __y) volatile
3569 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3572 atomic_llong::compare_swap(long long& __e, long long __m,
3573 memory_order __x) volatile
3575 const bool __cond1 = __x == memory_order_release;
3576 const bool __cond2 = __x == memory_order_acq_rel;
3577 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3578 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3579 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3583 atomic_llong::fence(memory_order __x) const volatile
3584 { return atomic_fence(this, __x); }
3588 atomic_ullong::is_lock_free() const volatile
3592 atomic_ullong::store(unsigned long long __m, memory_order __x) volatile
3593 { atomic_store_explicit(this, __m, __x); }
3595 inline unsigned long long
3596 atomic_ullong::load(memory_order __x) volatile
3597 { return atomic_load_explicit(this, __x); }
3599 inline unsigned long long
3600 atomic_ullong::swap(unsigned long long __m, memory_order __x) volatile
3601 { return atomic_swap_explicit(this, __m, __x); }
3604 atomic_ullong::compare_swap(unsigned long long& __e, unsigned long long __m,
3605 memory_order __x, memory_order __y) volatile
3606 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3609 atomic_ullong::compare_swap(unsigned long long& __e, unsigned long long __m,
3610 memory_order __x) volatile
3612 const bool __cond1 = __x == memory_order_release;
3613 const bool __cond2 = __x == memory_order_acq_rel;
3614 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3615 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3616 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3620 atomic_ullong::fence(memory_order __x) const volatile
3621 { return atomic_fence(this, __x); }
3625 atomic_wchar_t::is_lock_free() const volatile
3629 atomic_wchar_t::store(wchar_t __m, memory_order __x) volatile
3630 { atomic_store_explicit(this, __m, __x); }
3633 atomic_wchar_t::load(memory_order __x) volatile
3634 { return atomic_load_explicit(this, __x); }
3637 atomic_wchar_t::swap(wchar_t __m, memory_order __x) volatile
3638 { return atomic_swap_explicit(this, __m, __x); }
3641 atomic_wchar_t::compare_swap(wchar_t& __e, wchar_t __m,
3642 memory_order __x, memory_order __y) volatile
3643 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3646 atomic_wchar_t::compare_swap(wchar_t& __e, wchar_t __m,
3647 memory_order __x) volatile
3649 const bool __cond1 = __x == memory_order_release;
3650 const bool __cond2 = __x == memory_order_acq_rel;
3651 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3652 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3653 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
3657 atomic_wchar_t::fence(memory_order __x) const volatile
3658 { return atomic_fence(this, __x); }
3662 atomic_address::fetch_add(ptrdiff_t __m, memory_order __x) volatile
3663 { return atomic_fetch_add_explicit(this, __m, __x); }
3666 atomic_address::fetch_sub(ptrdiff_t __m, memory_order __x) volatile
3667 { return atomic_fetch_sub_explicit(this, __m, __x); }
3671 atomic_char::fetch_add(char __m, memory_order __x) volatile
3672 { return atomic_fetch_add_explicit(this, __m, __x); }
3676 atomic_char::fetch_sub(char __m, memory_order __x) volatile
3677 { return atomic_fetch_sub_explicit(this, __m, __x); }
3681 atomic_char::fetch_and(char __m, memory_order __x) volatile
3682 { return atomic_fetch_and_explicit(this, __m, __x); }
3686 atomic_char::fetch_or(char __m, memory_order __x) volatile
3687 { return atomic_fetch_or_explicit(this, __m, __x); }
3691 atomic_char::fetch_xor(char __m, memory_order __x) volatile
3692 { return atomic_fetch_xor_explicit(this, __m, __x); }
3696 atomic_schar::fetch_add(signed char __m, memory_order __x) volatile
3697 { return atomic_fetch_add_explicit(this, __m, __x); }
3701 atomic_schar::fetch_sub(signed char __m, memory_order __x) volatile
3702 { return atomic_fetch_sub_explicit(this, __m, __x); }
3706 atomic_schar::fetch_and(signed char __m, memory_order __x) volatile
3707 { return atomic_fetch_and_explicit(this, __m, __x); }
3711 atomic_schar::fetch_or(signed char __m, memory_order __x) volatile
3712 { return atomic_fetch_or_explicit(this, __m, __x); }
3716 atomic_schar::fetch_xor(signed char __m, memory_order __x) volatile
3717 { return atomic_fetch_xor_explicit(this, __m, __x); }
3720 inline unsigned char
3721 atomic_uchar::fetch_add(unsigned char __m, memory_order __x) volatile
3722 { return atomic_fetch_add_explicit(this, __m, __x); }
3725 inline unsigned char
3726 atomic_uchar::fetch_sub(unsigned char __m, memory_order __x) volatile
3727 { return atomic_fetch_sub_explicit(this, __m, __x); }
3730 inline unsigned char
3731 atomic_uchar::fetch_and(unsigned char __m, memory_order __x) volatile
3732 { return atomic_fetch_and_explicit(this, __m, __x); }
3735 inline unsigned char
3736 atomic_uchar::fetch_or(unsigned char __m, memory_order __x) volatile
3737 { return atomic_fetch_or_explicit(this, __m, __x); }
3740 inline unsigned char
3741 atomic_uchar::fetch_xor(unsigned char __m, memory_order __x) volatile
3742 { return atomic_fetch_xor_explicit(this, __m, __x); }
3746 atomic_short::fetch_add(short __m, memory_order __x) volatile
3747 { return atomic_fetch_add_explicit(this, __m, __x); }
3751 atomic_short::fetch_sub(short __m, memory_order __x) volatile
3752 { return atomic_fetch_sub_explicit(this, __m, __x); }
3756 atomic_short::fetch_and(short __m, memory_order __x) volatile
3757 { return atomic_fetch_and_explicit(this, __m, __x); }
3761 atomic_short::fetch_or(short __m, memory_order __x) volatile
3762 { return atomic_fetch_or_explicit(this, __m, __x); }
3766 atomic_short::fetch_xor(short __m, memory_order __x) volatile
3767 { return atomic_fetch_xor_explicit(this, __m, __x); }
3770 inline unsigned short
3771 atomic_ushort::fetch_add(unsigned short __m, memory_order __x) volatile
3772 { return atomic_fetch_add_explicit(this, __m, __x); }
3775 inline unsigned short
3776 atomic_ushort::fetch_sub(unsigned short __m, memory_order __x) volatile
3777 { return atomic_fetch_sub_explicit(this, __m, __x); }
3780 inline unsigned short
3781 atomic_ushort::fetch_and(unsigned short __m, memory_order __x) volatile
3782 { return atomic_fetch_and_explicit(this, __m, __x); }
3785 inline unsigned short
3786 atomic_ushort::fetch_or(unsigned short __m, memory_order __x) volatile
3787 { return atomic_fetch_or_explicit(this, __m, __x); }
3790 inline unsigned short
3791 atomic_ushort::fetch_xor(unsigned short __m, memory_order __x) volatile
3792 { return atomic_fetch_xor_explicit(this, __m, __x); }
3796 atomic_int::fetch_add(int __m, memory_order __x) volatile
3797 { return atomic_fetch_add_explicit(this, __m, __x); }
3801 atomic_int::fetch_sub(int __m, memory_order __x) volatile
3802 { return atomic_fetch_sub_explicit(this, __m, __x); }
3806 atomic_int::fetch_and(int __m, memory_order __x) volatile
3807 { return atomic_fetch_and_explicit(this, __m, __x); }
3811 atomic_int::fetch_or(int __m, memory_order __x) volatile
3812 { return atomic_fetch_or_explicit(this, __m, __x); }
3816 atomic_int::fetch_xor(int __m, memory_order __x) volatile
3817 { return atomic_fetch_xor_explicit(this, __m, __x); }
3821 atomic_uint::fetch_add(unsigned int __m, memory_order __x) volatile
3822 { return atomic_fetch_add_explicit(this, __m, __x); }
3826 atomic_uint::fetch_sub(unsigned int __m, memory_order __x) volatile
3827 { return atomic_fetch_sub_explicit(this, __m, __x); }
3831 atomic_uint::fetch_and(unsigned int __m, memory_order __x) volatile
3832 { return atomic_fetch_and_explicit(this, __m, __x); }
3836 atomic_uint::fetch_or(unsigned int __m, memory_order __x) volatile
3837 { return atomic_fetch_or_explicit(this, __m, __x); }
3841 atomic_uint::fetch_xor(unsigned int __m, memory_order __x) volatile
3842 { return atomic_fetch_xor_explicit(this, __m, __x); }
3846 atomic_long::fetch_add(long __m, memory_order __x) volatile
3847 { return atomic_fetch_add_explicit(this, __m, __x); }
3851 atomic_long::fetch_sub(long __m, memory_order __x) volatile
3852 { return atomic_fetch_sub_explicit(this, __m, __x); }
3856 atomic_long::fetch_and(long __m, memory_order __x) volatile
3857 { return atomic_fetch_and_explicit(this, __m, __x); }
3861 atomic_long::fetch_or(long __m, memory_order __x) volatile
3862 { return atomic_fetch_or_explicit(this, __m, __x); }
3866 atomic_long::fetch_xor(long __m, memory_order __x) volatile
3867 { return atomic_fetch_xor_explicit(this, __m, __x); }
3870 inline unsigned long
3871 atomic_ulong::fetch_add(unsigned long __m, memory_order __x) volatile
3872 { return atomic_fetch_add_explicit(this, __m, __x); }
3875 inline unsigned long
3876 atomic_ulong::fetch_sub(unsigned long __m, memory_order __x) volatile
3877 { return atomic_fetch_sub_explicit(this, __m, __x); }
3880 inline unsigned long
3881 atomic_ulong::fetch_and(unsigned long __m, memory_order __x) volatile
3882 { return atomic_fetch_and_explicit(this, __m, __x); }
3885 inline unsigned long
3886 atomic_ulong::fetch_or(unsigned long __m, memory_order __x) volatile
3887 { return atomic_fetch_or_explicit(this, __m, __x); }
3890 inline unsigned long
3891 atomic_ulong::fetch_xor(unsigned long __m, memory_order __x) volatile
3892 { return atomic_fetch_xor_explicit(this, __m, __x); }
3896 atomic_llong::fetch_add(long long __m, memory_order __x) volatile
3897 { return atomic_fetch_add_explicit(this, __m, __x); }
3901 atomic_llong::fetch_sub(long long __m, memory_order __x) volatile
3902 { return atomic_fetch_sub_explicit(this, __m, __x); }
3906 atomic_llong::fetch_and(long long __m, memory_order __x) volatile
3907 { return atomic_fetch_and_explicit(this, __m, __x); }
3911 atomic_llong::fetch_or(long long __m, memory_order __x) volatile
3912 { return atomic_fetch_or_explicit(this, __m, __x); }
3916 atomic_llong::fetch_xor(long long __m, memory_order __x) volatile
3917 { return atomic_fetch_xor_explicit(this, __m, __x); }
3920 inline unsigned long long
3921 atomic_ullong::fetch_add(unsigned long long __m, memory_order __x) volatile
3922 { return atomic_fetch_add_explicit(this, __m, __x); }
3925 inline unsigned long long
3926 atomic_ullong::fetch_sub(unsigned long long __m, memory_order __x) volatile
3927 { return atomic_fetch_sub_explicit(this, __m, __x); }
3930 inline unsigned long long
3931 atomic_ullong::fetch_and(unsigned long long __m, memory_order __x) volatile
3932 { return atomic_fetch_and_explicit(this, __m, __x); }
3935 inline unsigned long long
3936 atomic_ullong::fetch_or(unsigned long long __m, memory_order __x) volatile
3937 { return atomic_fetch_or_explicit(this, __m, __x); }
3940 inline unsigned long long
3941 atomic_ullong::fetch_xor(unsigned long long __m, memory_order __x) volatile
3942 { return atomic_fetch_xor_explicit(this, __m, __x); }
3946 atomic_wchar_t::fetch_add(wchar_t __m, memory_order __x) volatile
3947 { return atomic_fetch_add_explicit(this, __m, __x); }
3951 atomic_wchar_t::fetch_sub(wchar_t __m, memory_order __x) volatile
3952 { return atomic_fetch_sub_explicit(this, __m, __x); }
3956 atomic_wchar_t::fetch_and(wchar_t __m, memory_order __x) volatile
3957 { return atomic_fetch_and_explicit(this, __m, __x); }
3961 atomic_wchar_t::fetch_or(wchar_t __m, memory_order __x) volatile
3962 { return atomic_fetch_or_explicit(this, __m, __x); }
3966 atomic_wchar_t::fetch_xor(wchar_t __m, memory_order __x) volatile
3967 { return atomic_fetch_xor_explicit(this, __m, __x); }
3971 atomic_address::is_lock_free() const volatile
3975 atomic_address::store(void* __m, memory_order __x) volatile
3976 { atomic_store_explicit(this, __m, __x); }
3979 atomic_address::load(memory_order __x) volatile
3980 { return atomic_load_explicit(this, __x); }
3983 atomic_address::swap(void* __m, memory_order __x) volatile
3984 { return atomic_swap_explicit(this, __m, __x); }
3987 atomic_address::compare_swap(void*& __e, void* __m,
3988 memory_order __x, memory_order __y) volatile
3989 { return atomic_compare_swap_explicit(this, &__e, __m, __x, __y); }
3992 atomic_address::compare_swap(void*& __e, void* __m,
3993 memory_order __x) volatile
3995 const bool __cond1 = __x == memory_order_release;
3996 const bool __cond2 = __x == memory_order_acq_rel;
3997 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
3998 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
3999 return atomic_compare_swap_explicit(this, &__e, __m, __x, __mo2);
4003 atomic_address::fence(memory_order __x) const volatile
4004 { return atomic_fence(this, __x); }
4007 template<typename _Tp>
4009 atomic<_Tp>::is_lock_free() const volatile
4012 template<typename _Tp>
4014 atomic<_Tp>::store(_Tp __v, memory_order __x) volatile
4016 // { _ATOMIC_STORE_(this, __v, __x); }
4019 template<typename _Tp>
4021 atomic<_Tp>::load(memory_order __x) volatile
4023 // { return _ATOMIC_LOAD_(this, __x); }
4026 template<typename _Tp>
4028 atomic<_Tp>::swap(_Tp __v, memory_order __x) volatile
4030 // { return _ATOMIC_MODIFY_(this, =, __v, __x); }
4033 template<typename _Tp>
4035 atomic<_Tp>::compare_swap(_Tp& __r, _Tp __v, memory_order __x,
4036 memory_order __y) volatile
4038 // { return _ATOMIC_CMPSWP_(this, &__r, __v, __x); }
4041 template<typename _Tp>
4043 atomic<_Tp>::compare_swap(_Tp& __r, _Tp __v, memory_order __x) volatile
4045 const bool __cond1 = __x == memory_order_release;
4046 const bool __cond2 = __x == memory_order_acq_rel;
4047 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
4048 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
4049 return compare_swap(__r, __v, __x, __mo2);
4052 template<typename _Tp>
4054 atomic<_Tp*>::load(memory_order __x) volatile
4055 { return static_cast<_Tp*>(atomic_address::load(__x)); }
4057 template<typename _Tp>
4059 atomic<_Tp*>::swap(_Tp* __v, memory_order __x) volatile
4060 { return static_cast<_Tp*>(atomic_address::swap(__v, __x)); }
4062 template<typename _Tp>
4064 atomic<_Tp*>::compare_swap(_Tp*& __r, _Tp* __v, memory_order __x,
4065 memory_order __y) volatile
4066 { return atomic_address::compare_swap(*reinterpret_cast<void**>(&__r),
4067 static_cast<void*>(__v), __x, __y); }
4069 template<typename _Tp>
4071 atomic<_Tp*>::compare_swap(_Tp*& __r, _Tp* __v, memory_order __x) volatile
4073 const bool __cond1 = __x == memory_order_release;
4074 const bool __cond2 = __x == memory_order_acq_rel;
4075 memory_order __mo1(__cond1 ? memory_order_relaxed : __x);
4076 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
4077 return compare_swap(__r, __v, __x, __mo2);
4080 template<typename _Tp>
4082 atomic<_Tp*>::fetch_add(ptrdiff_t __v, memory_order __x) volatile
4084 void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __v, __x);
4085 return static_cast<_Tp*>(__p);
4088 template<typename _Tp>
4090 atomic<_Tp*>::fetch_sub(ptrdiff_t __v, memory_order __x) volatile
4092 void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __v, __x);
4093 return static_cast<_Tp*>(__p);
4096 _GLIBCXX_END_NAMESPACE