3 // Copyright (C) 2008, 2009
4 // Free Software Foundation, Inc.
6 // This file is part of the GNU ISO C++ Library. This library is free
7 // software; you can redistribute it and/or modify it under the
8 // terms of the GNU General Public License as published by the
9 // Free Software Foundation; either version 3, or (at your option)
12 // This library is distributed in the hope that it will be useful,
13 // but WITHOUT ANY WARRANTY; without even the implied warranty of
14 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 // GNU General Public License for more details.
17 // Under Section 7 of GPL version 3, you are granted additional
18 // permissions described in the GCC Runtime Library Exception, version
19 // 3.1, as published by the Free Software Foundation.
21 // You should have received a copy of the GNU General Public License and
22 // a copy of the GCC Runtime Library Exception along with this program;
23 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 // <http://www.gnu.org/licenses/>.
27 * This is a Standard C++ Library file. You should @c #include this file
28 * in your programs, rather than any of the "*.h" implementation files.
30 * This is the C++ version of the Standard C Library header @c stdatomic.h,
31 * and its contents are (mostly) the same as that header, but are all
32 * contained in the namespace @c std (except for names which are defined
36 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
37 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
39 #ifndef _GLIBCXX_STDATOMIC
40 #define _GLIBCXX_STDATOMIC 1
42 #pragma GCC system_header
44 #ifndef __GXX_EXPERIMENTAL_CXX0X__
45 # include <c++0x_warning.h>
48 #include <stdatomic.h>
51 _GLIBCXX_BEGIN_NAMESPACE(std)
59 template<typename _Tp>
61 kill_dependency(_Tp __y)
68 __calculate_memory_order(memory_order __m)
70 const bool __cond1 = __m == memory_order_release;
71 const bool __cond2 = __m == memory_order_acq_rel;
72 memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
73 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
78 // Three nested namespaces for atomic implementation details.
80 // The nested namespace inlined into std:: is determined by the value
81 // of the _GLIBCXX_ATOMIC_PROPERTY macro and the resulting
82 // ATOMIC_*_LOCK_FREE macros. See file stdatomic.h.
84 // 0 == __atomic0 == Never lock-free
85 // 1 == __atomic1 == Best available, sometimes lock-free
86 // 2 == __atomic2 == Always lock-free
87 #include <bits/atomic_0.h>
88 #include <bits/atomic_2.h>
91 /// 29.4.3, Generic atomic type, primary class template.
92 template<typename _Tp>
101 atomic(const atomic&) = delete;
102 atomic& operator=(const atomic&) = delete;
104 atomic(_Tp __i) : _M_i(__i) { }
106 operator _Tp() const volatile;
109 operator=(_Tp __i) volatile { store(__i); return __i; }
112 is_lock_free() const volatile;
115 store(_Tp, memory_order = memory_order_seq_cst) volatile;
118 load(memory_order = memory_order_seq_cst) const volatile;
121 exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile;
124 compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile;
127 compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile;
130 compare_exchange_weak(_Tp&, _Tp,
131 memory_order = memory_order_seq_cst) volatile;
134 compare_exchange_strong(_Tp&, _Tp,
135 memory_order = memory_order_seq_cst) volatile;
139 /// Partial specialization for pointer types.
140 template<typename _Tp>
141 struct atomic<_Tp*> : atomic_address
145 atomic(const atomic&) = delete;
146 atomic& operator=(const atomic&) = delete;
148 atomic(_Tp* __v) : atomic_address(__v) { }
151 store(_Tp*, memory_order = memory_order_seq_cst) volatile;
154 load(memory_order = memory_order_seq_cst) const volatile;
157 exchange(_Tp*, memory_order = memory_order_seq_cst) volatile;
160 compare_exchange_weak(_Tp*&, _Tp*, memory_order, memory_order) volatile;
163 compare_exchange_strong(_Tp*&, _Tp*, memory_order, memory_order) volatile;
166 compare_exchange_weak(_Tp*&, _Tp*,
167 memory_order = memory_order_seq_cst) volatile;
170 compare_exchange_strong(_Tp*&, _Tp*,
171 memory_order = memory_order_seq_cst) volatile;
174 fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
177 fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
179 operator _Tp*() const volatile
183 operator=(_Tp* __v) volatile
190 operator++(int) volatile { return fetch_add(1); }
193 operator--(int) volatile { return fetch_sub(1); }
196 operator++() volatile { return fetch_add(1) + 1; }
199 operator--() volatile { return fetch_sub(1) - 1; }
202 operator+=(ptrdiff_t __d) volatile
203 { return fetch_add(__d) + __d; }
206 operator-=(ptrdiff_t __d) volatile
207 { return fetch_sub(__d) - __d; }
211 /// Explicit specialization for void*
213 struct atomic<void*> : public atomic_address
215 typedef void* __integral_type;
216 typedef atomic_address __base_type;
220 atomic(const atomic&) = delete;
221 atomic& operator=(const atomic&) = delete;
223 atomic(__integral_type __i) : __base_type(__i) { }
225 using __base_type::operator __integral_type;
226 using __base_type::operator=;
229 /// Explicit specialization for bool.
231 struct atomic<bool> : public atomic_bool
233 typedef bool __integral_type;
234 typedef atomic_bool __base_type;
238 atomic(const atomic&) = delete;
239 atomic& operator=(const atomic&) = delete;
241 atomic(__integral_type __i) : __base_type(__i) { }
243 using __base_type::operator __integral_type;
244 using __base_type::operator=;
247 /// Explicit specialization for char.
249 struct atomic<char> : public atomic_char
251 typedef char __integral_type;
252 typedef atomic_char __base_type;
256 atomic(const atomic&) = delete;
257 atomic& operator=(const atomic&) = delete;
259 atomic(__integral_type __i) : __base_type(__i) { }
261 using __base_type::operator __integral_type;
262 using __base_type::operator=;
265 /// Explicit specialization for signed char.
267 struct atomic<signed char> : public atomic_schar
269 typedef signed char __integral_type;
270 typedef atomic_schar __base_type;
274 atomic(const atomic&) = delete;
275 atomic& operator=(const atomic&) = delete;
277 atomic(__integral_type __i) : __base_type(__i) { }
279 using __base_type::operator __integral_type;
280 using __base_type::operator=;
283 /// Explicit specialization for unsigned char.
285 struct atomic<unsigned char> : public atomic_uchar
287 typedef unsigned char __integral_type;
288 typedef atomic_uchar __base_type;
292 atomic(const atomic&) = delete;
293 atomic& operator=(const atomic&) = delete;
295 atomic(__integral_type __i) : __base_type(__i) { }
297 using __base_type::operator __integral_type;
298 using __base_type::operator=;
301 /// Explicit specialization for short.
303 struct atomic<short> : public atomic_short
305 typedef short __integral_type;
306 typedef atomic_short __base_type;
310 atomic(const atomic&) = delete;
311 atomic& operator=(const atomic&) = delete;
313 atomic(__integral_type __i) : __base_type(__i) { }
315 using __base_type::operator __integral_type;
316 using __base_type::operator=;
319 /// Explicit specialization for unsigned short.
321 struct atomic<unsigned short> : public atomic_ushort
323 typedef unsigned short __integral_type;
324 typedef atomic_ushort __base_type;
328 atomic(const atomic&) = delete;
329 atomic& operator=(const atomic&) = delete;
331 atomic(__integral_type __i) : __base_type(__i) { }
333 using __base_type::operator __integral_type;
334 using __base_type::operator=;
337 /// Explicit specialization for int.
339 struct atomic<int> : atomic_int
341 typedef int __integral_type;
342 typedef atomic_int __base_type;
346 atomic(const atomic&) = delete;
347 atomic& operator=(const atomic&) = delete;
349 atomic(__integral_type __i) : __base_type(__i) { }
351 using __base_type::operator __integral_type;
352 using __base_type::operator=;
355 /// Explicit specialization for unsigned int.
357 struct atomic<unsigned int> : public atomic_uint
359 typedef unsigned int __integral_type;
360 typedef atomic_uint __base_type;
364 atomic(const atomic&) = delete;
365 atomic& operator=(const atomic&) = delete;
367 atomic(__integral_type __i) : __base_type(__i) { }
369 using __base_type::operator __integral_type;
370 using __base_type::operator=;
373 /// Explicit specialization for long.
375 struct atomic<long> : public atomic_long
377 typedef long __integral_type;
378 typedef atomic_long __base_type;
382 atomic(const atomic&) = delete;
383 atomic& operator=(const atomic&) = delete;
385 atomic(__integral_type __i) : __base_type(__i) { }
387 using __base_type::operator __integral_type;
388 using __base_type::operator=;
391 /// Explicit specialization for unsigned long.
393 struct atomic<unsigned long> : public atomic_ulong
395 typedef unsigned long __integral_type;
396 typedef atomic_ulong __base_type;
400 atomic(const atomic&) = delete;
401 atomic& operator=(const atomic&) = delete;
403 atomic(__integral_type __i) : __base_type(__i) { }
405 using __base_type::operator __integral_type;
406 using __base_type::operator=;
409 /// Explicit specialization for long long.
411 struct atomic<long long> : public atomic_llong
413 typedef long long __integral_type;
414 typedef atomic_llong __base_type;
418 atomic(const atomic&) = delete;
419 atomic& operator=(const atomic&) = delete;
421 atomic(__integral_type __i) : __base_type(__i) { }
423 using __base_type::operator __integral_type;
424 using __base_type::operator=;
427 /// Explicit specialization for unsigned long long.
429 struct atomic<unsigned long long> : public atomic_ullong
431 typedef unsigned long long __integral_type;
432 typedef atomic_ullong __base_type;
436 atomic(const atomic&) = delete;
437 atomic& operator=(const atomic&) = delete;
439 atomic(__integral_type __i) : __base_type(__i) { }
441 using __base_type::operator __integral_type;
442 using __base_type::operator=;
445 /// Explicit specialization for wchar_t.
447 struct atomic<wchar_t> : public atomic_wchar_t
449 typedef wchar_t __integral_type;
450 typedef atomic_wchar_t __base_type;
454 atomic(const atomic&) = delete;
455 atomic& operator=(const atomic&) = delete;
457 atomic(__integral_type __i) : __base_type(__i) { }
459 using __base_type::operator __integral_type;
460 using __base_type::operator=;
463 /// Explicit specialization for char16_t.
465 struct atomic<char16_t> : public atomic_char16_t
467 typedef char16_t __integral_type;
468 typedef atomic_char16_t __base_type;
472 atomic(const atomic&) = delete;
473 atomic& operator=(const atomic&) = delete;
475 atomic(__integral_type __i) : __base_type(__i) { }
477 using __base_type::operator __integral_type;
478 using __base_type::operator=;
481 /// Explicit specialization for char32_t.
483 struct atomic<char32_t> : public atomic_char32_t
485 typedef char32_t __integral_type;
486 typedef atomic_char32_t __base_type;
490 atomic(const atomic&) = delete;
491 atomic& operator=(const atomic&) = delete;
493 atomic(__integral_type __i) : __base_type(__i) { }
495 using __base_type::operator __integral_type;
496 using __base_type::operator=;
500 template<typename _Tp>
502 atomic<_Tp*>::load(memory_order __m) const volatile
503 { return static_cast<_Tp*>(atomic_address::load(__m)); }
505 template<typename _Tp>
507 atomic<_Tp*>::exchange(_Tp* __v, memory_order __m) volatile
508 { return static_cast<_Tp*>(atomic_address::exchange(__v, __m)); }
510 template<typename _Tp>
512 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m1,
513 memory_order __m2) volatile
515 void** __vr = reinterpret_cast<void**>(&__r);
516 void* __vv = static_cast<void*>(__v);
517 return atomic_address::compare_exchange_weak(*__vr, __vv, __m1, __m2);
520 template<typename _Tp>
522 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
524 memory_order __m2) volatile
526 void** __vr = reinterpret_cast<void**>(&__r);
527 void* __vv = static_cast<void*>(__v);
528 return atomic_address::compare_exchange_strong(*__vr, __vv, __m1, __m2);
531 template<typename _Tp>
533 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v,
534 memory_order __m) volatile
536 return compare_exchange_weak(__r, __v, __m,
537 __calculate_memory_order(__m));
540 template<typename _Tp>
542 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
543 memory_order __m) volatile
545 return compare_exchange_strong(__r, __v, __m,
546 __calculate_memory_order(__m));
549 template<typename _Tp>
551 atomic<_Tp*>::fetch_add(ptrdiff_t __d, memory_order __m) volatile
553 void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __d, __m);
554 return static_cast<_Tp*>(__p);
557 template<typename _Tp>
559 atomic<_Tp*>::fetch_sub(ptrdiff_t __d, memory_order __m) volatile
561 void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __d, __m);
562 return static_cast<_Tp*>(__p);
565 // Convenience function definitions, atomic_flag.
567 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, memory_order __m)
568 { return __a->test_and_set(__m); }
571 atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m)
572 { return __a->clear(__m); }
575 // Convenience function definitions, atomic_address.
577 atomic_is_lock_free(const volatile atomic_address* __a)
578 { return __a->is_lock_free(); }
581 atomic_store(volatile atomic_address* __a, void* __v)
585 atomic_store_explicit(volatile atomic_address* __a, void* __v,
587 { __a->store(__v, __m); }
590 atomic_load(const volatile atomic_address* __a)
591 { return __a->load(); }
594 atomic_load_explicit(const volatile atomic_address* __a, memory_order __m)
595 { return __a->load(__m); }
598 atomic_exchange(volatile atomic_address* __a, void* __v)
599 { return __a->exchange(__v); }
602 atomic_exchange_explicit(volatile atomic_address* __a, void* __v,
604 { return __a->exchange(__v, __m); }
607 atomic_compare_exchange_weak(volatile atomic_address* __a,
608 void** __v1, void* __v2)
610 return __a->compare_exchange_weak(*__v1, __v2, memory_order_seq_cst,
611 memory_order_seq_cst);
615 atomic_compare_exchange_strong(volatile atomic_address* __a,
616 void** __v1, void* __v2)
618 return __a->compare_exchange_strong(*__v1, __v2, memory_order_seq_cst,
619 memory_order_seq_cst);
623 atomic_compare_exchange_weak_explicit(volatile atomic_address* __a,
624 void** __v1, void* __v2,
625 memory_order __m1, memory_order __m2)
626 { return __a->compare_exchange_weak(*__v1, __v2, __m1, __m2); }
629 atomic_compare_exchange_strong_explicit(volatile atomic_address* __a,
630 void** __v1, void* __v2,
631 memory_order __m1, memory_order __m2)
632 { return __a->compare_exchange_strong(*__v1, __v2, __m1, __m2); }
635 atomic_fetch_add_explicit(volatile atomic_address* __a, ptrdiff_t __d,
637 { return __a->fetch_add(__d, __m); }
640 atomic_fetch_add(volatile atomic_address* __a, ptrdiff_t __d)
641 { return __a->fetch_add(__d); }
644 atomic_fetch_sub_explicit(volatile atomic_address* __a, ptrdiff_t __d,
646 { return __a->fetch_sub(__d, __m); }
649 atomic_fetch_sub(volatile atomic_address* __a, ptrdiff_t __d)
650 { return __a->fetch_sub(__d); }
653 // Convenience function definitions, atomic_bool.
655 atomic_is_lock_free(const volatile atomic_bool* __a)
656 { return __a->is_lock_free(); }
659 atomic_store(volatile atomic_bool* __a, bool __i)
663 atomic_store_explicit(volatile atomic_bool* __a, bool __i, memory_order __m)
664 { __a->store(__i, __m); }
667 atomic_load(const volatile atomic_bool* __a)
668 { return __a->load(); }
671 atomic_load_explicit(const volatile atomic_bool* __a, memory_order __m)
672 { return __a->load(__m); }
675 atomic_exchange(volatile atomic_bool* __a, bool __i)
676 { return __a->exchange(__i); }
679 atomic_exchange_explicit(volatile atomic_bool* __a, bool __i,
681 { return __a->exchange(__i, __m); }
684 atomic_compare_exchange_weak(volatile atomic_bool* __a, bool* __i1, bool __i2)
686 return __a->compare_exchange_weak(*__i1, __i2, memory_order_seq_cst,
687 memory_order_seq_cst);
691 atomic_compare_exchange_strong(volatile atomic_bool* __a,
692 bool* __i1, bool __i2)
694 return __a->compare_exchange_strong(*__i1, __i2, memory_order_seq_cst,
695 memory_order_seq_cst);
699 atomic_compare_exchange_weak_explicit(volatile atomic_bool* __a, bool* __i1,
700 bool __i2, memory_order __m1,
702 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
705 atomic_compare_exchange_strong_explicit(volatile atomic_bool* __a,
706 bool* __i1, bool __i2,
707 memory_order __m1, memory_order __m2)
708 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
712 // Free standing functions. Template argument should be constricted
713 // to intergral types as specified in the standard.
714 template<typename _ITp>
716 atomic_store_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
718 { __a->store(__i, __m); }
720 template<typename _ITp>
722 atomic_load_explicit(const volatile __atomic_base<_ITp>* __a,
724 { return __a->load(__m); }
726 template<typename _ITp>
728 atomic_exchange_explicit(volatile __atomic_base<_ITp>* __a,
729 _ITp __i, memory_order __m)
730 { return __a->exchange(__i, __m); }
732 template<typename _ITp>
734 atomic_compare_exchange_weak_explicit(volatile __atomic_base<_ITp>* __a,
735 _ITp* __i1, _ITp __i2,
736 memory_order __m1, memory_order __m2)
737 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
739 template<typename _ITp>
741 atomic_compare_exchange_strong_explicit(volatile __atomic_base<_ITp>* __a,
742 _ITp* __i1, _ITp __i2,
745 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
747 template<typename _ITp>
749 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
751 { return __a->fetch_add(__i, __m); }
753 template<typename _ITp>
755 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
757 { return __a->fetch_sub(__i, __m); }
759 template<typename _ITp>
761 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
763 { return __a->fetch_and(__i, __m); }
765 template<typename _ITp>
767 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
769 { return __a->fetch_or(__i, __m); }
771 template<typename _ITp>
773 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
775 { return __a->fetch_xor(__i, __m); }
777 template<typename _ITp>
779 atomic_is_lock_free(const volatile __atomic_base<_ITp>* __a)
780 { return __a->is_lock_free(); }
782 template<typename _ITp>
784 atomic_store(volatile __atomic_base<_ITp>* __a, _ITp __i)
785 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
787 template<typename _ITp>
789 atomic_load(const volatile __atomic_base<_ITp>* __a)
790 { return atomic_load_explicit(__a, memory_order_seq_cst); }
792 template<typename _ITp>
794 atomic_exchange(volatile __atomic_base<_ITp>* __a, _ITp __i)
795 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
797 template<typename _ITp>
799 atomic_compare_exchange_weak(volatile __atomic_base<_ITp>* __a,
800 _ITp* __i1, _ITp __i2)
802 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
803 memory_order_seq_cst,
804 memory_order_seq_cst);
807 template<typename _ITp>
809 atomic_compare_exchange_strong(volatile __atomic_base<_ITp>* __a,
810 _ITp* __i1, _ITp __i2)
812 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
813 memory_order_seq_cst,
814 memory_order_seq_cst);
817 template<typename _ITp>
819 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i)
820 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
822 template<typename _ITp>
824 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i)
825 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
827 template<typename _ITp>
829 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i)
830 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
832 template<typename _ITp>
834 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i)
835 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
837 template<typename _ITp>
839 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i)
840 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
844 _GLIBCXX_END_NAMESPACE