1 //===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
9 // Not intended for direct inclusion. Include sanitizer_atomic.h.
11 //===----------------------------------------------------------------------===//
13 #ifndef SANITIZER_ATOMIC_MSVC_H
14 #define SANITIZER_ATOMIC_MSVC_H
16 extern "C" void _ReadWriteBarrier();
17 #pragma intrinsic(_ReadWriteBarrier)
18 extern "C" void _mm_mfence();
19 #pragma intrinsic(_mm_mfence)
20 extern "C" void _mm_pause();
21 #pragma intrinsic(_mm_pause)
22 extern "C" char _InterlockedExchange8( // NOLINT
23 char volatile *Addend
, char Value
); // NOLINT
24 #pragma intrinsic(_InterlockedExchange8)
25 extern "C" short _InterlockedExchange16( // NOLINT
26 short volatile *Addend
, short Value
); // NOLINT
27 #pragma intrinsic(_InterlockedExchange16)
28 extern "C" long _InterlockedExchange( // NOLINT
29 long volatile *Addend
, long Value
); // NOLINT
30 #pragma intrinsic(_InterlockedExchange)
31 extern "C" long _InterlockedExchangeAdd( // NOLINT
32 long volatile * Addend
, long Value
); // NOLINT
33 #pragma intrinsic(_InterlockedExchangeAdd)
34 extern "C" short _InterlockedCompareExchange16( // NOLINT
35 short volatile *Destination
, // NOLINT
36 short Exchange
, short Comparand
); // NOLINT
37 #pragma intrinsic(_InterlockedCompareExchange16)
39 long long _InterlockedCompareExchange64( // NOLINT
40 long long volatile *Destination
, // NOLINT
41 long long Exchange
, long long Comparand
); // NOLINT
42 #pragma intrinsic(_InterlockedCompareExchange64)
43 extern "C" void *_InterlockedCompareExchangePointer(
44 void *volatile *Destination
,
45 void *Exchange
, void *Comparand
);
46 #pragma intrinsic(_InterlockedCompareExchangePointer)
48 long __cdecl
_InterlockedCompareExchange( // NOLINT
49 long volatile *Destination
, // NOLINT
50 long Exchange
, long Comparand
); // NOLINT
51 #pragma intrinsic(_InterlockedCompareExchange)
54 extern "C" long long _InterlockedExchangeAdd64( // NOLINT
55 long long volatile * Addend
, long long Value
); // NOLINT
56 #pragma intrinsic(_InterlockedExchangeAdd64)
59 namespace __sanitizer
{
61 INLINE
void atomic_signal_fence(memory_order
) {
65 INLINE
void atomic_thread_fence(memory_order
) {
69 INLINE
void proc_yield(int cnt
) {
70 for (int i
= 0; i
< cnt
; i
++)
75 INLINE typename
T::Type
atomic_load(
76 const volatile T
*a
, memory_order mo
) {
77 DCHECK(mo
& (memory_order_relaxed
| memory_order_consume
78 | memory_order_acquire
| memory_order_seq_cst
));
79 DCHECK(!((uptr
)a
% sizeof(*a
)));
81 // FIXME(dvyukov): 64-bit load is not atomic on 32-bits.
82 if (mo
== memory_order_relaxed
) {
85 atomic_signal_fence(memory_order_seq_cst
);
87 atomic_signal_fence(memory_order_seq_cst
);
93 INLINE
void atomic_store(volatile T
*a
, typename
T::Type v
, memory_order mo
) {
94 DCHECK(mo
& (memory_order_relaxed
| memory_order_release
95 | memory_order_seq_cst
));
96 DCHECK(!((uptr
)a
% sizeof(*a
)));
97 // FIXME(dvyukov): 64-bit store is not atomic on 32-bits.
98 if (mo
== memory_order_relaxed
) {
101 atomic_signal_fence(memory_order_seq_cst
);
103 atomic_signal_fence(memory_order_seq_cst
);
105 if (mo
== memory_order_seq_cst
)
106 atomic_thread_fence(memory_order_seq_cst
);
109 INLINE u32
atomic_fetch_add(volatile atomic_uint32_t
*a
,
110 u32 v
, memory_order mo
) {
112 DCHECK(!((uptr
)a
% sizeof(*a
)));
113 return (u32
)_InterlockedExchangeAdd(
114 (volatile long*)&a
->val_dont_use
, (long)v
); // NOLINT
117 INLINE uptr
atomic_fetch_add(volatile atomic_uintptr_t
*a
,
118 uptr v
, memory_order mo
) {
120 DCHECK(!((uptr
)a
% sizeof(*a
)));
122 return (uptr
)_InterlockedExchangeAdd64(
123 (volatile long long*)&a
->val_dont_use
, (long long)v
); // NOLINT
125 return (uptr
)_InterlockedExchangeAdd(
126 (volatile long*)&a
->val_dont_use
, (long)v
); // NOLINT
130 INLINE u32
atomic_fetch_sub(volatile atomic_uint32_t
*a
,
131 u32 v
, memory_order mo
) {
133 DCHECK(!((uptr
)a
% sizeof(*a
)));
134 return (u32
)_InterlockedExchangeAdd(
135 (volatile long*)&a
->val_dont_use
, -(long)v
); // NOLINT
138 INLINE uptr
atomic_fetch_sub(volatile atomic_uintptr_t
*a
,
139 uptr v
, memory_order mo
) {
141 DCHECK(!((uptr
)a
% sizeof(*a
)));
143 return (uptr
)_InterlockedExchangeAdd64(
144 (volatile long long*)&a
->val_dont_use
, -(long long)v
); // NOLINT
146 return (uptr
)_InterlockedExchangeAdd(
147 (volatile long*)&a
->val_dont_use
, -(long)v
); // NOLINT
151 INLINE u8
atomic_exchange(volatile atomic_uint8_t
*a
,
152 u8 v
, memory_order mo
) {
154 DCHECK(!((uptr
)a
% sizeof(*a
)));
155 return (u8
)_InterlockedExchange8((volatile char*)&a
->val_dont_use
, v
);
158 INLINE u16
atomic_exchange(volatile atomic_uint16_t
*a
,
159 u16 v
, memory_order mo
) {
161 DCHECK(!((uptr
)a
% sizeof(*a
)));
162 return (u16
)_InterlockedExchange16((volatile short*)&a
->val_dont_use
, v
);
165 INLINE u32
atomic_exchange(volatile atomic_uint32_t
*a
,
166 u32 v
, memory_order mo
) {
168 DCHECK(!((uptr
)a
% sizeof(*a
)));
169 return (u32
)_InterlockedExchange((volatile long*)&a
->val_dont_use
, v
);
174 INLINE
bool atomic_compare_exchange_strong(volatile atomic_uint8_t
*a
,
179 DCHECK(!((uptr
)a
% sizeof(*a
)));
186 lock cmpxchg
[ecx
], dl
197 INLINE
bool atomic_compare_exchange_strong(volatile atomic_uintptr_t
*a
,
202 uptr prev
= (uptr
)_InterlockedCompareExchangePointer(
203 (void*volatile*)&a
->val_dont_use
, (void*)xchg
, (void*)cmpv
);
210 INLINE
bool atomic_compare_exchange_strong(volatile atomic_uint16_t
*a
,
215 u16 prev
= (u16
)_InterlockedCompareExchange16(
216 (volatile short*)&a
->val_dont_use
, (short)xchg
, (short)cmpv
);
223 INLINE
bool atomic_compare_exchange_strong(volatile atomic_uint32_t
*a
,
228 u32 prev
= (u32
)_InterlockedCompareExchange(
229 (volatile long*)&a
->val_dont_use
, (long)xchg
, (long)cmpv
);
236 INLINE
bool atomic_compare_exchange_strong(volatile atomic_uint64_t
*a
,
241 u64 prev
= (u64
)_InterlockedCompareExchange64(
242 (volatile long long*)&a
->val_dont_use
, (long long)xchg
, (long long)cmpv
);
250 INLINE
bool atomic_compare_exchange_weak(volatile T
*a
,
251 typename
T::Type
*cmp
,
252 typename
T::Type xchg
,
254 return atomic_compare_exchange_strong(a
, cmp
, xchg
, mo
);
257 } // namespace __sanitizer
259 #endif // SANITIZER_ATOMIC_CLANG_H