1 //===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
9 // Not intended for direct inclusion. Include sanitizer_atomic.h.
11 //===----------------------------------------------------------------------===//
13 #ifndef SANITIZER_ATOMIC_MSVC_H
14 #define SANITIZER_ATOMIC_MSVC_H
16 extern "C" void _ReadWriteBarrier();
17 #pragma intrinsic(_ReadWriteBarrier)
18 extern "C" void _mm_mfence();
19 #pragma intrinsic(_mm_mfence)
20 extern "C" void _mm_pause();
21 #pragma intrinsic(_mm_pause)
22 extern "C" long _InterlockedExchangeAdd( // NOLINT
23 long volatile * Addend
, long Value
); // NOLINT
24 #pragma intrinsic(_InterlockedExchangeAdd)
27 extern "C" void *_InterlockedCompareExchangePointer(
28 void *volatile *Destination
,
29 void *Exchange
, void *Comparand
);
30 #pragma intrinsic(_InterlockedCompareExchangePointer)
32 // There's no _InterlockedCompareExchangePointer intrinsic on x86,
33 // so call _InterlockedCompareExchange instead.
35 long __cdecl
_InterlockedCompareExchange( // NOLINT
36 long volatile *Destination
, // NOLINT
37 long Exchange
, long Comparand
); // NOLINT
38 #pragma intrinsic(_InterlockedCompareExchange)
40 inline static void *_InterlockedCompareExchangePointer(
41 void *volatile *Destination
,
42 void *Exchange
, void *Comparand
) {
43 return reinterpret_cast<void*>(
44 _InterlockedCompareExchange(
45 reinterpret_cast<long volatile*>(Destination
), // NOLINT
46 reinterpret_cast<long>(Exchange
), // NOLINT
47 reinterpret_cast<long>(Comparand
))); // NOLINT
51 namespace __sanitizer
{
53 INLINE
void atomic_signal_fence(memory_order
) {
57 INLINE
void atomic_thread_fence(memory_order
) {
61 INLINE
void proc_yield(int cnt
) {
62 for (int i
= 0; i
< cnt
; i
++)
67 INLINE typename
T::Type
atomic_load(
68 const volatile T
*a
, memory_order mo
) {
69 DCHECK(mo
& (memory_order_relaxed
| memory_order_consume
70 | memory_order_acquire
| memory_order_seq_cst
));
71 DCHECK(!((uptr
)a
% sizeof(*a
)));
73 // FIXME(dvyukov): 64-bit load is not atomic on 32-bits.
74 if (mo
== memory_order_relaxed
) {
77 atomic_signal_fence(memory_order_seq_cst
);
79 atomic_signal_fence(memory_order_seq_cst
);
85 INLINE
void atomic_store(volatile T
*a
, typename
T::Type v
, memory_order mo
) {
86 DCHECK(mo
& (memory_order_relaxed
| memory_order_release
87 | memory_order_seq_cst
));
88 DCHECK(!((uptr
)a
% sizeof(*a
)));
89 // FIXME(dvyukov): 64-bit store is not atomic on 32-bits.
90 if (mo
== memory_order_relaxed
) {
93 atomic_signal_fence(memory_order_seq_cst
);
95 atomic_signal_fence(memory_order_seq_cst
);
97 if (mo
== memory_order_seq_cst
)
98 atomic_thread_fence(memory_order_seq_cst
);
101 INLINE u32
atomic_fetch_add(volatile atomic_uint32_t
*a
,
102 u32 v
, memory_order mo
) {
104 DCHECK(!((uptr
)a
% sizeof(*a
)));
105 return (u32
)_InterlockedExchangeAdd(
106 (volatile long*)&a
->val_dont_use
, (long)v
); // NOLINT
109 INLINE u8
atomic_exchange(volatile atomic_uint8_t
*a
,
110 u8 v
, memory_order mo
) {
112 DCHECK(!((uptr
)a
% sizeof(*a
)));
116 xchg
[eax
], cl
// NOLINT
122 INLINE u16
atomic_exchange(volatile atomic_uint16_t
*a
,
123 u16 v
, memory_order mo
) {
125 DCHECK(!((uptr
)a
% sizeof(*a
)));
129 xchg
[eax
], cx
// NOLINT
135 INLINE
bool atomic_compare_exchange_strong(volatile atomic_uintptr_t
*a
,
140 uptr prev
= (uptr
)_InterlockedCompareExchangePointer(
141 (void*volatile*)&a
->val_dont_use
, (void*)xchg
, (void*)cmpv
);
149 INLINE
bool atomic_compare_exchange_weak(volatile T
*a
,
150 typename
T::Type
*cmp
,
151 typename
T::Type xchg
,
153 return atomic_compare_exchange_strong(a
, cmp
, xchg
, mo
);
156 } // namespace __sanitizer
158 #endif // SANITIZER_ATOMIC_CLANG_H