Remove assert in get_def_bb_for_const
[official-gcc.git] / libsanitizer / sanitizer_common / sanitizer_atomic_msvc.h
blob4ac3b90769f25e8de10c3f40567a00591ef57a7a
1 //===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2 //
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
5 //
6 //===----------------------------------------------------------------------===//
7 //
8 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
9 // Not intended for direct inclusion. Include sanitizer_atomic.h.
11 //===----------------------------------------------------------------------===//
13 #ifndef SANITIZER_ATOMIC_MSVC_H
14 #define SANITIZER_ATOMIC_MSVC_H
16 extern "C" void _ReadWriteBarrier();
17 #pragma intrinsic(_ReadWriteBarrier)
18 extern "C" void _mm_mfence();
19 #pragma intrinsic(_mm_mfence)
20 extern "C" void _mm_pause();
21 #pragma intrinsic(_mm_pause)
22 extern "C" char _InterlockedExchange8( // NOLINT
23 char volatile *Addend, char Value); // NOLINT
24 #pragma intrinsic(_InterlockedExchange8)
25 extern "C" short _InterlockedExchange16( // NOLINT
26 short volatile *Addend, short Value); // NOLINT
27 #pragma intrinsic(_InterlockedExchange16)
28 extern "C" long _InterlockedExchange( // NOLINT
29 long volatile *Addend, long Value); // NOLINT
30 #pragma intrinsic(_InterlockedExchange)
31 extern "C" long _InterlockedExchangeAdd( // NOLINT
32 long volatile * Addend, long Value); // NOLINT
33 #pragma intrinsic(_InterlockedExchangeAdd)
34 extern "C" short _InterlockedCompareExchange16( // NOLINT
35 short volatile *Destination, // NOLINT
36 short Exchange, short Comparand); // NOLINT
37 #pragma intrinsic(_InterlockedCompareExchange16)
38 extern "C"
39 long long _InterlockedCompareExchange64( // NOLINT
40 long long volatile *Destination, // NOLINT
41 long long Exchange, long long Comparand); // NOLINT
42 #pragma intrinsic(_InterlockedCompareExchange64)
43 extern "C" void *_InterlockedCompareExchangePointer(
44 void *volatile *Destination,
45 void *Exchange, void *Comparand);
46 #pragma intrinsic(_InterlockedCompareExchangePointer)
47 extern "C"
48 long __cdecl _InterlockedCompareExchange( // NOLINT
49 long volatile *Destination, // NOLINT
50 long Exchange, long Comparand); // NOLINT
51 #pragma intrinsic(_InterlockedCompareExchange)
53 #ifdef _WIN64
54 extern "C" long long _InterlockedExchangeAdd64( // NOLINT
55 long long volatile * Addend, long long Value); // NOLINT
56 #pragma intrinsic(_InterlockedExchangeAdd64)
57 #endif
59 namespace __sanitizer {
61 INLINE void atomic_signal_fence(memory_order) {
62 _ReadWriteBarrier();
65 INLINE void atomic_thread_fence(memory_order) {
66 _mm_mfence();
69 INLINE void proc_yield(int cnt) {
70 for (int i = 0; i < cnt; i++)
71 _mm_pause();
74 template<typename T>
75 INLINE typename T::Type atomic_load(
76 const volatile T *a, memory_order mo) {
77 DCHECK(mo & (memory_order_relaxed | memory_order_consume
78 | memory_order_acquire | memory_order_seq_cst));
79 DCHECK(!((uptr)a % sizeof(*a)));
80 typename T::Type v;
81 // FIXME(dvyukov): 64-bit load is not atomic on 32-bits.
82 if (mo == memory_order_relaxed) {
83 v = a->val_dont_use;
84 } else {
85 atomic_signal_fence(memory_order_seq_cst);
86 v = a->val_dont_use;
87 atomic_signal_fence(memory_order_seq_cst);
89 return v;
92 template<typename T>
93 INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
94 DCHECK(mo & (memory_order_relaxed | memory_order_release
95 | memory_order_seq_cst));
96 DCHECK(!((uptr)a % sizeof(*a)));
97 // FIXME(dvyukov): 64-bit store is not atomic on 32-bits.
98 if (mo == memory_order_relaxed) {
99 a->val_dont_use = v;
100 } else {
101 atomic_signal_fence(memory_order_seq_cst);
102 a->val_dont_use = v;
103 atomic_signal_fence(memory_order_seq_cst);
105 if (mo == memory_order_seq_cst)
106 atomic_thread_fence(memory_order_seq_cst);
109 INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
110 u32 v, memory_order mo) {
111 (void)mo;
112 DCHECK(!((uptr)a % sizeof(*a)));
113 return (u32)_InterlockedExchangeAdd(
114 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
117 INLINE uptr atomic_fetch_add(volatile atomic_uintptr_t *a,
118 uptr v, memory_order mo) {
119 (void)mo;
120 DCHECK(!((uptr)a % sizeof(*a)));
121 #ifdef _WIN64
122 return (uptr)_InterlockedExchangeAdd64(
123 (volatile long long*)&a->val_dont_use, (long long)v); // NOLINT
124 #else
125 return (uptr)_InterlockedExchangeAdd(
126 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
127 #endif
130 INLINE u32 atomic_fetch_sub(volatile atomic_uint32_t *a,
131 u32 v, memory_order mo) {
132 (void)mo;
133 DCHECK(!((uptr)a % sizeof(*a)));
134 return (u32)_InterlockedExchangeAdd(
135 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT
138 INLINE uptr atomic_fetch_sub(volatile atomic_uintptr_t *a,
139 uptr v, memory_order mo) {
140 (void)mo;
141 DCHECK(!((uptr)a % sizeof(*a)));
142 #ifdef _WIN64
143 return (uptr)_InterlockedExchangeAdd64(
144 (volatile long long*)&a->val_dont_use, -(long long)v); // NOLINT
145 #else
146 return (uptr)_InterlockedExchangeAdd(
147 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT
148 #endif
151 INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
152 u8 v, memory_order mo) {
153 (void)mo;
154 DCHECK(!((uptr)a % sizeof(*a)));
155 return (u8)_InterlockedExchange8((volatile char*)&a->val_dont_use, v);
158 INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
159 u16 v, memory_order mo) {
160 (void)mo;
161 DCHECK(!((uptr)a % sizeof(*a)));
162 return (u16)_InterlockedExchange16((volatile short*)&a->val_dont_use, v);
165 INLINE u32 atomic_exchange(volatile atomic_uint32_t *a,
166 u32 v, memory_order mo) {
167 (void)mo;
168 DCHECK(!((uptr)a % sizeof(*a)));
169 return (u32)_InterlockedExchange((volatile long*)&a->val_dont_use, v);
172 #ifndef _WIN64
174 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint8_t *a,
175 u8 *cmp,
176 u8 xchgv,
177 memory_order mo) {
178 (void)mo;
179 DCHECK(!((uptr)a % sizeof(*a)));
180 u8 cmpv = *cmp;
181 u8 prev;
182 __asm {
183 mov al, cmpv
184 mov ecx, a
185 mov dl, xchgv
186 lock cmpxchg [ecx], dl
187 mov prev, al
189 if (prev == cmpv)
190 return true;
191 *cmp = prev;
192 return false;
195 #endif
197 INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a,
198 uptr *cmp,
199 uptr xchg,
200 memory_order mo) {
201 uptr cmpv = *cmp;
202 uptr prev = (uptr)_InterlockedCompareExchangePointer(
203 (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv);
204 if (prev == cmpv)
205 return true;
206 *cmp = prev;
207 return false;
210 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint16_t *a,
211 u16 *cmp,
212 u16 xchg,
213 memory_order mo) {
214 u16 cmpv = *cmp;
215 u16 prev = (u16)_InterlockedCompareExchange16(
216 (volatile short*)&a->val_dont_use, (short)xchg, (short)cmpv);
217 if (prev == cmpv)
218 return true;
219 *cmp = prev;
220 return false;
223 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint32_t *a,
224 u32 *cmp,
225 u32 xchg,
226 memory_order mo) {
227 u32 cmpv = *cmp;
228 u32 prev = (u32)_InterlockedCompareExchange(
229 (volatile long*)&a->val_dont_use, (long)xchg, (long)cmpv);
230 if (prev == cmpv)
231 return true;
232 *cmp = prev;
233 return false;
236 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint64_t *a,
237 u64 *cmp,
238 u64 xchg,
239 memory_order mo) {
240 u64 cmpv = *cmp;
241 u64 prev = (u64)_InterlockedCompareExchange64(
242 (volatile long long*)&a->val_dont_use, (long long)xchg, (long long)cmpv);
243 if (prev == cmpv)
244 return true;
245 *cmp = prev;
246 return false;
249 template<typename T>
250 INLINE bool atomic_compare_exchange_weak(volatile T *a,
251 typename T::Type *cmp,
252 typename T::Type xchg,
253 memory_order mo) {
254 return atomic_compare_exchange_strong(a, cmp, xchg, mo);
257 } // namespace __sanitizer
259 #endif // SANITIZER_ATOMIC_CLANG_H