1 //===-- sanitizer_atomic_clang.h --------------------------------*- C++ -*-===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
9 // Not intended for direct inclusion. Include sanitizer_atomic.h.
11 //===----------------------------------------------------------------------===//
13 #ifndef SANITIZER_ATOMIC_CLANG_H
14 #define SANITIZER_ATOMIC_CLANG_H
16 #if defined(__i386__) || defined(__x86_64__)
17 # include "sanitizer_atomic_clang_x86.h"
19 # include "sanitizer_atomic_clang_other.h"
22 namespace __sanitizer
{
24 // We would like to just use compiler builtin atomic operations
25 // for loads and stores, but they are mostly broken in clang:
26 // - they lead to vastly inefficient code generation
27 // (http://llvm.org/bugs/show_bug.cgi?id=17281)
28 // - 64-bit atomic operations are not implemented on x86_32
29 // (http://llvm.org/bugs/show_bug.cgi?id=15034)
30 // - they are not implemented on ARM
31 // error: undefined reference to '__atomic_load_4'
33 // See http://www.cl.cam.ac.uk/~pes20/cpp/cpp0xmappings.html
34 // for mappings of the memory model to different processors.
36 INLINE
void atomic_signal_fence(memory_order
) {
37 __asm__
__volatile__("" ::: "memory");
40 INLINE
void atomic_thread_fence(memory_order
) {
45 INLINE typename
T::Type
atomic_fetch_add(volatile T
*a
,
46 typename
T::Type v
, memory_order mo
) {
48 DCHECK(!((uptr
)a
% sizeof(*a
)));
49 return __sync_fetch_and_add(&a
->val_dont_use
, v
);
53 INLINE typename
T::Type
atomic_fetch_sub(volatile T
*a
,
54 typename
T::Type v
, memory_order mo
) {
56 DCHECK(!((uptr
)a
% sizeof(*a
)));
57 return __sync_fetch_and_add(&a
->val_dont_use
, -v
);
61 INLINE typename
T::Type
atomic_exchange(volatile T
*a
,
62 typename
T::Type v
, memory_order mo
) {
63 DCHECK(!((uptr
)a
% sizeof(*a
)));
64 if (mo
& (memory_order_release
| memory_order_acq_rel
| memory_order_seq_cst
))
66 v
= __sync_lock_test_and_set(&a
->val_dont_use
, v
);
67 if (mo
== memory_order_seq_cst
)
73 INLINE
bool atomic_compare_exchange_strong(volatile T
*a
,
74 typename
T::Type
*cmp
,
75 typename
T::Type xchg
,
77 typedef typename
T::Type Type
;
79 Type prev
= __sync_val_compare_and_swap(&a
->val_dont_use
, cmpv
, xchg
);
87 INLINE
bool atomic_compare_exchange_weak(volatile T
*a
,
88 typename
T::Type
*cmp
,
89 typename
T::Type xchg
,
91 return atomic_compare_exchange_strong(a
, cmp
, xchg
, mo
);
94 } // namespace __sanitizer
98 #endif // SANITIZER_ATOMIC_CLANG_H