1 //===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 // Public interface header for TSan atomics.
11 //===----------------------------------------------------------------------===//
12 #ifndef TSAN_INTERFACE_ATOMIC_H
13 #define TSAN_INTERFACE_ATOMIC_H
19 typedef char __tsan_atomic8
;
20 typedef short __tsan_atomic16
; // NOLINT
21 typedef int __tsan_atomic32
;
22 typedef long __tsan_atomic64
; // NOLINT
23 #if defined(__SIZEOF_INT128__) \
24 || (__clang_major__ * 100 + __clang_minor__ >= 302)
25 __extension__
typedef __int128 __tsan_atomic128
;
26 # define __TSAN_HAS_INT128 1
28 # define __TSAN_HAS_INT128 0
31 // Part of ABI, do not change.
32 // http://llvm.org/viewvc/llvm-project/libcxx/trunk/include/atomic?view=markup
34 __tsan_memory_order_relaxed
,
35 __tsan_memory_order_consume
,
36 __tsan_memory_order_acquire
,
37 __tsan_memory_order_release
,
38 __tsan_memory_order_acq_rel
,
39 __tsan_memory_order_seq_cst
40 } __tsan_memory_order
;
42 __tsan_atomic8
__tsan_atomic8_load(const volatile __tsan_atomic8
*a
,
43 __tsan_memory_order mo
);
44 __tsan_atomic16
__tsan_atomic16_load(const volatile __tsan_atomic16
*a
,
45 __tsan_memory_order mo
);
46 __tsan_atomic32
__tsan_atomic32_load(const volatile __tsan_atomic32
*a
,
47 __tsan_memory_order mo
);
48 __tsan_atomic64
__tsan_atomic64_load(const volatile __tsan_atomic64
*a
,
49 __tsan_memory_order mo
);
51 __tsan_atomic128
__tsan_atomic128_load(const volatile __tsan_atomic128
*a
,
52 __tsan_memory_order mo
);
55 void __tsan_atomic8_store(volatile __tsan_atomic8
*a
, __tsan_atomic8 v
,
56 __tsan_memory_order mo
);
57 void __tsan_atomic16_store(volatile __tsan_atomic16
*a
, __tsan_atomic16 v
,
58 __tsan_memory_order mo
);
59 void __tsan_atomic32_store(volatile __tsan_atomic32
*a
, __tsan_atomic32 v
,
60 __tsan_memory_order mo
);
61 void __tsan_atomic64_store(volatile __tsan_atomic64
*a
, __tsan_atomic64 v
,
62 __tsan_memory_order mo
);
64 void __tsan_atomic128_store(volatile __tsan_atomic128
*a
, __tsan_atomic128 v
,
65 __tsan_memory_order mo
);
68 __tsan_atomic8
__tsan_atomic8_exchange(volatile __tsan_atomic8
*a
,
69 __tsan_atomic8 v
, __tsan_memory_order mo
);
70 __tsan_atomic16
__tsan_atomic16_exchange(volatile __tsan_atomic16
*a
,
71 __tsan_atomic16 v
, __tsan_memory_order mo
);
72 __tsan_atomic32
__tsan_atomic32_exchange(volatile __tsan_atomic32
*a
,
73 __tsan_atomic32 v
, __tsan_memory_order mo
);
74 __tsan_atomic64
__tsan_atomic64_exchange(volatile __tsan_atomic64
*a
,
75 __tsan_atomic64 v
, __tsan_memory_order mo
);
77 __tsan_atomic128
__tsan_atomic128_exchange(volatile __tsan_atomic128
*a
,
78 __tsan_atomic128 v
, __tsan_memory_order mo
);
81 __tsan_atomic8
__tsan_atomic8_fetch_add(volatile __tsan_atomic8
*a
,
82 __tsan_atomic8 v
, __tsan_memory_order mo
);
83 __tsan_atomic16
__tsan_atomic16_fetch_add(volatile __tsan_atomic16
*a
,
84 __tsan_atomic16 v
, __tsan_memory_order mo
);
85 __tsan_atomic32
__tsan_atomic32_fetch_add(volatile __tsan_atomic32
*a
,
86 __tsan_atomic32 v
, __tsan_memory_order mo
);
87 __tsan_atomic64
__tsan_atomic64_fetch_add(volatile __tsan_atomic64
*a
,
88 __tsan_atomic64 v
, __tsan_memory_order mo
);
90 __tsan_atomic128
__tsan_atomic128_fetch_add(volatile __tsan_atomic128
*a
,
91 __tsan_atomic128 v
, __tsan_memory_order mo
);
94 __tsan_atomic8
__tsan_atomic8_fetch_sub(volatile __tsan_atomic8
*a
,
95 __tsan_atomic8 v
, __tsan_memory_order mo
);
96 __tsan_atomic16
__tsan_atomic16_fetch_sub(volatile __tsan_atomic16
*a
,
97 __tsan_atomic16 v
, __tsan_memory_order mo
);
98 __tsan_atomic32
__tsan_atomic32_fetch_sub(volatile __tsan_atomic32
*a
,
99 __tsan_atomic32 v
, __tsan_memory_order mo
);
100 __tsan_atomic64
__tsan_atomic64_fetch_sub(volatile __tsan_atomic64
*a
,
101 __tsan_atomic64 v
, __tsan_memory_order mo
);
102 #if __TSAN_HAS_INT128
103 __tsan_atomic128
__tsan_atomic128_fetch_sub(volatile __tsan_atomic128
*a
,
104 __tsan_atomic128 v
, __tsan_memory_order mo
);
107 __tsan_atomic8
__tsan_atomic8_fetch_and(volatile __tsan_atomic8
*a
,
108 __tsan_atomic8 v
, __tsan_memory_order mo
);
109 __tsan_atomic16
__tsan_atomic16_fetch_and(volatile __tsan_atomic16
*a
,
110 __tsan_atomic16 v
, __tsan_memory_order mo
);
111 __tsan_atomic32
__tsan_atomic32_fetch_and(volatile __tsan_atomic32
*a
,
112 __tsan_atomic32 v
, __tsan_memory_order mo
);
113 __tsan_atomic64
__tsan_atomic64_fetch_and(volatile __tsan_atomic64
*a
,
114 __tsan_atomic64 v
, __tsan_memory_order mo
);
115 #if __TSAN_HAS_INT128
116 __tsan_atomic128
__tsan_atomic128_fetch_and(volatile __tsan_atomic128
*a
,
117 __tsan_atomic128 v
, __tsan_memory_order mo
);
120 __tsan_atomic8
__tsan_atomic8_fetch_or(volatile __tsan_atomic8
*a
,
121 __tsan_atomic8 v
, __tsan_memory_order mo
);
122 __tsan_atomic16
__tsan_atomic16_fetch_or(volatile __tsan_atomic16
*a
,
123 __tsan_atomic16 v
, __tsan_memory_order mo
);
124 __tsan_atomic32
__tsan_atomic32_fetch_or(volatile __tsan_atomic32
*a
,
125 __tsan_atomic32 v
, __tsan_memory_order mo
);
126 __tsan_atomic64
__tsan_atomic64_fetch_or(volatile __tsan_atomic64
*a
,
127 __tsan_atomic64 v
, __tsan_memory_order mo
);
128 #if __TSAN_HAS_INT128
129 __tsan_atomic128
__tsan_atomic128_fetch_or(volatile __tsan_atomic128
*a
,
130 __tsan_atomic128 v
, __tsan_memory_order mo
);
133 __tsan_atomic8
__tsan_atomic8_fetch_xor(volatile __tsan_atomic8
*a
,
134 __tsan_atomic8 v
, __tsan_memory_order mo
);
135 __tsan_atomic16
__tsan_atomic16_fetch_xor(volatile __tsan_atomic16
*a
,
136 __tsan_atomic16 v
, __tsan_memory_order mo
);
137 __tsan_atomic32
__tsan_atomic32_fetch_xor(volatile __tsan_atomic32
*a
,
138 __tsan_atomic32 v
, __tsan_memory_order mo
);
139 __tsan_atomic64
__tsan_atomic64_fetch_xor(volatile __tsan_atomic64
*a
,
140 __tsan_atomic64 v
, __tsan_memory_order mo
);
141 #if __TSAN_HAS_INT128
142 __tsan_atomic128
__tsan_atomic128_fetch_xor(volatile __tsan_atomic128
*a
,
143 __tsan_atomic128 v
, __tsan_memory_order mo
);
146 __tsan_atomic8
__tsan_atomic8_fetch_nand(volatile __tsan_atomic8
*a
,
147 __tsan_atomic8 v
, __tsan_memory_order mo
);
148 __tsan_atomic16
__tsan_atomic16_fetch_nand(volatile __tsan_atomic16
*a
,
149 __tsan_atomic16 v
, __tsan_memory_order mo
);
150 __tsan_atomic32
__tsan_atomic32_fetch_nand(volatile __tsan_atomic32
*a
,
151 __tsan_atomic32 v
, __tsan_memory_order mo
);
152 __tsan_atomic64
__tsan_atomic64_fetch_nand(volatile __tsan_atomic64
*a
,
153 __tsan_atomic64 v
, __tsan_memory_order mo
);
154 #if __TSAN_HAS_INT128
155 __tsan_atomic128
__tsan_atomic128_fetch_nand(volatile __tsan_atomic128
*a
,
156 __tsan_atomic128 v
, __tsan_memory_order mo
);
159 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8
*a
,
160 __tsan_atomic8
*c
, __tsan_atomic8 v
, __tsan_memory_order mo
,
161 __tsan_memory_order fail_mo
);
162 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16
*a
,
163 __tsan_atomic16
*c
, __tsan_atomic16 v
, __tsan_memory_order mo
,
164 __tsan_memory_order fail_mo
);
165 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32
*a
,
166 __tsan_atomic32
*c
, __tsan_atomic32 v
, __tsan_memory_order mo
,
167 __tsan_memory_order fail_mo
);
168 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64
*a
,
169 __tsan_atomic64
*c
, __tsan_atomic64 v
, __tsan_memory_order mo
,
170 __tsan_memory_order fail_mo
);
171 #if __TSAN_HAS_INT128
172 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128
*a
,
173 __tsan_atomic128
*c
, __tsan_atomic128 v
, __tsan_memory_order mo
,
174 __tsan_memory_order fail_mo
);
177 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8
*a
,
178 __tsan_atomic8
*c
, __tsan_atomic8 v
, __tsan_memory_order mo
,
179 __tsan_memory_order fail_mo
);
180 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16
*a
,
181 __tsan_atomic16
*c
, __tsan_atomic16 v
, __tsan_memory_order mo
,
182 __tsan_memory_order fail_mo
);
183 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32
*a
,
184 __tsan_atomic32
*c
, __tsan_atomic32 v
, __tsan_memory_order mo
,
185 __tsan_memory_order fail_mo
);
186 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64
*a
,
187 __tsan_atomic64
*c
, __tsan_atomic64 v
, __tsan_memory_order mo
,
188 __tsan_memory_order fail_mo
);
189 #if __TSAN_HAS_INT128
190 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128
*a
,
191 __tsan_atomic128
*c
, __tsan_atomic128 v
, __tsan_memory_order mo
,
192 __tsan_memory_order fail_mo
);
195 __tsan_atomic8
__tsan_atomic8_compare_exchange_val(
196 volatile __tsan_atomic8
*a
, __tsan_atomic8 c
, __tsan_atomic8 v
,
197 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
198 __tsan_atomic16
__tsan_atomic16_compare_exchange_val(
199 volatile __tsan_atomic16
*a
, __tsan_atomic16 c
, __tsan_atomic16 v
,
200 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
201 __tsan_atomic32
__tsan_atomic32_compare_exchange_val(
202 volatile __tsan_atomic32
*a
, __tsan_atomic32 c
, __tsan_atomic32 v
,
203 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
204 __tsan_atomic64
__tsan_atomic64_compare_exchange_val(
205 volatile __tsan_atomic64
*a
, __tsan_atomic64 c
, __tsan_atomic64 v
,
206 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
207 #if __TSAN_HAS_INT128
208 __tsan_atomic128
__tsan_atomic128_compare_exchange_val(
209 volatile __tsan_atomic128
*a
, __tsan_atomic128 c
, __tsan_atomic128 v
,
210 __tsan_memory_order mo
, __tsan_memory_order fail_mo
);
213 void __tsan_atomic_thread_fence(__tsan_memory_order mo
);
214 void __tsan_atomic_signal_fence(__tsan_memory_order mo
);
220 #endif // TSAN_INTERFACE_ATOMIC_H