Daily bump.
[official-gcc.git] / libsanitizer / tsan / tsan_interface_java.cc
blob7f451690946e9d554abd00a0cea5a5db0dc6ffb8
1 //===-- tsan_interface_java.cc --------------------------------------------===//
2 //
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
5 //
6 //===----------------------------------------------------------------------===//
7 //
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
9 //
10 //===----------------------------------------------------------------------===//
12 #include "tsan_interface_java.h"
13 #include "tsan_rtl.h"
14 #include "tsan_mutex.h"
15 #include "sanitizer_common/sanitizer_internal_defs.h"
16 #include "sanitizer_common/sanitizer_common.h"
17 #include "sanitizer_common/sanitizer_placement_new.h"
18 #include "sanitizer_common/sanitizer_stacktrace.h"
19 #include "sanitizer_common/sanitizer_procmaps.h"
21 using namespace __tsan; // NOLINT
23 namespace __tsan {
25 const uptr kHeapShadow = 0x300000000000ull;
26 const uptr kHeapAlignment = 8;
28 struct BlockDesc {
29 bool begin;
30 Mutex mtx;
31 SyncVar *head;
33 BlockDesc()
34 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock)
35 , head() {
36 CHECK_EQ(begin, false);
37 begin = true;
40 ~BlockDesc() {
41 CHECK_EQ(begin, true);
42 begin = false;
43 ThreadState *thr = cur_thread();
44 SyncVar *s = head;
45 while (s) {
46 SyncVar *s1 = s->next;
47 StatInc(thr, StatSyncDestroyed);
48 s->mtx.Lock();
49 s->mtx.Unlock();
50 thr->mset.Remove(s->GetId());
51 DestroyAndFree(s);
52 s = s1;
57 struct JavaContext {
58 const uptr heap_begin;
59 const uptr heap_size;
60 BlockDesc *heap_shadow;
62 JavaContext(jptr heap_begin, jptr heap_size)
63 : heap_begin(heap_begin)
64 , heap_size(heap_size) {
65 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
66 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
67 if ((uptr)heap_shadow != kHeapShadow) {
68 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
69 Die();
74 class ScopedJavaFunc {
75 public:
76 ScopedJavaFunc(ThreadState *thr, uptr pc)
77 : thr_(thr) {
78 Initialize(thr_);
79 FuncEntry(thr, pc);
82 ~ScopedJavaFunc() {
83 FuncExit(thr_);
84 // FIXME(dvyukov): process pending signals.
87 private:
88 ThreadState *thr_;
91 static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
92 static JavaContext *jctx;
94 static BlockDesc *getblock(uptr addr) {
95 uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
96 return &jctx->heap_shadow[i];
99 static uptr USED getmem(BlockDesc *b) {
100 uptr i = b - jctx->heap_shadow;
101 uptr p = jctx->heap_begin + i * kHeapAlignment;
102 CHECK_GE(p, jctx->heap_begin);
103 CHECK_LT(p, jctx->heap_begin + jctx->heap_size);
104 return p;
107 static BlockDesc *getblockbegin(uptr addr) {
108 for (BlockDesc *b = getblock(addr);; b--) {
109 CHECK_GE(b, jctx->heap_shadow);
110 if (b->begin)
111 return b;
113 return 0;
116 SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
117 bool write_lock, bool create) {
118 if (jctx == 0 || addr < jctx->heap_begin
119 || addr >= jctx->heap_begin + jctx->heap_size)
120 return 0;
121 BlockDesc *b = getblockbegin(addr);
122 DPrintf("#%d: GetJavaSync %p->%p\n", thr->tid, addr, b);
123 Lock l(&b->mtx);
124 SyncVar *s = b->head;
125 for (; s; s = s->next) {
126 if (s->addr == addr) {
127 DPrintf("#%d: found existing sync for %p\n", thr->tid, addr);
128 break;
131 if (s == 0 && create) {
132 DPrintf("#%d: creating new sync for %p\n", thr->tid, addr);
133 s = ctx->synctab.Create(thr, pc, addr);
134 s->next = b->head;
135 b->head = s;
137 if (s) {
138 if (write_lock)
139 s->mtx.Lock();
140 else
141 s->mtx.ReadLock();
143 return s;
146 SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
147 // We do not destroy Java mutexes other than in __tsan_java_free().
148 return 0;
151 } // namespace __tsan
153 #define SCOPED_JAVA_FUNC(func) \
154 ThreadState *thr = cur_thread(); \
155 const uptr caller_pc = GET_CALLER_PC(); \
156 const uptr pc = __sanitizer::StackTrace::GetCurrentPc(); \
157 (void)pc; \
158 ScopedJavaFunc scoped(thr, caller_pc); \
159 /**/
161 void __tsan_java_init(jptr heap_begin, jptr heap_size) {
162 SCOPED_JAVA_FUNC(__tsan_java_init);
163 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
164 CHECK_EQ(jctx, 0);
165 CHECK_GT(heap_begin, 0);
166 CHECK_GT(heap_size, 0);
167 CHECK_EQ(heap_begin % kHeapAlignment, 0);
168 CHECK_EQ(heap_size % kHeapAlignment, 0);
169 CHECK_LT(heap_begin, heap_begin + heap_size);
170 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
173 int __tsan_java_fini() {
174 SCOPED_JAVA_FUNC(__tsan_java_fini);
175 DPrintf("#%d: java_fini()\n", thr->tid);
176 CHECK_NE(jctx, 0);
177 // FIXME(dvyukov): this does not call atexit() callbacks.
178 int status = Finalize(thr);
179 DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
180 return status;
183 void __tsan_java_alloc(jptr ptr, jptr size) {
184 SCOPED_JAVA_FUNC(__tsan_java_alloc);
185 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
186 CHECK_NE(jctx, 0);
187 CHECK_NE(size, 0);
188 CHECK_EQ(ptr % kHeapAlignment, 0);
189 CHECK_EQ(size % kHeapAlignment, 0);
190 CHECK_GE(ptr, jctx->heap_begin);
191 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
193 BlockDesc *b = getblock(ptr);
194 new(b) BlockDesc();
197 void __tsan_java_free(jptr ptr, jptr size) {
198 SCOPED_JAVA_FUNC(__tsan_java_free);
199 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
200 CHECK_NE(jctx, 0);
201 CHECK_NE(size, 0);
202 CHECK_EQ(ptr % kHeapAlignment, 0);
203 CHECK_EQ(size % kHeapAlignment, 0);
204 CHECK_GE(ptr, jctx->heap_begin);
205 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
207 BlockDesc *beg = getblock(ptr);
208 BlockDesc *end = getblock(ptr + size);
209 for (BlockDesc *b = beg; b != end; b++) {
210 if (b->begin)
211 b->~BlockDesc();
215 void __tsan_java_move(jptr src, jptr dst, jptr size) {
216 SCOPED_JAVA_FUNC(__tsan_java_move);
217 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
218 CHECK_NE(jctx, 0);
219 CHECK_NE(size, 0);
220 CHECK_EQ(src % kHeapAlignment, 0);
221 CHECK_EQ(dst % kHeapAlignment, 0);
222 CHECK_EQ(size % kHeapAlignment, 0);
223 CHECK_GE(src, jctx->heap_begin);
224 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
225 CHECK_GE(dst, jctx->heap_begin);
226 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
227 CHECK(dst >= src + size || src >= dst + size);
229 // Assuming it's not running concurrently with threads that do
230 // memory accesses and mutex operations (stop-the-world phase).
231 { // NOLINT
232 BlockDesc *s = getblock(src);
233 BlockDesc *d = getblock(dst);
234 BlockDesc *send = getblock(src + size);
235 for (; s != send; s++, d++) {
236 CHECK_EQ(d->begin, false);
237 if (s->begin) {
238 DPrintf("#%d: moving block %p->%p\n", thr->tid, getmem(s), getmem(d));
239 new(d) BlockDesc;
240 d->head = s->head;
241 for (SyncVar *sync = d->head; sync; sync = sync->next) {
242 uptr newaddr = sync->addr - src + dst;
243 DPrintf("#%d: moving sync %p->%p\n", thr->tid, sync->addr, newaddr);
244 sync->addr = newaddr;
246 s->head = 0;
247 s->~BlockDesc();
252 { // NOLINT
253 u64 *s = (u64*)MemToShadow(src);
254 u64 *d = (u64*)MemToShadow(dst);
255 u64 *send = (u64*)MemToShadow(src + size);
256 for (; s != send; s++, d++) {
257 *d = *s;
258 *s = 0;
263 void __tsan_java_mutex_lock(jptr addr) {
264 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
265 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
266 CHECK_NE(jctx, 0);
267 CHECK_GE(addr, jctx->heap_begin);
268 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
270 MutexCreate(thr, pc, addr, true, true, true);
271 MutexLock(thr, pc, addr);
274 void __tsan_java_mutex_unlock(jptr addr) {
275 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
276 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
277 CHECK_NE(jctx, 0);
278 CHECK_GE(addr, jctx->heap_begin);
279 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
281 MutexUnlock(thr, pc, addr);
284 void __tsan_java_mutex_read_lock(jptr addr) {
285 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
286 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
287 CHECK_NE(jctx, 0);
288 CHECK_GE(addr, jctx->heap_begin);
289 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
291 MutexCreate(thr, pc, addr, true, true, true);
292 MutexReadLock(thr, pc, addr);
295 void __tsan_java_mutex_read_unlock(jptr addr) {
296 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
297 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
298 CHECK_NE(jctx, 0);
299 CHECK_GE(addr, jctx->heap_begin);
300 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
302 MutexReadUnlock(thr, pc, addr);
305 void __tsan_java_mutex_lock_rec(jptr addr, int rec) {
306 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock_rec);
307 DPrintf("#%d: java_mutex_lock_rec(%p, %d)\n", thr->tid, addr, rec);
308 CHECK_NE(jctx, 0);
309 CHECK_GE(addr, jctx->heap_begin);
310 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
311 CHECK_GT(rec, 0);
313 MutexCreate(thr, pc, addr, true, true, true);
314 MutexLock(thr, pc, addr, rec);
317 int __tsan_java_mutex_unlock_rec(jptr addr) {
318 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock_rec);
319 DPrintf("#%d: java_mutex_unlock_rec(%p)\n", thr->tid, addr);
320 CHECK_NE(jctx, 0);
321 CHECK_GE(addr, jctx->heap_begin);
322 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
324 return MutexUnlock(thr, pc, addr, true);