1 //===-- tsan_sync.cc ------------------------------------------------------===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //===----------------------------------------------------------------------===//
11 #include "sanitizer_common/sanitizer_placement_new.h"
12 #include "tsan_sync.h"
14 #include "tsan_mman.h"
18 void DDMutexInit(ThreadState
*thr
, uptr pc
, SyncVar
*s
);
20 SyncVar::SyncVar(uptr addr
, u64 uid
)
21 : mtx(MutexTypeSyncVar
, StatMtxSyncVar
)
25 , owner_tid(kInvalidTid
)
35 : mtx(MutexTypeSyncTab
, StatMtxSyncTab
)
43 for (int i
= 0; i
< kPartCount
; i
++) {
45 SyncVar
*tmp
= tab_
[i
].val
;
46 tab_
[i
].val
= tmp
->next
;
52 SyncVar
* SyncTab::GetOrCreateAndLock(ThreadState
*thr
, uptr pc
,
53 uptr addr
, bool write_lock
) {
54 return GetAndLock(thr
, pc
, addr
, write_lock
, true);
57 SyncVar
* SyncTab::GetIfExistsAndLock(uptr addr
, bool write_lock
) {
58 return GetAndLock(0, 0, addr
, write_lock
, false);
61 SyncVar
* SyncTab::Create(ThreadState
*thr
, uptr pc
, uptr addr
) {
62 StatInc(thr
, StatSyncCreated
);
63 void *mem
= internal_alloc(MBlockSync
, sizeof(SyncVar
));
64 const u64 uid
= atomic_fetch_add(&uid_gen_
, 1, memory_order_relaxed
);
65 SyncVar
*res
= new(mem
) SyncVar(addr
, uid
);
66 res
->creation_stack_id
= 0;
67 if (!kGoMode
) // Go does not use them
68 res
->creation_stack_id
= CurrentStackId(thr
, pc
);
69 if (flags()->detect_deadlocks
)
70 DDMutexInit(thr
, pc
, res
);
74 SyncVar
* SyncTab::GetAndLock(ThreadState
*thr
, uptr pc
,
75 uptr addr
, bool write_lock
, bool create
) {
78 SyncVar
*res
= GetJavaSync(thr
, pc
, addr
, write_lock
, create
);
83 // Here we ask only PrimaryAllocator, because
84 // SecondaryAllocator::PointerIsMine() is slow and we have fallback on
85 // the hashmap anyway.
86 if (PrimaryAllocator::PointerIsMine((void*)addr
)) {
87 MBlock
*b
= user_mblock(thr
, (void*)addr
);
89 MBlock::ScopedLock
l(b
);
91 for (res
= b
->ListHead(); res
; res
= res
->next
) {
92 if (res
->addr
== addr
)
98 res
= Create(thr
, pc
, addr
);
109 Part
*p
= &tab_
[PartIdx(addr
)];
112 for (SyncVar
*res
= p
->val
; res
; res
= res
->next
) {
113 if (res
->addr
== addr
) {
126 SyncVar
*res
= p
->val
;
127 for (; res
; res
= res
->next
) {
128 if (res
->addr
== addr
)
132 res
= Create(thr
, pc
, addr
);
144 SyncVar
* SyncTab::GetAndRemove(ThreadState
*thr
, uptr pc
, uptr addr
) {
147 SyncVar
*res
= GetAndRemoveJavaSync(thr
, pc
, addr
);
151 if (PrimaryAllocator::PointerIsMine((void*)addr
)) {
152 MBlock
*b
= user_mblock(thr
, (void*)addr
);
156 MBlock::ScopedLock
l(b
);
159 if (res
->addr
== addr
) {
160 if (res
->is_linker_init
)
164 SyncVar
**prev
= &res
->next
;
167 if (res
->addr
== addr
) {
168 if (res
->is_linker_init
)
178 StatInc(thr
, StatSyncDestroyed
);
188 Part
*p
= &tab_
[PartIdx(addr
)];
192 SyncVar
**prev
= &p
->val
;
195 if (res
->addr
== addr
) {
196 if (res
->is_linker_init
)
206 StatInc(thr
, StatSyncDestroyed
);
213 int SyncTab::PartIdx(uptr addr
) {
214 return (addr
>> 3) % kPartCount
;
217 StackTrace::StackTrace()
223 StackTrace::StackTrace(uptr
*buf
, uptr cnt
)
231 StackTrace::~StackTrace() {
235 void StackTrace::Reset() {
244 void StackTrace::Init(const uptr
*pcs
, uptr cnt
) {
252 s_
= (uptr
*)internal_alloc(MBlockStackTrace
, cnt
* sizeof(s_
[0]));
255 internal_memcpy(s_
, pcs
, cnt
* sizeof(s_
[0]));
258 void StackTrace::ObtainCurrent(ThreadState
*thr
, uptr toppc
) {
260 n_
= thr
->shadow_stack_pos
- thr
->shadow_stack
;
261 if (n_
+ !!toppc
== 0)
266 if (n_
+ !!toppc
> c_
) {
267 start
= n_
- c_
+ !!toppc
;
271 // Cap potentially huge stacks.
272 if (n_
+ !!toppc
> kTraceStackSize
) {
273 start
= n_
- kTraceStackSize
+ !!toppc
;
274 n_
= kTraceStackSize
- !!toppc
;
276 s_
= (uptr
*)internal_alloc(MBlockStackTrace
,
277 (n_
+ !!toppc
) * sizeof(s_
[0]));
279 for (uptr i
= 0; i
< n_
; i
++)
280 s_
[i
] = thr
->shadow_stack
[start
+ i
];
287 void StackTrace::CopyFrom(const StackTrace
& other
) {
289 Init(other
.Begin(), other
.Size());
292 bool StackTrace::IsEmpty() const {
296 uptr
StackTrace::Size() const {
300 uptr
StackTrace::Get(uptr i
) const {
305 const uptr
*StackTrace::Begin() const {
309 } // namespace __tsan