1 //===-- tsan_mman.cc ------------------------------------------------------===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //===----------------------------------------------------------------------===//
11 #include "sanitizer_common/sanitizer_allocator_interface.h"
12 #include "sanitizer_common/sanitizer_common.h"
13 #include "sanitizer_common/sanitizer_placement_new.h"
14 #include "tsan_mman.h"
16 #include "tsan_report.h"
17 #include "tsan_flags.h"
19 // May be overriden by front-end.
20 extern "C" void WEAK
__sanitizer_malloc_hook(void *ptr
, uptr size
) {
25 extern "C" void WEAK
__sanitizer_free_hook(void *ptr
) {
31 struct MapUnmapCallback
{
32 void OnMap(uptr p
, uptr size
) const { }
33 void OnUnmap(uptr p
, uptr size
) const {
34 // We are about to unmap a chunk of user memory.
35 // Mark the corresponding shadow memory as not needed.
36 DontNeedShadowFor(p
, size
);
40 static char allocator_placeholder
[sizeof(Allocator
)] ALIGNED(64);
41 Allocator
*allocator() {
42 return reinterpret_cast<Allocator
*>(&allocator_placeholder
);
45 void InitializeAllocator() {
49 void AllocatorThreadStart(ThreadState
*thr
) {
50 allocator()->InitCache(&thr
->alloc_cache
);
51 internal_allocator()->InitCache(&thr
->internal_alloc_cache
);
54 void AllocatorThreadFinish(ThreadState
*thr
) {
55 allocator()->DestroyCache(&thr
->alloc_cache
);
56 internal_allocator()->DestroyCache(&thr
->internal_alloc_cache
);
59 void AllocatorPrintStats() {
60 allocator()->PrintStats();
63 static void SignalUnsafeCall(ThreadState
*thr
, uptr pc
) {
64 if (atomic_load(&thr
->in_signal_handler
, memory_order_relaxed
) == 0 ||
65 !flags()->report_signal_unsafe
)
67 VarSizeStackTrace stack
;
68 ObtainCurrentStack(thr
, pc
, &stack
);
69 ThreadRegistryLock
l(ctx
->thread_registry
);
70 ScopedReport
rep(ReportTypeSignalUnsafe
);
71 if (!IsFiredSuppression(ctx
, rep
, stack
)) {
72 rep
.AddStack(stack
, true);
73 OutputReport(thr
, rep
);
77 void *user_alloc(ThreadState
*thr
, uptr pc
, uptr sz
, uptr align
, bool signal
) {
78 if ((sz
>= (1ull << 40)) || (align
>= (1ull << 40)))
79 return AllocatorReturnNull();
80 void *p
= allocator()->Allocate(&thr
->alloc_cache
, sz
, align
);
83 if (ctx
&& ctx
->initialized
)
84 OnUserAlloc(thr
, pc
, (uptr
)p
, sz
, true);
86 SignalUnsafeCall(thr
, pc
);
90 void user_free(ThreadState
*thr
, uptr pc
, void *p
, bool signal
) {
91 if (ctx
&& ctx
->initialized
)
92 OnUserFree(thr
, pc
, (uptr
)p
, true);
93 allocator()->Deallocate(&thr
->alloc_cache
, p
);
95 SignalUnsafeCall(thr
, pc
);
98 void OnUserAlloc(ThreadState
*thr
, uptr pc
, uptr p
, uptr sz
, bool write
) {
99 DPrintf("#%d: alloc(%zu) = %p\n", thr
->tid
, sz
, p
);
100 ctx
->metamap
.AllocBlock(thr
, pc
, p
, sz
);
101 if (write
&& thr
->ignore_reads_and_writes
== 0)
102 MemoryRangeImitateWrite(thr
, pc
, (uptr
)p
, sz
);
104 MemoryResetRange(thr
, pc
, (uptr
)p
, sz
);
107 void OnUserFree(ThreadState
*thr
, uptr pc
, uptr p
, bool write
) {
108 CHECK_NE(p
, (void*)0);
109 uptr sz
= ctx
->metamap
.FreeBlock(thr
, pc
, p
);
110 DPrintf("#%d: free(%p, %zu)\n", thr
->tid
, p
, sz
);
111 if (write
&& thr
->ignore_reads_and_writes
== 0)
112 MemoryRangeFreed(thr
, pc
, (uptr
)p
, sz
);
115 void *user_realloc(ThreadState
*thr
, uptr pc
, void *p
, uptr sz
) {
117 // FIXME: Handle "shrinking" more efficiently,
118 // it seems that some software actually does this.
120 p2
= user_alloc(thr
, pc
, sz
);
124 uptr oldsz
= user_alloc_usable_size(p
);
125 internal_memcpy(p2
, p
, min(oldsz
, sz
));
129 user_free(thr
, pc
, p
);
133 uptr
user_alloc_usable_size(const void *p
) {
136 MBlock
*b
= ctx
->metamap
.GetBlock((uptr
)p
);
137 return b
? b
->siz
: 0;
140 void invoke_malloc_hook(void *ptr
, uptr size
) {
141 ThreadState
*thr
= cur_thread();
142 if (ctx
== 0 || !ctx
->initialized
|| thr
->ignore_interceptors
)
144 __sanitizer_malloc_hook(ptr
, size
);
147 void invoke_free_hook(void *ptr
) {
148 ThreadState
*thr
= cur_thread();
149 if (ctx
== 0 || !ctx
->initialized
|| thr
->ignore_interceptors
)
151 __sanitizer_free_hook(ptr
);
154 void *internal_alloc(MBlockType typ
, uptr sz
) {
155 ThreadState
*thr
= cur_thread();
157 thr
->nomalloc
= 0; // CHECK calls internal_malloc().
160 return InternalAlloc(sz
, &thr
->internal_alloc_cache
);
163 void internal_free(void *p
) {
164 ThreadState
*thr
= cur_thread();
166 thr
->nomalloc
= 0; // CHECK calls internal_malloc().
169 InternalFree(p
, &thr
->internal_alloc_cache
);
172 } // namespace __tsan
174 using namespace __tsan
;
177 uptr
__sanitizer_get_current_allocated_bytes() {
178 uptr stats
[AllocatorStatCount
];
179 allocator()->GetStats(stats
);
180 return stats
[AllocatorStatAllocated
];
183 uptr
__sanitizer_get_heap_size() {
184 uptr stats
[AllocatorStatCount
];
185 allocator()->GetStats(stats
);
186 return stats
[AllocatorStatMapped
];
189 uptr
__sanitizer_get_free_bytes() {
193 uptr
__sanitizer_get_unmapped_bytes() {
197 uptr
__sanitizer_get_estimated_allocated_size(uptr size
) {
201 int __sanitizer_get_ownership(const void *p
) {
202 return allocator()->GetBlockBegin(p
) != 0;
205 uptr
__sanitizer_get_allocated_size(const void *p
) {
206 return user_alloc_usable_size(p
);
209 void __tsan_on_thread_idle() {
210 ThreadState
*thr
= cur_thread();
211 allocator()->SwallowCache(&thr
->alloc_cache
);
212 internal_allocator()->SwallowCache(&thr
->internal_alloc_cache
);
213 ctx
->metamap
.OnThreadIdle(thr
);