1 //===-- tsan_mman.cc ------------------------------------------------------===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //===----------------------------------------------------------------------===//
11 #include "sanitizer_common/sanitizer_allocator_interface.h"
12 #include "sanitizer_common/sanitizer_common.h"
13 #include "sanitizer_common/sanitizer_placement_new.h"
14 #include "tsan_mman.h"
16 #include "tsan_report.h"
17 #include "tsan_flags.h"
19 // May be overriden by front-end.
20 extern "C" void WEAK
__sanitizer_malloc_hook(void *ptr
, uptr size
) {
25 extern "C" void WEAK
__sanitizer_free_hook(void *ptr
) {
31 struct MapUnmapCallback
{
32 void OnMap(uptr p
, uptr size
) const { }
33 void OnUnmap(uptr p
, uptr size
) const {
34 // We are about to unmap a chunk of user memory.
35 // Mark the corresponding shadow memory as not needed.
36 DontNeedShadowFor(p
, size
);
40 static char allocator_placeholder
[sizeof(Allocator
)] ALIGNED(64);
41 Allocator
*allocator() {
42 return reinterpret_cast<Allocator
*>(&allocator_placeholder
);
45 void InitializeAllocator() {
49 void AllocatorThreadStart(ThreadState
*thr
) {
50 allocator()->InitCache(&thr
->alloc_cache
);
51 internal_allocator()->InitCache(&thr
->internal_alloc_cache
);
54 void AllocatorThreadFinish(ThreadState
*thr
) {
55 allocator()->DestroyCache(&thr
->alloc_cache
);
56 internal_allocator()->DestroyCache(&thr
->internal_alloc_cache
);
59 void AllocatorPrintStats() {
60 allocator()->PrintStats();
63 static void SignalUnsafeCall(ThreadState
*thr
, uptr pc
) {
64 if (atomic_load(&thr
->in_signal_handler
, memory_order_relaxed
) == 0 ||
65 !flags()->report_signal_unsafe
)
68 stack
.ObtainCurrent(thr
, pc
);
69 ThreadRegistryLock
l(ctx
->thread_registry
);
70 ScopedReport
rep(ReportTypeSignalUnsafe
);
71 if (!IsFiredSuppression(ctx
, rep
, stack
)) {
72 rep
.AddStack(&stack
, true);
73 OutputReport(thr
, rep
);
77 void *user_alloc(ThreadState
*thr
, uptr pc
, uptr sz
, uptr align
) {
78 if ((sz
>= (1ull << 40)) || (align
>= (1ull << 40)))
79 return AllocatorReturnNull();
80 void *p
= allocator()->Allocate(&thr
->alloc_cache
, sz
, align
);
83 if (ctx
&& ctx
->initialized
)
84 OnUserAlloc(thr
, pc
, (uptr
)p
, sz
, true);
85 SignalUnsafeCall(thr
, pc
);
89 void user_free(ThreadState
*thr
, uptr pc
, void *p
) {
90 if (ctx
&& ctx
->initialized
)
91 OnUserFree(thr
, pc
, (uptr
)p
, true);
92 allocator()->Deallocate(&thr
->alloc_cache
, p
);
93 SignalUnsafeCall(thr
, pc
);
96 void OnUserAlloc(ThreadState
*thr
, uptr pc
, uptr p
, uptr sz
, bool write
) {
97 DPrintf("#%d: alloc(%zu) = %p\n", thr
->tid
, sz
, p
);
98 ctx
->metamap
.AllocBlock(thr
, pc
, p
, sz
);
99 if (write
&& thr
->ignore_reads_and_writes
== 0)
100 MemoryRangeImitateWrite(thr
, pc
, (uptr
)p
, sz
);
102 MemoryResetRange(thr
, pc
, (uptr
)p
, sz
);
105 void OnUserFree(ThreadState
*thr
, uptr pc
, uptr p
, bool write
) {
106 CHECK_NE(p
, (void*)0);
107 uptr sz
= ctx
->metamap
.FreeBlock(thr
, pc
, p
);
108 DPrintf("#%d: free(%p, %zu)\n", thr
->tid
, p
, sz
);
109 if (write
&& thr
->ignore_reads_and_writes
== 0)
110 MemoryRangeFreed(thr
, pc
, (uptr
)p
, sz
);
113 void *user_realloc(ThreadState
*thr
, uptr pc
, void *p
, uptr sz
) {
115 // FIXME: Handle "shrinking" more efficiently,
116 // it seems that some software actually does this.
118 p2
= user_alloc(thr
, pc
, sz
);
122 uptr oldsz
= user_alloc_usable_size(p
);
123 internal_memcpy(p2
, p
, min(oldsz
, sz
));
127 user_free(thr
, pc
, p
);
131 uptr
user_alloc_usable_size(const void *p
) {
134 MBlock
*b
= ctx
->metamap
.GetBlock((uptr
)p
);
135 return b
? b
->siz
: 0;
138 void invoke_malloc_hook(void *ptr
, uptr size
) {
139 ThreadState
*thr
= cur_thread();
140 if (ctx
== 0 || !ctx
->initialized
|| thr
->ignore_interceptors
)
142 __sanitizer_malloc_hook(ptr
, size
);
145 void invoke_free_hook(void *ptr
) {
146 ThreadState
*thr
= cur_thread();
147 if (ctx
== 0 || !ctx
->initialized
|| thr
->ignore_interceptors
)
149 __sanitizer_free_hook(ptr
);
152 void *internal_alloc(MBlockType typ
, uptr sz
) {
153 ThreadState
*thr
= cur_thread();
155 thr
->nomalloc
= 0; // CHECK calls internal_malloc().
158 return InternalAlloc(sz
, &thr
->internal_alloc_cache
);
161 void internal_free(void *p
) {
162 ThreadState
*thr
= cur_thread();
164 thr
->nomalloc
= 0; // CHECK calls internal_malloc().
167 InternalFree(p
, &thr
->internal_alloc_cache
);
170 } // namespace __tsan
172 using namespace __tsan
;
175 uptr
__sanitizer_get_current_allocated_bytes() {
176 uptr stats
[AllocatorStatCount
];
177 allocator()->GetStats(stats
);
178 return stats
[AllocatorStatAllocated
];
181 uptr
__sanitizer_get_heap_size() {
182 uptr stats
[AllocatorStatCount
];
183 allocator()->GetStats(stats
);
184 return stats
[AllocatorStatMapped
];
187 uptr
__sanitizer_get_free_bytes() {
191 uptr
__sanitizer_get_unmapped_bytes() {
195 uptr
__sanitizer_get_estimated_allocated_size(uptr size
) {
199 int __sanitizer_get_ownership(const void *p
) {
200 return allocator()->GetBlockBegin(p
) != 0;
203 uptr
__sanitizer_get_allocated_size(const void *p
) {
204 return user_alloc_usable_size(p
);
207 void __tsan_on_thread_idle() {
208 ThreadState
*thr
= cur_thread();
209 allocator()->SwallowCache(&thr
->alloc_cache
);
210 internal_allocator()->SwallowCache(&thr
->internal_alloc_cache
);
211 ctx
->metamap
.OnThreadIdle(thr
);