tsan: move verbosity flag to CommonFlags
[blocksruntime.git] / lib / asan / asan_fake_stack.cc
blobd3e55bff1e6e1557951a46e8326ae362d3d55b82
1 //===-- asan_fake_stack.cc ------------------------------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of AddressSanitizer, an address sanity checker.
12 // FakeStack is used to detect use-after-return bugs.
13 //===----------------------------------------------------------------------===//
14 #include "asan_allocator.h"
15 #include "asan_poisoning.h"
16 #include "asan_thread.h"
18 namespace __asan {
20 static const u64 kMagic1 = kAsanStackAfterReturnMagic;
21 static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
22 static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
23 static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
25 // For small size classes inline PoisonShadow for better performance.
26 ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
27 CHECK_EQ(SHADOW_SCALE, 3); // This code expects SHADOW_SCALE=3.
28 u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
29 if (class_id <= 6) {
30 for (uptr i = 0; i < (1U << class_id); i++)
31 shadow[i] = magic;
32 } else {
33 // The size class is too big, it's cheaper to poison only size bytes.
34 PoisonShadow(ptr, size, static_cast<u8>(magic));
38 FakeStack *FakeStack::Create(uptr stack_size_log) {
39 static uptr kMinStackSizeLog = 16;
40 static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
41 if (stack_size_log < kMinStackSizeLog)
42 stack_size_log = kMinStackSizeLog;
43 if (stack_size_log > kMaxStackSizeLog)
44 stack_size_log = kMaxStackSizeLog;
45 FakeStack *res = reinterpret_cast<FakeStack *>(
46 MmapOrDie(RequiredSize(stack_size_log), "FakeStack"));
47 res->stack_size_log_ = stack_size_log;
48 if (common_flags()->verbosity) {
49 u8 *p = reinterpret_cast<u8 *>(res);
50 Report("T%d: FakeStack created: %p -- %p stack_size_log: %zd \n",
51 GetCurrentTidOrInvalid(), p,
52 p + FakeStack::RequiredSize(stack_size_log), stack_size_log);
54 return res;
57 void FakeStack::Destroy() {
58 PoisonAll(0);
59 UnmapOrDie(this, RequiredSize(stack_size_log_));
62 void FakeStack::PoisonAll(u8 magic) {
63 PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
64 magic);
67 ALWAYS_INLINE USED
68 FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
69 uptr real_stack) {
70 CHECK_LT(class_id, kNumberOfSizeClasses);
71 if (needs_gc_)
72 GC(real_stack);
73 uptr &hint_position = hint_position_[class_id];
74 const int num_iter = NumberOfFrames(stack_size_log, class_id);
75 u8 *flags = GetFlags(stack_size_log, class_id);
76 for (int i = 0; i < num_iter; i++) {
77 uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
78 // This part is tricky. On one hand, checking and setting flags[pos]
79 // should be atomic to ensure async-signal safety. But on the other hand,
80 // if the signal arrives between checking and setting flags[pos], the
81 // signal handler's fake stack will start from a different hint_position
82 // and so will not touch this particular byte. So, it is safe to do this
83 // with regular non-atimic load and store (at least I was not able to make
84 // this code crash).
85 if (flags[pos]) continue;
86 flags[pos] = 1;
87 FakeFrame *res = reinterpret_cast<FakeFrame *>(
88 GetFrame(stack_size_log, class_id, pos));
89 res->real_stack = real_stack;
90 *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
91 return res;
93 return 0; // We are out of fake stack.
96 uptr FakeStack::AddrIsInFakeStack(uptr ptr) {
97 uptr stack_size_log = this->stack_size_log();
98 uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
99 uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
100 if (ptr < beg || ptr >= end) return 0;
101 uptr class_id = (ptr - beg) >> stack_size_log;
102 uptr base = beg + (class_id << stack_size_log);
103 CHECK_LE(base, ptr);
104 CHECK_LT(ptr, base + (1UL << stack_size_log));
105 uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
106 return base + pos * BytesInSizeClass(class_id);
109 void FakeStack::HandleNoReturn() {
110 needs_gc_ = true;
113 // When throw, longjmp or some such happens we don't call OnFree() and
114 // as the result may leak one or more fake frames, but the good news is that
115 // we are notified about all such events by HandleNoReturn().
116 // If we recently had such no-return event we need to collect garbage frames.
117 // We do it based on their 'real_stack' values -- everything that is lower
118 // than the current real_stack is garbage.
119 NOINLINE void FakeStack::GC(uptr real_stack) {
120 uptr collected = 0;
121 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
122 u8 *flags = GetFlags(stack_size_log(), class_id);
123 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
124 i++) {
125 if (flags[i] == 0) continue; // not allocated.
126 FakeFrame *ff = reinterpret_cast<FakeFrame *>(
127 GetFrame(stack_size_log(), class_id, i));
128 if (ff->real_stack < real_stack) {
129 flags[i] = 0;
130 collected++;
134 needs_gc_ = false;
137 void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
138 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
139 u8 *flags = GetFlags(stack_size_log(), class_id);
140 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
141 i++) {
142 if (flags[i] == 0) continue; // not allocated.
143 FakeFrame *ff = reinterpret_cast<FakeFrame *>(
144 GetFrame(stack_size_log(), class_id, i));
145 uptr begin = reinterpret_cast<uptr>(ff);
146 callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
151 #if SANITIZER_LINUX && !SANITIZER_ANDROID
152 static THREADLOCAL FakeStack *fake_stack_tls;
154 FakeStack *GetTLSFakeStack() {
155 return fake_stack_tls;
157 void SetTLSFakeStack(FakeStack *fs) {
158 fake_stack_tls = fs;
160 #else
161 FakeStack *GetTLSFakeStack() { return 0; }
162 void SetTLSFakeStack(FakeStack *fs) { }
163 #endif // SANITIZER_LINUX && !SANITIZER_ANDROID
165 static FakeStack *GetFakeStack() {
166 AsanThread *t = GetCurrentThread();
167 if (!t) return 0;
168 return t->fake_stack();
171 static FakeStack *GetFakeStackFast() {
172 if (FakeStack *fs = GetTLSFakeStack())
173 return fs;
174 if (!__asan_option_detect_stack_use_after_return)
175 return 0;
176 return GetFakeStack();
179 ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size, uptr real_stack) {
180 FakeStack *fs = GetFakeStackFast();
181 if (!fs) return real_stack;
182 FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack);
183 if (!ff)
184 return real_stack; // Out of fake stack, return the real one.
185 uptr ptr = reinterpret_cast<uptr>(ff);
186 SetShadow(ptr, size, class_id, 0);
187 return ptr;
190 ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size, uptr real_stack) {
191 if (ptr == real_stack)
192 return;
193 FakeStack::Deallocate(ptr, class_id);
194 SetShadow(ptr, size, class_id, kMagic8);
197 } // namespace __asan
199 // ---------------------- Interface ---------------- {{{1
200 #define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \
201 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
202 __asan_stack_malloc_##class_id(uptr size, uptr real_stack) { \
203 return __asan::OnMalloc(class_id, size, real_stack); \
205 extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \
206 uptr ptr, uptr size, uptr real_stack) { \
207 __asan::OnFree(ptr, class_id, size, real_stack); \
210 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
211 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
212 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
213 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
214 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
215 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
216 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
217 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
218 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
219 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
220 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)