Use the SIGNED_16BIT_OFFSET_EXTRA_P macro for 16-bit signed tests.
[official-gcc.git] / libsanitizer / tsan / tsan_platform.h
blob0d106c4147c8895ff3728b1621827ef05c4bf11c
1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
11 // Platform-specific code.
12 //===----------------------------------------------------------------------===//
14 #ifndef TSAN_PLATFORM_H
15 #define TSAN_PLATFORM_H
17 #if !defined(__LP64__) && !defined(_WIN64)
18 # error "Only 64-bit is supported"
19 #endif
21 #include "tsan_defs.h"
22 #include "tsan_trace.h"
24 namespace __tsan {
26 #if !SANITIZER_GO
28 #if defined(__x86_64__)
30 C/C++ on linux/x86_64 and freebsd/x86_64
31 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB)
32 0040 0000 0000 - 0100 0000 0000: -
33 0100 0000 0000 - 2000 0000 0000: shadow
34 2000 0000 0000 - 3000 0000 0000: -
35 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
36 4000 0000 0000 - 5500 0000 0000: -
37 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels
38 5680 0000 0000 - 6000 0000 0000: -
39 6000 0000 0000 - 6200 0000 0000: traces
40 6200 0000 0000 - 7d00 0000 0000: -
41 7b00 0000 0000 - 7c00 0000 0000: heap
42 7c00 0000 0000 - 7e80 0000 0000: -
43 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
45 C/C++ on netbsd/amd64 can reuse the same mapping:
46 * The address space starts from 0x1000 (option with 0x0) and ends with
47 0x7f7ffffff000.
48 * LoAppMem-kHeapMemEnd can be reused as it is.
49 * No VDSO support.
50 * No MidAppMem region.
51 * No additional HeapMem region.
52 * HiAppMem contains the stack, loader, shared libraries and heap.
53 * Stack on NetBSD/amd64 has prereserved 128MB.
54 * Heap grows downwards (top-down).
55 * ASLR must be disabled per-process or globally.
58 struct Mapping {
59 static const uptr kMetaShadowBeg = 0x300000000000ull;
60 static const uptr kMetaShadowEnd = 0x340000000000ull;
61 static const uptr kTraceMemBeg = 0x600000000000ull;
62 static const uptr kTraceMemEnd = 0x620000000000ull;
63 static const uptr kShadowBeg = 0x010000000000ull;
64 static const uptr kShadowEnd = 0x200000000000ull;
65 static const uptr kHeapMemBeg = 0x7b0000000000ull;
66 static const uptr kHeapMemEnd = 0x7c0000000000ull;
67 static const uptr kLoAppMemBeg = 0x000000001000ull;
68 static const uptr kLoAppMemEnd = 0x008000000000ull;
69 static const uptr kMidAppMemBeg = 0x550000000000ull;
70 static const uptr kMidAppMemEnd = 0x568000000000ull;
71 static const uptr kHiAppMemBeg = 0x7e8000000000ull;
72 static const uptr kHiAppMemEnd = 0x800000000000ull;
73 static const uptr kAppMemMsk = 0x780000000000ull;
74 static const uptr kAppMemXor = 0x040000000000ull;
75 static const uptr kVdsoBeg = 0xf000000000000000ull;
78 #define TSAN_MID_APP_RANGE 1
79 #elif defined(__mips64)
81 C/C++ on linux/mips64 (40-bit VMA)
82 0000 0000 00 - 0100 0000 00: - (4 GB)
83 0100 0000 00 - 0200 0000 00: main binary (4 GB)
84 0200 0000 00 - 2000 0000 00: - (120 GB)
85 2000 0000 00 - 4000 0000 00: shadow (128 GB)
86 4000 0000 00 - 5000 0000 00: metainfo (memory blocks and sync objects) (64 GB)
87 5000 0000 00 - aa00 0000 00: - (360 GB)
88 aa00 0000 00 - ab00 0000 00: main binary (PIE) (4 GB)
89 ab00 0000 00 - b000 0000 00: - (20 GB)
90 b000 0000 00 - b200 0000 00: traces (8 GB)
91 b200 0000 00 - fe00 0000 00: - (304 GB)
92 fe00 0000 00 - ff00 0000 00: heap (4 GB)
93 ff00 0000 00 - ff80 0000 00: - (2 GB)
94 ff80 0000 00 - ffff ffff ff: modules and main thread stack (<2 GB)
96 struct Mapping {
97 static const uptr kMetaShadowBeg = 0x4000000000ull;
98 static const uptr kMetaShadowEnd = 0x5000000000ull;
99 static const uptr kTraceMemBeg = 0xb000000000ull;
100 static const uptr kTraceMemEnd = 0xb200000000ull;
101 static const uptr kShadowBeg = 0x2000000000ull;
102 static const uptr kShadowEnd = 0x4000000000ull;
103 static const uptr kHeapMemBeg = 0xfe00000000ull;
104 static const uptr kHeapMemEnd = 0xff00000000ull;
105 static const uptr kLoAppMemBeg = 0x0100000000ull;
106 static const uptr kLoAppMemEnd = 0x0200000000ull;
107 static const uptr kMidAppMemBeg = 0xaa00000000ull;
108 static const uptr kMidAppMemEnd = 0xab00000000ull;
109 static const uptr kHiAppMemBeg = 0xff80000000ull;
110 static const uptr kHiAppMemEnd = 0xffffffffffull;
111 static const uptr kAppMemMsk = 0xf800000000ull;
112 static const uptr kAppMemXor = 0x0800000000ull;
113 static const uptr kVdsoBeg = 0xfffff00000ull;
116 #define TSAN_MID_APP_RANGE 1
117 #elif defined(__aarch64__) && defined(__APPLE__)
119 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM)
120 0000 0000 00 - 0100 0000 00: - (4 GB)
121 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks (4 GB)
122 0200 0000 00 - 0300 0000 00: heap (4 GB)
123 0300 0000 00 - 0400 0000 00: - (4 GB)
124 0400 0000 00 - 0c00 0000 00: shadow memory (32 GB)
125 0c00 0000 00 - 0d00 0000 00: - (4 GB)
126 0d00 0000 00 - 0e00 0000 00: metainfo (4 GB)
127 0e00 0000 00 - 0f00 0000 00: - (4 GB)
128 0f00 0000 00 - 0fc0 0000 00: traces (3 GB)
129 0fc0 0000 00 - 1000 0000 00: -
131 struct Mapping {
132 static const uptr kLoAppMemBeg = 0x0100000000ull;
133 static const uptr kLoAppMemEnd = 0x0200000000ull;
134 static const uptr kHeapMemBeg = 0x0200000000ull;
135 static const uptr kHeapMemEnd = 0x0300000000ull;
136 static const uptr kShadowBeg = 0x0400000000ull;
137 static const uptr kShadowEnd = 0x0c00000000ull;
138 static const uptr kMetaShadowBeg = 0x0d00000000ull;
139 static const uptr kMetaShadowEnd = 0x0e00000000ull;
140 static const uptr kTraceMemBeg = 0x0f00000000ull;
141 static const uptr kTraceMemEnd = 0x0fc0000000ull;
142 static const uptr kHiAppMemBeg = 0x0fc0000000ull;
143 static const uptr kHiAppMemEnd = 0x0fc0000000ull;
144 static const uptr kAppMemMsk = 0x0ull;
145 static const uptr kAppMemXor = 0x0ull;
146 static const uptr kVdsoBeg = 0x7000000000000000ull;
149 #elif defined(__aarch64__)
150 // AArch64 supports multiple VMA which leads to multiple address transformation
151 // functions. To support these multiple VMAS transformations and mappings TSAN
152 // runtime for AArch64 uses an external memory read (vmaSize) to select which
153 // mapping to use. Although slower, it make a same instrumented binary run on
154 // multiple kernels.
157 C/C++ on linux/aarch64 (39-bit VMA)
158 0000 0010 00 - 0100 0000 00: main binary
159 0100 0000 00 - 0800 0000 00: -
160 0800 0000 00 - 2000 0000 00: shadow memory
161 2000 0000 00 - 3100 0000 00: -
162 3100 0000 00 - 3400 0000 00: metainfo
163 3400 0000 00 - 5500 0000 00: -
164 5500 0000 00 - 5600 0000 00: main binary (PIE)
165 5600 0000 00 - 6000 0000 00: -
166 6000 0000 00 - 6200 0000 00: traces
167 6200 0000 00 - 7d00 0000 00: -
168 7c00 0000 00 - 7d00 0000 00: heap
169 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
171 struct Mapping39 {
172 static const uptr kLoAppMemBeg = 0x0000001000ull;
173 static const uptr kLoAppMemEnd = 0x0100000000ull;
174 static const uptr kShadowBeg = 0x0800000000ull;
175 static const uptr kShadowEnd = 0x2000000000ull;
176 static const uptr kMetaShadowBeg = 0x3100000000ull;
177 static const uptr kMetaShadowEnd = 0x3400000000ull;
178 static const uptr kMidAppMemBeg = 0x5500000000ull;
179 static const uptr kMidAppMemEnd = 0x5600000000ull;
180 static const uptr kTraceMemBeg = 0x6000000000ull;
181 static const uptr kTraceMemEnd = 0x6200000000ull;
182 static const uptr kHeapMemBeg = 0x7c00000000ull;
183 static const uptr kHeapMemEnd = 0x7d00000000ull;
184 static const uptr kHiAppMemBeg = 0x7e00000000ull;
185 static const uptr kHiAppMemEnd = 0x7fffffffffull;
186 static const uptr kAppMemMsk = 0x7800000000ull;
187 static const uptr kAppMemXor = 0x0200000000ull;
188 static const uptr kVdsoBeg = 0x7f00000000ull;
192 C/C++ on linux/aarch64 (42-bit VMA)
193 00000 0010 00 - 01000 0000 00: main binary
194 01000 0000 00 - 10000 0000 00: -
195 10000 0000 00 - 20000 0000 00: shadow memory
196 20000 0000 00 - 26000 0000 00: -
197 26000 0000 00 - 28000 0000 00: metainfo
198 28000 0000 00 - 2aa00 0000 00: -
199 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
200 2ab00 0000 00 - 36200 0000 00: -
201 36200 0000 00 - 36240 0000 00: traces
202 36240 0000 00 - 3e000 0000 00: -
203 3e000 0000 00 - 3f000 0000 00: heap
204 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
206 struct Mapping42 {
207 static const uptr kLoAppMemBeg = 0x00000001000ull;
208 static const uptr kLoAppMemEnd = 0x01000000000ull;
209 static const uptr kShadowBeg = 0x10000000000ull;
210 static const uptr kShadowEnd = 0x20000000000ull;
211 static const uptr kMetaShadowBeg = 0x26000000000ull;
212 static const uptr kMetaShadowEnd = 0x28000000000ull;
213 static const uptr kMidAppMemBeg = 0x2aa00000000ull;
214 static const uptr kMidAppMemEnd = 0x2ab00000000ull;
215 static const uptr kTraceMemBeg = 0x36200000000ull;
216 static const uptr kTraceMemEnd = 0x36400000000ull;
217 static const uptr kHeapMemBeg = 0x3e000000000ull;
218 static const uptr kHeapMemEnd = 0x3f000000000ull;
219 static const uptr kHiAppMemBeg = 0x3f000000000ull;
220 static const uptr kHiAppMemEnd = 0x3ffffffffffull;
221 static const uptr kAppMemMsk = 0x3c000000000ull;
222 static const uptr kAppMemXor = 0x04000000000ull;
223 static const uptr kVdsoBeg = 0x37f00000000ull;
226 struct Mapping48 {
227 static const uptr kLoAppMemBeg = 0x0000000001000ull;
228 static const uptr kLoAppMemEnd = 0x0000200000000ull;
229 static const uptr kShadowBeg = 0x0002000000000ull;
230 static const uptr kShadowEnd = 0x0004000000000ull;
231 static const uptr kMetaShadowBeg = 0x0005000000000ull;
232 static const uptr kMetaShadowEnd = 0x0006000000000ull;
233 static const uptr kMidAppMemBeg = 0x0aaaa00000000ull;
234 static const uptr kMidAppMemEnd = 0x0aaaf00000000ull;
235 static const uptr kTraceMemBeg = 0x0f06000000000ull;
236 static const uptr kTraceMemEnd = 0x0f06200000000ull;
237 static const uptr kHeapMemBeg = 0x0ffff00000000ull;
238 static const uptr kHeapMemEnd = 0x0ffff00000000ull;
239 static const uptr kHiAppMemBeg = 0x0ffff00000000ull;
240 static const uptr kHiAppMemEnd = 0x1000000000000ull;
241 static const uptr kAppMemMsk = 0x0fff800000000ull;
242 static const uptr kAppMemXor = 0x0000800000000ull;
243 static const uptr kVdsoBeg = 0xffff000000000ull;
246 // Indicates the runtime will define the memory regions at runtime.
247 #define TSAN_RUNTIME_VMA 1
248 // Indicates that mapping defines a mid range memory segment.
249 #define TSAN_MID_APP_RANGE 1
250 #elif defined(__powerpc64__)
251 // PPC64 supports multiple VMA which leads to multiple address transformation
252 // functions. To support these multiple VMAS transformations and mappings TSAN
253 // runtime for PPC64 uses an external memory read (vmaSize) to select which
254 // mapping to use. Although slower, it make a same instrumented binary run on
255 // multiple kernels.
258 C/C++ on linux/powerpc64 (44-bit VMA)
259 0000 0000 0100 - 0001 0000 0000: main binary
260 0001 0000 0000 - 0001 0000 0000: -
261 0001 0000 0000 - 0b00 0000 0000: shadow
262 0b00 0000 0000 - 0b00 0000 0000: -
263 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
264 0d00 0000 0000 - 0d00 0000 0000: -
265 0d00 0000 0000 - 0f00 0000 0000: traces
266 0f00 0000 0000 - 0f00 0000 0000: -
267 0f00 0000 0000 - 0f50 0000 0000: heap
268 0f50 0000 0000 - 0f60 0000 0000: -
269 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
271 struct Mapping44 {
272 static const uptr kMetaShadowBeg = 0x0b0000000000ull;
273 static const uptr kMetaShadowEnd = 0x0d0000000000ull;
274 static const uptr kTraceMemBeg = 0x0d0000000000ull;
275 static const uptr kTraceMemEnd = 0x0f0000000000ull;
276 static const uptr kShadowBeg = 0x000100000000ull;
277 static const uptr kShadowEnd = 0x0b0000000000ull;
278 static const uptr kLoAppMemBeg = 0x000000000100ull;
279 static const uptr kLoAppMemEnd = 0x000100000000ull;
280 static const uptr kHeapMemBeg = 0x0f0000000000ull;
281 static const uptr kHeapMemEnd = 0x0f5000000000ull;
282 static const uptr kHiAppMemBeg = 0x0f6000000000ull;
283 static const uptr kHiAppMemEnd = 0x100000000000ull; // 44 bits
284 static const uptr kAppMemMsk = 0x0f0000000000ull;
285 static const uptr kAppMemXor = 0x002100000000ull;
286 static const uptr kVdsoBeg = 0x3c0000000000000ull;
290 C/C++ on linux/powerpc64 (46-bit VMA)
291 0000 0000 1000 - 0100 0000 0000: main binary
292 0100 0000 0000 - 0200 0000 0000: -
293 0100 0000 0000 - 1000 0000 0000: shadow
294 1000 0000 0000 - 1000 0000 0000: -
295 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
296 2000 0000 0000 - 2000 0000 0000: -
297 2000 0000 0000 - 2200 0000 0000: traces
298 2200 0000 0000 - 3d00 0000 0000: -
299 3d00 0000 0000 - 3e00 0000 0000: heap
300 3e00 0000 0000 - 3e80 0000 0000: -
301 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
303 struct Mapping46 {
304 static const uptr kMetaShadowBeg = 0x100000000000ull;
305 static const uptr kMetaShadowEnd = 0x200000000000ull;
306 static const uptr kTraceMemBeg = 0x200000000000ull;
307 static const uptr kTraceMemEnd = 0x220000000000ull;
308 static const uptr kShadowBeg = 0x010000000000ull;
309 static const uptr kShadowEnd = 0x100000000000ull;
310 static const uptr kHeapMemBeg = 0x3d0000000000ull;
311 static const uptr kHeapMemEnd = 0x3e0000000000ull;
312 static const uptr kLoAppMemBeg = 0x000000001000ull;
313 static const uptr kLoAppMemEnd = 0x010000000000ull;
314 static const uptr kHiAppMemBeg = 0x3e8000000000ull;
315 static const uptr kHiAppMemEnd = 0x400000000000ull; // 46 bits
316 static const uptr kAppMemMsk = 0x3c0000000000ull;
317 static const uptr kAppMemXor = 0x020000000000ull;
318 static const uptr kVdsoBeg = 0x7800000000000000ull;
322 C/C++ on linux/powerpc64 (47-bit VMA)
323 0000 0000 1000 - 0100 0000 0000: main binary
324 0100 0000 0000 - 0200 0000 0000: -
325 0100 0000 0000 - 1000 0000 0000: shadow
326 1000 0000 0000 - 1000 0000 0000: -
327 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
328 2000 0000 0000 - 2000 0000 0000: -
329 2000 0000 0000 - 2200 0000 0000: traces
330 2200 0000 0000 - 7d00 0000 0000: -
331 7d00 0000 0000 - 7e00 0000 0000: heap
332 7e00 0000 0000 - 7e80 0000 0000: -
333 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
335 struct Mapping47 {
336 static const uptr kMetaShadowBeg = 0x100000000000ull;
337 static const uptr kMetaShadowEnd = 0x200000000000ull;
338 static const uptr kTraceMemBeg = 0x200000000000ull;
339 static const uptr kTraceMemEnd = 0x220000000000ull;
340 static const uptr kShadowBeg = 0x010000000000ull;
341 static const uptr kShadowEnd = 0x100000000000ull;
342 static const uptr kHeapMemBeg = 0x7d0000000000ull;
343 static const uptr kHeapMemEnd = 0x7e0000000000ull;
344 static const uptr kLoAppMemBeg = 0x000000001000ull;
345 static const uptr kLoAppMemEnd = 0x010000000000ull;
346 static const uptr kHiAppMemBeg = 0x7e8000000000ull;
347 static const uptr kHiAppMemEnd = 0x800000000000ull; // 47 bits
348 static const uptr kAppMemMsk = 0x7c0000000000ull;
349 static const uptr kAppMemXor = 0x020000000000ull;
350 static const uptr kVdsoBeg = 0x7800000000000000ull;
353 // Indicates the runtime will define the memory regions at runtime.
354 #define TSAN_RUNTIME_VMA 1
355 #endif
357 #elif SANITIZER_GO && !SANITIZER_WINDOWS && defined(__x86_64__)
359 /* Go on linux, darwin and freebsd on x86_64
360 0000 0000 1000 - 0000 1000 0000: executable
361 0000 1000 0000 - 00c0 0000 0000: -
362 00c0 0000 0000 - 00e0 0000 0000: heap
363 00e0 0000 0000 - 2000 0000 0000: -
364 2000 0000 0000 - 2380 0000 0000: shadow
365 2380 0000 0000 - 3000 0000 0000: -
366 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
367 4000 0000 0000 - 6000 0000 0000: -
368 6000 0000 0000 - 6200 0000 0000: traces
369 6200 0000 0000 - 8000 0000 0000: -
372 struct Mapping {
373 static const uptr kMetaShadowBeg = 0x300000000000ull;
374 static const uptr kMetaShadowEnd = 0x400000000000ull;
375 static const uptr kTraceMemBeg = 0x600000000000ull;
376 static const uptr kTraceMemEnd = 0x620000000000ull;
377 static const uptr kShadowBeg = 0x200000000000ull;
378 static const uptr kShadowEnd = 0x238000000000ull;
379 static const uptr kAppMemBeg = 0x000000001000ull;
380 static const uptr kAppMemEnd = 0x00e000000000ull;
383 #elif SANITIZER_GO && SANITIZER_WINDOWS
385 /* Go on windows
386 0000 0000 1000 - 0000 1000 0000: executable
387 0000 1000 0000 - 00f8 0000 0000: -
388 00c0 0000 0000 - 00e0 0000 0000: heap
389 00e0 0000 0000 - 0100 0000 0000: -
390 0100 0000 0000 - 0500 0000 0000: shadow
391 0500 0000 0000 - 0560 0000 0000: -
392 0560 0000 0000 - 0760 0000 0000: traces
393 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
394 07d0 0000 0000 - 8000 0000 0000: -
397 struct Mapping {
398 static const uptr kMetaShadowBeg = 0x076000000000ull;
399 static const uptr kMetaShadowEnd = 0x07d000000000ull;
400 static const uptr kTraceMemBeg = 0x056000000000ull;
401 static const uptr kTraceMemEnd = 0x076000000000ull;
402 static const uptr kShadowBeg = 0x010000000000ull;
403 static const uptr kShadowEnd = 0x050000000000ull;
404 static const uptr kAppMemBeg = 0x000000001000ull;
405 static const uptr kAppMemEnd = 0x00e000000000ull;
408 #elif SANITIZER_GO && defined(__powerpc64__)
410 /* Only Mapping46 and Mapping47 are currently supported for powercp64 on Go. */
412 /* Go on linux/powerpc64 (46-bit VMA)
413 0000 0000 1000 - 0000 1000 0000: executable
414 0000 1000 0000 - 00c0 0000 0000: -
415 00c0 0000 0000 - 00e0 0000 0000: heap
416 00e0 0000 0000 - 2000 0000 0000: -
417 2000 0000 0000 - 2380 0000 0000: shadow
418 2380 0000 0000 - 2400 0000 0000: -
419 2400 0000 0000 - 3400 0000 0000: metainfo (memory blocks and sync objects)
420 3400 0000 0000 - 3600 0000 0000: -
421 3600 0000 0000 - 3800 0000 0000: traces
422 3800 0000 0000 - 4000 0000 0000: -
425 struct Mapping46 {
426 static const uptr kMetaShadowBeg = 0x240000000000ull;
427 static const uptr kMetaShadowEnd = 0x340000000000ull;
428 static const uptr kTraceMemBeg = 0x360000000000ull;
429 static const uptr kTraceMemEnd = 0x380000000000ull;
430 static const uptr kShadowBeg = 0x200000000000ull;
431 static const uptr kShadowEnd = 0x238000000000ull;
432 static const uptr kAppMemBeg = 0x000000001000ull;
433 static const uptr kAppMemEnd = 0x00e000000000ull;
436 /* Go on linux/powerpc64 (47-bit VMA)
437 0000 0000 1000 - 0000 1000 0000: executable
438 0000 1000 0000 - 00c0 0000 0000: -
439 00c0 0000 0000 - 00e0 0000 0000: heap
440 00e0 0000 0000 - 2000 0000 0000: -
441 2000 0000 0000 - 3000 0000 0000: shadow
442 3000 0000 0000 - 3000 0000 0000: -
443 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
444 4000 0000 0000 - 6000 0000 0000: -
445 6000 0000 0000 - 6200 0000 0000: traces
446 6200 0000 0000 - 8000 0000 0000: -
449 struct Mapping47 {
450 static const uptr kMetaShadowBeg = 0x300000000000ull;
451 static const uptr kMetaShadowEnd = 0x400000000000ull;
452 static const uptr kTraceMemBeg = 0x600000000000ull;
453 static const uptr kTraceMemEnd = 0x620000000000ull;
454 static const uptr kShadowBeg = 0x200000000000ull;
455 static const uptr kShadowEnd = 0x300000000000ull;
456 static const uptr kAppMemBeg = 0x000000001000ull;
457 static const uptr kAppMemEnd = 0x00e000000000ull;
460 #elif SANITIZER_GO && defined(__aarch64__)
462 /* Go on linux/aarch64 (48-bit VMA)
463 0000 0000 1000 - 0000 1000 0000: executable
464 0000 1000 0000 - 00c0 0000 0000: -
465 00c0 0000 0000 - 00e0 0000 0000: heap
466 00e0 0000 0000 - 2000 0000 0000: -
467 2000 0000 0000 - 3000 0000 0000: shadow
468 3000 0000 0000 - 3000 0000 0000: -
469 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
470 4000 0000 0000 - 6000 0000 0000: -
471 6000 0000 0000 - 6200 0000 0000: traces
472 6200 0000 0000 - 8000 0000 0000: -
475 struct Mapping {
476 static const uptr kMetaShadowBeg = 0x300000000000ull;
477 static const uptr kMetaShadowEnd = 0x400000000000ull;
478 static const uptr kTraceMemBeg = 0x600000000000ull;
479 static const uptr kTraceMemEnd = 0x620000000000ull;
480 static const uptr kShadowBeg = 0x200000000000ull;
481 static const uptr kShadowEnd = 0x300000000000ull;
482 static const uptr kAppMemBeg = 0x000000001000ull;
483 static const uptr kAppMemEnd = 0x00e000000000ull;
486 // Indicates the runtime will define the memory regions at runtime.
487 #define TSAN_RUNTIME_VMA 1
489 #else
490 # error "Unknown platform"
491 #endif
494 #ifdef TSAN_RUNTIME_VMA
495 extern uptr vmaSize;
496 #endif
499 enum MappingType {
500 MAPPING_LO_APP_BEG,
501 MAPPING_LO_APP_END,
502 MAPPING_HI_APP_BEG,
503 MAPPING_HI_APP_END,
504 #ifdef TSAN_MID_APP_RANGE
505 MAPPING_MID_APP_BEG,
506 MAPPING_MID_APP_END,
507 #endif
508 MAPPING_HEAP_BEG,
509 MAPPING_HEAP_END,
510 MAPPING_APP_BEG,
511 MAPPING_APP_END,
512 MAPPING_SHADOW_BEG,
513 MAPPING_SHADOW_END,
514 MAPPING_META_SHADOW_BEG,
515 MAPPING_META_SHADOW_END,
516 MAPPING_TRACE_BEG,
517 MAPPING_TRACE_END,
518 MAPPING_VDSO_BEG,
521 template<typename Mapping, int Type>
522 uptr MappingImpl(void) {
523 switch (Type) {
524 #if !SANITIZER_GO
525 case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
526 case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
527 # ifdef TSAN_MID_APP_RANGE
528 case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
529 case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
530 # endif
531 case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
532 case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
533 case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
534 case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
535 case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
536 #else
537 case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
538 case MAPPING_APP_END: return Mapping::kAppMemEnd;
539 #endif
540 case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
541 case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
542 case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
543 case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
544 case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
545 case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
549 template<int Type>
550 uptr MappingArchImpl(void) {
551 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
552 switch (vmaSize) {
553 case 39: return MappingImpl<Mapping39, Type>();
554 case 42: return MappingImpl<Mapping42, Type>();
555 case 48: return MappingImpl<Mapping48, Type>();
557 DCHECK(0);
558 return 0;
559 #elif defined(__powerpc64__)
560 switch (vmaSize) {
561 #if !SANITIZER_GO
562 case 44: return MappingImpl<Mapping44, Type>();
563 #endif
564 case 46: return MappingImpl<Mapping46, Type>();
565 case 47: return MappingImpl<Mapping47, Type>();
567 DCHECK(0);
568 return 0;
569 #else
570 return MappingImpl<Mapping, Type>();
571 #endif
574 #if !SANITIZER_GO
575 ALWAYS_INLINE
576 uptr LoAppMemBeg(void) {
577 return MappingArchImpl<MAPPING_LO_APP_BEG>();
579 ALWAYS_INLINE
580 uptr LoAppMemEnd(void) {
581 return MappingArchImpl<MAPPING_LO_APP_END>();
584 #ifdef TSAN_MID_APP_RANGE
585 ALWAYS_INLINE
586 uptr MidAppMemBeg(void) {
587 return MappingArchImpl<MAPPING_MID_APP_BEG>();
589 ALWAYS_INLINE
590 uptr MidAppMemEnd(void) {
591 return MappingArchImpl<MAPPING_MID_APP_END>();
593 #endif
595 ALWAYS_INLINE
596 uptr HeapMemBeg(void) {
597 return MappingArchImpl<MAPPING_HEAP_BEG>();
599 ALWAYS_INLINE
600 uptr HeapMemEnd(void) {
601 return MappingArchImpl<MAPPING_HEAP_END>();
604 ALWAYS_INLINE
605 uptr HiAppMemBeg(void) {
606 return MappingArchImpl<MAPPING_HI_APP_BEG>();
608 ALWAYS_INLINE
609 uptr HiAppMemEnd(void) {
610 return MappingArchImpl<MAPPING_HI_APP_END>();
613 ALWAYS_INLINE
614 uptr VdsoBeg(void) {
615 return MappingArchImpl<MAPPING_VDSO_BEG>();
618 #else
620 ALWAYS_INLINE
621 uptr AppMemBeg(void) {
622 return MappingArchImpl<MAPPING_APP_BEG>();
624 ALWAYS_INLINE
625 uptr AppMemEnd(void) {
626 return MappingArchImpl<MAPPING_APP_END>();
629 #endif
631 static inline
632 bool GetUserRegion(int i, uptr *start, uptr *end) {
633 switch (i) {
634 default:
635 return false;
636 #if !SANITIZER_GO
637 case 0:
638 *start = LoAppMemBeg();
639 *end = LoAppMemEnd();
640 return true;
641 case 1:
642 *start = HiAppMemBeg();
643 *end = HiAppMemEnd();
644 return true;
645 case 2:
646 *start = HeapMemBeg();
647 *end = HeapMemEnd();
648 return true;
649 # ifdef TSAN_MID_APP_RANGE
650 case 3:
651 *start = MidAppMemBeg();
652 *end = MidAppMemEnd();
653 return true;
654 # endif
655 #else
656 case 0:
657 *start = AppMemBeg();
658 *end = AppMemEnd();
659 return true;
660 #endif
664 ALWAYS_INLINE
665 uptr ShadowBeg(void) {
666 return MappingArchImpl<MAPPING_SHADOW_BEG>();
668 ALWAYS_INLINE
669 uptr ShadowEnd(void) {
670 return MappingArchImpl<MAPPING_SHADOW_END>();
673 ALWAYS_INLINE
674 uptr MetaShadowBeg(void) {
675 return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
677 ALWAYS_INLINE
678 uptr MetaShadowEnd(void) {
679 return MappingArchImpl<MAPPING_META_SHADOW_END>();
682 ALWAYS_INLINE
683 uptr TraceMemBeg(void) {
684 return MappingArchImpl<MAPPING_TRACE_BEG>();
686 ALWAYS_INLINE
687 uptr TraceMemEnd(void) {
688 return MappingArchImpl<MAPPING_TRACE_END>();
692 template<typename Mapping>
693 bool IsAppMemImpl(uptr mem) {
694 #if !SANITIZER_GO
695 return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
696 # ifdef TSAN_MID_APP_RANGE
697 (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
698 # endif
699 (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
700 (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
701 #else
702 return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
703 #endif
706 ALWAYS_INLINE
707 bool IsAppMem(uptr mem) {
708 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
709 switch (vmaSize) {
710 case 39: return IsAppMemImpl<Mapping39>(mem);
711 case 42: return IsAppMemImpl<Mapping42>(mem);
712 case 48: return IsAppMemImpl<Mapping48>(mem);
714 DCHECK(0);
715 return false;
716 #elif defined(__powerpc64__)
717 switch (vmaSize) {
718 #if !SANITIZER_GO
719 case 44: return IsAppMemImpl<Mapping44>(mem);
720 #endif
721 case 46: return IsAppMemImpl<Mapping46>(mem);
722 case 47: return IsAppMemImpl<Mapping47>(mem);
724 DCHECK(0);
725 return false;
726 #else
727 return IsAppMemImpl<Mapping>(mem);
728 #endif
732 template<typename Mapping>
733 bool IsShadowMemImpl(uptr mem) {
734 return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
737 ALWAYS_INLINE
738 bool IsShadowMem(uptr mem) {
739 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
740 switch (vmaSize) {
741 case 39: return IsShadowMemImpl<Mapping39>(mem);
742 case 42: return IsShadowMemImpl<Mapping42>(mem);
743 case 48: return IsShadowMemImpl<Mapping48>(mem);
745 DCHECK(0);
746 return false;
747 #elif defined(__powerpc64__)
748 switch (vmaSize) {
749 #if !SANITIZER_GO
750 case 44: return IsShadowMemImpl<Mapping44>(mem);
751 #endif
752 case 46: return IsShadowMemImpl<Mapping46>(mem);
753 case 47: return IsShadowMemImpl<Mapping47>(mem);
755 DCHECK(0);
756 return false;
757 #else
758 return IsShadowMemImpl<Mapping>(mem);
759 #endif
763 template<typename Mapping>
764 bool IsMetaMemImpl(uptr mem) {
765 return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
768 ALWAYS_INLINE
769 bool IsMetaMem(uptr mem) {
770 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
771 switch (vmaSize) {
772 case 39: return IsMetaMemImpl<Mapping39>(mem);
773 case 42: return IsMetaMemImpl<Mapping42>(mem);
774 case 48: return IsMetaMemImpl<Mapping48>(mem);
776 DCHECK(0);
777 return false;
778 #elif defined(__powerpc64__)
779 switch (vmaSize) {
780 #if !SANITIZER_GO
781 case 44: return IsMetaMemImpl<Mapping44>(mem);
782 #endif
783 case 46: return IsMetaMemImpl<Mapping46>(mem);
784 case 47: return IsMetaMemImpl<Mapping47>(mem);
786 DCHECK(0);
787 return false;
788 #else
789 return IsMetaMemImpl<Mapping>(mem);
790 #endif
794 template<typename Mapping>
795 uptr MemToShadowImpl(uptr x) {
796 DCHECK(IsAppMem(x));
797 #if !SANITIZER_GO
798 return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
799 ^ Mapping::kAppMemXor) * kShadowCnt;
800 #else
801 # ifndef SANITIZER_WINDOWS
802 return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
803 # else
804 return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg;
805 # endif
806 #endif
809 ALWAYS_INLINE
810 uptr MemToShadow(uptr x) {
811 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
812 switch (vmaSize) {
813 case 39: return MemToShadowImpl<Mapping39>(x);
814 case 42: return MemToShadowImpl<Mapping42>(x);
815 case 48: return MemToShadowImpl<Mapping48>(x);
817 DCHECK(0);
818 return 0;
819 #elif defined(__powerpc64__)
820 switch (vmaSize) {
821 #if !SANITIZER_GO
822 case 44: return MemToShadowImpl<Mapping44>(x);
823 #endif
824 case 46: return MemToShadowImpl<Mapping46>(x);
825 case 47: return MemToShadowImpl<Mapping47>(x);
827 DCHECK(0);
828 return 0;
829 #else
830 return MemToShadowImpl<Mapping>(x);
831 #endif
835 template<typename Mapping>
836 u32 *MemToMetaImpl(uptr x) {
837 DCHECK(IsAppMem(x));
838 #if !SANITIZER_GO
839 return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) /
840 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
841 #else
842 # ifndef SANITIZER_WINDOWS
843 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
844 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
845 # else
846 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
847 kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg);
848 # endif
849 #endif
852 ALWAYS_INLINE
853 u32 *MemToMeta(uptr x) {
854 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
855 switch (vmaSize) {
856 case 39: return MemToMetaImpl<Mapping39>(x);
857 case 42: return MemToMetaImpl<Mapping42>(x);
858 case 48: return MemToMetaImpl<Mapping48>(x);
860 DCHECK(0);
861 return 0;
862 #elif defined(__powerpc64__)
863 switch (vmaSize) {
864 #if !SANITIZER_GO
865 case 44: return MemToMetaImpl<Mapping44>(x);
866 #endif
867 case 46: return MemToMetaImpl<Mapping46>(x);
868 case 47: return MemToMetaImpl<Mapping47>(x);
870 DCHECK(0);
871 return 0;
872 #else
873 return MemToMetaImpl<Mapping>(x);
874 #endif
878 template<typename Mapping>
879 uptr ShadowToMemImpl(uptr s) {
880 DCHECK(IsShadowMem(s));
881 #if !SANITIZER_GO
882 // The shadow mapping is non-linear and we've lost some bits, so we don't have
883 // an easy way to restore the original app address. But the mapping is a
884 // bijection, so we try to restore the address as belonging to low/mid/high
885 // range consecutively and see if shadow->app->shadow mapping gives us the
886 // same address.
887 uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
888 if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd &&
889 MemToShadow(p) == s)
890 return p;
891 # ifdef TSAN_MID_APP_RANGE
892 p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) +
893 (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk);
894 if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd &&
895 MemToShadow(p) == s)
896 return p;
897 # endif
898 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
899 #else // #if !SANITIZER_GO
900 # ifndef SANITIZER_WINDOWS
901 return (s & ~Mapping::kShadowBeg) / kShadowCnt;
902 # else
903 return (s - Mapping::kShadowBeg) / kShadowCnt;
904 # endif // SANITIZER_WINDOWS
905 #endif
908 ALWAYS_INLINE
909 uptr ShadowToMem(uptr s) {
910 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
911 switch (vmaSize) {
912 case 39: return ShadowToMemImpl<Mapping39>(s);
913 case 42: return ShadowToMemImpl<Mapping42>(s);
914 case 48: return ShadowToMemImpl<Mapping48>(s);
916 DCHECK(0);
917 return 0;
918 #elif defined(__powerpc64__)
919 switch (vmaSize) {
920 #if !SANITIZER_GO
921 case 44: return ShadowToMemImpl<Mapping44>(s);
922 #endif
923 case 46: return ShadowToMemImpl<Mapping46>(s);
924 case 47: return ShadowToMemImpl<Mapping47>(s);
926 DCHECK(0);
927 return 0;
928 #else
929 return ShadowToMemImpl<Mapping>(s);
930 #endif
935 // The additional page is to catch shadow stack overflow as paging fault.
936 // Windows wants 64K alignment for mmaps.
937 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
938 + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
940 template<typename Mapping>
941 uptr GetThreadTraceImpl(int tid) {
942 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
943 DCHECK_LT(p, Mapping::kTraceMemEnd);
944 return p;
947 ALWAYS_INLINE
948 uptr GetThreadTrace(int tid) {
949 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
950 switch (vmaSize) {
951 case 39: return GetThreadTraceImpl<Mapping39>(tid);
952 case 42: return GetThreadTraceImpl<Mapping42>(tid);
953 case 48: return GetThreadTraceImpl<Mapping48>(tid);
955 DCHECK(0);
956 return 0;
957 #elif defined(__powerpc64__)
958 switch (vmaSize) {
959 #if !SANITIZER_GO
960 case 44: return GetThreadTraceImpl<Mapping44>(tid);
961 #endif
962 case 46: return GetThreadTraceImpl<Mapping46>(tid);
963 case 47: return GetThreadTraceImpl<Mapping47>(tid);
965 DCHECK(0);
966 return 0;
967 #else
968 return GetThreadTraceImpl<Mapping>(tid);
969 #endif
973 template<typename Mapping>
974 uptr GetThreadTraceHeaderImpl(int tid) {
975 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
976 + kTraceSize * sizeof(Event);
977 DCHECK_LT(p, Mapping::kTraceMemEnd);
978 return p;
981 ALWAYS_INLINE
982 uptr GetThreadTraceHeader(int tid) {
983 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
984 switch (vmaSize) {
985 case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid);
986 case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid);
987 case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid);
989 DCHECK(0);
990 return 0;
991 #elif defined(__powerpc64__)
992 switch (vmaSize) {
993 #if !SANITIZER_GO
994 case 44: return GetThreadTraceHeaderImpl<Mapping44>(tid);
995 #endif
996 case 46: return GetThreadTraceHeaderImpl<Mapping46>(tid);
997 case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid);
999 DCHECK(0);
1000 return 0;
1001 #else
1002 return GetThreadTraceHeaderImpl<Mapping>(tid);
1003 #endif
1006 void InitializePlatform();
1007 void InitializePlatformEarly();
1008 void CheckAndProtect();
1009 void InitializeShadowMemoryPlatform();
1010 void FlushShadowMemory();
1011 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
1012 int ExtractResolvFDs(void *state, int *fds, int nfd);
1013 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
1014 uptr ExtractLongJmpSp(uptr *env);
1015 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
1017 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m,
1018 void *abstime), void *c, void *m, void *abstime,
1019 void(*cleanup)(void *arg), void *arg);
1021 void DestroyThreadState();
1023 } // namespace __tsan
1025 #endif // TSAN_PLATFORM_H