* config/i386/i386.c (ix86_expand_prologue): Tighten assert
[official-gcc.git] / libsanitizer / tsan / tsan_platform.h
blobddf4b13d05adddfbf6bfe8153c9d13fa511ce140
1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
2 //
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
5 //
6 //===----------------------------------------------------------------------===//
7 //
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
9 //
10 // Platform-specific code.
11 //===----------------------------------------------------------------------===//
13 #ifndef TSAN_PLATFORM_H
14 #define TSAN_PLATFORM_H
16 #if !defined(__LP64__) && !defined(_WIN64)
17 # error "Only 64-bit is supported"
18 #endif
20 #include "tsan_defs.h"
21 #include "tsan_trace.h"
23 namespace __tsan {
25 #if !SANITIZER_GO
27 #if defined(__x86_64__)
29 C/C++ on linux/x86_64 and freebsd/x86_64
30 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB)
31 0040 0000 0000 - 0100 0000 0000: -
32 0100 0000 0000 - 2000 0000 0000: shadow
33 2000 0000 0000 - 3000 0000 0000: -
34 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
35 4000 0000 0000 - 5500 0000 0000: -
36 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels
37 5680 0000 0000 - 6000 0000 0000: -
38 6000 0000 0000 - 6200 0000 0000: traces
39 6200 0000 0000 - 7d00 0000 0000: -
40 7b00 0000 0000 - 7c00 0000 0000: heap
41 7c00 0000 0000 - 7e80 0000 0000: -
42 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
44 struct Mapping {
45 static const uptr kMetaShadowBeg = 0x300000000000ull;
46 static const uptr kMetaShadowEnd = 0x340000000000ull;
47 static const uptr kTraceMemBeg = 0x600000000000ull;
48 static const uptr kTraceMemEnd = 0x620000000000ull;
49 static const uptr kShadowBeg = 0x010000000000ull;
50 static const uptr kShadowEnd = 0x200000000000ull;
51 static const uptr kHeapMemBeg = 0x7b0000000000ull;
52 static const uptr kHeapMemEnd = 0x7c0000000000ull;
53 static const uptr kLoAppMemBeg = 0x000000001000ull;
54 static const uptr kLoAppMemEnd = 0x008000000000ull;
55 static const uptr kMidAppMemBeg = 0x550000000000ull;
56 static const uptr kMidAppMemEnd = 0x568000000000ull;
57 static const uptr kHiAppMemBeg = 0x7e8000000000ull;
58 static const uptr kHiAppMemEnd = 0x800000000000ull;
59 static const uptr kAppMemMsk = 0x780000000000ull;
60 static const uptr kAppMemXor = 0x040000000000ull;
61 static const uptr kVdsoBeg = 0xf000000000000000ull;
64 #define TSAN_MID_APP_RANGE 1
65 #elif defined(__mips64)
67 C/C++ on linux/mips64
68 0100 0000 00 - 0200 0000 00: main binary
69 0200 0000 00 - 1400 0000 00: -
70 1400 0000 00 - 2400 0000 00: shadow
71 2400 0000 00 - 3000 0000 00: -
72 3000 0000 00 - 4000 0000 00: metainfo (memory blocks and sync objects)
73 4000 0000 00 - 6000 0000 00: -
74 6000 0000 00 - 6200 0000 00: traces
75 6200 0000 00 - fe00 0000 00: -
76 fe00 0000 00 - ff00 0000 00: heap
77 ff00 0000 00 - ff80 0000 00: -
78 ff80 0000 00 - ffff ffff ff: modules and main thread stack
80 struct Mapping {
81 static const uptr kMetaShadowBeg = 0x4000000000ull;
82 static const uptr kMetaShadowEnd = 0x5000000000ull;
83 static const uptr kTraceMemBeg = 0xb000000000ull;
84 static const uptr kTraceMemEnd = 0xb200000000ull;
85 static const uptr kShadowBeg = 0x2400000000ull;
86 static const uptr kShadowEnd = 0x4000000000ull;
87 static const uptr kHeapMemBeg = 0xfe00000000ull;
88 static const uptr kHeapMemEnd = 0xff00000000ull;
89 static const uptr kLoAppMemBeg = 0x0100000000ull;
90 static const uptr kLoAppMemEnd = 0x0200000000ull;
91 static const uptr kMidAppMemBeg = 0xaa00000000ull;
92 static const uptr kMidAppMemEnd = 0xab00000000ull;
93 static const uptr kHiAppMemBeg = 0xff80000000ull;
94 static const uptr kHiAppMemEnd = 0xffffffffffull;
95 static const uptr kAppMemMsk = 0xf800000000ull;
96 static const uptr kAppMemXor = 0x0800000000ull;
97 static const uptr kVdsoBeg = 0xfffff00000ull;
100 #define TSAN_MID_APP_RANGE 1
101 #elif defined(__aarch64__) && defined(__APPLE__)
103 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM)
104 0000 0000 00 - 0100 0000 00: - (4 GB)
105 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks (4 GB)
106 0200 0000 00 - 0300 0000 00: heap (4 GB)
107 0300 0000 00 - 0400 0000 00: - (4 GB)
108 0400 0000 00 - 0c00 0000 00: shadow memory (32 GB)
109 0c00 0000 00 - 0d00 0000 00: - (4 GB)
110 0d00 0000 00 - 0e00 0000 00: metainfo (4 GB)
111 0e00 0000 00 - 0f00 0000 00: - (4 GB)
112 0f00 0000 00 - 1000 0000 00: traces (4 GB)
114 struct Mapping {
115 static const uptr kLoAppMemBeg = 0x0100000000ull;
116 static const uptr kLoAppMemEnd = 0x0200000000ull;
117 static const uptr kHeapMemBeg = 0x0200000000ull;
118 static const uptr kHeapMemEnd = 0x0300000000ull;
119 static const uptr kShadowBeg = 0x0400000000ull;
120 static const uptr kShadowEnd = 0x0c00000000ull;
121 static const uptr kMetaShadowBeg = 0x0d00000000ull;
122 static const uptr kMetaShadowEnd = 0x0e00000000ull;
123 static const uptr kTraceMemBeg = 0x0f00000000ull;
124 static const uptr kTraceMemEnd = 0x1000000000ull;
125 static const uptr kHiAppMemBeg = 0x1000000000ull;
126 static const uptr kHiAppMemEnd = 0x1000000000ull;
127 static const uptr kAppMemMsk = 0x0ull;
128 static const uptr kAppMemXor = 0x0ull;
129 static const uptr kVdsoBeg = 0x7000000000000000ull;
132 #elif defined(__aarch64__)
133 // AArch64 supports multiple VMA which leads to multiple address transformation
134 // functions. To support these multiple VMAS transformations and mappings TSAN
135 // runtime for AArch64 uses an external memory read (vmaSize) to select which
136 // mapping to use. Although slower, it make a same instrumented binary run on
137 // multiple kernels.
140 C/C++ on linux/aarch64 (39-bit VMA)
141 0000 0010 00 - 0100 0000 00: main binary
142 0100 0000 00 - 0800 0000 00: -
143 0800 0000 00 - 2000 0000 00: shadow memory
144 2000 0000 00 - 3100 0000 00: -
145 3100 0000 00 - 3400 0000 00: metainfo
146 3400 0000 00 - 5500 0000 00: -
147 5500 0000 00 - 5600 0000 00: main binary (PIE)
148 5600 0000 00 - 6000 0000 00: -
149 6000 0000 00 - 6200 0000 00: traces
150 6200 0000 00 - 7d00 0000 00: -
151 7c00 0000 00 - 7d00 0000 00: heap
152 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
154 struct Mapping39 {
155 static const uptr kLoAppMemBeg = 0x0000001000ull;
156 static const uptr kLoAppMemEnd = 0x0100000000ull;
157 static const uptr kShadowBeg = 0x0800000000ull;
158 static const uptr kShadowEnd = 0x2000000000ull;
159 static const uptr kMetaShadowBeg = 0x3100000000ull;
160 static const uptr kMetaShadowEnd = 0x3400000000ull;
161 static const uptr kMidAppMemBeg = 0x5500000000ull;
162 static const uptr kMidAppMemEnd = 0x5600000000ull;
163 static const uptr kTraceMemBeg = 0x6000000000ull;
164 static const uptr kTraceMemEnd = 0x6200000000ull;
165 static const uptr kHeapMemBeg = 0x7c00000000ull;
166 static const uptr kHeapMemEnd = 0x7d00000000ull;
167 static const uptr kHiAppMemBeg = 0x7e00000000ull;
168 static const uptr kHiAppMemEnd = 0x7fffffffffull;
169 static const uptr kAppMemMsk = 0x7800000000ull;
170 static const uptr kAppMemXor = 0x0200000000ull;
171 static const uptr kVdsoBeg = 0x7f00000000ull;
175 C/C++ on linux/aarch64 (42-bit VMA)
176 00000 0010 00 - 01000 0000 00: main binary
177 01000 0000 00 - 10000 0000 00: -
178 10000 0000 00 - 20000 0000 00: shadow memory
179 20000 0000 00 - 26000 0000 00: -
180 26000 0000 00 - 28000 0000 00: metainfo
181 28000 0000 00 - 2aa00 0000 00: -
182 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
183 2ab00 0000 00 - 36200 0000 00: -
184 36200 0000 00 - 36240 0000 00: traces
185 36240 0000 00 - 3e000 0000 00: -
186 3e000 0000 00 - 3f000 0000 00: heap
187 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
189 struct Mapping42 {
190 static const uptr kLoAppMemBeg = 0x00000001000ull;
191 static const uptr kLoAppMemEnd = 0x01000000000ull;
192 static const uptr kShadowBeg = 0x10000000000ull;
193 static const uptr kShadowEnd = 0x20000000000ull;
194 static const uptr kMetaShadowBeg = 0x26000000000ull;
195 static const uptr kMetaShadowEnd = 0x28000000000ull;
196 static const uptr kMidAppMemBeg = 0x2aa00000000ull;
197 static const uptr kMidAppMemEnd = 0x2ab00000000ull;
198 static const uptr kTraceMemBeg = 0x36200000000ull;
199 static const uptr kTraceMemEnd = 0x36400000000ull;
200 static const uptr kHeapMemBeg = 0x3e000000000ull;
201 static const uptr kHeapMemEnd = 0x3f000000000ull;
202 static const uptr kHiAppMemBeg = 0x3f000000000ull;
203 static const uptr kHiAppMemEnd = 0x3ffffffffffull;
204 static const uptr kAppMemMsk = 0x3c000000000ull;
205 static const uptr kAppMemXor = 0x04000000000ull;
206 static const uptr kVdsoBeg = 0x37f00000000ull;
209 struct Mapping48 {
210 static const uptr kLoAppMemBeg = 0x0000000001000ull;
211 static const uptr kLoAppMemEnd = 0x0000200000000ull;
212 static const uptr kShadowBeg = 0x0002000000000ull;
213 static const uptr kShadowEnd = 0x0004000000000ull;
214 static const uptr kMetaShadowBeg = 0x0005000000000ull;
215 static const uptr kMetaShadowEnd = 0x0006000000000ull;
216 static const uptr kMidAppMemBeg = 0x0aaaa00000000ull;
217 static const uptr kMidAppMemEnd = 0x0aaaf00000000ull;
218 static const uptr kTraceMemBeg = 0x0f06000000000ull;
219 static const uptr kTraceMemEnd = 0x0f06200000000ull;
220 static const uptr kHeapMemBeg = 0x0ffff00000000ull;
221 static const uptr kHeapMemEnd = 0x0ffff00000000ull;
222 static const uptr kHiAppMemBeg = 0x0ffff00000000ull;
223 static const uptr kHiAppMemEnd = 0x1000000000000ull;
224 static const uptr kAppMemMsk = 0x0fff800000000ull;
225 static const uptr kAppMemXor = 0x0000800000000ull;
226 static const uptr kVdsoBeg = 0xffff000000000ull;
229 // Indicates the runtime will define the memory regions at runtime.
230 #define TSAN_RUNTIME_VMA 1
231 // Indicates that mapping defines a mid range memory segment.
232 #define TSAN_MID_APP_RANGE 1
233 #elif defined(__powerpc64__)
234 // PPC64 supports multiple VMA which leads to multiple address transformation
235 // functions. To support these multiple VMAS transformations and mappings TSAN
236 // runtime for PPC64 uses an external memory read (vmaSize) to select which
237 // mapping to use. Although slower, it make a same instrumented binary run on
238 // multiple kernels.
241 C/C++ on linux/powerpc64 (44-bit VMA)
242 0000 0000 0100 - 0001 0000 0000: main binary
243 0001 0000 0000 - 0001 0000 0000: -
244 0001 0000 0000 - 0b00 0000 0000: shadow
245 0b00 0000 0000 - 0b00 0000 0000: -
246 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
247 0d00 0000 0000 - 0d00 0000 0000: -
248 0d00 0000 0000 - 0f00 0000 0000: traces
249 0f00 0000 0000 - 0f00 0000 0000: -
250 0f00 0000 0000 - 0f50 0000 0000: heap
251 0f50 0000 0000 - 0f60 0000 0000: -
252 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
254 struct Mapping44 {
255 static const uptr kMetaShadowBeg = 0x0b0000000000ull;
256 static const uptr kMetaShadowEnd = 0x0d0000000000ull;
257 static const uptr kTraceMemBeg = 0x0d0000000000ull;
258 static const uptr kTraceMemEnd = 0x0f0000000000ull;
259 static const uptr kShadowBeg = 0x000100000000ull;
260 static const uptr kShadowEnd = 0x0b0000000000ull;
261 static const uptr kLoAppMemBeg = 0x000000000100ull;
262 static const uptr kLoAppMemEnd = 0x000100000000ull;
263 static const uptr kHeapMemBeg = 0x0f0000000000ull;
264 static const uptr kHeapMemEnd = 0x0f5000000000ull;
265 static const uptr kHiAppMemBeg = 0x0f6000000000ull;
266 static const uptr kHiAppMemEnd = 0x100000000000ull; // 44 bits
267 static const uptr kAppMemMsk = 0x0f0000000000ull;
268 static const uptr kAppMemXor = 0x002100000000ull;
269 static const uptr kVdsoBeg = 0x3c0000000000000ull;
273 C/C++ on linux/powerpc64 (46-bit VMA)
274 0000 0000 1000 - 0100 0000 0000: main binary
275 0100 0000 0000 - 0200 0000 0000: -
276 0100 0000 0000 - 1000 0000 0000: shadow
277 1000 0000 0000 - 1000 0000 0000: -
278 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
279 2000 0000 0000 - 2000 0000 0000: -
280 2000 0000 0000 - 2200 0000 0000: traces
281 2200 0000 0000 - 3d00 0000 0000: -
282 3d00 0000 0000 - 3e00 0000 0000: heap
283 3e00 0000 0000 - 3e80 0000 0000: -
284 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
286 struct Mapping46 {
287 static const uptr kMetaShadowBeg = 0x100000000000ull;
288 static const uptr kMetaShadowEnd = 0x200000000000ull;
289 static const uptr kTraceMemBeg = 0x200000000000ull;
290 static const uptr kTraceMemEnd = 0x220000000000ull;
291 static const uptr kShadowBeg = 0x010000000000ull;
292 static const uptr kShadowEnd = 0x100000000000ull;
293 static const uptr kHeapMemBeg = 0x3d0000000000ull;
294 static const uptr kHeapMemEnd = 0x3e0000000000ull;
295 static const uptr kLoAppMemBeg = 0x000000001000ull;
296 static const uptr kLoAppMemEnd = 0x010000000000ull;
297 static const uptr kHiAppMemBeg = 0x3e8000000000ull;
298 static const uptr kHiAppMemEnd = 0x400000000000ull; // 46 bits
299 static const uptr kAppMemMsk = 0x3c0000000000ull;
300 static const uptr kAppMemXor = 0x020000000000ull;
301 static const uptr kVdsoBeg = 0x7800000000000000ull;
304 // Indicates the runtime will define the memory regions at runtime.
305 #define TSAN_RUNTIME_VMA 1
306 #endif
308 #elif SANITIZER_GO && !SANITIZER_WINDOWS
310 /* Go on linux, darwin and freebsd
311 0000 0000 1000 - 0000 1000 0000: executable
312 0000 1000 0000 - 00c0 0000 0000: -
313 00c0 0000 0000 - 00e0 0000 0000: heap
314 00e0 0000 0000 - 2000 0000 0000: -
315 2000 0000 0000 - 2380 0000 0000: shadow
316 2380 0000 0000 - 3000 0000 0000: -
317 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
318 4000 0000 0000 - 6000 0000 0000: -
319 6000 0000 0000 - 6200 0000 0000: traces
320 6200 0000 0000 - 8000 0000 0000: -
323 struct Mapping {
324 static const uptr kMetaShadowBeg = 0x300000000000ull;
325 static const uptr kMetaShadowEnd = 0x400000000000ull;
326 static const uptr kTraceMemBeg = 0x600000000000ull;
327 static const uptr kTraceMemEnd = 0x620000000000ull;
328 static const uptr kShadowBeg = 0x200000000000ull;
329 static const uptr kShadowEnd = 0x238000000000ull;
330 static const uptr kAppMemBeg = 0x000000001000ull;
331 static const uptr kAppMemEnd = 0x00e000000000ull;
334 #elif SANITIZER_GO && SANITIZER_WINDOWS
336 /* Go on windows
337 0000 0000 1000 - 0000 1000 0000: executable
338 0000 1000 0000 - 00f8 0000 0000: -
339 00c0 0000 0000 - 00e0 0000 0000: heap
340 00e0 0000 0000 - 0100 0000 0000: -
341 0100 0000 0000 - 0500 0000 0000: shadow
342 0500 0000 0000 - 0560 0000 0000: -
343 0560 0000 0000 - 0760 0000 0000: traces
344 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
345 07d0 0000 0000 - 8000 0000 0000: -
348 struct Mapping {
349 static const uptr kMetaShadowBeg = 0x076000000000ull;
350 static const uptr kMetaShadowEnd = 0x07d000000000ull;
351 static const uptr kTraceMemBeg = 0x056000000000ull;
352 static const uptr kTraceMemEnd = 0x076000000000ull;
353 static const uptr kShadowBeg = 0x010000000000ull;
354 static const uptr kShadowEnd = 0x050000000000ull;
355 static const uptr kAppMemBeg = 0x000000001000ull;
356 static const uptr kAppMemEnd = 0x00e000000000ull;
359 #else
360 # error "Unknown platform"
361 #endif
364 #ifdef TSAN_RUNTIME_VMA
365 extern uptr vmaSize;
366 #endif
369 enum MappingType {
370 MAPPING_LO_APP_BEG,
371 MAPPING_LO_APP_END,
372 MAPPING_HI_APP_BEG,
373 MAPPING_HI_APP_END,
374 #ifdef TSAN_MID_APP_RANGE
375 MAPPING_MID_APP_BEG,
376 MAPPING_MID_APP_END,
377 #endif
378 MAPPING_HEAP_BEG,
379 MAPPING_HEAP_END,
380 MAPPING_APP_BEG,
381 MAPPING_APP_END,
382 MAPPING_SHADOW_BEG,
383 MAPPING_SHADOW_END,
384 MAPPING_META_SHADOW_BEG,
385 MAPPING_META_SHADOW_END,
386 MAPPING_TRACE_BEG,
387 MAPPING_TRACE_END,
388 MAPPING_VDSO_BEG,
391 template<typename Mapping, int Type>
392 uptr MappingImpl(void) {
393 switch (Type) {
394 #if !SANITIZER_GO
395 case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
396 case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
397 # ifdef TSAN_MID_APP_RANGE
398 case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
399 case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
400 # endif
401 case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
402 case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
403 case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
404 case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
405 case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
406 #else
407 case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
408 case MAPPING_APP_END: return Mapping::kAppMemEnd;
409 #endif
410 case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
411 case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
412 case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
413 case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
414 case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
415 case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
419 template<int Type>
420 uptr MappingArchImpl(void) {
421 #if defined(__aarch64__) && !defined(__APPLE__)
422 switch (vmaSize) {
423 case 39: return MappingImpl<Mapping39, Type>();
424 case 42: return MappingImpl<Mapping42, Type>();
425 case 48: return MappingImpl<Mapping48, Type>();
427 DCHECK(0);
428 return 0;
429 #elif defined(__powerpc64__)
430 if (vmaSize == 44)
431 return MappingImpl<Mapping44, Type>();
432 else
433 return MappingImpl<Mapping46, Type>();
434 DCHECK(0);
435 #else
436 return MappingImpl<Mapping, Type>();
437 #endif
440 #if !SANITIZER_GO
441 ALWAYS_INLINE
442 uptr LoAppMemBeg(void) {
443 return MappingArchImpl<MAPPING_LO_APP_BEG>();
445 ALWAYS_INLINE
446 uptr LoAppMemEnd(void) {
447 return MappingArchImpl<MAPPING_LO_APP_END>();
450 #ifdef TSAN_MID_APP_RANGE
451 ALWAYS_INLINE
452 uptr MidAppMemBeg(void) {
453 return MappingArchImpl<MAPPING_MID_APP_BEG>();
455 ALWAYS_INLINE
456 uptr MidAppMemEnd(void) {
457 return MappingArchImpl<MAPPING_MID_APP_END>();
459 #endif
461 ALWAYS_INLINE
462 uptr HeapMemBeg(void) {
463 return MappingArchImpl<MAPPING_HEAP_BEG>();
465 ALWAYS_INLINE
466 uptr HeapMemEnd(void) {
467 return MappingArchImpl<MAPPING_HEAP_END>();
470 ALWAYS_INLINE
471 uptr HiAppMemBeg(void) {
472 return MappingArchImpl<MAPPING_HI_APP_BEG>();
474 ALWAYS_INLINE
475 uptr HiAppMemEnd(void) {
476 return MappingArchImpl<MAPPING_HI_APP_END>();
479 ALWAYS_INLINE
480 uptr VdsoBeg(void) {
481 return MappingArchImpl<MAPPING_VDSO_BEG>();
484 #else
486 ALWAYS_INLINE
487 uptr AppMemBeg(void) {
488 return MappingArchImpl<MAPPING_APP_BEG>();
490 ALWAYS_INLINE
491 uptr AppMemEnd(void) {
492 return MappingArchImpl<MAPPING_APP_END>();
495 #endif
497 static inline
498 bool GetUserRegion(int i, uptr *start, uptr *end) {
499 switch (i) {
500 default:
501 return false;
502 #if !SANITIZER_GO
503 case 0:
504 *start = LoAppMemBeg();
505 *end = LoAppMemEnd();
506 return true;
507 case 1:
508 *start = HiAppMemBeg();
509 *end = HiAppMemEnd();
510 return true;
511 case 2:
512 *start = HeapMemBeg();
513 *end = HeapMemEnd();
514 return true;
515 # ifdef TSAN_MID_APP_RANGE
516 case 3:
517 *start = MidAppMemBeg();
518 *end = MidAppMemEnd();
519 return true;
520 # endif
521 #else
522 case 0:
523 *start = AppMemBeg();
524 *end = AppMemEnd();
525 return true;
526 #endif
530 ALWAYS_INLINE
531 uptr ShadowBeg(void) {
532 return MappingArchImpl<MAPPING_SHADOW_BEG>();
534 ALWAYS_INLINE
535 uptr ShadowEnd(void) {
536 return MappingArchImpl<MAPPING_SHADOW_END>();
539 ALWAYS_INLINE
540 uptr MetaShadowBeg(void) {
541 return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
543 ALWAYS_INLINE
544 uptr MetaShadowEnd(void) {
545 return MappingArchImpl<MAPPING_META_SHADOW_END>();
548 ALWAYS_INLINE
549 uptr TraceMemBeg(void) {
550 return MappingArchImpl<MAPPING_TRACE_BEG>();
552 ALWAYS_INLINE
553 uptr TraceMemEnd(void) {
554 return MappingArchImpl<MAPPING_TRACE_END>();
558 template<typename Mapping>
559 bool IsAppMemImpl(uptr mem) {
560 #if !SANITIZER_GO
561 return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
562 # ifdef TSAN_MID_APP_RANGE
563 (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
564 # endif
565 (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
566 (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
567 #else
568 return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
569 #endif
572 ALWAYS_INLINE
573 bool IsAppMem(uptr mem) {
574 #if defined(__aarch64__) && !defined(__APPLE__)
575 switch (vmaSize) {
576 case 39: return IsAppMemImpl<Mapping39>(mem);
577 case 42: return IsAppMemImpl<Mapping42>(mem);
578 case 48: return IsAppMemImpl<Mapping48>(mem);
580 DCHECK(0);
581 return false;
582 #elif defined(__powerpc64__)
583 if (vmaSize == 44)
584 return IsAppMemImpl<Mapping44>(mem);
585 else
586 return IsAppMemImpl<Mapping46>(mem);
587 DCHECK(0);
588 #else
589 return IsAppMemImpl<Mapping>(mem);
590 #endif
594 template<typename Mapping>
595 bool IsShadowMemImpl(uptr mem) {
596 return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
599 ALWAYS_INLINE
600 bool IsShadowMem(uptr mem) {
601 #if defined(__aarch64__) && !defined(__APPLE__)
602 switch (vmaSize) {
603 case 39: return IsShadowMemImpl<Mapping39>(mem);
604 case 42: return IsShadowMemImpl<Mapping42>(mem);
605 case 48: return IsShadowMemImpl<Mapping48>(mem);
607 DCHECK(0);
608 return false;
609 #elif defined(__powerpc64__)
610 if (vmaSize == 44)
611 return IsShadowMemImpl<Mapping44>(mem);
612 else
613 return IsShadowMemImpl<Mapping46>(mem);
614 DCHECK(0);
615 #else
616 return IsShadowMemImpl<Mapping>(mem);
617 #endif
621 template<typename Mapping>
622 bool IsMetaMemImpl(uptr mem) {
623 return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
626 ALWAYS_INLINE
627 bool IsMetaMem(uptr mem) {
628 #if defined(__aarch64__) && !defined(__APPLE__)
629 switch (vmaSize) {
630 case 39: return IsMetaMemImpl<Mapping39>(mem);
631 case 42: return IsMetaMemImpl<Mapping42>(mem);
632 case 48: return IsMetaMemImpl<Mapping48>(mem);
634 DCHECK(0);
635 return false;
636 #elif defined(__powerpc64__)
637 if (vmaSize == 44)
638 return IsMetaMemImpl<Mapping44>(mem);
639 else
640 return IsMetaMemImpl<Mapping46>(mem);
641 DCHECK(0);
642 #else
643 return IsMetaMemImpl<Mapping>(mem);
644 #endif
648 template<typename Mapping>
649 uptr MemToShadowImpl(uptr x) {
650 DCHECK(IsAppMem(x));
651 #if !SANITIZER_GO
652 return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
653 ^ Mapping::kAppMemXor) * kShadowCnt;
654 #else
655 # ifndef SANITIZER_WINDOWS
656 return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
657 # else
658 return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg;
659 # endif
660 #endif
663 ALWAYS_INLINE
664 uptr MemToShadow(uptr x) {
665 #if defined(__aarch64__) && !defined(__APPLE__)
666 switch (vmaSize) {
667 case 39: return MemToShadowImpl<Mapping39>(x);
668 case 42: return MemToShadowImpl<Mapping42>(x);
669 case 48: return MemToShadowImpl<Mapping48>(x);
671 DCHECK(0);
672 return 0;
673 #elif defined(__powerpc64__)
674 if (vmaSize == 44)
675 return MemToShadowImpl<Mapping44>(x);
676 else
677 return MemToShadowImpl<Mapping46>(x);
678 DCHECK(0);
679 #else
680 return MemToShadowImpl<Mapping>(x);
681 #endif
685 template<typename Mapping>
686 u32 *MemToMetaImpl(uptr x) {
687 DCHECK(IsAppMem(x));
688 #if !SANITIZER_GO
689 return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) /
690 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
691 #else
692 # ifndef SANITIZER_WINDOWS
693 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
694 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
695 # else
696 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
697 kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg);
698 # endif
699 #endif
702 ALWAYS_INLINE
703 u32 *MemToMeta(uptr x) {
704 #if defined(__aarch64__) && !defined(__APPLE__)
705 switch (vmaSize) {
706 case 39: return MemToMetaImpl<Mapping39>(x);
707 case 42: return MemToMetaImpl<Mapping42>(x);
708 case 48: return MemToMetaImpl<Mapping48>(x);
710 DCHECK(0);
711 return 0;
712 #elif defined(__powerpc64__)
713 if (vmaSize == 44)
714 return MemToMetaImpl<Mapping44>(x);
715 else
716 return MemToMetaImpl<Mapping46>(x);
717 DCHECK(0);
718 #else
719 return MemToMetaImpl<Mapping>(x);
720 #endif
724 template<typename Mapping>
725 uptr ShadowToMemImpl(uptr s) {
726 DCHECK(IsShadowMem(s));
727 #if !SANITIZER_GO
728 // The shadow mapping is non-linear and we've lost some bits, so we don't have
729 // an easy way to restore the original app address. But the mapping is a
730 // bijection, so we try to restore the address as belonging to low/mid/high
731 // range consecutively and see if shadow->app->shadow mapping gives us the
732 // same address.
733 uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
734 if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd &&
735 MemToShadow(p) == s)
736 return p;
737 # ifdef TSAN_MID_APP_RANGE
738 p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) +
739 (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk);
740 if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd &&
741 MemToShadow(p) == s)
742 return p;
743 # endif
744 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
745 #else // #if !SANITIZER_GO
746 # ifndef SANITIZER_WINDOWS
747 return (s & ~Mapping::kShadowBeg) / kShadowCnt;
748 # else
749 return (s - Mapping::kShadowBeg) / kShadowCnt;
750 # endif // SANITIZER_WINDOWS
751 #endif
754 ALWAYS_INLINE
755 uptr ShadowToMem(uptr s) {
756 #if defined(__aarch64__) && !defined(__APPLE__)
757 switch (vmaSize) {
758 case 39: return ShadowToMemImpl<Mapping39>(s);
759 case 42: return ShadowToMemImpl<Mapping42>(s);
760 case 48: return ShadowToMemImpl<Mapping48>(s);
762 DCHECK(0);
763 return 0;
764 #elif defined(__powerpc64__)
765 if (vmaSize == 44)
766 return ShadowToMemImpl<Mapping44>(s);
767 else
768 return ShadowToMemImpl<Mapping46>(s);
769 DCHECK(0);
770 #else
771 return ShadowToMemImpl<Mapping>(s);
772 #endif
777 // The additional page is to catch shadow stack overflow as paging fault.
778 // Windows wants 64K alignment for mmaps.
779 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
780 + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
782 template<typename Mapping>
783 uptr GetThreadTraceImpl(int tid) {
784 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
785 DCHECK_LT(p, Mapping::kTraceMemEnd);
786 return p;
789 ALWAYS_INLINE
790 uptr GetThreadTrace(int tid) {
791 #if defined(__aarch64__) && !defined(__APPLE__)
792 switch (vmaSize) {
793 case 39: return GetThreadTraceImpl<Mapping39>(tid);
794 case 42: return GetThreadTraceImpl<Mapping42>(tid);
795 case 48: return GetThreadTraceImpl<Mapping48>(tid);
797 DCHECK(0);
798 return 0;
799 #elif defined(__powerpc64__)
800 if (vmaSize == 44)
801 return GetThreadTraceImpl<Mapping44>(tid);
802 else
803 return GetThreadTraceImpl<Mapping46>(tid);
804 DCHECK(0);
805 #else
806 return GetThreadTraceImpl<Mapping>(tid);
807 #endif
811 template<typename Mapping>
812 uptr GetThreadTraceHeaderImpl(int tid) {
813 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
814 + kTraceSize * sizeof(Event);
815 DCHECK_LT(p, Mapping::kTraceMemEnd);
816 return p;
819 ALWAYS_INLINE
820 uptr GetThreadTraceHeader(int tid) {
821 #if defined(__aarch64__) && !defined(__APPLE__)
822 switch (vmaSize) {
823 case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid);
824 case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid);
825 case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid);
827 DCHECK(0);
828 return 0;
829 #elif defined(__powerpc64__)
830 if (vmaSize == 44)
831 return GetThreadTraceHeaderImpl<Mapping44>(tid);
832 else
833 return GetThreadTraceHeaderImpl<Mapping46>(tid);
834 DCHECK(0);
835 #else
836 return GetThreadTraceHeaderImpl<Mapping>(tid);
837 #endif
840 void InitializePlatform();
841 void InitializePlatformEarly();
842 void CheckAndProtect();
843 void InitializeShadowMemoryPlatform();
844 void FlushShadowMemory();
845 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
846 int ExtractResolvFDs(void *state, int *fds, int nfd);
847 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
848 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
850 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m,
851 void *abstime), void *c, void *m, void *abstime,
852 void(*cleanup)(void *arg), void *arg);
854 void DestroyThreadState();
856 } // namespace __tsan
858 #endif // TSAN_PLATFORM_H