xref: /netbsd-src/external/gpl3/gcc.old/dist/libsanitizer/tsan/tsan_platform.h (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
2 //
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
5 //
6 //===----------------------------------------------------------------------===//
7 //
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
9 //
10 // Platform-specific code.
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef TSAN_PLATFORM_H
14 #define TSAN_PLATFORM_H
15 
16 #if !defined(__LP64__) && !defined(_WIN64)
17 # error "Only 64-bit is supported"
18 #endif
19 
20 #include "tsan_defs.h"
21 #include "tsan_trace.h"
22 
23 namespace __tsan {
24 
25 #if !SANITIZER_GO
26 
27 #if defined(__x86_64__)
28 /*
29 C/C++ on linux/x86_64 and freebsd/x86_64
30 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB)
31 0040 0000 0000 - 0100 0000 0000: -
32 0100 0000 0000 - 2000 0000 0000: shadow
33 2000 0000 0000 - 3000 0000 0000: -
34 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
35 4000 0000 0000 - 5500 0000 0000: -
36 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels
37 5680 0000 0000 - 6000 0000 0000: -
38 6000 0000 0000 - 6200 0000 0000: traces
39 6200 0000 0000 - 7d00 0000 0000: -
40 7b00 0000 0000 - 7c00 0000 0000: heap
41 7c00 0000 0000 - 7e80 0000 0000: -
42 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
43 */
44 struct Mapping {
45   static const uptr kMetaShadowBeg = 0x300000000000ull;
46   static const uptr kMetaShadowEnd = 0x340000000000ull;
47   static const uptr kTraceMemBeg   = 0x600000000000ull;
48   static const uptr kTraceMemEnd   = 0x620000000000ull;
49   static const uptr kShadowBeg     = 0x010000000000ull;
50   static const uptr kShadowEnd     = 0x200000000000ull;
51   static const uptr kHeapMemBeg    = 0x7b0000000000ull;
52   static const uptr kHeapMemEnd    = 0x7c0000000000ull;
53   static const uptr kLoAppMemBeg   = 0x000000001000ull;
54   static const uptr kLoAppMemEnd   = 0x008000000000ull;
55   static const uptr kMidAppMemBeg  = 0x550000000000ull;
56   static const uptr kMidAppMemEnd  = 0x568000000000ull;
57   static const uptr kHiAppMemBeg   = 0x7e8000000000ull;
58   static const uptr kHiAppMemEnd   = 0x800000000000ull;
59   static const uptr kAppMemMsk     = 0x780000000000ull;
60   static const uptr kAppMemXor     = 0x040000000000ull;
61   static const uptr kVdsoBeg       = 0xf000000000000000ull;
62 };
63 
64 #define TSAN_MID_APP_RANGE 1
65 #elif defined(__mips64)
66 /*
67 C/C++ on linux/mips64
68 0100 0000 00 - 0200 0000 00: main binary
69 0200 0000 00 - 1400 0000 00: -
70 1400 0000 00 - 2400 0000 00: shadow
71 2400 0000 00 - 3000 0000 00: -
72 3000 0000 00 - 4000 0000 00: metainfo (memory blocks and sync objects)
73 4000 0000 00 - 6000 0000 00: -
74 6000 0000 00 - 6200 0000 00: traces
75 6200 0000 00 - fe00 0000 00: -
76 fe00 0000 00 - ff00 0000 00: heap
77 ff00 0000 00 - ff80 0000 00: -
78 ff80 0000 00 - ffff ffff ff: modules and main thread stack
79 */
80 struct Mapping {
81   static const uptr kMetaShadowBeg = 0x4000000000ull;
82   static const uptr kMetaShadowEnd = 0x5000000000ull;
83   static const uptr kTraceMemBeg   = 0xb000000000ull;
84   static const uptr kTraceMemEnd   = 0xb200000000ull;
85   static const uptr kShadowBeg     = 0x2400000000ull;
86   static const uptr kShadowEnd     = 0x4000000000ull;
87   static const uptr kHeapMemBeg    = 0xfe00000000ull;
88   static const uptr kHeapMemEnd    = 0xff00000000ull;
89   static const uptr kLoAppMemBeg   = 0x0100000000ull;
90   static const uptr kLoAppMemEnd   = 0x0200000000ull;
91   static const uptr kMidAppMemBeg  = 0xaa00000000ull;
92   static const uptr kMidAppMemEnd  = 0xab00000000ull;
93   static const uptr kHiAppMemBeg   = 0xff80000000ull;
94   static const uptr kHiAppMemEnd   = 0xffffffffffull;
95   static const uptr kAppMemMsk     = 0xf800000000ull;
96   static const uptr kAppMemXor     = 0x0800000000ull;
97   static const uptr kVdsoBeg       = 0xfffff00000ull;
98 };
99 
100 #define TSAN_MID_APP_RANGE 1
101 #elif defined(__aarch64__)
102 // AArch64 supports multiple VMA which leads to multiple address transformation
103 // functions.  To support these multiple VMAS transformations and mappings TSAN
104 // runtime for AArch64 uses an external memory read (vmaSize) to select which
105 // mapping to use.  Although slower, it make a same instrumented binary run on
106 // multiple kernels.
107 
108 /*
109 C/C++ on linux/aarch64 (39-bit VMA)
110 0000 0010 00 - 0100 0000 00: main binary
111 0100 0000 00 - 0800 0000 00: -
112 0800 0000 00 - 2000 0000 00: shadow memory
113 2000 0000 00 - 3100 0000 00: -
114 3100 0000 00 - 3400 0000 00: metainfo
115 3400 0000 00 - 5500 0000 00: -
116 5500 0000 00 - 5600 0000 00: main binary (PIE)
117 5600 0000 00 - 6000 0000 00: -
118 6000 0000 00 - 6200 0000 00: traces
119 6200 0000 00 - 7d00 0000 00: -
120 7c00 0000 00 - 7d00 0000 00: heap
121 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
122 */
123 struct Mapping39 {
124   static const uptr kLoAppMemBeg   = 0x0000001000ull;
125   static const uptr kLoAppMemEnd   = 0x0100000000ull;
126   static const uptr kShadowBeg     = 0x0800000000ull;
127   static const uptr kShadowEnd     = 0x2000000000ull;
128   static const uptr kMetaShadowBeg = 0x3100000000ull;
129   static const uptr kMetaShadowEnd = 0x3400000000ull;
130   static const uptr kMidAppMemBeg  = 0x5500000000ull;
131   static const uptr kMidAppMemEnd  = 0x5600000000ull;
132   static const uptr kTraceMemBeg   = 0x6000000000ull;
133   static const uptr kTraceMemEnd   = 0x6200000000ull;
134   static const uptr kHeapMemBeg    = 0x7c00000000ull;
135   static const uptr kHeapMemEnd    = 0x7d00000000ull;
136   static const uptr kHiAppMemBeg   = 0x7e00000000ull;
137   static const uptr kHiAppMemEnd   = 0x7fffffffffull;
138   static const uptr kAppMemMsk     = 0x7800000000ull;
139   static const uptr kAppMemXor     = 0x0200000000ull;
140   static const uptr kVdsoBeg       = 0x7f00000000ull;
141 };
142 
143 /*
144 C/C++ on linux/aarch64 (42-bit VMA)
145 00000 0010 00 - 01000 0000 00: main binary
146 01000 0000 00 - 10000 0000 00: -
147 10000 0000 00 - 20000 0000 00: shadow memory
148 20000 0000 00 - 26000 0000 00: -
149 26000 0000 00 - 28000 0000 00: metainfo
150 28000 0000 00 - 2aa00 0000 00: -
151 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
152 2ab00 0000 00 - 36200 0000 00: -
153 36200 0000 00 - 36240 0000 00: traces
154 36240 0000 00 - 3e000 0000 00: -
155 3e000 0000 00 - 3f000 0000 00: heap
156 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
157 */
158 struct Mapping42 {
159   static const uptr kLoAppMemBeg   = 0x00000001000ull;
160   static const uptr kLoAppMemEnd   = 0x01000000000ull;
161   static const uptr kShadowBeg     = 0x10000000000ull;
162   static const uptr kShadowEnd     = 0x20000000000ull;
163   static const uptr kMetaShadowBeg = 0x26000000000ull;
164   static const uptr kMetaShadowEnd = 0x28000000000ull;
165   static const uptr kMidAppMemBeg  = 0x2aa00000000ull;
166   static const uptr kMidAppMemEnd  = 0x2ab00000000ull;
167   static const uptr kTraceMemBeg   = 0x36200000000ull;
168   static const uptr kTraceMemEnd   = 0x36400000000ull;
169   static const uptr kHeapMemBeg    = 0x3e000000000ull;
170   static const uptr kHeapMemEnd    = 0x3f000000000ull;
171   static const uptr kHiAppMemBeg   = 0x3f000000000ull;
172   static const uptr kHiAppMemEnd   = 0x3ffffffffffull;
173   static const uptr kAppMemMsk     = 0x3c000000000ull;
174   static const uptr kAppMemXor     = 0x04000000000ull;
175   static const uptr kVdsoBeg       = 0x37f00000000ull;
176 };
177 
178 struct Mapping48 {
179   static const uptr kLoAppMemBeg   = 0x0000000001000ull;
180   static const uptr kLoAppMemEnd   = 0x0000200000000ull;
181   static const uptr kShadowBeg     = 0x0002000000000ull;
182   static const uptr kShadowEnd     = 0x0004000000000ull;
183   static const uptr kMetaShadowBeg = 0x0005000000000ull;
184   static const uptr kMetaShadowEnd = 0x0006000000000ull;
185   static const uptr kMidAppMemBeg  = 0x0aaaa00000000ull;
186   static const uptr kMidAppMemEnd  = 0x0aaaf00000000ull;
187   static const uptr kTraceMemBeg   = 0x0f06000000000ull;
188   static const uptr kTraceMemEnd   = 0x0f06200000000ull;
189   static const uptr kHeapMemBeg    = 0x0ffff00000000ull;
190   static const uptr kHeapMemEnd    = 0x0ffff00000000ull;
191   static const uptr kHiAppMemBeg   = 0x0ffff00000000ull;
192   static const uptr kHiAppMemEnd   = 0x1000000000000ull;
193   static const uptr kAppMemMsk     = 0x0fff800000000ull;
194   static const uptr kAppMemXor     = 0x0000800000000ull;
195   static const uptr kVdsoBeg       = 0xffff000000000ull;
196 };
197 
198 // Indicates the runtime will define the memory regions at runtime.
199 #define TSAN_RUNTIME_VMA 1
200 // Indicates that mapping defines a mid range memory segment.
201 #define TSAN_MID_APP_RANGE 1
202 #elif defined(__powerpc64__)
203 // PPC64 supports multiple VMA which leads to multiple address transformation
204 // functions.  To support these multiple VMAS transformations and mappings TSAN
205 // runtime for PPC64 uses an external memory read (vmaSize) to select which
206 // mapping to use.  Although slower, it make a same instrumented binary run on
207 // multiple kernels.
208 
209 /*
210 C/C++ on linux/powerpc64 (44-bit VMA)
211 0000 0000 0100 - 0001 0000 0000: main binary
212 0001 0000 0000 - 0001 0000 0000: -
213 0001 0000 0000 - 0b00 0000 0000: shadow
214 0b00 0000 0000 - 0b00 0000 0000: -
215 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
216 0d00 0000 0000 - 0d00 0000 0000: -
217 0d00 0000 0000 - 0f00 0000 0000: traces
218 0f00 0000 0000 - 0f00 0000 0000: -
219 0f00 0000 0000 - 0f50 0000 0000: heap
220 0f50 0000 0000 - 0f60 0000 0000: -
221 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
222 */
223 struct Mapping44 {
224   static const uptr kMetaShadowBeg = 0x0b0000000000ull;
225   static const uptr kMetaShadowEnd = 0x0d0000000000ull;
226   static const uptr kTraceMemBeg   = 0x0d0000000000ull;
227   static const uptr kTraceMemEnd   = 0x0f0000000000ull;
228   static const uptr kShadowBeg     = 0x000100000000ull;
229   static const uptr kShadowEnd     = 0x0b0000000000ull;
230   static const uptr kLoAppMemBeg   = 0x000000000100ull;
231   static const uptr kLoAppMemEnd   = 0x000100000000ull;
232   static const uptr kHeapMemBeg    = 0x0f0000000000ull;
233   static const uptr kHeapMemEnd    = 0x0f5000000000ull;
234   static const uptr kHiAppMemBeg   = 0x0f6000000000ull;
235   static const uptr kHiAppMemEnd   = 0x100000000000ull; // 44 bits
236   static const uptr kAppMemMsk     = 0x0f0000000000ull;
237   static const uptr kAppMemXor     = 0x002100000000ull;
238   static const uptr kVdsoBeg       = 0x3c0000000000000ull;
239 };
240 
241 /*
242 C/C++ on linux/powerpc64 (46-bit VMA)
243 0000 0000 1000 - 0100 0000 0000: main binary
244 0100 0000 0000 - 0200 0000 0000: -
245 0100 0000 0000 - 1000 0000 0000: shadow
246 1000 0000 0000 - 1000 0000 0000: -
247 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
248 2000 0000 0000 - 2000 0000 0000: -
249 2000 0000 0000 - 2200 0000 0000: traces
250 2200 0000 0000 - 3d00 0000 0000: -
251 3d00 0000 0000 - 3e00 0000 0000: heap
252 3e00 0000 0000 - 3e80 0000 0000: -
253 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
254 */
255 struct Mapping46 {
256   static const uptr kMetaShadowBeg = 0x100000000000ull;
257   static const uptr kMetaShadowEnd = 0x200000000000ull;
258   static const uptr kTraceMemBeg   = 0x200000000000ull;
259   static const uptr kTraceMemEnd   = 0x220000000000ull;
260   static const uptr kShadowBeg     = 0x010000000000ull;
261   static const uptr kShadowEnd     = 0x100000000000ull;
262   static const uptr kHeapMemBeg    = 0x3d0000000000ull;
263   static const uptr kHeapMemEnd    = 0x3e0000000000ull;
264   static const uptr kLoAppMemBeg   = 0x000000001000ull;
265   static const uptr kLoAppMemEnd   = 0x010000000000ull;
266   static const uptr kHiAppMemBeg   = 0x3e8000000000ull;
267   static const uptr kHiAppMemEnd   = 0x400000000000ull; // 46 bits
268   static const uptr kAppMemMsk     = 0x3c0000000000ull;
269   static const uptr kAppMemXor     = 0x020000000000ull;
270   static const uptr kVdsoBeg       = 0x7800000000000000ull;
271 };
272 
273 // Indicates the runtime will define the memory regions at runtime.
274 #define TSAN_RUNTIME_VMA 1
275 #endif
276 
277 #elif SANITIZER_GO && !SANITIZER_WINDOWS
278 
279 /* Go on linux, darwin and freebsd
280 0000 0000 1000 - 0000 1000 0000: executable
281 0000 1000 0000 - 00c0 0000 0000: -
282 00c0 0000 0000 - 00e0 0000 0000: heap
283 00e0 0000 0000 - 2000 0000 0000: -
284 2000 0000 0000 - 2380 0000 0000: shadow
285 2380 0000 0000 - 3000 0000 0000: -
286 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
287 4000 0000 0000 - 6000 0000 0000: -
288 6000 0000 0000 - 6200 0000 0000: traces
289 6200 0000 0000 - 8000 0000 0000: -
290 */
291 
292 struct Mapping {
293   static const uptr kMetaShadowBeg = 0x300000000000ull;
294   static const uptr kMetaShadowEnd = 0x400000000000ull;
295   static const uptr kTraceMemBeg   = 0x600000000000ull;
296   static const uptr kTraceMemEnd   = 0x620000000000ull;
297   static const uptr kShadowBeg     = 0x200000000000ull;
298   static const uptr kShadowEnd     = 0x238000000000ull;
299   static const uptr kAppMemBeg     = 0x000000001000ull;
300   static const uptr kAppMemEnd     = 0x00e000000000ull;
301 };
302 
303 #elif SANITIZER_GO && SANITIZER_WINDOWS
304 
305 /* Go on windows
306 0000 0000 1000 - 0000 1000 0000: executable
307 0000 1000 0000 - 00f8 0000 0000: -
308 00c0 0000 0000 - 00e0 0000 0000: heap
309 00e0 0000 0000 - 0100 0000 0000: -
310 0100 0000 0000 - 0500 0000 0000: shadow
311 0500 0000 0000 - 0560 0000 0000: -
312 0560 0000 0000 - 0760 0000 0000: traces
313 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
314 07d0 0000 0000 - 8000 0000 0000: -
315 */
316 
317 struct Mapping {
318   static const uptr kMetaShadowBeg = 0x076000000000ull;
319   static const uptr kMetaShadowEnd = 0x07d000000000ull;
320   static const uptr kTraceMemBeg   = 0x056000000000ull;
321   static const uptr kTraceMemEnd   = 0x076000000000ull;
322   static const uptr kShadowBeg     = 0x010000000000ull;
323   static const uptr kShadowEnd     = 0x050000000000ull;
324   static const uptr kAppMemBeg     = 0x000000001000ull;
325   static const uptr kAppMemEnd     = 0x00e000000000ull;
326 };
327 
328 #else
329 # error "Unknown platform"
330 #endif
331 
332 
333 #ifdef TSAN_RUNTIME_VMA
334 extern uptr vmaSize;
335 #endif
336 
337 
338 enum MappingType {
339   MAPPING_LO_APP_BEG,
340   MAPPING_LO_APP_END,
341   MAPPING_HI_APP_BEG,
342   MAPPING_HI_APP_END,
343 #ifdef TSAN_MID_APP_RANGE
344   MAPPING_MID_APP_BEG,
345   MAPPING_MID_APP_END,
346 #endif
347   MAPPING_HEAP_BEG,
348   MAPPING_HEAP_END,
349   MAPPING_APP_BEG,
350   MAPPING_APP_END,
351   MAPPING_SHADOW_BEG,
352   MAPPING_SHADOW_END,
353   MAPPING_META_SHADOW_BEG,
354   MAPPING_META_SHADOW_END,
355   MAPPING_TRACE_BEG,
356   MAPPING_TRACE_END,
357   MAPPING_VDSO_BEG,
358 };
359 
360 template<typename Mapping, int Type>
361 uptr MappingImpl(void) {
362   switch (Type) {
363 #if !SANITIZER_GO
364     case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
365     case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
366 # ifdef TSAN_MID_APP_RANGE
367     case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
368     case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
369 # endif
370     case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
371     case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
372     case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
373     case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
374     case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
375 #else
376     case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
377     case MAPPING_APP_END: return Mapping::kAppMemEnd;
378 #endif
379     case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
380     case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
381     case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
382     case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
383     case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
384     case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
385   }
386 }
387 
388 template<int Type>
389 uptr MappingArchImpl(void) {
390 #ifdef __aarch64__
391   switch (vmaSize) {
392     case 39: return MappingImpl<Mapping39, Type>();
393     case 42: return MappingImpl<Mapping42, Type>();
394     case 48: return MappingImpl<Mapping48, Type>();
395   }
396   DCHECK(0);
397   return 0;
398 #elif defined(__powerpc64__)
399   if (vmaSize == 44)
400     return MappingImpl<Mapping44, Type>();
401   else
402     return MappingImpl<Mapping46, Type>();
403   DCHECK(0);
404 #else
405   return MappingImpl<Mapping, Type>();
406 #endif
407 }
408 
409 #if !SANITIZER_GO
410 ALWAYS_INLINE
411 uptr LoAppMemBeg(void) {
412   return MappingArchImpl<MAPPING_LO_APP_BEG>();
413 }
414 ALWAYS_INLINE
415 uptr LoAppMemEnd(void) {
416   return MappingArchImpl<MAPPING_LO_APP_END>();
417 }
418 
419 #ifdef TSAN_MID_APP_RANGE
420 ALWAYS_INLINE
421 uptr MidAppMemBeg(void) {
422   return MappingArchImpl<MAPPING_MID_APP_BEG>();
423 }
424 ALWAYS_INLINE
425 uptr MidAppMemEnd(void) {
426   return MappingArchImpl<MAPPING_MID_APP_END>();
427 }
428 #endif
429 
430 ALWAYS_INLINE
431 uptr HeapMemBeg(void) {
432   return MappingArchImpl<MAPPING_HEAP_BEG>();
433 }
434 ALWAYS_INLINE
435 uptr HeapMemEnd(void) {
436   return MappingArchImpl<MAPPING_HEAP_END>();
437 }
438 
439 ALWAYS_INLINE
440 uptr HiAppMemBeg(void) {
441   return MappingArchImpl<MAPPING_HI_APP_BEG>();
442 }
443 ALWAYS_INLINE
444 uptr HiAppMemEnd(void) {
445   return MappingArchImpl<MAPPING_HI_APP_END>();
446 }
447 
448 ALWAYS_INLINE
449 uptr VdsoBeg(void) {
450   return MappingArchImpl<MAPPING_VDSO_BEG>();
451 }
452 
453 #else
454 
455 ALWAYS_INLINE
456 uptr AppMemBeg(void) {
457   return MappingArchImpl<MAPPING_APP_BEG>();
458 }
459 ALWAYS_INLINE
460 uptr AppMemEnd(void) {
461   return MappingArchImpl<MAPPING_APP_END>();
462 }
463 
464 #endif
465 
466 static inline
467 bool GetUserRegion(int i, uptr *start, uptr *end) {
468   switch (i) {
469   default:
470     return false;
471 #if !SANITIZER_GO
472   case 0:
473     *start = LoAppMemBeg();
474     *end = LoAppMemEnd();
475     return true;
476   case 1:
477     *start = HiAppMemBeg();
478     *end = HiAppMemEnd();
479     return true;
480   case 2:
481     *start = HeapMemBeg();
482     *end = HeapMemEnd();
483     return true;
484 # ifdef TSAN_MID_APP_RANGE
485   case 3:
486     *start = MidAppMemBeg();
487     *end = MidAppMemEnd();
488     return true;
489 # endif
490 #else
491   case 0:
492     *start = AppMemBeg();
493     *end = AppMemEnd();
494     return true;
495 #endif
496   }
497 }
498 
499 ALWAYS_INLINE
500 uptr ShadowBeg(void) {
501   return MappingArchImpl<MAPPING_SHADOW_BEG>();
502 }
503 ALWAYS_INLINE
504 uptr ShadowEnd(void) {
505   return MappingArchImpl<MAPPING_SHADOW_END>();
506 }
507 
508 ALWAYS_INLINE
509 uptr MetaShadowBeg(void) {
510   return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
511 }
512 ALWAYS_INLINE
513 uptr MetaShadowEnd(void) {
514   return MappingArchImpl<MAPPING_META_SHADOW_END>();
515 }
516 
517 ALWAYS_INLINE
518 uptr TraceMemBeg(void) {
519   return MappingArchImpl<MAPPING_TRACE_BEG>();
520 }
521 ALWAYS_INLINE
522 uptr TraceMemEnd(void) {
523   return MappingArchImpl<MAPPING_TRACE_END>();
524 }
525 
526 
527 template<typename Mapping>
528 bool IsAppMemImpl(uptr mem) {
529 #if !SANITIZER_GO
530   return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
531 # ifdef TSAN_MID_APP_RANGE
532          (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
533 # endif
534          (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
535          (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
536 #else
537   return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
538 #endif
539 }
540 
541 ALWAYS_INLINE
542 bool IsAppMem(uptr mem) {
543 #ifdef __aarch64__
544   switch (vmaSize) {
545     case 39: return IsAppMemImpl<Mapping39>(mem);
546     case 42: return IsAppMemImpl<Mapping42>(mem);
547     case 48: return IsAppMemImpl<Mapping48>(mem);
548   }
549   DCHECK(0);
550   return false;
551 #elif defined(__powerpc64__)
552   if (vmaSize == 44)
553     return IsAppMemImpl<Mapping44>(mem);
554   else
555     return IsAppMemImpl<Mapping46>(mem);
556   DCHECK(0);
557 #else
558   return IsAppMemImpl<Mapping>(mem);
559 #endif
560 }
561 
562 
563 template<typename Mapping>
564 bool IsShadowMemImpl(uptr mem) {
565   return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
566 }
567 
568 ALWAYS_INLINE
569 bool IsShadowMem(uptr mem) {
570 #ifdef __aarch64__
571   switch (vmaSize) {
572     case 39: return IsShadowMemImpl<Mapping39>(mem);
573     case 42: return IsShadowMemImpl<Mapping42>(mem);
574     case 48: return IsShadowMemImpl<Mapping48>(mem);
575   }
576   DCHECK(0);
577   return false;
578 #elif defined(__powerpc64__)
579   if (vmaSize == 44)
580     return IsShadowMemImpl<Mapping44>(mem);
581   else
582     return IsShadowMemImpl<Mapping46>(mem);
583   DCHECK(0);
584 #else
585   return IsShadowMemImpl<Mapping>(mem);
586 #endif
587 }
588 
589 
590 template<typename Mapping>
591 bool IsMetaMemImpl(uptr mem) {
592   return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
593 }
594 
595 ALWAYS_INLINE
596 bool IsMetaMem(uptr mem) {
597 #ifdef __aarch64__
598   switch (vmaSize) {
599     case 39: return IsMetaMemImpl<Mapping39>(mem);
600     case 42: return IsMetaMemImpl<Mapping42>(mem);
601     case 48: return IsMetaMemImpl<Mapping48>(mem);
602   }
603   DCHECK(0);
604   return false;
605 #elif defined(__powerpc64__)
606   if (vmaSize == 44)
607     return IsMetaMemImpl<Mapping44>(mem);
608   else
609     return IsMetaMemImpl<Mapping46>(mem);
610   DCHECK(0);
611 #else
612   return IsMetaMemImpl<Mapping>(mem);
613 #endif
614 }
615 
616 
617 template<typename Mapping>
618 uptr MemToShadowImpl(uptr x) {
619   DCHECK(IsAppMem(x));
620 #if !SANITIZER_GO
621   return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
622       ^ Mapping::kAppMemXor) * kShadowCnt;
623 #else
624 # ifndef SANITIZER_WINDOWS
625   return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
626 # else
627   return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg;
628 # endif
629 #endif
630 }
631 
632 ALWAYS_INLINE
633 uptr MemToShadow(uptr x) {
634 #ifdef __aarch64__
635   switch (vmaSize) {
636     case 39: return MemToShadowImpl<Mapping39>(x);
637     case 42: return MemToShadowImpl<Mapping42>(x);
638     case 48: return MemToShadowImpl<Mapping48>(x);
639   }
640   DCHECK(0);
641   return 0;
642 #elif defined(__powerpc64__)
643   if (vmaSize == 44)
644     return MemToShadowImpl<Mapping44>(x);
645   else
646     return MemToShadowImpl<Mapping46>(x);
647   DCHECK(0);
648 #else
649   return MemToShadowImpl<Mapping>(x);
650 #endif
651 }
652 
653 
654 template<typename Mapping>
655 u32 *MemToMetaImpl(uptr x) {
656   DCHECK(IsAppMem(x));
657 #if !SANITIZER_GO
658   return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) /
659       kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
660 #else
661 # ifndef SANITIZER_WINDOWS
662   return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
663       kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
664 # else
665   return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
666       kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg);
667 # endif
668 #endif
669 }
670 
671 ALWAYS_INLINE
672 u32 *MemToMeta(uptr x) {
673 #ifdef __aarch64__
674   switch (vmaSize) {
675     case 39: return MemToMetaImpl<Mapping39>(x);
676     case 42: return MemToMetaImpl<Mapping42>(x);
677     case 48: return MemToMetaImpl<Mapping48>(x);
678   }
679   DCHECK(0);
680   return 0;
681 #elif defined(__powerpc64__)
682   if (vmaSize == 44)
683     return MemToMetaImpl<Mapping44>(x);
684   else
685     return MemToMetaImpl<Mapping46>(x);
686   DCHECK(0);
687 #else
688   return MemToMetaImpl<Mapping>(x);
689 #endif
690 }
691 
692 
693 template<typename Mapping>
694 uptr ShadowToMemImpl(uptr s) {
695   DCHECK(IsShadowMem(s));
696 #if !SANITIZER_GO
697   // The shadow mapping is non-linear and we've lost some bits, so we don't have
698   // an easy way to restore the original app address. But the mapping is a
699   // bijection, so we try to restore the address as belonging to low/mid/high
700   // range consecutively and see if shadow->app->shadow mapping gives us the
701   // same address.
702   uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
703   if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd &&
704       MemToShadow(p) == s)
705     return p;
706 # ifdef TSAN_MID_APP_RANGE
707   p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) +
708       (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk);
709   if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd &&
710       MemToShadow(p) == s)
711     return p;
712 # endif
713   return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
714 #else  // #if !SANITIZER_GO
715 # ifndef SANITIZER_WINDOWS
716   return (s & ~Mapping::kShadowBeg) / kShadowCnt;
717 # else
718   return (s - Mapping::kShadowBeg) / kShadowCnt;
719 # endif // SANITIZER_WINDOWS
720 #endif
721 }
722 
723 ALWAYS_INLINE
724 uptr ShadowToMem(uptr s) {
725 #ifdef __aarch64__
726   switch (vmaSize) {
727     case 39: return ShadowToMemImpl<Mapping39>(s);
728     case 42: return ShadowToMemImpl<Mapping42>(s);
729     case 48: return ShadowToMemImpl<Mapping48>(s);
730   }
731   DCHECK(0);
732   return 0;
733 #elif defined(__powerpc64__)
734   if (vmaSize == 44)
735     return ShadowToMemImpl<Mapping44>(s);
736   else
737     return ShadowToMemImpl<Mapping46>(s);
738   DCHECK(0);
739 #else
740   return ShadowToMemImpl<Mapping>(s);
741 #endif
742 }
743 
744 
745 
746 // The additional page is to catch shadow stack overflow as paging fault.
747 // Windows wants 64K alignment for mmaps.
748 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
749     + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
750 
751 template<typename Mapping>
752 uptr GetThreadTraceImpl(int tid) {
753   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
754   DCHECK_LT(p, Mapping::kTraceMemEnd);
755   return p;
756 }
757 
758 ALWAYS_INLINE
759 uptr GetThreadTrace(int tid) {
760 #ifdef __aarch64__
761   switch (vmaSize) {
762     case 39: return GetThreadTraceImpl<Mapping39>(tid);
763     case 42: return GetThreadTraceImpl<Mapping42>(tid);
764     case 48: return GetThreadTraceImpl<Mapping48>(tid);
765   }
766   DCHECK(0);
767   return 0;
768 #elif defined(__powerpc64__)
769   if (vmaSize == 44)
770     return GetThreadTraceImpl<Mapping44>(tid);
771   else
772     return GetThreadTraceImpl<Mapping46>(tid);
773   DCHECK(0);
774 #else
775   return GetThreadTraceImpl<Mapping>(tid);
776 #endif
777 }
778 
779 
780 template<typename Mapping>
781 uptr GetThreadTraceHeaderImpl(int tid) {
782   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
783       + kTraceSize * sizeof(Event);
784   DCHECK_LT(p, Mapping::kTraceMemEnd);
785   return p;
786 }
787 
788 ALWAYS_INLINE
789 uptr GetThreadTraceHeader(int tid) {
790 #ifdef __aarch64__
791   switch (vmaSize) {
792     case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid);
793     case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid);
794     case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid);
795   }
796   DCHECK(0);
797   return 0;
798 #elif defined(__powerpc64__)
799   if (vmaSize == 44)
800     return GetThreadTraceHeaderImpl<Mapping44>(tid);
801   else
802     return GetThreadTraceHeaderImpl<Mapping46>(tid);
803   DCHECK(0);
804 #else
805   return GetThreadTraceHeaderImpl<Mapping>(tid);
806 #endif
807 }
808 
809 void InitializePlatform();
810 void InitializePlatformEarly();
811 void CheckAndProtect();
812 void InitializeShadowMemoryPlatform();
813 void FlushShadowMemory();
814 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
815 int ExtractResolvFDs(void *state, int *fds, int nfd);
816 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
817 
818 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m,
819     void *abstime), void *c, void *m, void *abstime,
820     void(*cleanup)(void *arg), void *arg);
821 
822 void DestroyThreadState();
823 
824 }  // namespace __tsan
825 
826 #endif  // TSAN_PLATFORM_H
827