1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===// 2 // 3 // This file is distributed under the University of Illinois Open Source 4 // License. See LICENSE.TXT for details. 5 // 6 //===----------------------------------------------------------------------===// 7 // 8 // This file is a part of ThreadSanitizer (TSan), a race detector. 9 // 10 // Platform-specific code. 11 //===----------------------------------------------------------------------===// 12 13 #ifndef TSAN_PLATFORM_H 14 #define TSAN_PLATFORM_H 15 16 #if !defined(__LP64__) && !defined(_WIN64) 17 # error "Only 64-bit is supported" 18 #endif 19 20 #include "tsan_defs.h" 21 #include "tsan_trace.h" 22 23 namespace __tsan { 24 25 #if !SANITIZER_GO 26 27 #if defined(__x86_64__) 28 /* 29 C/C++ on linux/x86_64 and freebsd/x86_64 30 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB) 31 0040 0000 0000 - 0100 0000 0000: - 32 0100 0000 0000 - 2000 0000 0000: shadow 33 2000 0000 0000 - 3000 0000 0000: - 34 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 35 4000 0000 0000 - 5500 0000 0000: - 36 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels 37 5680 0000 0000 - 6000 0000 0000: - 38 6000 0000 0000 - 6200 0000 0000: traces 39 6200 0000 0000 - 7d00 0000 0000: - 40 7b00 0000 0000 - 7c00 0000 0000: heap 41 7c00 0000 0000 - 7e80 0000 0000: - 42 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack 43 44 C/C++ on netbsd/amd64 can reuse the same mapping: 45 * The address space starts from 0x1000 (option with 0x0) and ends with 46 0x7f7ffffff000. 47 * LoAppMem-kHeapMemEnd can be reused as it is. 48 * No VDSO support. 49 * No MidAppMem region. 50 * No additional HeapMem region. 51 * HiAppMem contains the stack, loader, shared libraries and heap. 52 * Stack on NetBSD/amd64 has prereserved 128MB. 53 * Heap grows downwards (top-down). 54 * ASLR must be disabled per-process or globally. 55 56 */ 57 struct Mapping { 58 static const uptr kMetaShadowBeg = 0x300000000000ull; 59 static const uptr kMetaShadowEnd = 0x340000000000ull; 60 static const uptr kTraceMemBeg = 0x600000000000ull; 61 static const uptr kTraceMemEnd = 0x620000000000ull; 62 static const uptr kShadowBeg = 0x010000000000ull; 63 static const uptr kShadowEnd = 0x200000000000ull; 64 static const uptr kHeapMemBeg = 0x7b0000000000ull; 65 static const uptr kHeapMemEnd = 0x7c0000000000ull; 66 static const uptr kLoAppMemBeg = 0x000000001000ull; 67 static const uptr kLoAppMemEnd = 0x008000000000ull; 68 static const uptr kMidAppMemBeg = 0x550000000000ull; 69 static const uptr kMidAppMemEnd = 0x568000000000ull; 70 static const uptr kHiAppMemBeg = 0x7e8000000000ull; 71 static const uptr kHiAppMemEnd = 0x800000000000ull; 72 static const uptr kAppMemMsk = 0x780000000000ull; 73 static const uptr kAppMemXor = 0x040000000000ull; 74 static const uptr kVdsoBeg = 0xf000000000000000ull; 75 }; 76 77 #define TSAN_MID_APP_RANGE 1 78 #elif defined(__mips64) 79 /* 80 C/C++ on linux/mips64 (40-bit VMA) 81 0000 0000 00 - 0100 0000 00: - (4 GB) 82 0100 0000 00 - 0200 0000 00: main binary (4 GB) 83 0200 0000 00 - 2000 0000 00: - (120 GB) 84 2000 0000 00 - 4000 0000 00: shadow (128 GB) 85 4000 0000 00 - 5000 0000 00: metainfo (memory blocks and sync objects) (64 GB) 86 5000 0000 00 - aa00 0000 00: - (360 GB) 87 aa00 0000 00 - ab00 0000 00: main binary (PIE) (4 GB) 88 ab00 0000 00 - b000 0000 00: - (20 GB) 89 b000 0000 00 - b200 0000 00: traces (8 GB) 90 b200 0000 00 - fe00 0000 00: - (304 GB) 91 fe00 0000 00 - ff00 0000 00: heap (4 GB) 92 ff00 0000 00 - ff80 0000 00: - (2 GB) 93 ff80 0000 00 - ffff ffff ff: modules and main thread stack (<2 GB) 94 */ 95 struct Mapping { 96 static const uptr kMetaShadowBeg = 0x4000000000ull; 97 static const uptr kMetaShadowEnd = 0x5000000000ull; 98 static const uptr kTraceMemBeg = 0xb000000000ull; 99 static const uptr kTraceMemEnd = 0xb200000000ull; 100 static const uptr kShadowBeg = 0x2000000000ull; 101 static const uptr kShadowEnd = 0x4000000000ull; 102 static const uptr kHeapMemBeg = 0xfe00000000ull; 103 static const uptr kHeapMemEnd = 0xff00000000ull; 104 static const uptr kLoAppMemBeg = 0x0100000000ull; 105 static const uptr kLoAppMemEnd = 0x0200000000ull; 106 static const uptr kMidAppMemBeg = 0xaa00000000ull; 107 static const uptr kMidAppMemEnd = 0xab00000000ull; 108 static const uptr kHiAppMemBeg = 0xff80000000ull; 109 static const uptr kHiAppMemEnd = 0xffffffffffull; 110 static const uptr kAppMemMsk = 0xf800000000ull; 111 static const uptr kAppMemXor = 0x0800000000ull; 112 static const uptr kVdsoBeg = 0xfffff00000ull; 113 }; 114 115 #define TSAN_MID_APP_RANGE 1 116 #elif defined(__aarch64__) && defined(__APPLE__) 117 /* 118 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM) 119 0000 0000 00 - 0100 0000 00: - (4 GB) 120 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks (4 GB) 121 0200 0000 00 - 0300 0000 00: heap (4 GB) 122 0300 0000 00 - 0400 0000 00: - (4 GB) 123 0400 0000 00 - 0c00 0000 00: shadow memory (32 GB) 124 0c00 0000 00 - 0d00 0000 00: - (4 GB) 125 0d00 0000 00 - 0e00 0000 00: metainfo (4 GB) 126 0e00 0000 00 - 0f00 0000 00: - (4 GB) 127 0f00 0000 00 - 0fc0 0000 00: traces (3 GB) 128 0fc0 0000 00 - 1000 0000 00: - 129 */ 130 struct Mapping { 131 static const uptr kLoAppMemBeg = 0x0100000000ull; 132 static const uptr kLoAppMemEnd = 0x0200000000ull; 133 static const uptr kHeapMemBeg = 0x0200000000ull; 134 static const uptr kHeapMemEnd = 0x0300000000ull; 135 static const uptr kShadowBeg = 0x0400000000ull; 136 static const uptr kShadowEnd = 0x0c00000000ull; 137 static const uptr kMetaShadowBeg = 0x0d00000000ull; 138 static const uptr kMetaShadowEnd = 0x0e00000000ull; 139 static const uptr kTraceMemBeg = 0x0f00000000ull; 140 static const uptr kTraceMemEnd = 0x0fc0000000ull; 141 static const uptr kHiAppMemBeg = 0x0fc0000000ull; 142 static const uptr kHiAppMemEnd = 0x0fc0000000ull; 143 static const uptr kAppMemMsk = 0x0ull; 144 static const uptr kAppMemXor = 0x0ull; 145 static const uptr kVdsoBeg = 0x7000000000000000ull; 146 }; 147 148 #elif defined(__aarch64__) 149 // AArch64 supports multiple VMA which leads to multiple address transformation 150 // functions. To support these multiple VMAS transformations and mappings TSAN 151 // runtime for AArch64 uses an external memory read (vmaSize) to select which 152 // mapping to use. Although slower, it make a same instrumented binary run on 153 // multiple kernels. 154 155 /* 156 C/C++ on linux/aarch64 (39-bit VMA) 157 0000 0010 00 - 0100 0000 00: main binary 158 0100 0000 00 - 0800 0000 00: - 159 0800 0000 00 - 2000 0000 00: shadow memory 160 2000 0000 00 - 3100 0000 00: - 161 3100 0000 00 - 3400 0000 00: metainfo 162 3400 0000 00 - 5500 0000 00: - 163 5500 0000 00 - 5600 0000 00: main binary (PIE) 164 5600 0000 00 - 6000 0000 00: - 165 6000 0000 00 - 6200 0000 00: traces 166 6200 0000 00 - 7d00 0000 00: - 167 7c00 0000 00 - 7d00 0000 00: heap 168 7d00 0000 00 - 7fff ffff ff: modules and main thread stack 169 */ 170 struct Mapping39 { 171 static const uptr kLoAppMemBeg = 0x0000001000ull; 172 static const uptr kLoAppMemEnd = 0x0100000000ull; 173 static const uptr kShadowBeg = 0x0800000000ull; 174 static const uptr kShadowEnd = 0x2000000000ull; 175 static const uptr kMetaShadowBeg = 0x3100000000ull; 176 static const uptr kMetaShadowEnd = 0x3400000000ull; 177 static const uptr kMidAppMemBeg = 0x5500000000ull; 178 static const uptr kMidAppMemEnd = 0x5600000000ull; 179 static const uptr kTraceMemBeg = 0x6000000000ull; 180 static const uptr kTraceMemEnd = 0x6200000000ull; 181 static const uptr kHeapMemBeg = 0x7c00000000ull; 182 static const uptr kHeapMemEnd = 0x7d00000000ull; 183 static const uptr kHiAppMemBeg = 0x7e00000000ull; 184 static const uptr kHiAppMemEnd = 0x7fffffffffull; 185 static const uptr kAppMemMsk = 0x7800000000ull; 186 static const uptr kAppMemXor = 0x0200000000ull; 187 static const uptr kVdsoBeg = 0x7f00000000ull; 188 }; 189 190 /* 191 C/C++ on linux/aarch64 (42-bit VMA) 192 00000 0010 00 - 01000 0000 00: main binary 193 01000 0000 00 - 10000 0000 00: - 194 10000 0000 00 - 20000 0000 00: shadow memory 195 20000 0000 00 - 26000 0000 00: - 196 26000 0000 00 - 28000 0000 00: metainfo 197 28000 0000 00 - 2aa00 0000 00: - 198 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE) 199 2ab00 0000 00 - 36200 0000 00: - 200 36200 0000 00 - 36240 0000 00: traces 201 36240 0000 00 - 3e000 0000 00: - 202 3e000 0000 00 - 3f000 0000 00: heap 203 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack 204 */ 205 struct Mapping42 { 206 static const uptr kLoAppMemBeg = 0x00000001000ull; 207 static const uptr kLoAppMemEnd = 0x01000000000ull; 208 static const uptr kShadowBeg = 0x10000000000ull; 209 static const uptr kShadowEnd = 0x20000000000ull; 210 static const uptr kMetaShadowBeg = 0x26000000000ull; 211 static const uptr kMetaShadowEnd = 0x28000000000ull; 212 static const uptr kMidAppMemBeg = 0x2aa00000000ull; 213 static const uptr kMidAppMemEnd = 0x2ab00000000ull; 214 static const uptr kTraceMemBeg = 0x36200000000ull; 215 static const uptr kTraceMemEnd = 0x36400000000ull; 216 static const uptr kHeapMemBeg = 0x3e000000000ull; 217 static const uptr kHeapMemEnd = 0x3f000000000ull; 218 static const uptr kHiAppMemBeg = 0x3f000000000ull; 219 static const uptr kHiAppMemEnd = 0x3ffffffffffull; 220 static const uptr kAppMemMsk = 0x3c000000000ull; 221 static const uptr kAppMemXor = 0x04000000000ull; 222 static const uptr kVdsoBeg = 0x37f00000000ull; 223 }; 224 225 struct Mapping48 { 226 static const uptr kLoAppMemBeg = 0x0000000001000ull; 227 static const uptr kLoAppMemEnd = 0x0000200000000ull; 228 static const uptr kShadowBeg = 0x0002000000000ull; 229 static const uptr kShadowEnd = 0x0004000000000ull; 230 static const uptr kMetaShadowBeg = 0x0005000000000ull; 231 static const uptr kMetaShadowEnd = 0x0006000000000ull; 232 static const uptr kMidAppMemBeg = 0x0aaaa00000000ull; 233 static const uptr kMidAppMemEnd = 0x0aaaf00000000ull; 234 static const uptr kTraceMemBeg = 0x0f06000000000ull; 235 static const uptr kTraceMemEnd = 0x0f06200000000ull; 236 static const uptr kHeapMemBeg = 0x0ffff00000000ull; 237 static const uptr kHeapMemEnd = 0x0ffff00000000ull; 238 static const uptr kHiAppMemBeg = 0x0ffff00000000ull; 239 static const uptr kHiAppMemEnd = 0x1000000000000ull; 240 static const uptr kAppMemMsk = 0x0fff800000000ull; 241 static const uptr kAppMemXor = 0x0000800000000ull; 242 static const uptr kVdsoBeg = 0xffff000000000ull; 243 }; 244 245 // Indicates the runtime will define the memory regions at runtime. 246 #define TSAN_RUNTIME_VMA 1 247 // Indicates that mapping defines a mid range memory segment. 248 #define TSAN_MID_APP_RANGE 1 249 #elif defined(__powerpc64__) 250 // PPC64 supports multiple VMA which leads to multiple address transformation 251 // functions. To support these multiple VMAS transformations and mappings TSAN 252 // runtime for PPC64 uses an external memory read (vmaSize) to select which 253 // mapping to use. Although slower, it make a same instrumented binary run on 254 // multiple kernels. 255 256 /* 257 C/C++ on linux/powerpc64 (44-bit VMA) 258 0000 0000 0100 - 0001 0000 0000: main binary 259 0001 0000 0000 - 0001 0000 0000: - 260 0001 0000 0000 - 0b00 0000 0000: shadow 261 0b00 0000 0000 - 0b00 0000 0000: - 262 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects) 263 0d00 0000 0000 - 0d00 0000 0000: - 264 0d00 0000 0000 - 0f00 0000 0000: traces 265 0f00 0000 0000 - 0f00 0000 0000: - 266 0f00 0000 0000 - 0f50 0000 0000: heap 267 0f50 0000 0000 - 0f60 0000 0000: - 268 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack 269 */ 270 struct Mapping44 { 271 static const uptr kMetaShadowBeg = 0x0b0000000000ull; 272 static const uptr kMetaShadowEnd = 0x0d0000000000ull; 273 static const uptr kTraceMemBeg = 0x0d0000000000ull; 274 static const uptr kTraceMemEnd = 0x0f0000000000ull; 275 static const uptr kShadowBeg = 0x000100000000ull; 276 static const uptr kShadowEnd = 0x0b0000000000ull; 277 static const uptr kLoAppMemBeg = 0x000000000100ull; 278 static const uptr kLoAppMemEnd = 0x000100000000ull; 279 static const uptr kHeapMemBeg = 0x0f0000000000ull; 280 static const uptr kHeapMemEnd = 0x0f5000000000ull; 281 static const uptr kHiAppMemBeg = 0x0f6000000000ull; 282 static const uptr kHiAppMemEnd = 0x100000000000ull; // 44 bits 283 static const uptr kAppMemMsk = 0x0f0000000000ull; 284 static const uptr kAppMemXor = 0x002100000000ull; 285 static const uptr kVdsoBeg = 0x3c0000000000000ull; 286 }; 287 288 /* 289 C/C++ on linux/powerpc64 (46-bit VMA) 290 0000 0000 1000 - 0100 0000 0000: main binary 291 0100 0000 0000 - 0200 0000 0000: - 292 0100 0000 0000 - 1000 0000 0000: shadow 293 1000 0000 0000 - 1000 0000 0000: - 294 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects) 295 2000 0000 0000 - 2000 0000 0000: - 296 2000 0000 0000 - 2200 0000 0000: traces 297 2200 0000 0000 - 3d00 0000 0000: - 298 3d00 0000 0000 - 3e00 0000 0000: heap 299 3e00 0000 0000 - 3e80 0000 0000: - 300 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack 301 */ 302 struct Mapping46 { 303 static const uptr kMetaShadowBeg = 0x100000000000ull; 304 static const uptr kMetaShadowEnd = 0x200000000000ull; 305 static const uptr kTraceMemBeg = 0x200000000000ull; 306 static const uptr kTraceMemEnd = 0x220000000000ull; 307 static const uptr kShadowBeg = 0x010000000000ull; 308 static const uptr kShadowEnd = 0x100000000000ull; 309 static const uptr kHeapMemBeg = 0x3d0000000000ull; 310 static const uptr kHeapMemEnd = 0x3e0000000000ull; 311 static const uptr kLoAppMemBeg = 0x000000001000ull; 312 static const uptr kLoAppMemEnd = 0x010000000000ull; 313 static const uptr kHiAppMemBeg = 0x3e8000000000ull; 314 static const uptr kHiAppMemEnd = 0x400000000000ull; // 46 bits 315 static const uptr kAppMemMsk = 0x3c0000000000ull; 316 static const uptr kAppMemXor = 0x020000000000ull; 317 static const uptr kVdsoBeg = 0x7800000000000000ull; 318 }; 319 320 /* 321 C/C++ on linux/powerpc64 (47-bit VMA) 322 0000 0000 1000 - 0100 0000 0000: main binary 323 0100 0000 0000 - 0200 0000 0000: - 324 0100 0000 0000 - 1000 0000 0000: shadow 325 1000 0000 0000 - 1000 0000 0000: - 326 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects) 327 2000 0000 0000 - 2000 0000 0000: - 328 2000 0000 0000 - 2200 0000 0000: traces 329 2200 0000 0000 - 7d00 0000 0000: - 330 7d00 0000 0000 - 7e00 0000 0000: heap 331 7e00 0000 0000 - 7e80 0000 0000: - 332 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack 333 */ 334 struct Mapping47 { 335 static const uptr kMetaShadowBeg = 0x100000000000ull; 336 static const uptr kMetaShadowEnd = 0x200000000000ull; 337 static const uptr kTraceMemBeg = 0x200000000000ull; 338 static const uptr kTraceMemEnd = 0x220000000000ull; 339 static const uptr kShadowBeg = 0x010000000000ull; 340 static const uptr kShadowEnd = 0x100000000000ull; 341 static const uptr kHeapMemBeg = 0x7d0000000000ull; 342 static const uptr kHeapMemEnd = 0x7e0000000000ull; 343 static const uptr kLoAppMemBeg = 0x000000001000ull; 344 static const uptr kLoAppMemEnd = 0x010000000000ull; 345 static const uptr kHiAppMemBeg = 0x7e8000000000ull; 346 static const uptr kHiAppMemEnd = 0x800000000000ull; // 47 bits 347 static const uptr kAppMemMsk = 0x7c0000000000ull; 348 static const uptr kAppMemXor = 0x020000000000ull; 349 static const uptr kVdsoBeg = 0x7800000000000000ull; 350 }; 351 352 // Indicates the runtime will define the memory regions at runtime. 353 #define TSAN_RUNTIME_VMA 1 354 #endif 355 356 #elif SANITIZER_GO && !SANITIZER_WINDOWS && defined(__x86_64__) 357 358 /* Go on linux, darwin and freebsd on x86_64 359 0000 0000 1000 - 0000 1000 0000: executable 360 0000 1000 0000 - 00c0 0000 0000: - 361 00c0 0000 0000 - 00e0 0000 0000: heap 362 00e0 0000 0000 - 2000 0000 0000: - 363 2000 0000 0000 - 2380 0000 0000: shadow 364 2380 0000 0000 - 3000 0000 0000: - 365 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 366 4000 0000 0000 - 6000 0000 0000: - 367 6000 0000 0000 - 6200 0000 0000: traces 368 6200 0000 0000 - 8000 0000 0000: - 369 */ 370 371 struct Mapping { 372 static const uptr kMetaShadowBeg = 0x300000000000ull; 373 static const uptr kMetaShadowEnd = 0x400000000000ull; 374 static const uptr kTraceMemBeg = 0x600000000000ull; 375 static const uptr kTraceMemEnd = 0x620000000000ull; 376 static const uptr kShadowBeg = 0x200000000000ull; 377 static const uptr kShadowEnd = 0x238000000000ull; 378 static const uptr kAppMemBeg = 0x000000001000ull; 379 static const uptr kAppMemEnd = 0x00e000000000ull; 380 }; 381 382 #elif SANITIZER_GO && SANITIZER_WINDOWS 383 384 /* Go on windows 385 0000 0000 1000 - 0000 1000 0000: executable 386 0000 1000 0000 - 00f8 0000 0000: - 387 00c0 0000 0000 - 00e0 0000 0000: heap 388 00e0 0000 0000 - 0100 0000 0000: - 389 0100 0000 0000 - 0500 0000 0000: shadow 390 0500 0000 0000 - 0560 0000 0000: - 391 0560 0000 0000 - 0760 0000 0000: traces 392 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects) 393 07d0 0000 0000 - 8000 0000 0000: - 394 */ 395 396 struct Mapping { 397 static const uptr kMetaShadowBeg = 0x076000000000ull; 398 static const uptr kMetaShadowEnd = 0x07d000000000ull; 399 static const uptr kTraceMemBeg = 0x056000000000ull; 400 static const uptr kTraceMemEnd = 0x076000000000ull; 401 static const uptr kShadowBeg = 0x010000000000ull; 402 static const uptr kShadowEnd = 0x050000000000ull; 403 static const uptr kAppMemBeg = 0x000000001000ull; 404 static const uptr kAppMemEnd = 0x00e000000000ull; 405 }; 406 407 #elif SANITIZER_GO && defined(__powerpc64__) 408 409 /* Only Mapping46 and Mapping47 are currently supported for powercp64 on Go. */ 410 411 /* Go on linux/powerpc64 (46-bit VMA) 412 0000 0000 1000 - 0000 1000 0000: executable 413 0000 1000 0000 - 00c0 0000 0000: - 414 00c0 0000 0000 - 00e0 0000 0000: heap 415 00e0 0000 0000 - 2000 0000 0000: - 416 2000 0000 0000 - 2380 0000 0000: shadow 417 2380 0000 0000 - 2400 0000 0000: - 418 2400 0000 0000 - 3400 0000 0000: metainfo (memory blocks and sync objects) 419 3400 0000 0000 - 3600 0000 0000: - 420 3600 0000 0000 - 3800 0000 0000: traces 421 3800 0000 0000 - 4000 0000 0000: - 422 */ 423 424 struct Mapping46 { 425 static const uptr kMetaShadowBeg = 0x240000000000ull; 426 static const uptr kMetaShadowEnd = 0x340000000000ull; 427 static const uptr kTraceMemBeg = 0x360000000000ull; 428 static const uptr kTraceMemEnd = 0x380000000000ull; 429 static const uptr kShadowBeg = 0x200000000000ull; 430 static const uptr kShadowEnd = 0x238000000000ull; 431 static const uptr kAppMemBeg = 0x000000001000ull; 432 static const uptr kAppMemEnd = 0x00e000000000ull; 433 }; 434 435 /* Go on linux/powerpc64 (47-bit VMA) 436 0000 0000 1000 - 0000 1000 0000: executable 437 0000 1000 0000 - 00c0 0000 0000: - 438 00c0 0000 0000 - 00e0 0000 0000: heap 439 00e0 0000 0000 - 2000 0000 0000: - 440 2000 0000 0000 - 3000 0000 0000: shadow 441 3000 0000 0000 - 3000 0000 0000: - 442 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 443 4000 0000 0000 - 6000 0000 0000: - 444 6000 0000 0000 - 6200 0000 0000: traces 445 6200 0000 0000 - 8000 0000 0000: - 446 */ 447 448 struct Mapping47 { 449 static const uptr kMetaShadowBeg = 0x300000000000ull; 450 static const uptr kMetaShadowEnd = 0x400000000000ull; 451 static const uptr kTraceMemBeg = 0x600000000000ull; 452 static const uptr kTraceMemEnd = 0x620000000000ull; 453 static const uptr kShadowBeg = 0x200000000000ull; 454 static const uptr kShadowEnd = 0x300000000000ull; 455 static const uptr kAppMemBeg = 0x000000001000ull; 456 static const uptr kAppMemEnd = 0x00e000000000ull; 457 }; 458 459 #elif SANITIZER_GO && defined(__aarch64__) 460 461 /* Go on linux/aarch64 (48-bit VMA) 462 0000 0000 1000 - 0000 1000 0000: executable 463 0000 1000 0000 - 00c0 0000 0000: - 464 00c0 0000 0000 - 00e0 0000 0000: heap 465 00e0 0000 0000 - 2000 0000 0000: - 466 2000 0000 0000 - 3000 0000 0000: shadow 467 3000 0000 0000 - 3000 0000 0000: - 468 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 469 4000 0000 0000 - 6000 0000 0000: - 470 6000 0000 0000 - 6200 0000 0000: traces 471 6200 0000 0000 - 8000 0000 0000: - 472 */ 473 474 struct Mapping { 475 static const uptr kMetaShadowBeg = 0x300000000000ull; 476 static const uptr kMetaShadowEnd = 0x400000000000ull; 477 static const uptr kTraceMemBeg = 0x600000000000ull; 478 static const uptr kTraceMemEnd = 0x620000000000ull; 479 static const uptr kShadowBeg = 0x200000000000ull; 480 static const uptr kShadowEnd = 0x300000000000ull; 481 static const uptr kAppMemBeg = 0x000000001000ull; 482 static const uptr kAppMemEnd = 0x00e000000000ull; 483 }; 484 485 // Indicates the runtime will define the memory regions at runtime. 486 #define TSAN_RUNTIME_VMA 1 487 488 #else 489 # error "Unknown platform" 490 #endif 491 492 493 #ifdef TSAN_RUNTIME_VMA 494 extern uptr vmaSize; 495 #endif 496 497 498 enum MappingType { 499 MAPPING_LO_APP_BEG, 500 MAPPING_LO_APP_END, 501 MAPPING_HI_APP_BEG, 502 MAPPING_HI_APP_END, 503 #ifdef TSAN_MID_APP_RANGE 504 MAPPING_MID_APP_BEG, 505 MAPPING_MID_APP_END, 506 #endif 507 MAPPING_HEAP_BEG, 508 MAPPING_HEAP_END, 509 MAPPING_APP_BEG, 510 MAPPING_APP_END, 511 MAPPING_SHADOW_BEG, 512 MAPPING_SHADOW_END, 513 MAPPING_META_SHADOW_BEG, 514 MAPPING_META_SHADOW_END, 515 MAPPING_TRACE_BEG, 516 MAPPING_TRACE_END, 517 MAPPING_VDSO_BEG, 518 }; 519 520 template<typename Mapping, int Type> 521 uptr MappingImpl(void) { 522 switch (Type) { 523 #if !SANITIZER_GO 524 case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg; 525 case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd; 526 # ifdef TSAN_MID_APP_RANGE 527 case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg; 528 case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd; 529 # endif 530 case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg; 531 case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd; 532 case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg; 533 case MAPPING_HEAP_END: return Mapping::kHeapMemEnd; 534 case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg; 535 #else 536 case MAPPING_APP_BEG: return Mapping::kAppMemBeg; 537 case MAPPING_APP_END: return Mapping::kAppMemEnd; 538 #endif 539 case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg; 540 case MAPPING_SHADOW_END: return Mapping::kShadowEnd; 541 case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg; 542 case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd; 543 case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg; 544 case MAPPING_TRACE_END: return Mapping::kTraceMemEnd; 545 } 546 } 547 548 template<int Type> 549 uptr MappingArchImpl(void) { 550 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 551 switch (vmaSize) { 552 case 39: return MappingImpl<Mapping39, Type>(); 553 case 42: return MappingImpl<Mapping42, Type>(); 554 case 48: return MappingImpl<Mapping48, Type>(); 555 } 556 DCHECK(0); 557 return 0; 558 #elif defined(__powerpc64__) 559 switch (vmaSize) { 560 #if !SANITIZER_GO 561 case 44: return MappingImpl<Mapping44, Type>(); 562 #endif 563 case 46: return MappingImpl<Mapping46, Type>(); 564 case 47: return MappingImpl<Mapping47, Type>(); 565 } 566 DCHECK(0); 567 return 0; 568 #else 569 return MappingImpl<Mapping, Type>(); 570 #endif 571 } 572 573 #if !SANITIZER_GO 574 ALWAYS_INLINE 575 uptr LoAppMemBeg(void) { 576 return MappingArchImpl<MAPPING_LO_APP_BEG>(); 577 } 578 ALWAYS_INLINE 579 uptr LoAppMemEnd(void) { 580 return MappingArchImpl<MAPPING_LO_APP_END>(); 581 } 582 583 #ifdef TSAN_MID_APP_RANGE 584 ALWAYS_INLINE 585 uptr MidAppMemBeg(void) { 586 return MappingArchImpl<MAPPING_MID_APP_BEG>(); 587 } 588 ALWAYS_INLINE 589 uptr MidAppMemEnd(void) { 590 return MappingArchImpl<MAPPING_MID_APP_END>(); 591 } 592 #endif 593 594 ALWAYS_INLINE 595 uptr HeapMemBeg(void) { 596 return MappingArchImpl<MAPPING_HEAP_BEG>(); 597 } 598 ALWAYS_INLINE 599 uptr HeapMemEnd(void) { 600 return MappingArchImpl<MAPPING_HEAP_END>(); 601 } 602 603 ALWAYS_INLINE 604 uptr HiAppMemBeg(void) { 605 return MappingArchImpl<MAPPING_HI_APP_BEG>(); 606 } 607 ALWAYS_INLINE 608 uptr HiAppMemEnd(void) { 609 return MappingArchImpl<MAPPING_HI_APP_END>(); 610 } 611 612 ALWAYS_INLINE 613 uptr VdsoBeg(void) { 614 return MappingArchImpl<MAPPING_VDSO_BEG>(); 615 } 616 617 #else 618 619 ALWAYS_INLINE 620 uptr AppMemBeg(void) { 621 return MappingArchImpl<MAPPING_APP_BEG>(); 622 } 623 ALWAYS_INLINE 624 uptr AppMemEnd(void) { 625 return MappingArchImpl<MAPPING_APP_END>(); 626 } 627 628 #endif 629 630 static inline 631 bool GetUserRegion(int i, uptr *start, uptr *end) { 632 switch (i) { 633 default: 634 return false; 635 #if !SANITIZER_GO 636 case 0: 637 *start = LoAppMemBeg(); 638 *end = LoAppMemEnd(); 639 return true; 640 case 1: 641 *start = HiAppMemBeg(); 642 *end = HiAppMemEnd(); 643 return true; 644 case 2: 645 *start = HeapMemBeg(); 646 *end = HeapMemEnd(); 647 return true; 648 # ifdef TSAN_MID_APP_RANGE 649 case 3: 650 *start = MidAppMemBeg(); 651 *end = MidAppMemEnd(); 652 return true; 653 # endif 654 #else 655 case 0: 656 *start = AppMemBeg(); 657 *end = AppMemEnd(); 658 return true; 659 #endif 660 } 661 } 662 663 ALWAYS_INLINE 664 uptr ShadowBeg(void) { 665 return MappingArchImpl<MAPPING_SHADOW_BEG>(); 666 } 667 ALWAYS_INLINE 668 uptr ShadowEnd(void) { 669 return MappingArchImpl<MAPPING_SHADOW_END>(); 670 } 671 672 ALWAYS_INLINE 673 uptr MetaShadowBeg(void) { 674 return MappingArchImpl<MAPPING_META_SHADOW_BEG>(); 675 } 676 ALWAYS_INLINE 677 uptr MetaShadowEnd(void) { 678 return MappingArchImpl<MAPPING_META_SHADOW_END>(); 679 } 680 681 ALWAYS_INLINE 682 uptr TraceMemBeg(void) { 683 return MappingArchImpl<MAPPING_TRACE_BEG>(); 684 } 685 ALWAYS_INLINE 686 uptr TraceMemEnd(void) { 687 return MappingArchImpl<MAPPING_TRACE_END>(); 688 } 689 690 691 template<typename Mapping> 692 bool IsAppMemImpl(uptr mem) { 693 #if !SANITIZER_GO 694 return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) || 695 # ifdef TSAN_MID_APP_RANGE 696 (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) || 697 # endif 698 (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) || 699 (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd); 700 #else 701 return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd; 702 #endif 703 } 704 705 ALWAYS_INLINE 706 bool IsAppMem(uptr mem) { 707 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 708 switch (vmaSize) { 709 case 39: return IsAppMemImpl<Mapping39>(mem); 710 case 42: return IsAppMemImpl<Mapping42>(mem); 711 case 48: return IsAppMemImpl<Mapping48>(mem); 712 } 713 DCHECK(0); 714 return false; 715 #elif defined(__powerpc64__) 716 switch (vmaSize) { 717 #if !SANITIZER_GO 718 case 44: return IsAppMemImpl<Mapping44>(mem); 719 #endif 720 case 46: return IsAppMemImpl<Mapping46>(mem); 721 case 47: return IsAppMemImpl<Mapping47>(mem); 722 } 723 DCHECK(0); 724 return false; 725 #else 726 return IsAppMemImpl<Mapping>(mem); 727 #endif 728 } 729 730 731 template<typename Mapping> 732 bool IsShadowMemImpl(uptr mem) { 733 return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd; 734 } 735 736 ALWAYS_INLINE 737 bool IsShadowMem(uptr mem) { 738 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 739 switch (vmaSize) { 740 case 39: return IsShadowMemImpl<Mapping39>(mem); 741 case 42: return IsShadowMemImpl<Mapping42>(mem); 742 case 48: return IsShadowMemImpl<Mapping48>(mem); 743 } 744 DCHECK(0); 745 return false; 746 #elif defined(__powerpc64__) 747 switch (vmaSize) { 748 #if !SANITIZER_GO 749 case 44: return IsShadowMemImpl<Mapping44>(mem); 750 #endif 751 case 46: return IsShadowMemImpl<Mapping46>(mem); 752 case 47: return IsShadowMemImpl<Mapping47>(mem); 753 } 754 DCHECK(0); 755 return false; 756 #else 757 return IsShadowMemImpl<Mapping>(mem); 758 #endif 759 } 760 761 762 template<typename Mapping> 763 bool IsMetaMemImpl(uptr mem) { 764 return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd; 765 } 766 767 ALWAYS_INLINE 768 bool IsMetaMem(uptr mem) { 769 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 770 switch (vmaSize) { 771 case 39: return IsMetaMemImpl<Mapping39>(mem); 772 case 42: return IsMetaMemImpl<Mapping42>(mem); 773 case 48: return IsMetaMemImpl<Mapping48>(mem); 774 } 775 DCHECK(0); 776 return false; 777 #elif defined(__powerpc64__) 778 switch (vmaSize) { 779 #if !SANITIZER_GO 780 case 44: return IsMetaMemImpl<Mapping44>(mem); 781 #endif 782 case 46: return IsMetaMemImpl<Mapping46>(mem); 783 case 47: return IsMetaMemImpl<Mapping47>(mem); 784 } 785 DCHECK(0); 786 return false; 787 #else 788 return IsMetaMemImpl<Mapping>(mem); 789 #endif 790 } 791 792 793 template<typename Mapping> 794 uptr MemToShadowImpl(uptr x) { 795 DCHECK(IsAppMem(x)); 796 #if !SANITIZER_GO 797 return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1))) 798 ^ Mapping::kAppMemXor) * kShadowCnt; 799 #else 800 # ifndef SANITIZER_WINDOWS 801 return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg; 802 # else 803 return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg; 804 # endif 805 #endif 806 } 807 808 ALWAYS_INLINE 809 uptr MemToShadow(uptr x) { 810 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 811 switch (vmaSize) { 812 case 39: return MemToShadowImpl<Mapping39>(x); 813 case 42: return MemToShadowImpl<Mapping42>(x); 814 case 48: return MemToShadowImpl<Mapping48>(x); 815 } 816 DCHECK(0); 817 return 0; 818 #elif defined(__powerpc64__) 819 switch (vmaSize) { 820 #if !SANITIZER_GO 821 case 44: return MemToShadowImpl<Mapping44>(x); 822 #endif 823 case 46: return MemToShadowImpl<Mapping46>(x); 824 case 47: return MemToShadowImpl<Mapping47>(x); 825 } 826 DCHECK(0); 827 return 0; 828 #else 829 return MemToShadowImpl<Mapping>(x); 830 #endif 831 } 832 833 834 template<typename Mapping> 835 u32 *MemToMetaImpl(uptr x) { 836 DCHECK(IsAppMem(x)); 837 #if !SANITIZER_GO 838 return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) / 839 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg); 840 #else 841 # ifndef SANITIZER_WINDOWS 842 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \ 843 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg); 844 # else 845 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \ 846 kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg); 847 # endif 848 #endif 849 } 850 851 ALWAYS_INLINE 852 u32 *MemToMeta(uptr x) { 853 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 854 switch (vmaSize) { 855 case 39: return MemToMetaImpl<Mapping39>(x); 856 case 42: return MemToMetaImpl<Mapping42>(x); 857 case 48: return MemToMetaImpl<Mapping48>(x); 858 } 859 DCHECK(0); 860 return 0; 861 #elif defined(__powerpc64__) 862 switch (vmaSize) { 863 #if !SANITIZER_GO 864 case 44: return MemToMetaImpl<Mapping44>(x); 865 #endif 866 case 46: return MemToMetaImpl<Mapping46>(x); 867 case 47: return MemToMetaImpl<Mapping47>(x); 868 } 869 DCHECK(0); 870 return 0; 871 #else 872 return MemToMetaImpl<Mapping>(x); 873 #endif 874 } 875 876 877 template<typename Mapping> 878 uptr ShadowToMemImpl(uptr s) { 879 DCHECK(IsShadowMem(s)); 880 #if !SANITIZER_GO 881 // The shadow mapping is non-linear and we've lost some bits, so we don't have 882 // an easy way to restore the original app address. But the mapping is a 883 // bijection, so we try to restore the address as belonging to low/mid/high 884 // range consecutively and see if shadow->app->shadow mapping gives us the 885 // same address. 886 uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor; 887 if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd && 888 MemToShadow(p) == s) 889 return p; 890 # ifdef TSAN_MID_APP_RANGE 891 p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) + 892 (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk); 893 if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd && 894 MemToShadow(p) == s) 895 return p; 896 # endif 897 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk; 898 #else // #if !SANITIZER_GO 899 # ifndef SANITIZER_WINDOWS 900 return (s & ~Mapping::kShadowBeg) / kShadowCnt; 901 # else 902 return (s - Mapping::kShadowBeg) / kShadowCnt; 903 # endif // SANITIZER_WINDOWS 904 #endif 905 } 906 907 ALWAYS_INLINE 908 uptr ShadowToMem(uptr s) { 909 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 910 switch (vmaSize) { 911 case 39: return ShadowToMemImpl<Mapping39>(s); 912 case 42: return ShadowToMemImpl<Mapping42>(s); 913 case 48: return ShadowToMemImpl<Mapping48>(s); 914 } 915 DCHECK(0); 916 return 0; 917 #elif defined(__powerpc64__) 918 switch (vmaSize) { 919 #if !SANITIZER_GO 920 case 44: return ShadowToMemImpl<Mapping44>(s); 921 #endif 922 case 46: return ShadowToMemImpl<Mapping46>(s); 923 case 47: return ShadowToMemImpl<Mapping47>(s); 924 } 925 DCHECK(0); 926 return 0; 927 #else 928 return ShadowToMemImpl<Mapping>(s); 929 #endif 930 } 931 932 933 934 // The additional page is to catch shadow stack overflow as paging fault. 935 // Windows wants 64K alignment for mmaps. 936 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace) 937 + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1); 938 939 template<typename Mapping> 940 uptr GetThreadTraceImpl(int tid) { 941 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize; 942 DCHECK_LT(p, Mapping::kTraceMemEnd); 943 return p; 944 } 945 946 ALWAYS_INLINE 947 uptr GetThreadTrace(int tid) { 948 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 949 switch (vmaSize) { 950 case 39: return GetThreadTraceImpl<Mapping39>(tid); 951 case 42: return GetThreadTraceImpl<Mapping42>(tid); 952 case 48: return GetThreadTraceImpl<Mapping48>(tid); 953 } 954 DCHECK(0); 955 return 0; 956 #elif defined(__powerpc64__) 957 switch (vmaSize) { 958 #if !SANITIZER_GO 959 case 44: return GetThreadTraceImpl<Mapping44>(tid); 960 #endif 961 case 46: return GetThreadTraceImpl<Mapping46>(tid); 962 case 47: return GetThreadTraceImpl<Mapping47>(tid); 963 } 964 DCHECK(0); 965 return 0; 966 #else 967 return GetThreadTraceImpl<Mapping>(tid); 968 #endif 969 } 970 971 972 template<typename Mapping> 973 uptr GetThreadTraceHeaderImpl(int tid) { 974 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize 975 + kTraceSize * sizeof(Event); 976 DCHECK_LT(p, Mapping::kTraceMemEnd); 977 return p; 978 } 979 980 ALWAYS_INLINE 981 uptr GetThreadTraceHeader(int tid) { 982 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 983 switch (vmaSize) { 984 case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid); 985 case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid); 986 case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid); 987 } 988 DCHECK(0); 989 return 0; 990 #elif defined(__powerpc64__) 991 switch (vmaSize) { 992 #if !SANITIZER_GO 993 case 44: return GetThreadTraceHeaderImpl<Mapping44>(tid); 994 #endif 995 case 46: return GetThreadTraceHeaderImpl<Mapping46>(tid); 996 case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid); 997 } 998 DCHECK(0); 999 return 0; 1000 #else 1001 return GetThreadTraceHeaderImpl<Mapping>(tid); 1002 #endif 1003 } 1004 1005 void InitializePlatform(); 1006 void InitializePlatformEarly(); 1007 void CheckAndProtect(); 1008 void InitializeShadowMemoryPlatform(); 1009 void FlushShadowMemory(); 1010 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive); 1011 int ExtractResolvFDs(void *state, int *fds, int nfd); 1012 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd); 1013 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size); 1014 1015 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m, 1016 void *abstime), void *c, void *m, void *abstime, 1017 void(*cleanup)(void *arg), void *arg); 1018 1019 void DestroyThreadState(); 1020 1021 } // namespace __tsan 1022 1023 #endif // TSAN_PLATFORM_H 1024