1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file is a part of ThreadSanitizer (TSan), a race detector. 10 // 11 // Platform-specific code. 12 //===----------------------------------------------------------------------===// 13 14 #ifndef TSAN_PLATFORM_H 15 #define TSAN_PLATFORM_H 16 17 #if !defined(__LP64__) && !defined(_WIN64) 18 # error "Only 64-bit is supported" 19 #endif 20 21 #include "tsan_defs.h" 22 #include "tsan_trace.h" 23 24 namespace __tsan { 25 26 #if !SANITIZER_GO 27 28 #if defined(__x86_64__) 29 /* 30 C/C++ on linux/x86_64 and freebsd/x86_64 31 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB) 32 0040 0000 0000 - 0100 0000 0000: - 33 0100 0000 0000 - 2000 0000 0000: shadow 34 2000 0000 0000 - 3000 0000 0000: - 35 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 36 4000 0000 0000 - 5500 0000 0000: - 37 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels 38 5680 0000 0000 - 6000 0000 0000: - 39 6000 0000 0000 - 6200 0000 0000: traces 40 6200 0000 0000 - 7d00 0000 0000: - 41 7b00 0000 0000 - 7c00 0000 0000: heap 42 7c00 0000 0000 - 7e80 0000 0000: - 43 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack 44 45 C/C++ on netbsd/amd64 can reuse the same mapping: 46 * The address space starts from 0x1000 (option with 0x0) and ends with 47 0x7f7ffffff000. 48 * LoAppMem-kHeapMemEnd can be reused as it is. 49 * No VDSO support. 50 * No MidAppMem region. 51 * No additional HeapMem region. 52 * HiAppMem contains the stack, loader, shared libraries and heap. 53 * Stack on NetBSD/amd64 has prereserved 128MB. 54 * Heap grows downwards (top-down). 55 * ASLR must be disabled per-process or globally. 56 57 */ 58 struct Mapping { 59 static const uptr kMetaShadowBeg = 0x300000000000ull; 60 static const uptr kMetaShadowEnd = 0x340000000000ull; 61 static const uptr kTraceMemBeg = 0x600000000000ull; 62 static const uptr kTraceMemEnd = 0x620000000000ull; 63 static const uptr kShadowBeg = 0x010000000000ull; 64 static const uptr kShadowEnd = 0x200000000000ull; 65 static const uptr kHeapMemBeg = 0x7b0000000000ull; 66 static const uptr kHeapMemEnd = 0x7c0000000000ull; 67 static const uptr kLoAppMemBeg = 0x000000001000ull; 68 static const uptr kLoAppMemEnd = 0x008000000000ull; 69 static const uptr kMidAppMemBeg = 0x550000000000ull; 70 static const uptr kMidAppMemEnd = 0x568000000000ull; 71 static const uptr kHiAppMemBeg = 0x7e8000000000ull; 72 static const uptr kHiAppMemEnd = 0x800000000000ull; 73 static const uptr kAppMemMsk = 0x780000000000ull; 74 static const uptr kAppMemXor = 0x040000000000ull; 75 static const uptr kVdsoBeg = 0xf000000000000000ull; 76 }; 77 78 #define TSAN_MID_APP_RANGE 1 79 #elif defined(__mips64) 80 /* 81 C/C++ on linux/mips64 (40-bit VMA) 82 0000 0000 00 - 0100 0000 00: - (4 GB) 83 0100 0000 00 - 0200 0000 00: main binary (4 GB) 84 0200 0000 00 - 2000 0000 00: - (120 GB) 85 2000 0000 00 - 4000 0000 00: shadow (128 GB) 86 4000 0000 00 - 5000 0000 00: metainfo (memory blocks and sync objects) (64 GB) 87 5000 0000 00 - aa00 0000 00: - (360 GB) 88 aa00 0000 00 - ab00 0000 00: main binary (PIE) (4 GB) 89 ab00 0000 00 - b000 0000 00: - (20 GB) 90 b000 0000 00 - b200 0000 00: traces (8 GB) 91 b200 0000 00 - fe00 0000 00: - (304 GB) 92 fe00 0000 00 - ff00 0000 00: heap (4 GB) 93 ff00 0000 00 - ff80 0000 00: - (2 GB) 94 ff80 0000 00 - ffff ffff ff: modules and main thread stack (<2 GB) 95 */ 96 struct Mapping { 97 static const uptr kMetaShadowBeg = 0x4000000000ull; 98 static const uptr kMetaShadowEnd = 0x5000000000ull; 99 static const uptr kTraceMemBeg = 0xb000000000ull; 100 static const uptr kTraceMemEnd = 0xb200000000ull; 101 static const uptr kShadowBeg = 0x2000000000ull; 102 static const uptr kShadowEnd = 0x4000000000ull; 103 static const uptr kHeapMemBeg = 0xfe00000000ull; 104 static const uptr kHeapMemEnd = 0xff00000000ull; 105 static const uptr kLoAppMemBeg = 0x0100000000ull; 106 static const uptr kLoAppMemEnd = 0x0200000000ull; 107 static const uptr kMidAppMemBeg = 0xaa00000000ull; 108 static const uptr kMidAppMemEnd = 0xab00000000ull; 109 static const uptr kHiAppMemBeg = 0xff80000000ull; 110 static const uptr kHiAppMemEnd = 0xffffffffffull; 111 static const uptr kAppMemMsk = 0xf800000000ull; 112 static const uptr kAppMemXor = 0x0800000000ull; 113 static const uptr kVdsoBeg = 0xfffff00000ull; 114 }; 115 116 #define TSAN_MID_APP_RANGE 1 117 #elif defined(__aarch64__) && defined(__APPLE__) 118 /* 119 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM) 120 0000 0000 00 - 0100 0000 00: - (4 GB) 121 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks (4 GB) 122 0200 0000 00 - 0300 0000 00: heap (4 GB) 123 0300 0000 00 - 0400 0000 00: - (4 GB) 124 0400 0000 00 - 0c00 0000 00: shadow memory (32 GB) 125 0c00 0000 00 - 0d00 0000 00: - (4 GB) 126 0d00 0000 00 - 0e00 0000 00: metainfo (4 GB) 127 0e00 0000 00 - 0f00 0000 00: - (4 GB) 128 0f00 0000 00 - 0fc0 0000 00: traces (3 GB) 129 0fc0 0000 00 - 1000 0000 00: - 130 */ 131 struct Mapping { 132 static const uptr kLoAppMemBeg = 0x0100000000ull; 133 static const uptr kLoAppMemEnd = 0x0200000000ull; 134 static const uptr kHeapMemBeg = 0x0200000000ull; 135 static const uptr kHeapMemEnd = 0x0300000000ull; 136 static const uptr kShadowBeg = 0x0400000000ull; 137 static const uptr kShadowEnd = 0x0c00000000ull; 138 static const uptr kMetaShadowBeg = 0x0d00000000ull; 139 static const uptr kMetaShadowEnd = 0x0e00000000ull; 140 static const uptr kTraceMemBeg = 0x0f00000000ull; 141 static const uptr kTraceMemEnd = 0x0fc0000000ull; 142 static const uptr kHiAppMemBeg = 0x0fc0000000ull; 143 static const uptr kHiAppMemEnd = 0x0fc0000000ull; 144 static const uptr kAppMemMsk = 0x0ull; 145 static const uptr kAppMemXor = 0x0ull; 146 static const uptr kVdsoBeg = 0x7000000000000000ull; 147 }; 148 149 #elif defined(__aarch64__) 150 // AArch64 supports multiple VMA which leads to multiple address transformation 151 // functions. To support these multiple VMAS transformations and mappings TSAN 152 // runtime for AArch64 uses an external memory read (vmaSize) to select which 153 // mapping to use. Although slower, it make a same instrumented binary run on 154 // multiple kernels. 155 156 /* 157 C/C++ on linux/aarch64 (39-bit VMA) 158 0000 0010 00 - 0100 0000 00: main binary 159 0100 0000 00 - 0800 0000 00: - 160 0800 0000 00 - 2000 0000 00: shadow memory 161 2000 0000 00 - 3100 0000 00: - 162 3100 0000 00 - 3400 0000 00: metainfo 163 3400 0000 00 - 5500 0000 00: - 164 5500 0000 00 - 5600 0000 00: main binary (PIE) 165 5600 0000 00 - 6000 0000 00: - 166 6000 0000 00 - 6200 0000 00: traces 167 6200 0000 00 - 7d00 0000 00: - 168 7c00 0000 00 - 7d00 0000 00: heap 169 7d00 0000 00 - 7fff ffff ff: modules and main thread stack 170 */ 171 struct Mapping39 { 172 static const uptr kLoAppMemBeg = 0x0000001000ull; 173 static const uptr kLoAppMemEnd = 0x0100000000ull; 174 static const uptr kShadowBeg = 0x0800000000ull; 175 static const uptr kShadowEnd = 0x2000000000ull; 176 static const uptr kMetaShadowBeg = 0x3100000000ull; 177 static const uptr kMetaShadowEnd = 0x3400000000ull; 178 static const uptr kMidAppMemBeg = 0x5500000000ull; 179 static const uptr kMidAppMemEnd = 0x5600000000ull; 180 static const uptr kTraceMemBeg = 0x6000000000ull; 181 static const uptr kTraceMemEnd = 0x6200000000ull; 182 static const uptr kHeapMemBeg = 0x7c00000000ull; 183 static const uptr kHeapMemEnd = 0x7d00000000ull; 184 static const uptr kHiAppMemBeg = 0x7e00000000ull; 185 static const uptr kHiAppMemEnd = 0x7fffffffffull; 186 static const uptr kAppMemMsk = 0x7800000000ull; 187 static const uptr kAppMemXor = 0x0200000000ull; 188 static const uptr kVdsoBeg = 0x7f00000000ull; 189 }; 190 191 /* 192 C/C++ on linux/aarch64 (42-bit VMA) 193 00000 0010 00 - 01000 0000 00: main binary 194 01000 0000 00 - 10000 0000 00: - 195 10000 0000 00 - 20000 0000 00: shadow memory 196 20000 0000 00 - 26000 0000 00: - 197 26000 0000 00 - 28000 0000 00: metainfo 198 28000 0000 00 - 2aa00 0000 00: - 199 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE) 200 2ab00 0000 00 - 36200 0000 00: - 201 36200 0000 00 - 36240 0000 00: traces 202 36240 0000 00 - 3e000 0000 00: - 203 3e000 0000 00 - 3f000 0000 00: heap 204 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack 205 */ 206 struct Mapping42 { 207 static const uptr kLoAppMemBeg = 0x00000001000ull; 208 static const uptr kLoAppMemEnd = 0x01000000000ull; 209 static const uptr kShadowBeg = 0x10000000000ull; 210 static const uptr kShadowEnd = 0x20000000000ull; 211 static const uptr kMetaShadowBeg = 0x26000000000ull; 212 static const uptr kMetaShadowEnd = 0x28000000000ull; 213 static const uptr kMidAppMemBeg = 0x2aa00000000ull; 214 static const uptr kMidAppMemEnd = 0x2ab00000000ull; 215 static const uptr kTraceMemBeg = 0x36200000000ull; 216 static const uptr kTraceMemEnd = 0x36400000000ull; 217 static const uptr kHeapMemBeg = 0x3e000000000ull; 218 static const uptr kHeapMemEnd = 0x3f000000000ull; 219 static const uptr kHiAppMemBeg = 0x3f000000000ull; 220 static const uptr kHiAppMemEnd = 0x3ffffffffffull; 221 static const uptr kAppMemMsk = 0x3c000000000ull; 222 static const uptr kAppMemXor = 0x04000000000ull; 223 static const uptr kVdsoBeg = 0x37f00000000ull; 224 }; 225 226 struct Mapping48 { 227 static const uptr kLoAppMemBeg = 0x0000000001000ull; 228 static const uptr kLoAppMemEnd = 0x0000200000000ull; 229 static const uptr kShadowBeg = 0x0002000000000ull; 230 static const uptr kShadowEnd = 0x0004000000000ull; 231 static const uptr kMetaShadowBeg = 0x0005000000000ull; 232 static const uptr kMetaShadowEnd = 0x0006000000000ull; 233 static const uptr kMidAppMemBeg = 0x0aaaa00000000ull; 234 static const uptr kMidAppMemEnd = 0x0aaaf00000000ull; 235 static const uptr kTraceMemBeg = 0x0f06000000000ull; 236 static const uptr kTraceMemEnd = 0x0f06200000000ull; 237 static const uptr kHeapMemBeg = 0x0ffff00000000ull; 238 static const uptr kHeapMemEnd = 0x0ffff00000000ull; 239 static const uptr kHiAppMemBeg = 0x0ffff00000000ull; 240 static const uptr kHiAppMemEnd = 0x1000000000000ull; 241 static const uptr kAppMemMsk = 0x0fff800000000ull; 242 static const uptr kAppMemXor = 0x0000800000000ull; 243 static const uptr kVdsoBeg = 0xffff000000000ull; 244 }; 245 246 // Indicates the runtime will define the memory regions at runtime. 247 #define TSAN_RUNTIME_VMA 1 248 // Indicates that mapping defines a mid range memory segment. 249 #define TSAN_MID_APP_RANGE 1 250 #elif defined(__powerpc64__) 251 // PPC64 supports multiple VMA which leads to multiple address transformation 252 // functions. To support these multiple VMAS transformations and mappings TSAN 253 // runtime for PPC64 uses an external memory read (vmaSize) to select which 254 // mapping to use. Although slower, it make a same instrumented binary run on 255 // multiple kernels. 256 257 /* 258 C/C++ on linux/powerpc64 (44-bit VMA) 259 0000 0000 0100 - 0001 0000 0000: main binary 260 0001 0000 0000 - 0001 0000 0000: - 261 0001 0000 0000 - 0b00 0000 0000: shadow 262 0b00 0000 0000 - 0b00 0000 0000: - 263 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects) 264 0d00 0000 0000 - 0d00 0000 0000: - 265 0d00 0000 0000 - 0f00 0000 0000: traces 266 0f00 0000 0000 - 0f00 0000 0000: - 267 0f00 0000 0000 - 0f50 0000 0000: heap 268 0f50 0000 0000 - 0f60 0000 0000: - 269 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack 270 */ 271 struct Mapping44 { 272 static const uptr kMetaShadowBeg = 0x0b0000000000ull; 273 static const uptr kMetaShadowEnd = 0x0d0000000000ull; 274 static const uptr kTraceMemBeg = 0x0d0000000000ull; 275 static const uptr kTraceMemEnd = 0x0f0000000000ull; 276 static const uptr kShadowBeg = 0x000100000000ull; 277 static const uptr kShadowEnd = 0x0b0000000000ull; 278 static const uptr kLoAppMemBeg = 0x000000000100ull; 279 static const uptr kLoAppMemEnd = 0x000100000000ull; 280 static const uptr kHeapMemBeg = 0x0f0000000000ull; 281 static const uptr kHeapMemEnd = 0x0f5000000000ull; 282 static const uptr kHiAppMemBeg = 0x0f6000000000ull; 283 static const uptr kHiAppMemEnd = 0x100000000000ull; // 44 bits 284 static const uptr kAppMemMsk = 0x0f0000000000ull; 285 static const uptr kAppMemXor = 0x002100000000ull; 286 static const uptr kVdsoBeg = 0x3c0000000000000ull; 287 }; 288 289 /* 290 C/C++ on linux/powerpc64 (46-bit VMA) 291 0000 0000 1000 - 0100 0000 0000: main binary 292 0100 0000 0000 - 0200 0000 0000: - 293 0100 0000 0000 - 1000 0000 0000: shadow 294 1000 0000 0000 - 1000 0000 0000: - 295 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects) 296 2000 0000 0000 - 2000 0000 0000: - 297 2000 0000 0000 - 2200 0000 0000: traces 298 2200 0000 0000 - 3d00 0000 0000: - 299 3d00 0000 0000 - 3e00 0000 0000: heap 300 3e00 0000 0000 - 3e80 0000 0000: - 301 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack 302 */ 303 struct Mapping46 { 304 static const uptr kMetaShadowBeg = 0x100000000000ull; 305 static const uptr kMetaShadowEnd = 0x200000000000ull; 306 static const uptr kTraceMemBeg = 0x200000000000ull; 307 static const uptr kTraceMemEnd = 0x220000000000ull; 308 static const uptr kShadowBeg = 0x010000000000ull; 309 static const uptr kShadowEnd = 0x100000000000ull; 310 static const uptr kHeapMemBeg = 0x3d0000000000ull; 311 static const uptr kHeapMemEnd = 0x3e0000000000ull; 312 static const uptr kLoAppMemBeg = 0x000000001000ull; 313 static const uptr kLoAppMemEnd = 0x010000000000ull; 314 static const uptr kHiAppMemBeg = 0x3e8000000000ull; 315 static const uptr kHiAppMemEnd = 0x400000000000ull; // 46 bits 316 static const uptr kAppMemMsk = 0x3c0000000000ull; 317 static const uptr kAppMemXor = 0x020000000000ull; 318 static const uptr kVdsoBeg = 0x7800000000000000ull; 319 }; 320 321 /* 322 C/C++ on linux/powerpc64 (47-bit VMA) 323 0000 0000 1000 - 0100 0000 0000: main binary 324 0100 0000 0000 - 0200 0000 0000: - 325 0100 0000 0000 - 1000 0000 0000: shadow 326 1000 0000 0000 - 1000 0000 0000: - 327 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects) 328 2000 0000 0000 - 2000 0000 0000: - 329 2000 0000 0000 - 2200 0000 0000: traces 330 2200 0000 0000 - 7d00 0000 0000: - 331 7d00 0000 0000 - 7e00 0000 0000: heap 332 7e00 0000 0000 - 7e80 0000 0000: - 333 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack 334 */ 335 struct Mapping47 { 336 static const uptr kMetaShadowBeg = 0x100000000000ull; 337 static const uptr kMetaShadowEnd = 0x200000000000ull; 338 static const uptr kTraceMemBeg = 0x200000000000ull; 339 static const uptr kTraceMemEnd = 0x220000000000ull; 340 static const uptr kShadowBeg = 0x010000000000ull; 341 static const uptr kShadowEnd = 0x100000000000ull; 342 static const uptr kHeapMemBeg = 0x7d0000000000ull; 343 static const uptr kHeapMemEnd = 0x7e0000000000ull; 344 static const uptr kLoAppMemBeg = 0x000000001000ull; 345 static const uptr kLoAppMemEnd = 0x010000000000ull; 346 static const uptr kHiAppMemBeg = 0x7e8000000000ull; 347 static const uptr kHiAppMemEnd = 0x800000000000ull; // 47 bits 348 static const uptr kAppMemMsk = 0x7c0000000000ull; 349 static const uptr kAppMemXor = 0x020000000000ull; 350 static const uptr kVdsoBeg = 0x7800000000000000ull; 351 }; 352 353 // Indicates the runtime will define the memory regions at runtime. 354 #define TSAN_RUNTIME_VMA 1 355 #endif 356 357 #elif SANITIZER_GO && !SANITIZER_WINDOWS && defined(__x86_64__) 358 359 /* Go on linux, darwin and freebsd on x86_64 360 0000 0000 1000 - 0000 1000 0000: executable 361 0000 1000 0000 - 00c0 0000 0000: - 362 00c0 0000 0000 - 00e0 0000 0000: heap 363 00e0 0000 0000 - 2000 0000 0000: - 364 2000 0000 0000 - 2380 0000 0000: shadow 365 2380 0000 0000 - 3000 0000 0000: - 366 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 367 4000 0000 0000 - 6000 0000 0000: - 368 6000 0000 0000 - 6200 0000 0000: traces 369 6200 0000 0000 - 8000 0000 0000: - 370 */ 371 372 struct Mapping { 373 static const uptr kMetaShadowBeg = 0x300000000000ull; 374 static const uptr kMetaShadowEnd = 0x400000000000ull; 375 static const uptr kTraceMemBeg = 0x600000000000ull; 376 static const uptr kTraceMemEnd = 0x620000000000ull; 377 static const uptr kShadowBeg = 0x200000000000ull; 378 static const uptr kShadowEnd = 0x238000000000ull; 379 static const uptr kAppMemBeg = 0x000000001000ull; 380 static const uptr kAppMemEnd = 0x00e000000000ull; 381 }; 382 383 #elif SANITIZER_GO && SANITIZER_WINDOWS 384 385 /* Go on windows 386 0000 0000 1000 - 0000 1000 0000: executable 387 0000 1000 0000 - 00f8 0000 0000: - 388 00c0 0000 0000 - 00e0 0000 0000: heap 389 00e0 0000 0000 - 0100 0000 0000: - 390 0100 0000 0000 - 0500 0000 0000: shadow 391 0500 0000 0000 - 0560 0000 0000: - 392 0560 0000 0000 - 0760 0000 0000: traces 393 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects) 394 07d0 0000 0000 - 8000 0000 0000: - 395 */ 396 397 struct Mapping { 398 static const uptr kMetaShadowBeg = 0x076000000000ull; 399 static const uptr kMetaShadowEnd = 0x07d000000000ull; 400 static const uptr kTraceMemBeg = 0x056000000000ull; 401 static const uptr kTraceMemEnd = 0x076000000000ull; 402 static const uptr kShadowBeg = 0x010000000000ull; 403 static const uptr kShadowEnd = 0x050000000000ull; 404 static const uptr kAppMemBeg = 0x000000001000ull; 405 static const uptr kAppMemEnd = 0x00e000000000ull; 406 }; 407 408 #elif SANITIZER_GO && defined(__powerpc64__) 409 410 /* Only Mapping46 and Mapping47 are currently supported for powercp64 on Go. */ 411 412 /* Go on linux/powerpc64 (46-bit VMA) 413 0000 0000 1000 - 0000 1000 0000: executable 414 0000 1000 0000 - 00c0 0000 0000: - 415 00c0 0000 0000 - 00e0 0000 0000: heap 416 00e0 0000 0000 - 2000 0000 0000: - 417 2000 0000 0000 - 2380 0000 0000: shadow 418 2380 0000 0000 - 2400 0000 0000: - 419 2400 0000 0000 - 3400 0000 0000: metainfo (memory blocks and sync objects) 420 3400 0000 0000 - 3600 0000 0000: - 421 3600 0000 0000 - 3800 0000 0000: traces 422 3800 0000 0000 - 4000 0000 0000: - 423 */ 424 425 struct Mapping46 { 426 static const uptr kMetaShadowBeg = 0x240000000000ull; 427 static const uptr kMetaShadowEnd = 0x340000000000ull; 428 static const uptr kTraceMemBeg = 0x360000000000ull; 429 static const uptr kTraceMemEnd = 0x380000000000ull; 430 static const uptr kShadowBeg = 0x200000000000ull; 431 static const uptr kShadowEnd = 0x238000000000ull; 432 static const uptr kAppMemBeg = 0x000000001000ull; 433 static const uptr kAppMemEnd = 0x00e000000000ull; 434 }; 435 436 /* Go on linux/powerpc64 (47-bit VMA) 437 0000 0000 1000 - 0000 1000 0000: executable 438 0000 1000 0000 - 00c0 0000 0000: - 439 00c0 0000 0000 - 00e0 0000 0000: heap 440 00e0 0000 0000 - 2000 0000 0000: - 441 2000 0000 0000 - 3000 0000 0000: shadow 442 3000 0000 0000 - 3000 0000 0000: - 443 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 444 4000 0000 0000 - 6000 0000 0000: - 445 6000 0000 0000 - 6200 0000 0000: traces 446 6200 0000 0000 - 8000 0000 0000: - 447 */ 448 449 struct Mapping47 { 450 static const uptr kMetaShadowBeg = 0x300000000000ull; 451 static const uptr kMetaShadowEnd = 0x400000000000ull; 452 static const uptr kTraceMemBeg = 0x600000000000ull; 453 static const uptr kTraceMemEnd = 0x620000000000ull; 454 static const uptr kShadowBeg = 0x200000000000ull; 455 static const uptr kShadowEnd = 0x300000000000ull; 456 static const uptr kAppMemBeg = 0x000000001000ull; 457 static const uptr kAppMemEnd = 0x00e000000000ull; 458 }; 459 460 #define TSAN_RUNTIME_VMA 1 461 462 #elif SANITIZER_GO && defined(__aarch64__) 463 464 /* Go on linux/aarch64 (48-bit VMA) and darwin/aarch64 (47-bit VMA) 465 0000 0000 1000 - 0000 1000 0000: executable 466 0000 1000 0000 - 00c0 0000 0000: - 467 00c0 0000 0000 - 00e0 0000 0000: heap 468 00e0 0000 0000 - 2000 0000 0000: - 469 2000 0000 0000 - 3000 0000 0000: shadow 470 3000 0000 0000 - 3000 0000 0000: - 471 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 472 4000 0000 0000 - 6000 0000 0000: - 473 6000 0000 0000 - 6200 0000 0000: traces 474 6200 0000 0000 - 8000 0000 0000: - 475 */ 476 477 struct Mapping { 478 static const uptr kMetaShadowBeg = 0x300000000000ull; 479 static const uptr kMetaShadowEnd = 0x400000000000ull; 480 static const uptr kTraceMemBeg = 0x600000000000ull; 481 static const uptr kTraceMemEnd = 0x620000000000ull; 482 static const uptr kShadowBeg = 0x200000000000ull; 483 static const uptr kShadowEnd = 0x300000000000ull; 484 static const uptr kAppMemBeg = 0x000000001000ull; 485 static const uptr kAppMemEnd = 0x00e000000000ull; 486 }; 487 488 // Indicates the runtime will define the memory regions at runtime. 489 #define TSAN_RUNTIME_VMA 1 490 491 #elif SANITIZER_GO && defined(__mips64) 492 /* 493 Go on linux/mips64 (47-bit VMA) 494 0000 0000 1000 - 0000 1000 0000: executable 495 0000 1000 0000 - 00c0 0000 0000: - 496 00c0 0000 0000 - 00e0 0000 0000: heap 497 00e0 0000 0000 - 2000 0000 0000: - 498 2000 0000 0000 - 3000 0000 0000: shadow 499 3000 0000 0000 - 3000 0000 0000: - 500 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects) 501 4000 0000 0000 - 6000 0000 0000: - 502 6000 0000 0000 - 6200 0000 0000: traces 503 6200 0000 0000 - 8000 0000 0000: - 504 */ 505 struct Mapping { 506 static const uptr kMetaShadowBeg = 0x300000000000ull; 507 static const uptr kMetaShadowEnd = 0x400000000000ull; 508 static const uptr kTraceMemBeg = 0x600000000000ull; 509 static const uptr kTraceMemEnd = 0x620000000000ull; 510 static const uptr kShadowBeg = 0x200000000000ull; 511 static const uptr kShadowEnd = 0x300000000000ull; 512 static const uptr kAppMemBeg = 0x000000001000ull; 513 static const uptr kAppMemEnd = 0x00e000000000ull; 514 }; 515 #else 516 # error "Unknown platform" 517 #endif 518 519 520 #ifdef TSAN_RUNTIME_VMA 521 extern uptr vmaSize; 522 #endif 523 524 525 enum MappingType { 526 MAPPING_LO_APP_BEG, 527 MAPPING_LO_APP_END, 528 MAPPING_HI_APP_BEG, 529 MAPPING_HI_APP_END, 530 #ifdef TSAN_MID_APP_RANGE 531 MAPPING_MID_APP_BEG, 532 MAPPING_MID_APP_END, 533 #endif 534 MAPPING_HEAP_BEG, 535 MAPPING_HEAP_END, 536 MAPPING_APP_BEG, 537 MAPPING_APP_END, 538 MAPPING_SHADOW_BEG, 539 MAPPING_SHADOW_END, 540 MAPPING_META_SHADOW_BEG, 541 MAPPING_META_SHADOW_END, 542 MAPPING_TRACE_BEG, 543 MAPPING_TRACE_END, 544 MAPPING_VDSO_BEG, 545 }; 546 547 template<typename Mapping, int Type> 548 uptr MappingImpl(void) { 549 switch (Type) { 550 #if !SANITIZER_GO 551 case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg; 552 case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd; 553 # ifdef TSAN_MID_APP_RANGE 554 case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg; 555 case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd; 556 # endif 557 case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg; 558 case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd; 559 case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg; 560 case MAPPING_HEAP_END: return Mapping::kHeapMemEnd; 561 case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg; 562 #else 563 case MAPPING_APP_BEG: return Mapping::kAppMemBeg; 564 case MAPPING_APP_END: return Mapping::kAppMemEnd; 565 #endif 566 case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg; 567 case MAPPING_SHADOW_END: return Mapping::kShadowEnd; 568 case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg; 569 case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd; 570 case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg; 571 case MAPPING_TRACE_END: return Mapping::kTraceMemEnd; 572 } 573 } 574 575 template<int Type> 576 uptr MappingArchImpl(void) { 577 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 578 switch (vmaSize) { 579 case 39: return MappingImpl<Mapping39, Type>(); 580 case 42: return MappingImpl<Mapping42, Type>(); 581 case 48: return MappingImpl<Mapping48, Type>(); 582 } 583 DCHECK(0); 584 return 0; 585 #elif defined(__powerpc64__) 586 switch (vmaSize) { 587 #if !SANITIZER_GO 588 case 44: return MappingImpl<Mapping44, Type>(); 589 #endif 590 case 46: return MappingImpl<Mapping46, Type>(); 591 case 47: return MappingImpl<Mapping47, Type>(); 592 } 593 DCHECK(0); 594 return 0; 595 #else 596 return MappingImpl<Mapping, Type>(); 597 #endif 598 } 599 600 #if !SANITIZER_GO 601 ALWAYS_INLINE 602 uptr LoAppMemBeg(void) { 603 return MappingArchImpl<MAPPING_LO_APP_BEG>(); 604 } 605 ALWAYS_INLINE 606 uptr LoAppMemEnd(void) { 607 return MappingArchImpl<MAPPING_LO_APP_END>(); 608 } 609 610 #ifdef TSAN_MID_APP_RANGE 611 ALWAYS_INLINE 612 uptr MidAppMemBeg(void) { 613 return MappingArchImpl<MAPPING_MID_APP_BEG>(); 614 } 615 ALWAYS_INLINE 616 uptr MidAppMemEnd(void) { 617 return MappingArchImpl<MAPPING_MID_APP_END>(); 618 } 619 #endif 620 621 ALWAYS_INLINE 622 uptr HeapMemBeg(void) { 623 return MappingArchImpl<MAPPING_HEAP_BEG>(); 624 } 625 ALWAYS_INLINE 626 uptr HeapMemEnd(void) { 627 return MappingArchImpl<MAPPING_HEAP_END>(); 628 } 629 630 ALWAYS_INLINE 631 uptr HiAppMemBeg(void) { 632 return MappingArchImpl<MAPPING_HI_APP_BEG>(); 633 } 634 ALWAYS_INLINE 635 uptr HiAppMemEnd(void) { 636 return MappingArchImpl<MAPPING_HI_APP_END>(); 637 } 638 639 ALWAYS_INLINE 640 uptr VdsoBeg(void) { 641 return MappingArchImpl<MAPPING_VDSO_BEG>(); 642 } 643 644 #else 645 646 ALWAYS_INLINE 647 uptr AppMemBeg(void) { 648 return MappingArchImpl<MAPPING_APP_BEG>(); 649 } 650 ALWAYS_INLINE 651 uptr AppMemEnd(void) { 652 return MappingArchImpl<MAPPING_APP_END>(); 653 } 654 655 #endif 656 657 static inline 658 bool GetUserRegion(int i, uptr *start, uptr *end) { 659 switch (i) { 660 default: 661 return false; 662 #if !SANITIZER_GO 663 case 0: 664 *start = LoAppMemBeg(); 665 *end = LoAppMemEnd(); 666 return true; 667 case 1: 668 *start = HiAppMemBeg(); 669 *end = HiAppMemEnd(); 670 return true; 671 case 2: 672 *start = HeapMemBeg(); 673 *end = HeapMemEnd(); 674 return true; 675 # ifdef TSAN_MID_APP_RANGE 676 case 3: 677 *start = MidAppMemBeg(); 678 *end = MidAppMemEnd(); 679 return true; 680 # endif 681 #else 682 case 0: 683 *start = AppMemBeg(); 684 *end = AppMemEnd(); 685 return true; 686 #endif 687 } 688 } 689 690 ALWAYS_INLINE 691 uptr ShadowBeg(void) { 692 return MappingArchImpl<MAPPING_SHADOW_BEG>(); 693 } 694 ALWAYS_INLINE 695 uptr ShadowEnd(void) { 696 return MappingArchImpl<MAPPING_SHADOW_END>(); 697 } 698 699 ALWAYS_INLINE 700 uptr MetaShadowBeg(void) { 701 return MappingArchImpl<MAPPING_META_SHADOW_BEG>(); 702 } 703 ALWAYS_INLINE 704 uptr MetaShadowEnd(void) { 705 return MappingArchImpl<MAPPING_META_SHADOW_END>(); 706 } 707 708 ALWAYS_INLINE 709 uptr TraceMemBeg(void) { 710 return MappingArchImpl<MAPPING_TRACE_BEG>(); 711 } 712 ALWAYS_INLINE 713 uptr TraceMemEnd(void) { 714 return MappingArchImpl<MAPPING_TRACE_END>(); 715 } 716 717 718 template<typename Mapping> 719 bool IsAppMemImpl(uptr mem) { 720 #if !SANITIZER_GO 721 return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) || 722 # ifdef TSAN_MID_APP_RANGE 723 (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) || 724 # endif 725 (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) || 726 (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd); 727 #else 728 return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd; 729 #endif 730 } 731 732 ALWAYS_INLINE 733 bool IsAppMem(uptr mem) { 734 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 735 switch (vmaSize) { 736 case 39: return IsAppMemImpl<Mapping39>(mem); 737 case 42: return IsAppMemImpl<Mapping42>(mem); 738 case 48: return IsAppMemImpl<Mapping48>(mem); 739 } 740 DCHECK(0); 741 return false; 742 #elif defined(__powerpc64__) 743 switch (vmaSize) { 744 #if !SANITIZER_GO 745 case 44: return IsAppMemImpl<Mapping44>(mem); 746 #endif 747 case 46: return IsAppMemImpl<Mapping46>(mem); 748 case 47: return IsAppMemImpl<Mapping47>(mem); 749 } 750 DCHECK(0); 751 return false; 752 #else 753 return IsAppMemImpl<Mapping>(mem); 754 #endif 755 } 756 757 758 template<typename Mapping> 759 bool IsShadowMemImpl(uptr mem) { 760 return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd; 761 } 762 763 ALWAYS_INLINE 764 bool IsShadowMem(uptr mem) { 765 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 766 switch (vmaSize) { 767 case 39: return IsShadowMemImpl<Mapping39>(mem); 768 case 42: return IsShadowMemImpl<Mapping42>(mem); 769 case 48: return IsShadowMemImpl<Mapping48>(mem); 770 } 771 DCHECK(0); 772 return false; 773 #elif defined(__powerpc64__) 774 switch (vmaSize) { 775 #if !SANITIZER_GO 776 case 44: return IsShadowMemImpl<Mapping44>(mem); 777 #endif 778 case 46: return IsShadowMemImpl<Mapping46>(mem); 779 case 47: return IsShadowMemImpl<Mapping47>(mem); 780 } 781 DCHECK(0); 782 return false; 783 #else 784 return IsShadowMemImpl<Mapping>(mem); 785 #endif 786 } 787 788 789 template<typename Mapping> 790 bool IsMetaMemImpl(uptr mem) { 791 return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd; 792 } 793 794 ALWAYS_INLINE 795 bool IsMetaMem(uptr mem) { 796 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 797 switch (vmaSize) { 798 case 39: return IsMetaMemImpl<Mapping39>(mem); 799 case 42: return IsMetaMemImpl<Mapping42>(mem); 800 case 48: return IsMetaMemImpl<Mapping48>(mem); 801 } 802 DCHECK(0); 803 return false; 804 #elif defined(__powerpc64__) 805 switch (vmaSize) { 806 #if !SANITIZER_GO 807 case 44: return IsMetaMemImpl<Mapping44>(mem); 808 #endif 809 case 46: return IsMetaMemImpl<Mapping46>(mem); 810 case 47: return IsMetaMemImpl<Mapping47>(mem); 811 } 812 DCHECK(0); 813 return false; 814 #else 815 return IsMetaMemImpl<Mapping>(mem); 816 #endif 817 } 818 819 820 template<typename Mapping> 821 uptr MemToShadowImpl(uptr x) { 822 DCHECK(IsAppMem(x)); 823 #if !SANITIZER_GO 824 return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1))) 825 ^ Mapping::kAppMemXor) * kShadowCnt; 826 #else 827 # ifndef SANITIZER_WINDOWS 828 return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg; 829 # else 830 return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg; 831 # endif 832 #endif 833 } 834 835 ALWAYS_INLINE 836 uptr MemToShadow(uptr x) { 837 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 838 switch (vmaSize) { 839 case 39: return MemToShadowImpl<Mapping39>(x); 840 case 42: return MemToShadowImpl<Mapping42>(x); 841 case 48: return MemToShadowImpl<Mapping48>(x); 842 } 843 DCHECK(0); 844 return 0; 845 #elif defined(__powerpc64__) 846 switch (vmaSize) { 847 #if !SANITIZER_GO 848 case 44: return MemToShadowImpl<Mapping44>(x); 849 #endif 850 case 46: return MemToShadowImpl<Mapping46>(x); 851 case 47: return MemToShadowImpl<Mapping47>(x); 852 } 853 DCHECK(0); 854 return 0; 855 #else 856 return MemToShadowImpl<Mapping>(x); 857 #endif 858 } 859 860 861 template<typename Mapping> 862 u32 *MemToMetaImpl(uptr x) { 863 DCHECK(IsAppMem(x)); 864 #if !SANITIZER_GO 865 return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) / 866 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg); 867 #else 868 # ifndef SANITIZER_WINDOWS 869 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \ 870 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg); 871 # else 872 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \ 873 kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg); 874 # endif 875 #endif 876 } 877 878 ALWAYS_INLINE 879 u32 *MemToMeta(uptr x) { 880 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 881 switch (vmaSize) { 882 case 39: return MemToMetaImpl<Mapping39>(x); 883 case 42: return MemToMetaImpl<Mapping42>(x); 884 case 48: return MemToMetaImpl<Mapping48>(x); 885 } 886 DCHECK(0); 887 return 0; 888 #elif defined(__powerpc64__) 889 switch (vmaSize) { 890 #if !SANITIZER_GO 891 case 44: return MemToMetaImpl<Mapping44>(x); 892 #endif 893 case 46: return MemToMetaImpl<Mapping46>(x); 894 case 47: return MemToMetaImpl<Mapping47>(x); 895 } 896 DCHECK(0); 897 return 0; 898 #else 899 return MemToMetaImpl<Mapping>(x); 900 #endif 901 } 902 903 904 template<typename Mapping> 905 uptr ShadowToMemImpl(uptr s) { 906 DCHECK(IsShadowMem(s)); 907 #if !SANITIZER_GO 908 // The shadow mapping is non-linear and we've lost some bits, so we don't have 909 // an easy way to restore the original app address. But the mapping is a 910 // bijection, so we try to restore the address as belonging to low/mid/high 911 // range consecutively and see if shadow->app->shadow mapping gives us the 912 // same address. 913 uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor; 914 if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd && 915 MemToShadow(p) == s) 916 return p; 917 # ifdef TSAN_MID_APP_RANGE 918 p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) + 919 (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk); 920 if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd && 921 MemToShadow(p) == s) 922 return p; 923 # endif 924 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk; 925 #else // #if !SANITIZER_GO 926 # ifndef SANITIZER_WINDOWS 927 return (s & ~Mapping::kShadowBeg) / kShadowCnt; 928 # else 929 return (s - Mapping::kShadowBeg) / kShadowCnt; 930 # endif // SANITIZER_WINDOWS 931 #endif 932 } 933 934 ALWAYS_INLINE 935 uptr ShadowToMem(uptr s) { 936 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 937 switch (vmaSize) { 938 case 39: return ShadowToMemImpl<Mapping39>(s); 939 case 42: return ShadowToMemImpl<Mapping42>(s); 940 case 48: return ShadowToMemImpl<Mapping48>(s); 941 } 942 DCHECK(0); 943 return 0; 944 #elif defined(__powerpc64__) 945 switch (vmaSize) { 946 #if !SANITIZER_GO 947 case 44: return ShadowToMemImpl<Mapping44>(s); 948 #endif 949 case 46: return ShadowToMemImpl<Mapping46>(s); 950 case 47: return ShadowToMemImpl<Mapping47>(s); 951 } 952 DCHECK(0); 953 return 0; 954 #else 955 return ShadowToMemImpl<Mapping>(s); 956 #endif 957 } 958 959 960 961 // The additional page is to catch shadow stack overflow as paging fault. 962 // Windows wants 64K alignment for mmaps. 963 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace) 964 + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1); 965 966 template<typename Mapping> 967 uptr GetThreadTraceImpl(int tid) { 968 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize; 969 DCHECK_LT(p, Mapping::kTraceMemEnd); 970 return p; 971 } 972 973 ALWAYS_INLINE 974 uptr GetThreadTrace(int tid) { 975 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 976 switch (vmaSize) { 977 case 39: return GetThreadTraceImpl<Mapping39>(tid); 978 case 42: return GetThreadTraceImpl<Mapping42>(tid); 979 case 48: return GetThreadTraceImpl<Mapping48>(tid); 980 } 981 DCHECK(0); 982 return 0; 983 #elif defined(__powerpc64__) 984 switch (vmaSize) { 985 #if !SANITIZER_GO 986 case 44: return GetThreadTraceImpl<Mapping44>(tid); 987 #endif 988 case 46: return GetThreadTraceImpl<Mapping46>(tid); 989 case 47: return GetThreadTraceImpl<Mapping47>(tid); 990 } 991 DCHECK(0); 992 return 0; 993 #else 994 return GetThreadTraceImpl<Mapping>(tid); 995 #endif 996 } 997 998 999 template<typename Mapping> 1000 uptr GetThreadTraceHeaderImpl(int tid) { 1001 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize 1002 + kTraceSize * sizeof(Event); 1003 DCHECK_LT(p, Mapping::kTraceMemEnd); 1004 return p; 1005 } 1006 1007 ALWAYS_INLINE 1008 uptr GetThreadTraceHeader(int tid) { 1009 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO 1010 switch (vmaSize) { 1011 case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid); 1012 case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid); 1013 case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid); 1014 } 1015 DCHECK(0); 1016 return 0; 1017 #elif defined(__powerpc64__) 1018 switch (vmaSize) { 1019 #if !SANITIZER_GO 1020 case 44: return GetThreadTraceHeaderImpl<Mapping44>(tid); 1021 #endif 1022 case 46: return GetThreadTraceHeaderImpl<Mapping46>(tid); 1023 case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid); 1024 } 1025 DCHECK(0); 1026 return 0; 1027 #else 1028 return GetThreadTraceHeaderImpl<Mapping>(tid); 1029 #endif 1030 } 1031 1032 void InitializePlatform(); 1033 void InitializePlatformEarly(); 1034 void CheckAndProtect(); 1035 void InitializeShadowMemoryPlatform(); 1036 void FlushShadowMemory(); 1037 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive); 1038 int ExtractResolvFDs(void *state, int *fds, int nfd); 1039 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd); 1040 uptr ExtractLongJmpSp(uptr *env); 1041 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size); 1042 1043 int call_pthread_cancel_with_cleanup(int (*fn)(void *arg), 1044 void (*cleanup)(void *arg), void *arg); 1045 1046 void DestroyThreadState(); 1047 void PlatformCleanUpThreadState(ThreadState *thr); 1048 1049 } // namespace __tsan 1050 1051 #endif // TSAN_PLATFORM_H 1052