xref: /freebsd/contrib/llvm-project/compiler-rt/lib/tsan/rtl/tsan_platform.h (revision 9e5787d2284e187abb5b654d924394a65772e004)
1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //
11 // Platform-specific code.
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef TSAN_PLATFORM_H
15 #define TSAN_PLATFORM_H
16 
17 #if !defined(__LP64__) && !defined(_WIN64)
18 # error "Only 64-bit is supported"
19 #endif
20 
21 #include "tsan_defs.h"
22 #include "tsan_trace.h"
23 
24 namespace __tsan {
25 
26 #if !SANITIZER_GO
27 
28 #if defined(__x86_64__)
29 /*
30 C/C++ on linux/x86_64 and freebsd/x86_64
31 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB)
32 0040 0000 0000 - 0100 0000 0000: -
33 0100 0000 0000 - 2000 0000 0000: shadow
34 2000 0000 0000 - 3000 0000 0000: -
35 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
36 4000 0000 0000 - 5500 0000 0000: -
37 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels
38 5680 0000 0000 - 6000 0000 0000: -
39 6000 0000 0000 - 6200 0000 0000: traces
40 6200 0000 0000 - 7d00 0000 0000: -
41 7b00 0000 0000 - 7c00 0000 0000: heap
42 7c00 0000 0000 - 7e80 0000 0000: -
43 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
44 
45 C/C++ on netbsd/amd64 can reuse the same mapping:
46  * The address space starts from 0x1000 (option with 0x0) and ends with
47    0x7f7ffffff000.
48  * LoAppMem-kHeapMemEnd can be reused as it is.
49  * No VDSO support.
50  * No MidAppMem region.
51  * No additional HeapMem region.
52  * HiAppMem contains the stack, loader, shared libraries and heap.
53  * Stack on NetBSD/amd64 has prereserved 128MB.
54  * Heap grows downwards (top-down).
55  * ASLR must be disabled per-process or globally.
56 
57 */
58 struct Mapping {
59   static const uptr kMetaShadowBeg = 0x300000000000ull;
60   static const uptr kMetaShadowEnd = 0x340000000000ull;
61   static const uptr kTraceMemBeg   = 0x600000000000ull;
62   static const uptr kTraceMemEnd   = 0x620000000000ull;
63   static const uptr kShadowBeg     = 0x010000000000ull;
64   static const uptr kShadowEnd     = 0x200000000000ull;
65   static const uptr kHeapMemBeg    = 0x7b0000000000ull;
66   static const uptr kHeapMemEnd    = 0x7c0000000000ull;
67   static const uptr kLoAppMemBeg   = 0x000000001000ull;
68   static const uptr kLoAppMemEnd   = 0x008000000000ull;
69   static const uptr kMidAppMemBeg  = 0x550000000000ull;
70   static const uptr kMidAppMemEnd  = 0x568000000000ull;
71   static const uptr kHiAppMemBeg   = 0x7e8000000000ull;
72   static const uptr kHiAppMemEnd   = 0x800000000000ull;
73   static const uptr kAppMemMsk     = 0x780000000000ull;
74   static const uptr kAppMemXor     = 0x040000000000ull;
75   static const uptr kVdsoBeg       = 0xf000000000000000ull;
76 };
77 
78 #define TSAN_MID_APP_RANGE 1
79 #elif defined(__mips64)
80 /*
81 C/C++ on linux/mips64 (40-bit VMA)
82 0000 0000 00 - 0100 0000 00: -                                           (4 GB)
83 0100 0000 00 - 0200 0000 00: main binary                                 (4 GB)
84 0200 0000 00 - 2000 0000 00: -                                         (120 GB)
85 2000 0000 00 - 4000 0000 00: shadow                                    (128 GB)
86 4000 0000 00 - 5000 0000 00: metainfo (memory blocks and sync objects)  (64 GB)
87 5000 0000 00 - aa00 0000 00: -                                         (360 GB)
88 aa00 0000 00 - ab00 0000 00: main binary (PIE)                           (4 GB)
89 ab00 0000 00 - b000 0000 00: -                                          (20 GB)
90 b000 0000 00 - b200 0000 00: traces                                      (8 GB)
91 b200 0000 00 - fe00 0000 00: -                                         (304 GB)
92 fe00 0000 00 - ff00 0000 00: heap                                        (4 GB)
93 ff00 0000 00 - ff80 0000 00: -                                           (2 GB)
94 ff80 0000 00 - ffff ffff ff: modules and main thread stack              (<2 GB)
95 */
96 struct Mapping {
97   static const uptr kMetaShadowBeg = 0x4000000000ull;
98   static const uptr kMetaShadowEnd = 0x5000000000ull;
99   static const uptr kTraceMemBeg   = 0xb000000000ull;
100   static const uptr kTraceMemEnd   = 0xb200000000ull;
101   static const uptr kShadowBeg     = 0x2000000000ull;
102   static const uptr kShadowEnd     = 0x4000000000ull;
103   static const uptr kHeapMemBeg    = 0xfe00000000ull;
104   static const uptr kHeapMemEnd    = 0xff00000000ull;
105   static const uptr kLoAppMemBeg   = 0x0100000000ull;
106   static const uptr kLoAppMemEnd   = 0x0200000000ull;
107   static const uptr kMidAppMemBeg  = 0xaa00000000ull;
108   static const uptr kMidAppMemEnd  = 0xab00000000ull;
109   static const uptr kHiAppMemBeg   = 0xff80000000ull;
110   static const uptr kHiAppMemEnd   = 0xffffffffffull;
111   static const uptr kAppMemMsk     = 0xf800000000ull;
112   static const uptr kAppMemXor     = 0x0800000000ull;
113   static const uptr kVdsoBeg       = 0xfffff00000ull;
114 };
115 
116 #define TSAN_MID_APP_RANGE 1
117 #elif defined(__aarch64__) && defined(__APPLE__)
118 /*
119 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM)
120 0000 0000 00 - 0100 0000 00: -                                    (4 GB)
121 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks  (4 GB)
122 0200 0000 00 - 0300 0000 00: heap                                 (4 GB)
123 0300 0000 00 - 0400 0000 00: -                                    (4 GB)
124 0400 0000 00 - 0c00 0000 00: shadow memory                       (32 GB)
125 0c00 0000 00 - 0d00 0000 00: -                                    (4 GB)
126 0d00 0000 00 - 0e00 0000 00: metainfo                             (4 GB)
127 0e00 0000 00 - 0f00 0000 00: -                                    (4 GB)
128 0f00 0000 00 - 0fc0 0000 00: traces                               (3 GB)
129 0fc0 0000 00 - 1000 0000 00: -
130 */
131 struct Mapping {
132   static const uptr kLoAppMemBeg   = 0x0100000000ull;
133   static const uptr kLoAppMemEnd   = 0x0200000000ull;
134   static const uptr kHeapMemBeg    = 0x0200000000ull;
135   static const uptr kHeapMemEnd    = 0x0300000000ull;
136   static const uptr kShadowBeg     = 0x0400000000ull;
137   static const uptr kShadowEnd     = 0x0c00000000ull;
138   static const uptr kMetaShadowBeg = 0x0d00000000ull;
139   static const uptr kMetaShadowEnd = 0x0e00000000ull;
140   static const uptr kTraceMemBeg   = 0x0f00000000ull;
141   static const uptr kTraceMemEnd   = 0x0fc0000000ull;
142   static const uptr kHiAppMemBeg   = 0x0fc0000000ull;
143   static const uptr kHiAppMemEnd   = 0x0fc0000000ull;
144   static const uptr kAppMemMsk     =          0x0ull;
145   static const uptr kAppMemXor     =          0x0ull;
146   static const uptr kVdsoBeg       = 0x7000000000000000ull;
147 };
148 
149 #elif defined(__aarch64__)
150 // AArch64 supports multiple VMA which leads to multiple address transformation
151 // functions.  To support these multiple VMAS transformations and mappings TSAN
152 // runtime for AArch64 uses an external memory read (vmaSize) to select which
153 // mapping to use.  Although slower, it make a same instrumented binary run on
154 // multiple kernels.
155 
156 /*
157 C/C++ on linux/aarch64 (39-bit VMA)
158 0000 0010 00 - 0100 0000 00: main binary
159 0100 0000 00 - 0800 0000 00: -
160 0800 0000 00 - 2000 0000 00: shadow memory
161 2000 0000 00 - 3100 0000 00: -
162 3100 0000 00 - 3400 0000 00: metainfo
163 3400 0000 00 - 5500 0000 00: -
164 5500 0000 00 - 5600 0000 00: main binary (PIE)
165 5600 0000 00 - 6000 0000 00: -
166 6000 0000 00 - 6200 0000 00: traces
167 6200 0000 00 - 7d00 0000 00: -
168 7c00 0000 00 - 7d00 0000 00: heap
169 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
170 */
171 struct Mapping39 {
172   static const uptr kLoAppMemBeg   = 0x0000001000ull;
173   static const uptr kLoAppMemEnd   = 0x0100000000ull;
174   static const uptr kShadowBeg     = 0x0800000000ull;
175   static const uptr kShadowEnd     = 0x2000000000ull;
176   static const uptr kMetaShadowBeg = 0x3100000000ull;
177   static const uptr kMetaShadowEnd = 0x3400000000ull;
178   static const uptr kMidAppMemBeg  = 0x5500000000ull;
179   static const uptr kMidAppMemEnd  = 0x5600000000ull;
180   static const uptr kTraceMemBeg   = 0x6000000000ull;
181   static const uptr kTraceMemEnd   = 0x6200000000ull;
182   static const uptr kHeapMemBeg    = 0x7c00000000ull;
183   static const uptr kHeapMemEnd    = 0x7d00000000ull;
184   static const uptr kHiAppMemBeg   = 0x7e00000000ull;
185   static const uptr kHiAppMemEnd   = 0x7fffffffffull;
186   static const uptr kAppMemMsk     = 0x7800000000ull;
187   static const uptr kAppMemXor     = 0x0200000000ull;
188   static const uptr kVdsoBeg       = 0x7f00000000ull;
189 };
190 
191 /*
192 C/C++ on linux/aarch64 (42-bit VMA)
193 00000 0010 00 - 01000 0000 00: main binary
194 01000 0000 00 - 10000 0000 00: -
195 10000 0000 00 - 20000 0000 00: shadow memory
196 20000 0000 00 - 26000 0000 00: -
197 26000 0000 00 - 28000 0000 00: metainfo
198 28000 0000 00 - 2aa00 0000 00: -
199 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
200 2ab00 0000 00 - 36200 0000 00: -
201 36200 0000 00 - 36240 0000 00: traces
202 36240 0000 00 - 3e000 0000 00: -
203 3e000 0000 00 - 3f000 0000 00: heap
204 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
205 */
206 struct Mapping42 {
207   static const uptr kLoAppMemBeg   = 0x00000001000ull;
208   static const uptr kLoAppMemEnd   = 0x01000000000ull;
209   static const uptr kShadowBeg     = 0x10000000000ull;
210   static const uptr kShadowEnd     = 0x20000000000ull;
211   static const uptr kMetaShadowBeg = 0x26000000000ull;
212   static const uptr kMetaShadowEnd = 0x28000000000ull;
213   static const uptr kMidAppMemBeg  = 0x2aa00000000ull;
214   static const uptr kMidAppMemEnd  = 0x2ab00000000ull;
215   static const uptr kTraceMemBeg   = 0x36200000000ull;
216   static const uptr kTraceMemEnd   = 0x36400000000ull;
217   static const uptr kHeapMemBeg    = 0x3e000000000ull;
218   static const uptr kHeapMemEnd    = 0x3f000000000ull;
219   static const uptr kHiAppMemBeg   = 0x3f000000000ull;
220   static const uptr kHiAppMemEnd   = 0x3ffffffffffull;
221   static const uptr kAppMemMsk     = 0x3c000000000ull;
222   static const uptr kAppMemXor     = 0x04000000000ull;
223   static const uptr kVdsoBeg       = 0x37f00000000ull;
224 };
225 
226 struct Mapping48 {
227   static const uptr kLoAppMemBeg   = 0x0000000001000ull;
228   static const uptr kLoAppMemEnd   = 0x0000200000000ull;
229   static const uptr kShadowBeg     = 0x0002000000000ull;
230   static const uptr kShadowEnd     = 0x0004000000000ull;
231   static const uptr kMetaShadowBeg = 0x0005000000000ull;
232   static const uptr kMetaShadowEnd = 0x0006000000000ull;
233   static const uptr kMidAppMemBeg  = 0x0aaaa00000000ull;
234   static const uptr kMidAppMemEnd  = 0x0aaaf00000000ull;
235   static const uptr kTraceMemBeg   = 0x0f06000000000ull;
236   static const uptr kTraceMemEnd   = 0x0f06200000000ull;
237   static const uptr kHeapMemBeg    = 0x0ffff00000000ull;
238   static const uptr kHeapMemEnd    = 0x0ffff00000000ull;
239   static const uptr kHiAppMemBeg   = 0x0ffff00000000ull;
240   static const uptr kHiAppMemEnd   = 0x1000000000000ull;
241   static const uptr kAppMemMsk     = 0x0fff800000000ull;
242   static const uptr kAppMemXor     = 0x0000800000000ull;
243   static const uptr kVdsoBeg       = 0xffff000000000ull;
244 };
245 
246 // Indicates the runtime will define the memory regions at runtime.
247 #define TSAN_RUNTIME_VMA 1
248 // Indicates that mapping defines a mid range memory segment.
249 #define TSAN_MID_APP_RANGE 1
250 #elif defined(__powerpc64__)
251 // PPC64 supports multiple VMA which leads to multiple address transformation
252 // functions.  To support these multiple VMAS transformations and mappings TSAN
253 // runtime for PPC64 uses an external memory read (vmaSize) to select which
254 // mapping to use.  Although slower, it make a same instrumented binary run on
255 // multiple kernels.
256 
257 /*
258 C/C++ on linux/powerpc64 (44-bit VMA)
259 0000 0000 0100 - 0001 0000 0000: main binary
260 0001 0000 0000 - 0001 0000 0000: -
261 0001 0000 0000 - 0b00 0000 0000: shadow
262 0b00 0000 0000 - 0b00 0000 0000: -
263 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
264 0d00 0000 0000 - 0d00 0000 0000: -
265 0d00 0000 0000 - 0f00 0000 0000: traces
266 0f00 0000 0000 - 0f00 0000 0000: -
267 0f00 0000 0000 - 0f50 0000 0000: heap
268 0f50 0000 0000 - 0f60 0000 0000: -
269 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
270 */
271 struct Mapping44 {
272   static const uptr kMetaShadowBeg = 0x0b0000000000ull;
273   static const uptr kMetaShadowEnd = 0x0d0000000000ull;
274   static const uptr kTraceMemBeg   = 0x0d0000000000ull;
275   static const uptr kTraceMemEnd   = 0x0f0000000000ull;
276   static const uptr kShadowBeg     = 0x000100000000ull;
277   static const uptr kShadowEnd     = 0x0b0000000000ull;
278   static const uptr kLoAppMemBeg   = 0x000000000100ull;
279   static const uptr kLoAppMemEnd   = 0x000100000000ull;
280   static const uptr kHeapMemBeg    = 0x0f0000000000ull;
281   static const uptr kHeapMemEnd    = 0x0f5000000000ull;
282   static const uptr kHiAppMemBeg   = 0x0f6000000000ull;
283   static const uptr kHiAppMemEnd   = 0x100000000000ull; // 44 bits
284   static const uptr kAppMemMsk     = 0x0f0000000000ull;
285   static const uptr kAppMemXor     = 0x002100000000ull;
286   static const uptr kVdsoBeg       = 0x3c0000000000000ull;
287 };
288 
289 /*
290 C/C++ on linux/powerpc64 (46-bit VMA)
291 0000 0000 1000 - 0100 0000 0000: main binary
292 0100 0000 0000 - 0200 0000 0000: -
293 0100 0000 0000 - 1000 0000 0000: shadow
294 1000 0000 0000 - 1000 0000 0000: -
295 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
296 2000 0000 0000 - 2000 0000 0000: -
297 2000 0000 0000 - 2200 0000 0000: traces
298 2200 0000 0000 - 3d00 0000 0000: -
299 3d00 0000 0000 - 3e00 0000 0000: heap
300 3e00 0000 0000 - 3e80 0000 0000: -
301 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
302 */
303 struct Mapping46 {
304   static const uptr kMetaShadowBeg = 0x100000000000ull;
305   static const uptr kMetaShadowEnd = 0x200000000000ull;
306   static const uptr kTraceMemBeg   = 0x200000000000ull;
307   static const uptr kTraceMemEnd   = 0x220000000000ull;
308   static const uptr kShadowBeg     = 0x010000000000ull;
309   static const uptr kShadowEnd     = 0x100000000000ull;
310   static const uptr kHeapMemBeg    = 0x3d0000000000ull;
311   static const uptr kHeapMemEnd    = 0x3e0000000000ull;
312   static const uptr kLoAppMemBeg   = 0x000000001000ull;
313   static const uptr kLoAppMemEnd   = 0x010000000000ull;
314   static const uptr kHiAppMemBeg   = 0x3e8000000000ull;
315   static const uptr kHiAppMemEnd   = 0x400000000000ull; // 46 bits
316   static const uptr kAppMemMsk     = 0x3c0000000000ull;
317   static const uptr kAppMemXor     = 0x020000000000ull;
318   static const uptr kVdsoBeg       = 0x7800000000000000ull;
319 };
320 
321 /*
322 C/C++ on linux/powerpc64 (47-bit VMA)
323 0000 0000 1000 - 0100 0000 0000: main binary
324 0100 0000 0000 - 0200 0000 0000: -
325 0100 0000 0000 - 1000 0000 0000: shadow
326 1000 0000 0000 - 1000 0000 0000: -
327 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
328 2000 0000 0000 - 2000 0000 0000: -
329 2000 0000 0000 - 2200 0000 0000: traces
330 2200 0000 0000 - 7d00 0000 0000: -
331 7d00 0000 0000 - 7e00 0000 0000: heap
332 7e00 0000 0000 - 7e80 0000 0000: -
333 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
334 */
335 struct Mapping47 {
336   static const uptr kMetaShadowBeg = 0x100000000000ull;
337   static const uptr kMetaShadowEnd = 0x200000000000ull;
338   static const uptr kTraceMemBeg   = 0x200000000000ull;
339   static const uptr kTraceMemEnd   = 0x220000000000ull;
340   static const uptr kShadowBeg     = 0x010000000000ull;
341   static const uptr kShadowEnd     = 0x100000000000ull;
342   static const uptr kHeapMemBeg    = 0x7d0000000000ull;
343   static const uptr kHeapMemEnd    = 0x7e0000000000ull;
344   static const uptr kLoAppMemBeg   = 0x000000001000ull;
345   static const uptr kLoAppMemEnd   = 0x010000000000ull;
346   static const uptr kHiAppMemBeg   = 0x7e8000000000ull;
347   static const uptr kHiAppMemEnd   = 0x800000000000ull; // 47 bits
348   static const uptr kAppMemMsk     = 0x7c0000000000ull;
349   static const uptr kAppMemXor     = 0x020000000000ull;
350   static const uptr kVdsoBeg       = 0x7800000000000000ull;
351 };
352 
353 // Indicates the runtime will define the memory regions at runtime.
354 #define TSAN_RUNTIME_VMA 1
355 #endif
356 
357 #elif SANITIZER_GO && !SANITIZER_WINDOWS && defined(__x86_64__)
358 
359 /* Go on linux, darwin and freebsd on x86_64
360 0000 0000 1000 - 0000 1000 0000: executable
361 0000 1000 0000 - 00c0 0000 0000: -
362 00c0 0000 0000 - 00e0 0000 0000: heap
363 00e0 0000 0000 - 2000 0000 0000: -
364 2000 0000 0000 - 2380 0000 0000: shadow
365 2380 0000 0000 - 3000 0000 0000: -
366 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
367 4000 0000 0000 - 6000 0000 0000: -
368 6000 0000 0000 - 6200 0000 0000: traces
369 6200 0000 0000 - 8000 0000 0000: -
370 */
371 
372 struct Mapping {
373   static const uptr kMetaShadowBeg = 0x300000000000ull;
374   static const uptr kMetaShadowEnd = 0x400000000000ull;
375   static const uptr kTraceMemBeg   = 0x600000000000ull;
376   static const uptr kTraceMemEnd   = 0x620000000000ull;
377   static const uptr kShadowBeg     = 0x200000000000ull;
378   static const uptr kShadowEnd     = 0x238000000000ull;
379   static const uptr kAppMemBeg     = 0x000000001000ull;
380   static const uptr kAppMemEnd     = 0x00e000000000ull;
381 };
382 
383 #elif SANITIZER_GO && SANITIZER_WINDOWS
384 
385 /* Go on windows
386 0000 0000 1000 - 0000 1000 0000: executable
387 0000 1000 0000 - 00f8 0000 0000: -
388 00c0 0000 0000 - 00e0 0000 0000: heap
389 00e0 0000 0000 - 0100 0000 0000: -
390 0100 0000 0000 - 0500 0000 0000: shadow
391 0500 0000 0000 - 0560 0000 0000: -
392 0560 0000 0000 - 0760 0000 0000: traces
393 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
394 07d0 0000 0000 - 8000 0000 0000: -
395 */
396 
397 struct Mapping {
398   static const uptr kMetaShadowBeg = 0x076000000000ull;
399   static const uptr kMetaShadowEnd = 0x07d000000000ull;
400   static const uptr kTraceMemBeg   = 0x056000000000ull;
401   static const uptr kTraceMemEnd   = 0x076000000000ull;
402   static const uptr kShadowBeg     = 0x010000000000ull;
403   static const uptr kShadowEnd     = 0x050000000000ull;
404   static const uptr kAppMemBeg     = 0x000000001000ull;
405   static const uptr kAppMemEnd     = 0x00e000000000ull;
406 };
407 
408 #elif SANITIZER_GO && defined(__powerpc64__)
409 
410 /* Only Mapping46 and Mapping47 are currently supported for powercp64 on Go. */
411 
412 /* Go on linux/powerpc64 (46-bit VMA)
413 0000 0000 1000 - 0000 1000 0000: executable
414 0000 1000 0000 - 00c0 0000 0000: -
415 00c0 0000 0000 - 00e0 0000 0000: heap
416 00e0 0000 0000 - 2000 0000 0000: -
417 2000 0000 0000 - 2380 0000 0000: shadow
418 2380 0000 0000 - 2400 0000 0000: -
419 2400 0000 0000 - 3400 0000 0000: metainfo (memory blocks and sync objects)
420 3400 0000 0000 - 3600 0000 0000: -
421 3600 0000 0000 - 3800 0000 0000: traces
422 3800 0000 0000 - 4000 0000 0000: -
423 */
424 
425 struct Mapping46 {
426   static const uptr kMetaShadowBeg = 0x240000000000ull;
427   static const uptr kMetaShadowEnd = 0x340000000000ull;
428   static const uptr kTraceMemBeg   = 0x360000000000ull;
429   static const uptr kTraceMemEnd   = 0x380000000000ull;
430   static const uptr kShadowBeg     = 0x200000000000ull;
431   static const uptr kShadowEnd     = 0x238000000000ull;
432   static const uptr kAppMemBeg     = 0x000000001000ull;
433   static const uptr kAppMemEnd     = 0x00e000000000ull;
434 };
435 
436 /* Go on linux/powerpc64 (47-bit VMA)
437 0000 0000 1000 - 0000 1000 0000: executable
438 0000 1000 0000 - 00c0 0000 0000: -
439 00c0 0000 0000 - 00e0 0000 0000: heap
440 00e0 0000 0000 - 2000 0000 0000: -
441 2000 0000 0000 - 3000 0000 0000: shadow
442 3000 0000 0000 - 3000 0000 0000: -
443 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
444 4000 0000 0000 - 6000 0000 0000: -
445 6000 0000 0000 - 6200 0000 0000: traces
446 6200 0000 0000 - 8000 0000 0000: -
447 */
448 
449 struct Mapping47 {
450   static const uptr kMetaShadowBeg = 0x300000000000ull;
451   static const uptr kMetaShadowEnd = 0x400000000000ull;
452   static const uptr kTraceMemBeg   = 0x600000000000ull;
453   static const uptr kTraceMemEnd   = 0x620000000000ull;
454   static const uptr kShadowBeg     = 0x200000000000ull;
455   static const uptr kShadowEnd     = 0x300000000000ull;
456   static const uptr kAppMemBeg     = 0x000000001000ull;
457   static const uptr kAppMemEnd     = 0x00e000000000ull;
458 };
459 
460 #define TSAN_RUNTIME_VMA 1
461 
462 #elif SANITIZER_GO && defined(__aarch64__)
463 
464 /* Go on linux/aarch64 (48-bit VMA)
465 0000 0000 1000 - 0000 1000 0000: executable
466 0000 1000 0000 - 00c0 0000 0000: -
467 00c0 0000 0000 - 00e0 0000 0000: heap
468 00e0 0000 0000 - 2000 0000 0000: -
469 2000 0000 0000 - 3000 0000 0000: shadow
470 3000 0000 0000 - 3000 0000 0000: -
471 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
472 4000 0000 0000 - 6000 0000 0000: -
473 6000 0000 0000 - 6200 0000 0000: traces
474 6200 0000 0000 - 8000 0000 0000: -
475 */
476 
477 struct Mapping {
478   static const uptr kMetaShadowBeg = 0x300000000000ull;
479   static const uptr kMetaShadowEnd = 0x400000000000ull;
480   static const uptr kTraceMemBeg   = 0x600000000000ull;
481   static const uptr kTraceMemEnd   = 0x620000000000ull;
482   static const uptr kShadowBeg     = 0x200000000000ull;
483   static const uptr kShadowEnd     = 0x300000000000ull;
484   static const uptr kAppMemBeg     = 0x000000001000ull;
485   static const uptr kAppMemEnd     = 0x00e000000000ull;
486 };
487 
488 // Indicates the runtime will define the memory regions at runtime.
489 #define TSAN_RUNTIME_VMA 1
490 
491 #else
492 # error "Unknown platform"
493 #endif
494 
495 
496 #ifdef TSAN_RUNTIME_VMA
497 extern uptr vmaSize;
498 #endif
499 
500 
501 enum MappingType {
502   MAPPING_LO_APP_BEG,
503   MAPPING_LO_APP_END,
504   MAPPING_HI_APP_BEG,
505   MAPPING_HI_APP_END,
506 #ifdef TSAN_MID_APP_RANGE
507   MAPPING_MID_APP_BEG,
508   MAPPING_MID_APP_END,
509 #endif
510   MAPPING_HEAP_BEG,
511   MAPPING_HEAP_END,
512   MAPPING_APP_BEG,
513   MAPPING_APP_END,
514   MAPPING_SHADOW_BEG,
515   MAPPING_SHADOW_END,
516   MAPPING_META_SHADOW_BEG,
517   MAPPING_META_SHADOW_END,
518   MAPPING_TRACE_BEG,
519   MAPPING_TRACE_END,
520   MAPPING_VDSO_BEG,
521 };
522 
523 template<typename Mapping, int Type>
524 uptr MappingImpl(void) {
525   switch (Type) {
526 #if !SANITIZER_GO
527     case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
528     case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
529 # ifdef TSAN_MID_APP_RANGE
530     case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
531     case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
532 # endif
533     case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
534     case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
535     case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
536     case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
537     case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
538 #else
539     case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
540     case MAPPING_APP_END: return Mapping::kAppMemEnd;
541 #endif
542     case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
543     case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
544     case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
545     case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
546     case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
547     case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
548   }
549 }
550 
551 template<int Type>
552 uptr MappingArchImpl(void) {
553 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
554   switch (vmaSize) {
555     case 39: return MappingImpl<Mapping39, Type>();
556     case 42: return MappingImpl<Mapping42, Type>();
557     case 48: return MappingImpl<Mapping48, Type>();
558   }
559   DCHECK(0);
560   return 0;
561 #elif defined(__powerpc64__)
562   switch (vmaSize) {
563 #if !SANITIZER_GO
564     case 44: return MappingImpl<Mapping44, Type>();
565 #endif
566     case 46: return MappingImpl<Mapping46, Type>();
567     case 47: return MappingImpl<Mapping47, Type>();
568   }
569   DCHECK(0);
570   return 0;
571 #else
572   return MappingImpl<Mapping, Type>();
573 #endif
574 }
575 
576 #if !SANITIZER_GO
577 ALWAYS_INLINE
578 uptr LoAppMemBeg(void) {
579   return MappingArchImpl<MAPPING_LO_APP_BEG>();
580 }
581 ALWAYS_INLINE
582 uptr LoAppMemEnd(void) {
583   return MappingArchImpl<MAPPING_LO_APP_END>();
584 }
585 
586 #ifdef TSAN_MID_APP_RANGE
587 ALWAYS_INLINE
588 uptr MidAppMemBeg(void) {
589   return MappingArchImpl<MAPPING_MID_APP_BEG>();
590 }
591 ALWAYS_INLINE
592 uptr MidAppMemEnd(void) {
593   return MappingArchImpl<MAPPING_MID_APP_END>();
594 }
595 #endif
596 
597 ALWAYS_INLINE
598 uptr HeapMemBeg(void) {
599   return MappingArchImpl<MAPPING_HEAP_BEG>();
600 }
601 ALWAYS_INLINE
602 uptr HeapMemEnd(void) {
603   return MappingArchImpl<MAPPING_HEAP_END>();
604 }
605 
606 ALWAYS_INLINE
607 uptr HiAppMemBeg(void) {
608   return MappingArchImpl<MAPPING_HI_APP_BEG>();
609 }
610 ALWAYS_INLINE
611 uptr HiAppMemEnd(void) {
612   return MappingArchImpl<MAPPING_HI_APP_END>();
613 }
614 
615 ALWAYS_INLINE
616 uptr VdsoBeg(void) {
617   return MappingArchImpl<MAPPING_VDSO_BEG>();
618 }
619 
620 #else
621 
622 ALWAYS_INLINE
623 uptr AppMemBeg(void) {
624   return MappingArchImpl<MAPPING_APP_BEG>();
625 }
626 ALWAYS_INLINE
627 uptr AppMemEnd(void) {
628   return MappingArchImpl<MAPPING_APP_END>();
629 }
630 
631 #endif
632 
633 static inline
634 bool GetUserRegion(int i, uptr *start, uptr *end) {
635   switch (i) {
636   default:
637     return false;
638 #if !SANITIZER_GO
639   case 0:
640     *start = LoAppMemBeg();
641     *end = LoAppMemEnd();
642     return true;
643   case 1:
644     *start = HiAppMemBeg();
645     *end = HiAppMemEnd();
646     return true;
647   case 2:
648     *start = HeapMemBeg();
649     *end = HeapMemEnd();
650     return true;
651 # ifdef TSAN_MID_APP_RANGE
652   case 3:
653     *start = MidAppMemBeg();
654     *end = MidAppMemEnd();
655     return true;
656 # endif
657 #else
658   case 0:
659     *start = AppMemBeg();
660     *end = AppMemEnd();
661     return true;
662 #endif
663   }
664 }
665 
666 ALWAYS_INLINE
667 uptr ShadowBeg(void) {
668   return MappingArchImpl<MAPPING_SHADOW_BEG>();
669 }
670 ALWAYS_INLINE
671 uptr ShadowEnd(void) {
672   return MappingArchImpl<MAPPING_SHADOW_END>();
673 }
674 
675 ALWAYS_INLINE
676 uptr MetaShadowBeg(void) {
677   return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
678 }
679 ALWAYS_INLINE
680 uptr MetaShadowEnd(void) {
681   return MappingArchImpl<MAPPING_META_SHADOW_END>();
682 }
683 
684 ALWAYS_INLINE
685 uptr TraceMemBeg(void) {
686   return MappingArchImpl<MAPPING_TRACE_BEG>();
687 }
688 ALWAYS_INLINE
689 uptr TraceMemEnd(void) {
690   return MappingArchImpl<MAPPING_TRACE_END>();
691 }
692 
693 
694 template<typename Mapping>
695 bool IsAppMemImpl(uptr mem) {
696 #if !SANITIZER_GO
697   return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
698 # ifdef TSAN_MID_APP_RANGE
699          (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
700 # endif
701          (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
702          (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
703 #else
704   return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
705 #endif
706 }
707 
708 ALWAYS_INLINE
709 bool IsAppMem(uptr mem) {
710 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
711   switch (vmaSize) {
712     case 39: return IsAppMemImpl<Mapping39>(mem);
713     case 42: return IsAppMemImpl<Mapping42>(mem);
714     case 48: return IsAppMemImpl<Mapping48>(mem);
715   }
716   DCHECK(0);
717   return false;
718 #elif defined(__powerpc64__)
719   switch (vmaSize) {
720 #if !SANITIZER_GO
721     case 44: return IsAppMemImpl<Mapping44>(mem);
722 #endif
723     case 46: return IsAppMemImpl<Mapping46>(mem);
724     case 47: return IsAppMemImpl<Mapping47>(mem);
725   }
726   DCHECK(0);
727   return false;
728 #else
729   return IsAppMemImpl<Mapping>(mem);
730 #endif
731 }
732 
733 
734 template<typename Mapping>
735 bool IsShadowMemImpl(uptr mem) {
736   return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
737 }
738 
739 ALWAYS_INLINE
740 bool IsShadowMem(uptr mem) {
741 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
742   switch (vmaSize) {
743     case 39: return IsShadowMemImpl<Mapping39>(mem);
744     case 42: return IsShadowMemImpl<Mapping42>(mem);
745     case 48: return IsShadowMemImpl<Mapping48>(mem);
746   }
747   DCHECK(0);
748   return false;
749 #elif defined(__powerpc64__)
750   switch (vmaSize) {
751 #if !SANITIZER_GO
752     case 44: return IsShadowMemImpl<Mapping44>(mem);
753 #endif
754     case 46: return IsShadowMemImpl<Mapping46>(mem);
755     case 47: return IsShadowMemImpl<Mapping47>(mem);
756   }
757   DCHECK(0);
758   return false;
759 #else
760   return IsShadowMemImpl<Mapping>(mem);
761 #endif
762 }
763 
764 
765 template<typename Mapping>
766 bool IsMetaMemImpl(uptr mem) {
767   return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
768 }
769 
770 ALWAYS_INLINE
771 bool IsMetaMem(uptr mem) {
772 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
773   switch (vmaSize) {
774     case 39: return IsMetaMemImpl<Mapping39>(mem);
775     case 42: return IsMetaMemImpl<Mapping42>(mem);
776     case 48: return IsMetaMemImpl<Mapping48>(mem);
777   }
778   DCHECK(0);
779   return false;
780 #elif defined(__powerpc64__)
781   switch (vmaSize) {
782 #if !SANITIZER_GO
783     case 44: return IsMetaMemImpl<Mapping44>(mem);
784 #endif
785     case 46: return IsMetaMemImpl<Mapping46>(mem);
786     case 47: return IsMetaMemImpl<Mapping47>(mem);
787   }
788   DCHECK(0);
789   return false;
790 #else
791   return IsMetaMemImpl<Mapping>(mem);
792 #endif
793 }
794 
795 
796 template<typename Mapping>
797 uptr MemToShadowImpl(uptr x) {
798   DCHECK(IsAppMem(x));
799 #if !SANITIZER_GO
800   return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
801       ^ Mapping::kAppMemXor) * kShadowCnt;
802 #else
803 # ifndef SANITIZER_WINDOWS
804   return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
805 # else
806   return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg;
807 # endif
808 #endif
809 }
810 
811 ALWAYS_INLINE
812 uptr MemToShadow(uptr x) {
813 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
814   switch (vmaSize) {
815     case 39: return MemToShadowImpl<Mapping39>(x);
816     case 42: return MemToShadowImpl<Mapping42>(x);
817     case 48: return MemToShadowImpl<Mapping48>(x);
818   }
819   DCHECK(0);
820   return 0;
821 #elif defined(__powerpc64__)
822   switch (vmaSize) {
823 #if !SANITIZER_GO
824     case 44: return MemToShadowImpl<Mapping44>(x);
825 #endif
826     case 46: return MemToShadowImpl<Mapping46>(x);
827     case 47: return MemToShadowImpl<Mapping47>(x);
828   }
829   DCHECK(0);
830   return 0;
831 #else
832   return MemToShadowImpl<Mapping>(x);
833 #endif
834 }
835 
836 
837 template<typename Mapping>
838 u32 *MemToMetaImpl(uptr x) {
839   DCHECK(IsAppMem(x));
840 #if !SANITIZER_GO
841   return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) /
842       kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
843 #else
844 # ifndef SANITIZER_WINDOWS
845   return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
846       kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
847 # else
848   return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
849       kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg);
850 # endif
851 #endif
852 }
853 
854 ALWAYS_INLINE
855 u32 *MemToMeta(uptr x) {
856 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
857   switch (vmaSize) {
858     case 39: return MemToMetaImpl<Mapping39>(x);
859     case 42: return MemToMetaImpl<Mapping42>(x);
860     case 48: return MemToMetaImpl<Mapping48>(x);
861   }
862   DCHECK(0);
863   return 0;
864 #elif defined(__powerpc64__)
865   switch (vmaSize) {
866 #if !SANITIZER_GO
867     case 44: return MemToMetaImpl<Mapping44>(x);
868 #endif
869     case 46: return MemToMetaImpl<Mapping46>(x);
870     case 47: return MemToMetaImpl<Mapping47>(x);
871   }
872   DCHECK(0);
873   return 0;
874 #else
875   return MemToMetaImpl<Mapping>(x);
876 #endif
877 }
878 
879 
880 template<typename Mapping>
881 uptr ShadowToMemImpl(uptr s) {
882   DCHECK(IsShadowMem(s));
883 #if !SANITIZER_GO
884   // The shadow mapping is non-linear and we've lost some bits, so we don't have
885   // an easy way to restore the original app address. But the mapping is a
886   // bijection, so we try to restore the address as belonging to low/mid/high
887   // range consecutively and see if shadow->app->shadow mapping gives us the
888   // same address.
889   uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
890   if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd &&
891       MemToShadow(p) == s)
892     return p;
893 # ifdef TSAN_MID_APP_RANGE
894   p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) +
895       (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk);
896   if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd &&
897       MemToShadow(p) == s)
898     return p;
899 # endif
900   return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
901 #else  // #if !SANITIZER_GO
902 # ifndef SANITIZER_WINDOWS
903   return (s & ~Mapping::kShadowBeg) / kShadowCnt;
904 # else
905   return (s - Mapping::kShadowBeg) / kShadowCnt;
906 # endif // SANITIZER_WINDOWS
907 #endif
908 }
909 
910 ALWAYS_INLINE
911 uptr ShadowToMem(uptr s) {
912 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
913   switch (vmaSize) {
914     case 39: return ShadowToMemImpl<Mapping39>(s);
915     case 42: return ShadowToMemImpl<Mapping42>(s);
916     case 48: return ShadowToMemImpl<Mapping48>(s);
917   }
918   DCHECK(0);
919   return 0;
920 #elif defined(__powerpc64__)
921   switch (vmaSize) {
922 #if !SANITIZER_GO
923     case 44: return ShadowToMemImpl<Mapping44>(s);
924 #endif
925     case 46: return ShadowToMemImpl<Mapping46>(s);
926     case 47: return ShadowToMemImpl<Mapping47>(s);
927   }
928   DCHECK(0);
929   return 0;
930 #else
931   return ShadowToMemImpl<Mapping>(s);
932 #endif
933 }
934 
935 
936 
937 // The additional page is to catch shadow stack overflow as paging fault.
938 // Windows wants 64K alignment for mmaps.
939 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
940     + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
941 
942 template<typename Mapping>
943 uptr GetThreadTraceImpl(int tid) {
944   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
945   DCHECK_LT(p, Mapping::kTraceMemEnd);
946   return p;
947 }
948 
949 ALWAYS_INLINE
950 uptr GetThreadTrace(int tid) {
951 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
952   switch (vmaSize) {
953     case 39: return GetThreadTraceImpl<Mapping39>(tid);
954     case 42: return GetThreadTraceImpl<Mapping42>(tid);
955     case 48: return GetThreadTraceImpl<Mapping48>(tid);
956   }
957   DCHECK(0);
958   return 0;
959 #elif defined(__powerpc64__)
960   switch (vmaSize) {
961 #if !SANITIZER_GO
962     case 44: return GetThreadTraceImpl<Mapping44>(tid);
963 #endif
964     case 46: return GetThreadTraceImpl<Mapping46>(tid);
965     case 47: return GetThreadTraceImpl<Mapping47>(tid);
966   }
967   DCHECK(0);
968   return 0;
969 #else
970   return GetThreadTraceImpl<Mapping>(tid);
971 #endif
972 }
973 
974 
975 template<typename Mapping>
976 uptr GetThreadTraceHeaderImpl(int tid) {
977   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
978       + kTraceSize * sizeof(Event);
979   DCHECK_LT(p, Mapping::kTraceMemEnd);
980   return p;
981 }
982 
983 ALWAYS_INLINE
984 uptr GetThreadTraceHeader(int tid) {
985 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
986   switch (vmaSize) {
987     case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid);
988     case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid);
989     case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid);
990   }
991   DCHECK(0);
992   return 0;
993 #elif defined(__powerpc64__)
994   switch (vmaSize) {
995 #if !SANITIZER_GO
996     case 44: return GetThreadTraceHeaderImpl<Mapping44>(tid);
997 #endif
998     case 46: return GetThreadTraceHeaderImpl<Mapping46>(tid);
999     case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid);
1000   }
1001   DCHECK(0);
1002   return 0;
1003 #else
1004   return GetThreadTraceHeaderImpl<Mapping>(tid);
1005 #endif
1006 }
1007 
1008 void InitializePlatform();
1009 void InitializePlatformEarly();
1010 void CheckAndProtect();
1011 void InitializeShadowMemoryPlatform();
1012 void FlushShadowMemory();
1013 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
1014 int ExtractResolvFDs(void *state, int *fds, int nfd);
1015 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
1016 uptr ExtractLongJmpSp(uptr *env);
1017 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
1018 
1019 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m,
1020     void *abstime), void *c, void *m, void *abstime,
1021     void(*cleanup)(void *arg), void *arg);
1022 
1023 void DestroyThreadState();
1024 void PlatformCleanUpThreadState(ThreadState *thr);
1025 
1026 }  // namespace __tsan
1027 
1028 #endif  // TSAN_PLATFORM_H
1029