xref: /freebsd/contrib/llvm-project/compiler-rt/lib/msan/msan.h (revision 1db9f3b21e39176dd5b67cf8ac378633b172463e)
1 //===-- msan.h --------------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of MemorySanitizer.
10 //
11 // Private MSan header.
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef MSAN_H
15 #define MSAN_H
16 
17 #include "sanitizer_common/sanitizer_flags.h"
18 #include "sanitizer_common/sanitizer_internal_defs.h"
19 #include "sanitizer_common/sanitizer_stacktrace.h"
20 #include "msan_interface_internal.h"
21 #include "msan_flags.h"
22 #include "ubsan/ubsan_platform.h"
23 
24 #ifndef MSAN_REPLACE_OPERATORS_NEW_AND_DELETE
25 # define MSAN_REPLACE_OPERATORS_NEW_AND_DELETE 1
26 #endif
27 
28 #ifndef MSAN_CONTAINS_UBSAN
29 # define MSAN_CONTAINS_UBSAN CAN_SANITIZE_UB
30 #endif
31 
32 struct MappingDesc {
33   uptr start;
34   uptr end;
35   enum Type {
36     INVALID, APP, SHADOW, ORIGIN
37   } type;
38   const char *name;
39 };
40 
41 
42 #if SANITIZER_LINUX && defined(__mips64)
43 
44 // MIPS64 maps:
45 // - 0x0000000000-0x0200000000: Program own segments
46 // - 0xa200000000-0xc000000000: PIE program segments
47 // - 0xe200000000-0xffffffffff: libraries segments.
48 const MappingDesc kMemoryLayout[] = {
49     {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "app-1"},
50     {0x000200000000ULL, 0x002200000000ULL, MappingDesc::INVALID, "invalid"},
51     {0x002200000000ULL, 0x004000000000ULL, MappingDesc::SHADOW, "shadow-2"},
52     {0x004000000000ULL, 0x004200000000ULL, MappingDesc::INVALID, "invalid"},
53     {0x004200000000ULL, 0x006000000000ULL, MappingDesc::ORIGIN, "origin-2"},
54     {0x006000000000ULL, 0x006200000000ULL, MappingDesc::INVALID, "invalid"},
55     {0x006200000000ULL, 0x008000000000ULL, MappingDesc::SHADOW, "shadow-3"},
56     {0x008000000000ULL, 0x008200000000ULL, MappingDesc::SHADOW, "shadow-1"},
57     {0x008200000000ULL, 0x00a000000000ULL, MappingDesc::ORIGIN, "origin-3"},
58     {0x00a000000000ULL, 0x00a200000000ULL, MappingDesc::ORIGIN, "origin-1"},
59     {0x00a200000000ULL, 0x00c000000000ULL, MappingDesc::APP, "app-2"},
60     {0x00c000000000ULL, 0x00e200000000ULL, MappingDesc::INVALID, "invalid"},
61     {0x00e200000000ULL, 0x00ffffffffffULL, MappingDesc::APP, "app-3"}};
62 
63 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x8000000000ULL)
64 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x2000000000ULL)
65 
66 #elif SANITIZER_LINUX && defined(__aarch64__)
67 
68 // The mapping assumes 48-bit VMA. AArch64 maps:
69 // - 0x0000000000000-0x0100000000000: 39/42/48-bits program own segments
70 // - 0x0a00000000000-0x0b00000000000: 48-bits PIE program segments
71 //   Ideally, this would extend to 0x0c00000000000 (2^45 bytes - the
72 //   maximum ASLR region for 48-bit VMA) but it is too hard to fit in
73 //   the larger app/shadow/origin regions.
74 // - 0x0e00000000000-0x1000000000000: 48-bits libraries segments
75 const MappingDesc kMemoryLayout[] = {
76     {0X0000000000000, 0X0100000000000, MappingDesc::APP, "app-10-13"},
77     {0X0100000000000, 0X0200000000000, MappingDesc::SHADOW, "shadow-14"},
78     {0X0200000000000, 0X0300000000000, MappingDesc::INVALID, "invalid"},
79     {0X0300000000000, 0X0400000000000, MappingDesc::ORIGIN, "origin-14"},
80     {0X0400000000000, 0X0600000000000, MappingDesc::SHADOW, "shadow-15"},
81     {0X0600000000000, 0X0800000000000, MappingDesc::ORIGIN, "origin-15"},
82     {0X0800000000000, 0X0A00000000000, MappingDesc::INVALID, "invalid"},
83     {0X0A00000000000, 0X0B00000000000, MappingDesc::APP, "app-14"},
84     {0X0B00000000000, 0X0C00000000000, MappingDesc::SHADOW, "shadow-10-13"},
85     {0X0C00000000000, 0X0D00000000000, MappingDesc::INVALID, "invalid"},
86     {0X0D00000000000, 0X0E00000000000, MappingDesc::ORIGIN, "origin-10-13"},
87     {0X0E00000000000, 0X1000000000000, MappingDesc::APP, "app-15"},
88 };
89 # define MEM_TO_SHADOW(mem) ((uptr)mem ^ 0xB00000000000ULL)
90 # define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x200000000000ULL)
91 
92 #elif SANITIZER_LINUX && SANITIZER_LOONGARCH64
93 // LoongArch64 maps:
94 // - 0x000000000000-0x010000000000: Program own segments
95 // - 0x555500000000-0x555600000000: PIE program segments
96 // - 0x7fff00000000-0x7fffffffffff: libraries segments.
97 const MappingDesc kMemoryLayout[] = {
98     {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
99     {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
100     {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
101     {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
102     {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
103     {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
104     {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
105     {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
106     {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
107     {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
108     {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
109     {0x700000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
110 #  define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
111 #  define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x100000000000ULL)
112 
113 #elif SANITIZER_LINUX && SANITIZER_PPC64
114 const MappingDesc kMemoryLayout[] = {
115     {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "low memory"},
116     {0x000200000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
117     {0x080000000000ULL, 0x180200000000ULL, MappingDesc::SHADOW, "shadow"},
118     {0x180200000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
119     {0x1C0000000000ULL, 0x2C0200000000ULL, MappingDesc::ORIGIN, "origin"},
120     {0x2C0200000000ULL, 0x300000000000ULL, MappingDesc::INVALID, "invalid"},
121     {0x300000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
122 
123 // Various kernels use different low end ranges but we can combine them into one
124 // big range. They also use different high end ranges but we can map them all to
125 // one range.
126 // Maps low and high app ranges to contiguous space with zero base:
127 //   Low:  0000 0000 0000 - 0001 ffff ffff  ->  1000 0000 0000 - 1001 ffff ffff
128 //   High: 3000 0000 0000 - 3fff ffff ffff  ->  0000 0000 0000 - 0fff ffff ffff
129 //   High: 4000 0000 0000 - 4fff ffff ffff  ->  0000 0000 0000 - 0fff ffff ffff
130 //   High: 7000 0000 0000 - 7fff ffff ffff  ->  0000 0000 0000 - 0fff ffff ffff
131 #define LINEARIZE_MEM(mem) \
132   (((uptr)(mem) & ~0xE00000000000ULL) ^ 0x100000000000ULL)
133 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x080000000000ULL)
134 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
135 
136 #elif SANITIZER_LINUX && SANITIZER_S390_64
137 const MappingDesc kMemoryLayout[] = {
138     {0x000000000000ULL, 0x040000000000ULL, MappingDesc::APP, "low memory"},
139     {0x040000000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
140     {0x080000000000ULL, 0x180000000000ULL, MappingDesc::SHADOW, "shadow"},
141     {0x180000000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
142     {0x1C0000000000ULL, 0x2C0000000000ULL, MappingDesc::ORIGIN, "origin"},
143     {0x2C0000000000ULL, 0x440000000000ULL, MappingDesc::INVALID, "invalid"},
144     {0x440000000000ULL, 0x500000000000ULL, MappingDesc::APP, "high memory"}};
145 
146 #define MEM_TO_SHADOW(mem) \
147   ((((uptr)(mem)) & ~0xC00000000000ULL) + 0x080000000000ULL)
148 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
149 
150 #elif SANITIZER_FREEBSD && defined(__aarch64__)
151 
152 // Low memory: main binary, MAP_32BIT mappings and modules
153 // High memory: heap, modules and main thread stack
154 const MappingDesc kMemoryLayout[] = {
155     {0x000000000000ULL, 0x020000000000ULL, MappingDesc::APP, "low memory"},
156     {0x020000000000ULL, 0x200000000000ULL, MappingDesc::INVALID, "invalid"},
157     {0x200000000000ULL, 0x620000000000ULL, MappingDesc::SHADOW, "shadow"},
158     {0x620000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
159     {0x700000000000ULL, 0xb20000000000ULL, MappingDesc::ORIGIN, "origin"},
160     {0xb20000000000ULL, 0xc00000000000ULL, MappingDesc::INVALID, "invalid"},
161     {0xc00000000000ULL, 0x1000000000000ULL, MappingDesc::APP, "high memory"}};
162 
163 // Maps low and high app ranges to contiguous space with zero base:
164 //   Low:  0000 0000 0000 - 01ff ffff ffff -> 4000 0000 0000 - 41ff ffff ffff
165 //   High: c000 0000 0000 - ffff ffff ffff -> 0000 0000 0000 - 3fff ffff ffff
166 #define LINEARIZE_MEM(mem) \
167   (((uptr)(mem) & ~0x1800000000000ULL) ^ 0x400000000000ULL)
168 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x200000000000ULL)
169 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x500000000000)
170 
171 #elif SANITIZER_FREEBSD && SANITIZER_WORDSIZE == 64
172 
173 // Low memory: main binary, MAP_32BIT mappings and modules
174 // High memory: heap, modules and main thread stack
175 const MappingDesc kMemoryLayout[] = {
176     {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "low memory"},
177     {0x010000000000ULL, 0x100000000000ULL, MappingDesc::INVALID, "invalid"},
178     {0x100000000000ULL, 0x310000000000ULL, MappingDesc::SHADOW, "shadow"},
179     {0x310000000000ULL, 0x380000000000ULL, MappingDesc::INVALID, "invalid"},
180     {0x380000000000ULL, 0x590000000000ULL, MappingDesc::ORIGIN, "origin"},
181     {0x590000000000ULL, 0x600000000000ULL, MappingDesc::INVALID, "invalid"},
182     {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
183 
184 // Maps low and high app ranges to contiguous space with zero base:
185 //   Low:  0000 0000 0000 - 00ff ffff ffff  ->  2000 0000 0000 - 20ff ffff ffff
186 //   High: 6000 0000 0000 - 7fff ffff ffff  ->  0000 0000 0000 - 1fff ffff ffff
187 #define LINEARIZE_MEM(mem) \
188   (((uptr)(mem) & ~0xc00000000000ULL) ^ 0x200000000000ULL)
189 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x100000000000ULL)
190 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x280000000000)
191 
192 #elif SANITIZER_NETBSD || (SANITIZER_LINUX && SANITIZER_WORDSIZE == 64)
193 
194 // All of the following configurations are supported.
195 // ASLR disabled: main executable and DSOs at 0x555550000000
196 // PIE and ASLR: main executable and DSOs at 0x7f0000000000
197 // non-PIE: main executable below 0x100000000, DSOs at 0x7f0000000000
198 // Heap at 0x700000000000.
199 const MappingDesc kMemoryLayout[] = {
200     {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
201     {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
202     {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
203     {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
204     {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
205     {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
206     {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
207     {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
208     {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
209     {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
210     {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
211     {0x700000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
212 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
213 #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x100000000000ULL)
214 
215 #else
216 #error "Unsupported platform"
217 #endif
218 
219 const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
220 
221 #define MEM_TO_ORIGIN(mem) (SHADOW_TO_ORIGIN(MEM_TO_SHADOW((mem))))
222 
223 #ifndef __clang__
224 __attribute__((optimize("unroll-loops")))
225 #endif
226 inline bool addr_is_type(uptr addr, MappingDesc::Type mapping_type) {
227 // It is critical for performance that this loop is unrolled (because then it is
228 // simplified into just a few constant comparisons).
229 #ifdef __clang__
230 #pragma unroll
231 #endif
232   for (unsigned i = 0; i < kMemoryLayoutSize; ++i)
233     if (kMemoryLayout[i].type == mapping_type &&
234         addr >= kMemoryLayout[i].start && addr < kMemoryLayout[i].end)
235       return true;
236   return false;
237 }
238 
239 #define MEM_IS_APP(mem) addr_is_type((uptr)(mem), MappingDesc::APP)
240 #define MEM_IS_SHADOW(mem) addr_is_type((uptr)(mem), MappingDesc::SHADOW)
241 #define MEM_IS_ORIGIN(mem) addr_is_type((uptr)(mem), MappingDesc::ORIGIN)
242 
243 // These constants must be kept in sync with the ones in MemorySanitizer.cpp.
244 const int kMsanParamTlsSize = 800;
245 const int kMsanRetvalTlsSize = 800;
246 
247 namespace __msan {
248 extern int msan_inited;
249 extern bool msan_init_is_running;
250 extern int msan_report_count;
251 
252 bool ProtectRange(uptr beg, uptr end);
253 bool InitShadow(bool init_origins);
254 char *GetProcSelfMaps();
255 void InitializeInterceptors();
256 
257 void MsanAllocatorInit();
258 void MsanDeallocate(BufferedStackTrace *stack, void *ptr);
259 
260 void *msan_malloc(uptr size, BufferedStackTrace *stack);
261 void *msan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
262 void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack);
263 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size,
264                         BufferedStackTrace *stack);
265 void *msan_valloc(uptr size, BufferedStackTrace *stack);
266 void *msan_pvalloc(uptr size, BufferedStackTrace *stack);
267 void *msan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack);
268 void *msan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack);
269 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
270                         BufferedStackTrace *stack);
271 
272 void InstallTrapHandler();
273 void InstallAtExitHandler();
274 
275 const char *GetStackOriginDescr(u32 id, uptr *pc);
276 
277 bool IsInSymbolizerOrUnwider();
278 
279 void PrintWarning(uptr pc, uptr bp);
280 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
281 
282 // Unpoison first n function arguments.
283 void UnpoisonParam(uptr n);
284 void UnpoisonThreadLocalState();
285 
286 // Returns a "chained" origin id, pointing to the given stack trace followed by
287 // the previous origin id.
288 u32 ChainOrigin(u32 id, StackTrace *stack);
289 
290 const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
291 const int STACK_TRACE_TAG_FIELDS = STACK_TRACE_TAG_POISON + 1;
292 const int STACK_TRACE_TAG_VPTR = STACK_TRACE_TAG_FIELDS + 1;
293 
294 #define GET_MALLOC_STACK_TRACE                                             \
295   UNINITIALIZED BufferedStackTrace stack;                                  \
296   if (__msan_get_track_origins() && msan_inited) {                         \
297     stack.Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
298                  common_flags()->fast_unwind_on_malloc,                    \
299                  common_flags()->malloc_context_size);                     \
300   }
301 
302 // For platforms which support slow unwinder only, we restrict the store context
303 // size to 1, basically only storing the current pc. We do this because the slow
304 // unwinder which is based on libunwind is not async signal safe and causes
305 // random freezes in forking applications as well as in signal handlers.
306 #define GET_STORE_STACK_TRACE_PC_BP(pc, bp)                              \
307   UNINITIALIZED BufferedStackTrace stack;                                \
308   if (__msan_get_track_origins() > 1 && msan_inited) {                   \
309     int size = flags()->store_context_size;                              \
310     if (!SANITIZER_CAN_FAST_UNWIND)                                      \
311       size = Min(size, 1);                                               \
312     stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_malloc, \
313                  size);                                                  \
314   }
315 
316 #define GET_STORE_STACK_TRACE \
317   GET_STORE_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
318 
319 #define GET_FATAL_STACK_TRACE_PC_BP(pc, bp)                              \
320   UNINITIALIZED BufferedStackTrace stack;                                \
321   if (msan_inited) {                                                     \
322     stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_fatal); \
323   }
324 
325 #define GET_FATAL_STACK_TRACE \
326   GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
327 
328 // Unwind the stack for fatal error, as the parameter `stack` is
329 // empty without origins.
330 #define GET_FATAL_STACK_TRACE_IF_EMPTY(STACK)                                 \
331   if (msan_inited && (STACK)->size == 0) {                                    \
332     (STACK)->Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), nullptr, \
333                     common_flags()->fast_unwind_on_fatal);                    \
334   }
335 
336 class ScopedThreadLocalStateBackup {
337  public:
338   ScopedThreadLocalStateBackup() { Backup(); }
339   ~ScopedThreadLocalStateBackup() { Restore(); }
340   void Backup();
341   void Restore();
342  private:
343   u64 va_arg_overflow_size_tls;
344 };
345 
346 void MsanTSDInit(void (*destructor)(void *tsd));
347 void *MsanTSDGet();
348 void MsanTSDSet(void *tsd);
349 void MsanTSDDtor(void *tsd);
350 
351 void InstallAtForkHandler();
352 
353 }  // namespace __msan
354 
355 #endif  // MSAN_H
356