1 //===-- msan.cpp ----------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file is a part of MemorySanitizer. 10 // 11 // MemorySanitizer runtime. 12 //===----------------------------------------------------------------------===// 13 14 #include "msan.h" 15 16 #include "msan_chained_origin_depot.h" 17 #include "msan_origin.h" 18 #include "msan_poisoning.h" 19 #include "msan_report.h" 20 #include "msan_thread.h" 21 #include "sanitizer_common/sanitizer_atomic.h" 22 #include "sanitizer_common/sanitizer_common.h" 23 #include "sanitizer_common/sanitizer_flag_parser.h" 24 #include "sanitizer_common/sanitizer_flags.h" 25 #include "sanitizer_common/sanitizer_interface_internal.h" 26 #include "sanitizer_common/sanitizer_libc.h" 27 #include "sanitizer_common/sanitizer_procmaps.h" 28 #include "sanitizer_common/sanitizer_stackdepot.h" 29 #include "sanitizer_common/sanitizer_stacktrace.h" 30 #include "sanitizer_common/sanitizer_symbolizer.h" 31 #include "ubsan/ubsan_flags.h" 32 #include "ubsan/ubsan_init.h" 33 34 // ACHTUNG! No system header includes in this file. 35 36 using namespace __sanitizer; 37 38 // Globals. 39 static THREADLOCAL int msan_expect_umr = 0; 40 static THREADLOCAL int msan_expected_umr_found = 0; 41 42 // Function argument shadow. Each argument starts at the next available 8-byte 43 // aligned address. 44 SANITIZER_INTERFACE_ATTRIBUTE 45 THREADLOCAL u64 __msan_param_tls[kMsanParamTlsSize / sizeof(u64)]; 46 47 // Function argument origin. Each argument starts at the same offset as the 48 // corresponding shadow in (__msan_param_tls). Slightly weird, but changing this 49 // would break compatibility with older prebuilt binaries. 50 SANITIZER_INTERFACE_ATTRIBUTE 51 THREADLOCAL u32 __msan_param_origin_tls[kMsanParamTlsSize / sizeof(u32)]; 52 53 SANITIZER_INTERFACE_ATTRIBUTE 54 THREADLOCAL u64 __msan_retval_tls[kMsanRetvalTlsSize / sizeof(u64)]; 55 56 SANITIZER_INTERFACE_ATTRIBUTE 57 THREADLOCAL u32 __msan_retval_origin_tls; 58 59 SANITIZER_INTERFACE_ATTRIBUTE 60 ALIGNED(16) THREADLOCAL u64 __msan_va_arg_tls[kMsanParamTlsSize / sizeof(u64)]; 61 62 SANITIZER_INTERFACE_ATTRIBUTE 63 ALIGNED(16) 64 THREADLOCAL u32 __msan_va_arg_origin_tls[kMsanParamTlsSize / sizeof(u32)]; 65 66 SANITIZER_INTERFACE_ATTRIBUTE 67 THREADLOCAL u64 __msan_va_arg_overflow_size_tls; 68 69 SANITIZER_INTERFACE_ATTRIBUTE 70 THREADLOCAL u32 __msan_origin_tls; 71 72 extern "C" SANITIZER_WEAK_ATTRIBUTE const int __msan_track_origins; 73 74 int __msan_get_track_origins() { 75 return &__msan_track_origins ? __msan_track_origins : 0; 76 } 77 78 extern "C" SANITIZER_WEAK_ATTRIBUTE const int __msan_keep_going; 79 80 namespace __msan { 81 82 static THREADLOCAL int is_in_symbolizer_or_unwinder; 83 static void EnterSymbolizerOrUnwider() { ++is_in_symbolizer_or_unwinder; } 84 static void ExitSymbolizerOrUnwider() { --is_in_symbolizer_or_unwinder; } 85 bool IsInSymbolizerOrUnwider() { return is_in_symbolizer_or_unwinder; } 86 87 struct UnwinderScope { 88 UnwinderScope() { EnterSymbolizerOrUnwider(); } 89 ~UnwinderScope() { ExitSymbolizerOrUnwider(); } 90 }; 91 92 static Flags msan_flags; 93 94 Flags *flags() { return &msan_flags; } 95 96 int msan_inited = 0; 97 bool msan_init_is_running; 98 99 int msan_report_count = 0; 100 101 // Array of stack origins. 102 // FIXME: make it resizable. 103 static const uptr kNumStackOriginDescrs = 1024 * 1024; 104 static const char *StackOriginDescr[kNumStackOriginDescrs]; 105 static uptr StackOriginPC[kNumStackOriginDescrs]; 106 static atomic_uint32_t NumStackOriginDescrs; 107 108 void Flags::SetDefaults() { 109 #define MSAN_FLAG(Type, Name, DefaultValue, Description) Name = DefaultValue; 110 #include "msan_flags.inc" 111 #undef MSAN_FLAG 112 } 113 114 // keep_going is an old name for halt_on_error, 115 // and it has inverse meaning. 116 class FlagHandlerKeepGoing final : public FlagHandlerBase { 117 bool *halt_on_error_; 118 119 public: 120 explicit FlagHandlerKeepGoing(bool *halt_on_error) 121 : halt_on_error_(halt_on_error) {} 122 bool Parse(const char *value) final { 123 bool tmp; 124 FlagHandler<bool> h(&tmp); 125 if (!h.Parse(value)) return false; 126 *halt_on_error_ = !tmp; 127 return true; 128 } 129 bool Format(char *buffer, uptr size) final { 130 const char *keep_going_str = (*halt_on_error_) ? "false" : "true"; 131 return FormatString(buffer, size, keep_going_str); 132 } 133 }; 134 135 static void RegisterMsanFlags(FlagParser *parser, Flags *f) { 136 #define MSAN_FLAG(Type, Name, DefaultValue, Description) \ 137 RegisterFlag(parser, #Name, Description, &f->Name); 138 #include "msan_flags.inc" 139 #undef MSAN_FLAG 140 141 FlagHandlerKeepGoing *fh_keep_going = new (GetGlobalLowLevelAllocator()) 142 FlagHandlerKeepGoing(&f->halt_on_error); 143 parser->RegisterHandler("keep_going", fh_keep_going, 144 "deprecated, use halt_on_error"); 145 } 146 147 static void InitializeFlags() { 148 SetCommonFlagsDefaults(); 149 { 150 CommonFlags cf; 151 cf.CopyFrom(*common_flags()); 152 cf.external_symbolizer_path = GetEnv("MSAN_SYMBOLIZER_PATH"); 153 cf.malloc_context_size = 20; 154 cf.handle_ioctl = true; 155 // FIXME: test and enable. 156 cf.check_printf = false; 157 cf.intercept_tls_get_addr = true; 158 OverrideCommonFlags(cf); 159 } 160 161 Flags *f = flags(); 162 f->SetDefaults(); 163 164 FlagParser parser; 165 RegisterMsanFlags(&parser, f); 166 RegisterCommonFlags(&parser); 167 168 #if MSAN_CONTAINS_UBSAN 169 __ubsan::Flags *uf = __ubsan::flags(); 170 uf->SetDefaults(); 171 172 FlagParser ubsan_parser; 173 __ubsan::RegisterUbsanFlags(&ubsan_parser, uf); 174 RegisterCommonFlags(&ubsan_parser); 175 #endif 176 177 // Override from user-specified string. 178 parser.ParseString(__msan_default_options()); 179 #if MSAN_CONTAINS_UBSAN 180 const char *ubsan_default_options = __ubsan_default_options(); 181 ubsan_parser.ParseString(ubsan_default_options); 182 #endif 183 184 parser.ParseStringFromEnv("MSAN_OPTIONS"); 185 #if MSAN_CONTAINS_UBSAN 186 ubsan_parser.ParseStringFromEnv("UBSAN_OPTIONS"); 187 #endif 188 189 InitializeCommonFlags(); 190 191 if (Verbosity()) ReportUnrecognizedFlags(); 192 193 if (common_flags()->help) parser.PrintFlagDescriptions(); 194 195 // Check if deprecated exit_code MSan flag is set. 196 if (f->exit_code != -1) { 197 if (Verbosity()) 198 Printf("MSAN_OPTIONS=exit_code is deprecated! " 199 "Please use MSAN_OPTIONS=exitcode instead.\n"); 200 CommonFlags cf; 201 cf.CopyFrom(*common_flags()); 202 cf.exitcode = f->exit_code; 203 OverrideCommonFlags(cf); 204 } 205 206 // Check flag values: 207 if (f->origin_history_size < 0 || 208 f->origin_history_size > Origin::kMaxDepth) { 209 Printf( 210 "Origin history size invalid: %d. Must be 0 (unlimited) or in [1, %d] " 211 "range.\n", 212 f->origin_history_size, Origin::kMaxDepth); 213 Die(); 214 } 215 // Limiting to kStackDepotMaxUseCount / 2 to avoid overflow in 216 // StackDepotHandle::inc_use_count_unsafe. 217 if (f->origin_history_per_stack_limit < 0 || 218 f->origin_history_per_stack_limit > kStackDepotMaxUseCount / 2) { 219 Printf( 220 "Origin per-stack limit invalid: %d. Must be 0 (unlimited) or in [1, " 221 "%d] range.\n", 222 f->origin_history_per_stack_limit, kStackDepotMaxUseCount / 2); 223 Die(); 224 } 225 if (f->store_context_size < 1) f->store_context_size = 1; 226 } 227 228 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin) { 229 if (msan_expect_umr) { 230 // Printf("Expected UMR\n"); 231 __msan_origin_tls = origin; 232 msan_expected_umr_found = 1; 233 return; 234 } 235 236 ++msan_report_count; 237 238 GET_FATAL_STACK_TRACE_PC_BP(pc, bp); 239 240 u32 report_origin = 241 (__msan_get_track_origins() && Origin::isValidId(origin)) ? origin : 0; 242 ReportUMR(&stack, report_origin); 243 244 if (__msan_get_track_origins() && !Origin::isValidId(origin)) { 245 Printf( 246 " ORIGIN: invalid (%x). Might be a bug in MemorySanitizer origin " 247 "tracking.\n This could still be a bug in your code, too!\n", 248 origin); 249 } 250 } 251 252 void UnpoisonParam(uptr n) { 253 internal_memset(__msan_param_tls, 0, n * sizeof(*__msan_param_tls)); 254 } 255 256 // Backup MSan runtime TLS state. 257 // Implementation must be async-signal-safe. 258 // Instances of this class may live on the signal handler stack, and data size 259 // may be an issue. 260 void ScopedThreadLocalStateBackup::Backup() { 261 va_arg_overflow_size_tls = __msan_va_arg_overflow_size_tls; 262 } 263 264 void ScopedThreadLocalStateBackup::Restore() { 265 // A lame implementation that only keeps essential state and resets the rest. 266 __msan_va_arg_overflow_size_tls = va_arg_overflow_size_tls; 267 268 internal_memset(__msan_param_tls, 0, sizeof(__msan_param_tls)); 269 internal_memset(__msan_retval_tls, 0, sizeof(__msan_retval_tls)); 270 internal_memset(__msan_va_arg_tls, 0, sizeof(__msan_va_arg_tls)); 271 internal_memset(__msan_va_arg_origin_tls, 0, 272 sizeof(__msan_va_arg_origin_tls)); 273 274 if (__msan_get_track_origins()) { 275 internal_memset(&__msan_retval_origin_tls, 0, 276 sizeof(__msan_retval_origin_tls)); 277 internal_memset(__msan_param_origin_tls, 0, 278 sizeof(__msan_param_origin_tls)); 279 } 280 } 281 282 void UnpoisonThreadLocalState() { 283 } 284 285 const char *GetStackOriginDescr(u32 id, uptr *pc) { 286 CHECK_LT(id, kNumStackOriginDescrs); 287 if (pc) *pc = StackOriginPC[id]; 288 return StackOriginDescr[id]; 289 } 290 291 u32 ChainOrigin(u32 id, StackTrace *stack) { 292 MsanThread *t = GetCurrentThread(); 293 if (t && t->InSignalHandler()) 294 return id; 295 296 Origin o = Origin::FromRawId(id); 297 stack->tag = StackTrace::TAG_UNKNOWN; 298 Origin chained = Origin::CreateChainedOrigin(o, stack); 299 return chained.raw_id(); 300 } 301 302 // Current implementation separates the 'id_ptr' from the 'descr' and makes 303 // 'descr' constant. 304 // Previous implementation 'descr' is created at compile time and contains 305 // '----' in the beginning. When we see descr for the first time we replace 306 // '----' with a uniq id and set the origin to (id | (31-th bit)). 307 static inline void SetAllocaOrigin(void *a, uptr size, u32 *id_ptr, char *descr, 308 uptr pc) { 309 static const u32 dash = '-'; 310 static const u32 first_timer = 311 dash + (dash << 8) + (dash << 16) + (dash << 24); 312 u32 id = *id_ptr; 313 if (id == 0 || id == first_timer) { 314 u32 idx = atomic_fetch_add(&NumStackOriginDescrs, 1, memory_order_relaxed); 315 CHECK_LT(idx, kNumStackOriginDescrs); 316 StackOriginDescr[idx] = descr; 317 StackOriginPC[idx] = pc; 318 id = Origin::CreateStackOrigin(idx).raw_id(); 319 *id_ptr = id; 320 } 321 __msan_set_origin(a, size, id); 322 } 323 324 } // namespace __msan 325 326 void __sanitizer::BufferedStackTrace::UnwindImpl( 327 uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) { 328 using namespace __msan; 329 MsanThread *t = GetCurrentThread(); 330 if (!t || !StackTrace::WillUseFastUnwind(request_fast)) { 331 // Block reports from our interceptors during _Unwind_Backtrace. 332 UnwinderScope sym_scope; 333 return Unwind(max_depth, pc, bp, context, t ? t->stack_top() : 0, 334 t ? t->stack_bottom() : 0, false); 335 } 336 if (StackTrace::WillUseFastUnwind(request_fast)) 337 Unwind(max_depth, pc, bp, nullptr, t->stack_top(), t->stack_bottom(), true); 338 else 339 Unwind(max_depth, pc, 0, context, 0, 0, false); 340 } 341 342 // Interface. 343 344 using namespace __msan; 345 346 #define MSAN_MAYBE_WARNING(type, size) \ 347 void __msan_maybe_warning_##size(type s, u32 o) { \ 348 GET_CALLER_PC_BP; \ 349 if (UNLIKELY(s)) { \ 350 PrintWarningWithOrigin(pc, bp, o); \ 351 if (__msan::flags()->halt_on_error) { \ 352 Printf("Exiting\n"); \ 353 Die(); \ 354 } \ 355 } \ 356 } 357 358 MSAN_MAYBE_WARNING(u8, 1) 359 MSAN_MAYBE_WARNING(u16, 2) 360 MSAN_MAYBE_WARNING(u32, 4) 361 MSAN_MAYBE_WARNING(u64, 8) 362 363 #define MSAN_MAYBE_STORE_ORIGIN(type, size) \ 364 void __msan_maybe_store_origin_##size(type s, void *p, u32 o) { \ 365 if (UNLIKELY(s)) { \ 366 if (__msan_get_track_origins() > 1) { \ 367 GET_CALLER_PC_BP; \ 368 GET_STORE_STACK_TRACE_PC_BP(pc, bp); \ 369 o = ChainOrigin(o, &stack); \ 370 } \ 371 *(u32 *)MEM_TO_ORIGIN((uptr)p & ~3UL) = o; \ 372 } \ 373 } 374 375 MSAN_MAYBE_STORE_ORIGIN(u8, 1) 376 MSAN_MAYBE_STORE_ORIGIN(u16, 2) 377 MSAN_MAYBE_STORE_ORIGIN(u32, 4) 378 MSAN_MAYBE_STORE_ORIGIN(u64, 8) 379 380 void __msan_warning() { 381 GET_CALLER_PC_BP; 382 PrintWarningWithOrigin(pc, bp, 0); 383 if (__msan::flags()->halt_on_error) { 384 if (__msan::flags()->print_stats) 385 ReportStats(); 386 Printf("Exiting\n"); 387 Die(); 388 } 389 } 390 391 void __msan_warning_noreturn() { 392 GET_CALLER_PC_BP; 393 PrintWarningWithOrigin(pc, bp, 0); 394 if (__msan::flags()->print_stats) 395 ReportStats(); 396 Printf("Exiting\n"); 397 Die(); 398 } 399 400 void __msan_warning_with_origin(u32 origin) { 401 GET_CALLER_PC_BP; 402 PrintWarningWithOrigin(pc, bp, origin); 403 if (__msan::flags()->halt_on_error) { 404 if (__msan::flags()->print_stats) 405 ReportStats(); 406 Printf("Exiting\n"); 407 Die(); 408 } 409 } 410 411 void __msan_warning_with_origin_noreturn(u32 origin) { 412 GET_CALLER_PC_BP; 413 PrintWarningWithOrigin(pc, bp, origin); 414 if (__msan::flags()->print_stats) 415 ReportStats(); 416 Printf("Exiting\n"); 417 Die(); 418 } 419 420 static void OnStackUnwind(const SignalContext &sig, const void *, 421 BufferedStackTrace *stack) { 422 stack->Unwind(StackTrace::GetNextInstructionPc(sig.pc), sig.bp, sig.context, 423 common_flags()->fast_unwind_on_fatal); 424 } 425 426 static void MsanOnDeadlySignal(int signo, void *siginfo, void *context) { 427 HandleDeadlySignal(siginfo, context, GetTid(), &OnStackUnwind, nullptr); 428 } 429 430 static void CheckUnwind() { 431 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 432 stack.Print(); 433 } 434 435 void __msan_init() { 436 CHECK(!msan_init_is_running); 437 if (msan_inited) return; 438 msan_init_is_running = 1; 439 SanitizerToolName = "MemorySanitizer"; 440 441 AvoidCVE_2016_2143(); 442 443 CacheBinaryName(); 444 InitializeFlags(); 445 446 // Install tool-specific callbacks in sanitizer_common. 447 SetCheckUnwindCallback(CheckUnwind); 448 449 __sanitizer_set_report_path(common_flags()->log_path); 450 451 InitializeInterceptors(); 452 InstallAtForkHandler(); 453 CheckASLR(); 454 InitTlsSize(); 455 InstallDeadlySignalHandlers(MsanOnDeadlySignal); 456 InstallAtExitHandler(); // Needs __cxa_atexit interceptor. 457 458 DisableCoreDumperIfNecessary(); 459 if (StackSizeIsUnlimited()) { 460 VPrintf(1, "Unlimited stack, doing reexec\n"); 461 // A reasonably large stack size. It is bigger than the usual 8Mb, because, 462 // well, the program could have been run with unlimited stack for a reason. 463 SetStackSizeLimitInBytes(32 * 1024 * 1024); 464 ReExec(); 465 } 466 467 __msan_clear_on_return(); 468 if (__msan_get_track_origins()) 469 VPrintf(1, "msan_track_origins\n"); 470 if (!InitShadowWithReExec(__msan_get_track_origins())) { 471 Printf("FATAL: MemorySanitizer can not mmap the shadow memory.\n"); 472 Printf("FATAL: Make sure to compile with -fPIE and to link with -pie.\n"); 473 Printf("FATAL: Disabling ASLR is known to cause this error.\n"); 474 Printf("FATAL: If running under GDB, try " 475 "'set disable-randomization off'.\n"); 476 DumpProcessMap(); 477 Die(); 478 } 479 480 Symbolizer::GetOrInit()->AddHooks(EnterSymbolizerOrUnwider, 481 ExitSymbolizerOrUnwider); 482 483 InitializeCoverage(common_flags()->coverage, common_flags()->coverage_dir); 484 485 MsanTSDInit(MsanTSDDtor); 486 487 MsanAllocatorInit(); 488 489 MsanThread *main_thread = MsanThread::Create(nullptr, nullptr); 490 SetCurrentThread(main_thread); 491 main_thread->Init(); 492 493 #if MSAN_CONTAINS_UBSAN 494 __ubsan::InitAsPlugin(); 495 #endif 496 497 VPrintf(1, "MemorySanitizer init done\n"); 498 499 msan_init_is_running = 0; 500 msan_inited = 1; 501 } 502 503 void __msan_set_keep_going(int keep_going) { 504 flags()->halt_on_error = !keep_going; 505 } 506 507 void __msan_set_expect_umr(int expect_umr) { 508 if (expect_umr) { 509 msan_expected_umr_found = 0; 510 } else if (!msan_expected_umr_found) { 511 GET_CALLER_PC_BP; 512 GET_FATAL_STACK_TRACE_PC_BP(pc, bp); 513 ReportExpectedUMRNotFound(&stack); 514 Die(); 515 } 516 msan_expect_umr = expect_umr; 517 } 518 519 void __msan_print_shadow(const void *x, uptr size) { 520 if (!MEM_IS_APP(x)) { 521 Printf("Not a valid application address: %p\n", x); 522 return; 523 } 524 525 DescribeMemoryRange(x, size); 526 } 527 528 void __msan_dump_shadow(const void *x, uptr size) { 529 if (!MEM_IS_APP(x)) { 530 Printf("Not a valid application address: %p\n", x); 531 return; 532 } 533 534 unsigned char *s = (unsigned char*)MEM_TO_SHADOW(x); 535 Printf("%p[%p] ", (void *)s, x); 536 for (uptr i = 0; i < size; i++) 537 Printf("%x%x ", s[i] >> 4, s[i] & 0xf); 538 Printf("\n"); 539 } 540 541 sptr __msan_test_shadow(const void *x, uptr size) { 542 if (!MEM_IS_APP(x)) return -1; 543 unsigned char *s = (unsigned char *)MEM_TO_SHADOW((uptr)x); 544 if (__sanitizer::mem_is_zero((const char *)s, size)) 545 return -1; 546 // Slow path: loop through again to find the location. 547 for (uptr i = 0; i < size; ++i) 548 if (s[i]) 549 return i; 550 return -1; 551 } 552 553 void __msan_check_mem_is_initialized(const void *x, uptr size) { 554 if (!__msan::flags()->report_umrs) return; 555 sptr offset = __msan_test_shadow(x, size); 556 if (offset < 0) 557 return; 558 559 GET_CALLER_PC_BP; 560 ReportUMRInsideAddressRange(__func__, x, size, offset); 561 __msan::PrintWarningWithOrigin(pc, bp, 562 __msan_get_origin(((const char *)x) + offset)); 563 if (__msan::flags()->halt_on_error) { 564 Printf("Exiting\n"); 565 Die(); 566 } 567 } 568 569 int __msan_set_poison_in_malloc(int do_poison) { 570 int old = flags()->poison_in_malloc; 571 flags()->poison_in_malloc = do_poison; 572 return old; 573 } 574 575 int __msan_has_dynamic_component() { return false; } 576 577 NOINLINE 578 void __msan_clear_on_return() { 579 __msan_param_tls[0] = 0; 580 } 581 582 void __msan_partial_poison(const void* data, void* shadow, uptr size) { 583 internal_memcpy((void*)MEM_TO_SHADOW((uptr)data), shadow, size); 584 } 585 586 void __msan_load_unpoisoned(const void *src, uptr size, void *dst) { 587 internal_memcpy(dst, src, size); 588 __msan_unpoison(dst, size); 589 } 590 591 void __msan_set_origin(const void *a, uptr size, u32 origin) { 592 if (__msan_get_track_origins()) SetOrigin(a, size, origin); 593 } 594 595 void __msan_set_alloca_origin(void *a, uptr size, char *descr) { 596 SetAllocaOrigin(a, size, reinterpret_cast<u32 *>(descr), descr + 4, 597 GET_CALLER_PC()); 598 } 599 600 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc) { 601 // Intentionally ignore pc and use return address. This function is here for 602 // compatibility, in case program is linked with library instrumented by 603 // older clang. 604 SetAllocaOrigin(a, size, reinterpret_cast<u32 *>(descr), descr + 4, 605 GET_CALLER_PC()); 606 } 607 608 void __msan_set_alloca_origin_with_descr(void *a, uptr size, u32 *id_ptr, 609 char *descr) { 610 SetAllocaOrigin(a, size, id_ptr, descr, GET_CALLER_PC()); 611 } 612 613 void __msan_set_alloca_origin_no_descr(void *a, uptr size, u32 *id_ptr) { 614 SetAllocaOrigin(a, size, id_ptr, nullptr, GET_CALLER_PC()); 615 } 616 617 u32 __msan_chain_origin(u32 id) { 618 GET_CALLER_PC_BP; 619 GET_STORE_STACK_TRACE_PC_BP(pc, bp); 620 return ChainOrigin(id, &stack); 621 } 622 623 u32 __msan_get_origin(const void *a) { 624 if (!__msan_get_track_origins()) return 0; 625 uptr x = (uptr)a; 626 uptr aligned = x & ~3ULL; 627 uptr origin_ptr = MEM_TO_ORIGIN(aligned); 628 return *(u32*)origin_ptr; 629 } 630 631 int __msan_origin_is_descendant_or_same(u32 this_id, u32 prev_id) { 632 Origin o = Origin::FromRawId(this_id); 633 while (o.raw_id() != prev_id && o.isChainedOrigin()) 634 o = o.getNextChainedOrigin(nullptr); 635 return o.raw_id() == prev_id; 636 } 637 638 u32 __msan_get_umr_origin() { 639 return __msan_origin_tls; 640 } 641 642 u16 __sanitizer_unaligned_load16(const uu16 *p) { 643 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 644 sizeof(uu16)); 645 if (__msan_get_track_origins()) 646 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 647 return *p; 648 } 649 u32 __sanitizer_unaligned_load32(const uu32 *p) { 650 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 651 sizeof(uu32)); 652 if (__msan_get_track_origins()) 653 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 654 return *p; 655 } 656 u64 __sanitizer_unaligned_load64(const uu64 *p) { 657 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 658 sizeof(uu64)); 659 if (__msan_get_track_origins()) 660 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 661 return *p; 662 } 663 void __sanitizer_unaligned_store16(uu16 *p, u16 x) { 664 static_assert(sizeof(uu16) == sizeof(u16), "incompatible types"); 665 u16 s; 666 internal_memcpy(&s, &__msan_param_tls[1], sizeof(uu16)); 667 internal_memcpy((void *)MEM_TO_SHADOW((uptr)p), &s, sizeof(uu16)); 668 if (s && __msan_get_track_origins()) 669 if (uu32 o = __msan_param_origin_tls[2]) 670 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 671 *p = x; 672 } 673 void __sanitizer_unaligned_store32(uu32 *p, u32 x) { 674 static_assert(sizeof(uu32) == sizeof(u32), "incompatible types"); 675 u32 s; 676 internal_memcpy(&s, &__msan_param_tls[1], sizeof(uu32)); 677 internal_memcpy((void *)MEM_TO_SHADOW((uptr)p), &s, sizeof(uu32)); 678 if (s && __msan_get_track_origins()) 679 if (uu32 o = __msan_param_origin_tls[2]) 680 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 681 *p = x; 682 } 683 void __sanitizer_unaligned_store64(uu64 *p, u64 x) { 684 u64 s = __msan_param_tls[1]; 685 *(uu64 *)MEM_TO_SHADOW((uptr)p) = s; 686 if (s && __msan_get_track_origins()) 687 if (uu32 o = __msan_param_origin_tls[2]) 688 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 689 *p = x; 690 } 691 692 void __msan_set_death_callback(void (*callback)(void)) { 693 SetUserDieCallback(callback); 694 } 695 696 void __msan_start_switch_fiber(const void *bottom, uptr size) { 697 MsanThread *t = GetCurrentThread(); 698 if (!t) { 699 VReport(1, "__msan_start_switch_fiber called from unknown thread\n"); 700 return; 701 } 702 t->StartSwitchFiber((uptr)bottom, size); 703 } 704 705 void __msan_finish_switch_fiber(const void **bottom_old, uptr *size_old) { 706 MsanThread *t = GetCurrentThread(); 707 if (!t) { 708 VReport(1, "__msan_finish_switch_fiber called from unknown thread\n"); 709 return; 710 } 711 t->FinishSwitchFiber((uptr *)bottom_old, (uptr *)size_old); 712 713 internal_memset(__msan_param_tls, 0, sizeof(__msan_param_tls)); 714 internal_memset(__msan_retval_tls, 0, sizeof(__msan_retval_tls)); 715 internal_memset(__msan_va_arg_tls, 0, sizeof(__msan_va_arg_tls)); 716 717 if (__msan_get_track_origins()) { 718 internal_memset(__msan_param_origin_tls, 0, 719 sizeof(__msan_param_origin_tls)); 720 internal_memset(&__msan_retval_origin_tls, 0, 721 sizeof(__msan_retval_origin_tls)); 722 internal_memset(__msan_va_arg_origin_tls, 0, 723 sizeof(__msan_va_arg_origin_tls)); 724 } 725 } 726 727 SANITIZER_INTERFACE_WEAK_DEF(const char *, __msan_default_options, void) { 728 return ""; 729 } 730 731 extern "C" { 732 SANITIZER_INTERFACE_ATTRIBUTE 733 void __sanitizer_print_stack_trace() { 734 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 735 stack.Print(); 736 } 737 } // extern "C" 738