1 //===-- msan.cpp ----------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file is a part of MemorySanitizer. 10 // 11 // MemorySanitizer runtime. 12 //===----------------------------------------------------------------------===// 13 14 #include "msan.h" 15 16 #include "msan_chained_origin_depot.h" 17 #include "msan_origin.h" 18 #include "msan_poisoning.h" 19 #include "msan_report.h" 20 #include "msan_thread.h" 21 #include "sanitizer_common/sanitizer_atomic.h" 22 #include "sanitizer_common/sanitizer_common.h" 23 #include "sanitizer_common/sanitizer_flag_parser.h" 24 #include "sanitizer_common/sanitizer_flags.h" 25 #include "sanitizer_common/sanitizer_interface_internal.h" 26 #include "sanitizer_common/sanitizer_libc.h" 27 #include "sanitizer_common/sanitizer_procmaps.h" 28 #include "sanitizer_common/sanitizer_stackdepot.h" 29 #include "sanitizer_common/sanitizer_stacktrace.h" 30 #include "sanitizer_common/sanitizer_symbolizer.h" 31 #include "ubsan/ubsan_flags.h" 32 #include "ubsan/ubsan_init.h" 33 34 // ACHTUNG! No system header includes in this file. 35 36 using namespace __sanitizer; 37 38 // Globals. 39 static THREADLOCAL int msan_expect_umr = 0; 40 static THREADLOCAL int msan_expected_umr_found = 0; 41 42 // Function argument shadow. Each argument starts at the next available 8-byte 43 // aligned address. 44 SANITIZER_INTERFACE_ATTRIBUTE 45 THREADLOCAL u64 __msan_param_tls[kMsanParamTlsSize / sizeof(u64)]; 46 47 // Function argument origin. Each argument starts at the same offset as the 48 // corresponding shadow in (__msan_param_tls). Slightly weird, but changing this 49 // would break compatibility with older prebuilt binaries. 50 SANITIZER_INTERFACE_ATTRIBUTE 51 THREADLOCAL u32 __msan_param_origin_tls[kMsanParamTlsSize / sizeof(u32)]; 52 53 SANITIZER_INTERFACE_ATTRIBUTE 54 THREADLOCAL u64 __msan_retval_tls[kMsanRetvalTlsSize / sizeof(u64)]; 55 56 SANITIZER_INTERFACE_ATTRIBUTE 57 THREADLOCAL u32 __msan_retval_origin_tls; 58 59 SANITIZER_INTERFACE_ATTRIBUTE 60 ALIGNED(16) THREADLOCAL u64 __msan_va_arg_tls[kMsanParamTlsSize / sizeof(u64)]; 61 62 SANITIZER_INTERFACE_ATTRIBUTE 63 ALIGNED(16) 64 THREADLOCAL u32 __msan_va_arg_origin_tls[kMsanParamTlsSize / sizeof(u32)]; 65 66 SANITIZER_INTERFACE_ATTRIBUTE 67 THREADLOCAL u64 __msan_va_arg_overflow_size_tls; 68 69 SANITIZER_INTERFACE_ATTRIBUTE 70 THREADLOCAL u32 __msan_origin_tls; 71 72 extern "C" SANITIZER_WEAK_ATTRIBUTE const int __msan_track_origins; 73 74 int __msan_get_track_origins() { 75 return &__msan_track_origins ? __msan_track_origins : 0; 76 } 77 78 extern "C" SANITIZER_WEAK_ATTRIBUTE const int __msan_keep_going; 79 80 namespace __msan { 81 82 static THREADLOCAL int is_in_symbolizer_or_unwinder; 83 static void EnterSymbolizerOrUnwider() { ++is_in_symbolizer_or_unwinder; } 84 static void ExitSymbolizerOrUnwider() { --is_in_symbolizer_or_unwinder; } 85 bool IsInSymbolizerOrUnwider() { return is_in_symbolizer_or_unwinder; } 86 87 struct UnwinderScope { 88 UnwinderScope() { EnterSymbolizerOrUnwider(); } 89 ~UnwinderScope() { ExitSymbolizerOrUnwider(); } 90 }; 91 92 static Flags msan_flags; 93 94 Flags *flags() { return &msan_flags; } 95 96 int msan_inited = 0; 97 bool msan_init_is_running; 98 99 int msan_report_count = 0; 100 101 // Array of stack origins. 102 // FIXME: make it resizable. 103 static const uptr kNumStackOriginDescrs = 1024 * 1024; 104 static const char *StackOriginDescr[kNumStackOriginDescrs]; 105 static uptr StackOriginPC[kNumStackOriginDescrs]; 106 static atomic_uint32_t NumStackOriginDescrs; 107 108 void Flags::SetDefaults() { 109 #define MSAN_FLAG(Type, Name, DefaultValue, Description) Name = DefaultValue; 110 #include "msan_flags.inc" 111 #undef MSAN_FLAG 112 } 113 114 // keep_going is an old name for halt_on_error, 115 // and it has inverse meaning. 116 class FlagHandlerKeepGoing final : public FlagHandlerBase { 117 bool *halt_on_error_; 118 119 public: 120 explicit FlagHandlerKeepGoing(bool *halt_on_error) 121 : halt_on_error_(halt_on_error) {} 122 bool Parse(const char *value) final { 123 bool tmp; 124 FlagHandler<bool> h(&tmp); 125 if (!h.Parse(value)) return false; 126 *halt_on_error_ = !tmp; 127 return true; 128 } 129 bool Format(char *buffer, uptr size) final { 130 const char *keep_going_str = (*halt_on_error_) ? "false" : "true"; 131 return FormatString(buffer, size, keep_going_str); 132 } 133 }; 134 135 static void RegisterMsanFlags(FlagParser *parser, Flags *f) { 136 #define MSAN_FLAG(Type, Name, DefaultValue, Description) \ 137 RegisterFlag(parser, #Name, Description, &f->Name); 138 #include "msan_flags.inc" 139 #undef MSAN_FLAG 140 141 FlagHandlerKeepGoing *fh_keep_going = 142 new (FlagParser::Alloc) FlagHandlerKeepGoing(&f->halt_on_error); 143 parser->RegisterHandler("keep_going", fh_keep_going, 144 "deprecated, use halt_on_error"); 145 } 146 147 static void InitializeFlags() { 148 SetCommonFlagsDefaults(); 149 { 150 CommonFlags cf; 151 cf.CopyFrom(*common_flags()); 152 cf.external_symbolizer_path = GetEnv("MSAN_SYMBOLIZER_PATH"); 153 cf.malloc_context_size = 20; 154 cf.handle_ioctl = true; 155 // FIXME: test and enable. 156 cf.check_printf = false; 157 cf.intercept_tls_get_addr = true; 158 OverrideCommonFlags(cf); 159 } 160 161 Flags *f = flags(); 162 f->SetDefaults(); 163 164 FlagParser parser; 165 RegisterMsanFlags(&parser, f); 166 RegisterCommonFlags(&parser); 167 168 #if MSAN_CONTAINS_UBSAN 169 __ubsan::Flags *uf = __ubsan::flags(); 170 uf->SetDefaults(); 171 172 FlagParser ubsan_parser; 173 __ubsan::RegisterUbsanFlags(&ubsan_parser, uf); 174 RegisterCommonFlags(&ubsan_parser); 175 #endif 176 177 // Override from user-specified string. 178 parser.ParseString(__msan_default_options()); 179 #if MSAN_CONTAINS_UBSAN 180 const char *ubsan_default_options = __ubsan_default_options(); 181 ubsan_parser.ParseString(ubsan_default_options); 182 #endif 183 184 parser.ParseStringFromEnv("MSAN_OPTIONS"); 185 #if MSAN_CONTAINS_UBSAN 186 ubsan_parser.ParseStringFromEnv("UBSAN_OPTIONS"); 187 #endif 188 189 InitializeCommonFlags(); 190 191 if (Verbosity()) ReportUnrecognizedFlags(); 192 193 if (common_flags()->help) parser.PrintFlagDescriptions(); 194 195 // Check if deprecated exit_code MSan flag is set. 196 if (f->exit_code != -1) { 197 if (Verbosity()) 198 Printf("MSAN_OPTIONS=exit_code is deprecated! " 199 "Please use MSAN_OPTIONS=exitcode instead.\n"); 200 CommonFlags cf; 201 cf.CopyFrom(*common_flags()); 202 cf.exitcode = f->exit_code; 203 OverrideCommonFlags(cf); 204 } 205 206 // Check flag values: 207 if (f->origin_history_size < 0 || 208 f->origin_history_size > Origin::kMaxDepth) { 209 Printf( 210 "Origin history size invalid: %d. Must be 0 (unlimited) or in [1, %d] " 211 "range.\n", 212 f->origin_history_size, Origin::kMaxDepth); 213 Die(); 214 } 215 // Limiting to kStackDepotMaxUseCount / 2 to avoid overflow in 216 // StackDepotHandle::inc_use_count_unsafe. 217 if (f->origin_history_per_stack_limit < 0 || 218 f->origin_history_per_stack_limit > kStackDepotMaxUseCount / 2) { 219 Printf( 220 "Origin per-stack limit invalid: %d. Must be 0 (unlimited) or in [1, " 221 "%d] range.\n", 222 f->origin_history_per_stack_limit, kStackDepotMaxUseCount / 2); 223 Die(); 224 } 225 if (f->store_context_size < 1) f->store_context_size = 1; 226 } 227 228 void PrintWarning(uptr pc, uptr bp) { 229 PrintWarningWithOrigin(pc, bp, __msan_origin_tls); 230 } 231 232 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin) { 233 if (msan_expect_umr) { 234 // Printf("Expected UMR\n"); 235 __msan_origin_tls = origin; 236 msan_expected_umr_found = 1; 237 return; 238 } 239 240 ++msan_report_count; 241 242 GET_FATAL_STACK_TRACE_PC_BP(pc, bp); 243 244 u32 report_origin = 245 (__msan_get_track_origins() && Origin::isValidId(origin)) ? origin : 0; 246 ReportUMR(&stack, report_origin); 247 248 if (__msan_get_track_origins() && !Origin::isValidId(origin)) { 249 Printf( 250 " ORIGIN: invalid (%x). Might be a bug in MemorySanitizer origin " 251 "tracking.\n This could still be a bug in your code, too!\n", 252 origin); 253 } 254 } 255 256 void UnpoisonParam(uptr n) { 257 internal_memset(__msan_param_tls, 0, n * sizeof(*__msan_param_tls)); 258 } 259 260 // Backup MSan runtime TLS state. 261 // Implementation must be async-signal-safe. 262 // Instances of this class may live on the signal handler stack, and data size 263 // may be an issue. 264 void ScopedThreadLocalStateBackup::Backup() { 265 va_arg_overflow_size_tls = __msan_va_arg_overflow_size_tls; 266 } 267 268 void ScopedThreadLocalStateBackup::Restore() { 269 // A lame implementation that only keeps essential state and resets the rest. 270 __msan_va_arg_overflow_size_tls = va_arg_overflow_size_tls; 271 272 internal_memset(__msan_param_tls, 0, sizeof(__msan_param_tls)); 273 internal_memset(__msan_retval_tls, 0, sizeof(__msan_retval_tls)); 274 internal_memset(__msan_va_arg_tls, 0, sizeof(__msan_va_arg_tls)); 275 internal_memset(__msan_va_arg_origin_tls, 0, 276 sizeof(__msan_va_arg_origin_tls)); 277 278 if (__msan_get_track_origins()) { 279 internal_memset(&__msan_retval_origin_tls, 0, 280 sizeof(__msan_retval_origin_tls)); 281 internal_memset(__msan_param_origin_tls, 0, 282 sizeof(__msan_param_origin_tls)); 283 } 284 } 285 286 void UnpoisonThreadLocalState() { 287 } 288 289 const char *GetStackOriginDescr(u32 id, uptr *pc) { 290 CHECK_LT(id, kNumStackOriginDescrs); 291 if (pc) *pc = StackOriginPC[id]; 292 return StackOriginDescr[id]; 293 } 294 295 u32 ChainOrigin(u32 id, StackTrace *stack) { 296 MsanThread *t = GetCurrentThread(); 297 if (t && t->InSignalHandler()) 298 return id; 299 300 Origin o = Origin::FromRawId(id); 301 stack->tag = StackTrace::TAG_UNKNOWN; 302 Origin chained = Origin::CreateChainedOrigin(o, stack); 303 return chained.raw_id(); 304 } 305 306 } // namespace __msan 307 308 void __sanitizer::BufferedStackTrace::UnwindImpl( 309 uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) { 310 using namespace __msan; 311 MsanThread *t = GetCurrentThread(); 312 if (!t || !StackTrace::WillUseFastUnwind(request_fast)) { 313 // Block reports from our interceptors during _Unwind_Backtrace. 314 UnwinderScope sym_scope; 315 return Unwind(max_depth, pc, bp, context, t ? t->stack_top() : 0, 316 t ? t->stack_bottom() : 0, false); 317 } 318 if (StackTrace::WillUseFastUnwind(request_fast)) 319 Unwind(max_depth, pc, bp, nullptr, t->stack_top(), t->stack_bottom(), true); 320 else 321 Unwind(max_depth, pc, 0, context, 0, 0, false); 322 } 323 324 // Interface. 325 326 using namespace __msan; 327 328 #define MSAN_MAYBE_WARNING(type, size) \ 329 void __msan_maybe_warning_##size(type s, u32 o) { \ 330 GET_CALLER_PC_BP_SP; \ 331 (void) sp; \ 332 if (UNLIKELY(s)) { \ 333 PrintWarningWithOrigin(pc, bp, o); \ 334 if (__msan::flags()->halt_on_error) { \ 335 Printf("Exiting\n"); \ 336 Die(); \ 337 } \ 338 } \ 339 } 340 341 MSAN_MAYBE_WARNING(u8, 1) 342 MSAN_MAYBE_WARNING(u16, 2) 343 MSAN_MAYBE_WARNING(u32, 4) 344 MSAN_MAYBE_WARNING(u64, 8) 345 346 #define MSAN_MAYBE_STORE_ORIGIN(type, size) \ 347 void __msan_maybe_store_origin_##size(type s, void *p, u32 o) { \ 348 if (UNLIKELY(s)) { \ 349 if (__msan_get_track_origins() > 1) { \ 350 GET_CALLER_PC_BP_SP; \ 351 (void) sp; \ 352 GET_STORE_STACK_TRACE_PC_BP(pc, bp); \ 353 o = ChainOrigin(o, &stack); \ 354 } \ 355 *(u32 *)MEM_TO_ORIGIN((uptr)p & ~3UL) = o; \ 356 } \ 357 } 358 359 MSAN_MAYBE_STORE_ORIGIN(u8, 1) 360 MSAN_MAYBE_STORE_ORIGIN(u16, 2) 361 MSAN_MAYBE_STORE_ORIGIN(u32, 4) 362 MSAN_MAYBE_STORE_ORIGIN(u64, 8) 363 364 void __msan_warning() { 365 GET_CALLER_PC_BP_SP; 366 (void)sp; 367 PrintWarning(pc, bp); 368 if (__msan::flags()->halt_on_error) { 369 if (__msan::flags()->print_stats) 370 ReportStats(); 371 Printf("Exiting\n"); 372 Die(); 373 } 374 } 375 376 void __msan_warning_noreturn() { 377 GET_CALLER_PC_BP_SP; 378 (void)sp; 379 PrintWarning(pc, bp); 380 if (__msan::flags()->print_stats) 381 ReportStats(); 382 Printf("Exiting\n"); 383 Die(); 384 } 385 386 void __msan_warning_with_origin(u32 origin) { 387 GET_CALLER_PC_BP_SP; 388 (void)sp; 389 PrintWarningWithOrigin(pc, bp, origin); 390 if (__msan::flags()->halt_on_error) { 391 if (__msan::flags()->print_stats) 392 ReportStats(); 393 Printf("Exiting\n"); 394 Die(); 395 } 396 } 397 398 void __msan_warning_with_origin_noreturn(u32 origin) { 399 GET_CALLER_PC_BP_SP; 400 (void)sp; 401 PrintWarningWithOrigin(pc, bp, origin); 402 if (__msan::flags()->print_stats) 403 ReportStats(); 404 Printf("Exiting\n"); 405 Die(); 406 } 407 408 static void OnStackUnwind(const SignalContext &sig, const void *, 409 BufferedStackTrace *stack) { 410 stack->Unwind(StackTrace::GetNextInstructionPc(sig.pc), sig.bp, sig.context, 411 common_flags()->fast_unwind_on_fatal); 412 } 413 414 static void MsanOnDeadlySignal(int signo, void *siginfo, void *context) { 415 HandleDeadlySignal(siginfo, context, GetTid(), &OnStackUnwind, nullptr); 416 } 417 418 static void CheckUnwind() { 419 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 420 stack.Print(); 421 } 422 423 void __msan_init() { 424 CHECK(!msan_init_is_running); 425 if (msan_inited) return; 426 msan_init_is_running = 1; 427 SanitizerToolName = "MemorySanitizer"; 428 429 AvoidCVE_2016_2143(); 430 431 CacheBinaryName(); 432 InitializeFlags(); 433 434 // Install tool-specific callbacks in sanitizer_common. 435 SetCheckUnwindCallback(CheckUnwind); 436 437 __sanitizer_set_report_path(common_flags()->log_path); 438 439 InitializeInterceptors(); 440 CheckASLR(); 441 InitTlsSize(); 442 InstallDeadlySignalHandlers(MsanOnDeadlySignal); 443 InstallAtExitHandler(); // Needs __cxa_atexit interceptor. 444 445 DisableCoreDumperIfNecessary(); 446 if (StackSizeIsUnlimited()) { 447 VPrintf(1, "Unlimited stack, doing reexec\n"); 448 // A reasonably large stack size. It is bigger than the usual 8Mb, because, 449 // well, the program could have been run with unlimited stack for a reason. 450 SetStackSizeLimitInBytes(32 * 1024 * 1024); 451 ReExec(); 452 } 453 454 __msan_clear_on_return(); 455 if (__msan_get_track_origins()) 456 VPrintf(1, "msan_track_origins\n"); 457 if (!InitShadow(__msan_get_track_origins())) { 458 Printf("FATAL: MemorySanitizer can not mmap the shadow memory.\n"); 459 Printf("FATAL: Make sure to compile with -fPIE and to link with -pie.\n"); 460 Printf("FATAL: Disabling ASLR is known to cause this error.\n"); 461 Printf("FATAL: If running under GDB, try " 462 "'set disable-randomization off'.\n"); 463 DumpProcessMap(); 464 Die(); 465 } 466 467 Symbolizer::GetOrInit()->AddHooks(EnterSymbolizerOrUnwider, 468 ExitSymbolizerOrUnwider); 469 470 InitializeCoverage(common_flags()->coverage, common_flags()->coverage_dir); 471 472 MsanTSDInit(MsanTSDDtor); 473 474 MsanAllocatorInit(); 475 476 MsanThread *main_thread = MsanThread::Create(nullptr, nullptr); 477 SetCurrentThread(main_thread); 478 main_thread->Init(); 479 480 #if MSAN_CONTAINS_UBSAN 481 __ubsan::InitAsPlugin(); 482 #endif 483 484 VPrintf(1, "MemorySanitizer init done\n"); 485 486 msan_init_is_running = 0; 487 msan_inited = 1; 488 } 489 490 void __msan_set_keep_going(int keep_going) { 491 flags()->halt_on_error = !keep_going; 492 } 493 494 void __msan_set_expect_umr(int expect_umr) { 495 if (expect_umr) { 496 msan_expected_umr_found = 0; 497 } else if (!msan_expected_umr_found) { 498 GET_CALLER_PC_BP_SP; 499 (void)sp; 500 GET_FATAL_STACK_TRACE_PC_BP(pc, bp); 501 ReportExpectedUMRNotFound(&stack); 502 Die(); 503 } 504 msan_expect_umr = expect_umr; 505 } 506 507 void __msan_print_shadow(const void *x, uptr size) { 508 if (!MEM_IS_APP(x)) { 509 Printf("Not a valid application address: %p\n", x); 510 return; 511 } 512 513 DescribeMemoryRange(x, size); 514 } 515 516 void __msan_dump_shadow(const void *x, uptr size) { 517 if (!MEM_IS_APP(x)) { 518 Printf("Not a valid application address: %p\n", x); 519 return; 520 } 521 522 unsigned char *s = (unsigned char*)MEM_TO_SHADOW(x); 523 Printf("%p[%p] ", (void *)s, x); 524 for (uptr i = 0; i < size; i++) 525 Printf("%x%x ", s[i] >> 4, s[i] & 0xf); 526 Printf("\n"); 527 } 528 529 sptr __msan_test_shadow(const void *x, uptr size) { 530 if (!MEM_IS_APP(x)) return -1; 531 unsigned char *s = (unsigned char *)MEM_TO_SHADOW((uptr)x); 532 if (__sanitizer::mem_is_zero((const char *)s, size)) 533 return -1; 534 // Slow path: loop through again to find the location. 535 for (uptr i = 0; i < size; ++i) 536 if (s[i]) 537 return i; 538 return -1; 539 } 540 541 void __msan_check_mem_is_initialized(const void *x, uptr size) { 542 if (!__msan::flags()->report_umrs) return; 543 sptr offset = __msan_test_shadow(x, size); 544 if (offset < 0) 545 return; 546 547 GET_CALLER_PC_BP_SP; 548 (void)sp; 549 ReportUMRInsideAddressRange(__func__, x, size, offset); 550 __msan::PrintWarningWithOrigin(pc, bp, 551 __msan_get_origin(((const char *)x) + offset)); 552 if (__msan::flags()->halt_on_error) { 553 Printf("Exiting\n"); 554 Die(); 555 } 556 } 557 558 int __msan_set_poison_in_malloc(int do_poison) { 559 int old = flags()->poison_in_malloc; 560 flags()->poison_in_malloc = do_poison; 561 return old; 562 } 563 564 int __msan_has_dynamic_component() { return false; } 565 566 NOINLINE 567 void __msan_clear_on_return() { 568 __msan_param_tls[0] = 0; 569 } 570 571 void __msan_partial_poison(const void* data, void* shadow, uptr size) { 572 internal_memcpy((void*)MEM_TO_SHADOW((uptr)data), shadow, size); 573 } 574 575 void __msan_load_unpoisoned(const void *src, uptr size, void *dst) { 576 internal_memcpy(dst, src, size); 577 __msan_unpoison(dst, size); 578 } 579 580 void __msan_set_origin(const void *a, uptr size, u32 origin) { 581 if (__msan_get_track_origins()) SetOrigin(a, size, origin); 582 } 583 584 // 'descr' is created at compile time and contains '----' in the beginning. 585 // When we see descr for the first time we replace '----' with a uniq id 586 // and set the origin to (id | (31-th bit)). 587 void __msan_set_alloca_origin(void *a, uptr size, char *descr) { 588 __msan_set_alloca_origin4(a, size, descr, 0); 589 } 590 591 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc) { 592 static const u32 dash = '-'; 593 static const u32 first_timer = 594 dash + (dash << 8) + (dash << 16) + (dash << 24); 595 u32 *id_ptr = (u32*)descr; 596 bool print = false; // internal_strstr(descr + 4, "AllocaTOTest") != 0; 597 u32 id = *id_ptr; 598 if (id == first_timer) { 599 u32 idx = atomic_fetch_add(&NumStackOriginDescrs, 1, memory_order_relaxed); 600 CHECK_LT(idx, kNumStackOriginDescrs); 601 StackOriginDescr[idx] = descr + 4; 602 #if SANITIZER_PPC64V1 603 // On PowerPC64 ELFv1, the address of a function actually points to a 604 // three-doubleword data structure with the first field containing 605 // the address of the function's code. 606 if (pc) 607 pc = *reinterpret_cast<uptr*>(pc); 608 #endif 609 StackOriginPC[idx] = pc; 610 id = Origin::CreateStackOrigin(idx).raw_id(); 611 *id_ptr = id; 612 if (print) 613 Printf("First time: idx=%d id=%d %s 0x%zx \n", idx, id, descr + 4, pc); 614 } 615 if (print) 616 Printf("__msan_set_alloca_origin: descr=%s id=%x\n", descr + 4, id); 617 __msan_set_origin(a, size, id); 618 } 619 620 u32 __msan_chain_origin(u32 id) { 621 GET_CALLER_PC_BP_SP; 622 (void)sp; 623 GET_STORE_STACK_TRACE_PC_BP(pc, bp); 624 return ChainOrigin(id, &stack); 625 } 626 627 u32 __msan_get_origin(const void *a) { 628 if (!__msan_get_track_origins()) return 0; 629 uptr x = (uptr)a; 630 uptr aligned = x & ~3ULL; 631 uptr origin_ptr = MEM_TO_ORIGIN(aligned); 632 return *(u32*)origin_ptr; 633 } 634 635 int __msan_origin_is_descendant_or_same(u32 this_id, u32 prev_id) { 636 Origin o = Origin::FromRawId(this_id); 637 while (o.raw_id() != prev_id && o.isChainedOrigin()) 638 o = o.getNextChainedOrigin(nullptr); 639 return o.raw_id() == prev_id; 640 } 641 642 u32 __msan_get_umr_origin() { 643 return __msan_origin_tls; 644 } 645 646 u16 __sanitizer_unaligned_load16(const uu16 *p) { 647 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 648 sizeof(uu16)); 649 if (__msan_get_track_origins()) 650 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 651 return *p; 652 } 653 u32 __sanitizer_unaligned_load32(const uu32 *p) { 654 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 655 sizeof(uu32)); 656 if (__msan_get_track_origins()) 657 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 658 return *p; 659 } 660 u64 __sanitizer_unaligned_load64(const uu64 *p) { 661 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 662 sizeof(uu64)); 663 if (__msan_get_track_origins()) 664 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 665 return *p; 666 } 667 void __sanitizer_unaligned_store16(uu16 *p, u16 x) { 668 static_assert(sizeof(uu16) == sizeof(u16), "incompatible types"); 669 u16 s; 670 internal_memcpy(&s, &__msan_param_tls[1], sizeof(uu16)); 671 internal_memcpy((void *)MEM_TO_SHADOW((uptr)p), &s, sizeof(uu16)); 672 if (s && __msan_get_track_origins()) 673 if (uu32 o = __msan_param_origin_tls[2]) 674 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 675 *p = x; 676 } 677 void __sanitizer_unaligned_store32(uu32 *p, u32 x) { 678 static_assert(sizeof(uu32) == sizeof(u32), "incompatible types"); 679 u32 s; 680 internal_memcpy(&s, &__msan_param_tls[1], sizeof(uu32)); 681 internal_memcpy((void *)MEM_TO_SHADOW((uptr)p), &s, sizeof(uu32)); 682 if (s && __msan_get_track_origins()) 683 if (uu32 o = __msan_param_origin_tls[2]) 684 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 685 *p = x; 686 } 687 void __sanitizer_unaligned_store64(uu64 *p, u64 x) { 688 u64 s = __msan_param_tls[1]; 689 *(uu64 *)MEM_TO_SHADOW((uptr)p) = s; 690 if (s && __msan_get_track_origins()) 691 if (uu32 o = __msan_param_origin_tls[2]) 692 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 693 *p = x; 694 } 695 696 void __msan_set_death_callback(void (*callback)(void)) { 697 SetUserDieCallback(callback); 698 } 699 700 void __msan_start_switch_fiber(const void *bottom, uptr size) { 701 MsanThread *t = GetCurrentThread(); 702 if (!t) { 703 VReport(1, "__msan_start_switch_fiber called from unknown thread\n"); 704 return; 705 } 706 t->StartSwitchFiber((uptr)bottom, size); 707 } 708 709 void __msan_finish_switch_fiber(const void **bottom_old, uptr *size_old) { 710 MsanThread *t = GetCurrentThread(); 711 if (!t) { 712 VReport(1, "__msan_finish_switch_fiber called from unknown thread\n"); 713 return; 714 } 715 t->FinishSwitchFiber((uptr *)bottom_old, (uptr *)size_old); 716 717 internal_memset(__msan_param_tls, 0, sizeof(__msan_param_tls)); 718 internal_memset(__msan_retval_tls, 0, sizeof(__msan_retval_tls)); 719 internal_memset(__msan_va_arg_tls, 0, sizeof(__msan_va_arg_tls)); 720 721 if (__msan_get_track_origins()) { 722 internal_memset(__msan_param_origin_tls, 0, 723 sizeof(__msan_param_origin_tls)); 724 internal_memset(&__msan_retval_origin_tls, 0, 725 sizeof(__msan_retval_origin_tls)); 726 internal_memset(__msan_va_arg_origin_tls, 0, 727 sizeof(__msan_va_arg_origin_tls)); 728 } 729 } 730 731 SANITIZER_INTERFACE_WEAK_DEF(const char *, __msan_default_options, void) { 732 return ""; 733 } 734 735 extern "C" { 736 SANITIZER_INTERFACE_ATTRIBUTE 737 void __sanitizer_print_stack_trace() { 738 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 739 stack.Print(); 740 } 741 } // extern "C" 742