1 //===-- msan.cpp ----------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file is a part of MemorySanitizer. 10 // 11 // MemorySanitizer runtime. 12 //===----------------------------------------------------------------------===// 13 14 #include "msan.h" 15 16 #include "msan_chained_origin_depot.h" 17 #include "msan_origin.h" 18 #include "msan_poisoning.h" 19 #include "msan_report.h" 20 #include "msan_thread.h" 21 #include "sanitizer_common/sanitizer_atomic.h" 22 #include "sanitizer_common/sanitizer_common.h" 23 #include "sanitizer_common/sanitizer_flag_parser.h" 24 #include "sanitizer_common/sanitizer_flags.h" 25 #include "sanitizer_common/sanitizer_interface_internal.h" 26 #include "sanitizer_common/sanitizer_libc.h" 27 #include "sanitizer_common/sanitizer_procmaps.h" 28 #include "sanitizer_common/sanitizer_stackdepot.h" 29 #include "sanitizer_common/sanitizer_stacktrace.h" 30 #include "sanitizer_common/sanitizer_symbolizer.h" 31 #include "ubsan/ubsan_flags.h" 32 #include "ubsan/ubsan_init.h" 33 34 // ACHTUNG! No system header includes in this file. 35 36 using namespace __sanitizer; 37 38 // Globals. 39 static THREADLOCAL int msan_expect_umr = 0; 40 static THREADLOCAL int msan_expected_umr_found = 0; 41 42 // Function argument shadow. Each argument starts at the next available 8-byte 43 // aligned address. 44 SANITIZER_INTERFACE_ATTRIBUTE 45 THREADLOCAL u64 __msan_param_tls[kMsanParamTlsSize / sizeof(u64)]; 46 47 // Function argument origin. Each argument starts at the same offset as the 48 // corresponding shadow in (__msan_param_tls). Slightly weird, but changing this 49 // would break compatibility with older prebuilt binaries. 50 SANITIZER_INTERFACE_ATTRIBUTE 51 THREADLOCAL u32 __msan_param_origin_tls[kMsanParamTlsSize / sizeof(u32)]; 52 53 SANITIZER_INTERFACE_ATTRIBUTE 54 THREADLOCAL u64 __msan_retval_tls[kMsanRetvalTlsSize / sizeof(u64)]; 55 56 SANITIZER_INTERFACE_ATTRIBUTE 57 THREADLOCAL u32 __msan_retval_origin_tls; 58 59 SANITIZER_INTERFACE_ATTRIBUTE 60 ALIGNED(16) THREADLOCAL u64 __msan_va_arg_tls[kMsanParamTlsSize / sizeof(u64)]; 61 62 SANITIZER_INTERFACE_ATTRIBUTE 63 ALIGNED(16) 64 THREADLOCAL u32 __msan_va_arg_origin_tls[kMsanParamTlsSize / sizeof(u32)]; 65 66 SANITIZER_INTERFACE_ATTRIBUTE 67 THREADLOCAL u64 __msan_va_arg_overflow_size_tls; 68 69 SANITIZER_INTERFACE_ATTRIBUTE 70 THREADLOCAL u32 __msan_origin_tls; 71 72 extern "C" SANITIZER_WEAK_ATTRIBUTE const int __msan_track_origins; 73 74 int __msan_get_track_origins() { 75 return &__msan_track_origins ? __msan_track_origins : 0; 76 } 77 78 extern "C" SANITIZER_WEAK_ATTRIBUTE const int __msan_keep_going; 79 80 namespace __msan { 81 82 static THREADLOCAL int is_in_symbolizer_or_unwinder; 83 static void EnterSymbolizerOrUnwider() { ++is_in_symbolizer_or_unwinder; } 84 static void ExitSymbolizerOrUnwider() { --is_in_symbolizer_or_unwinder; } 85 bool IsInSymbolizerOrUnwider() { return is_in_symbolizer_or_unwinder; } 86 87 struct UnwinderScope { 88 UnwinderScope() { EnterSymbolizerOrUnwider(); } 89 ~UnwinderScope() { ExitSymbolizerOrUnwider(); } 90 }; 91 92 static Flags msan_flags; 93 94 Flags *flags() { return &msan_flags; } 95 96 int msan_inited = 0; 97 bool msan_init_is_running; 98 99 int msan_report_count = 0; 100 101 // Array of stack origins. 102 // FIXME: make it resizable. 103 static const uptr kNumStackOriginDescrs = 1024 * 1024; 104 static const char *StackOriginDescr[kNumStackOriginDescrs]; 105 static uptr StackOriginPC[kNumStackOriginDescrs]; 106 static atomic_uint32_t NumStackOriginDescrs; 107 108 void Flags::SetDefaults() { 109 #define MSAN_FLAG(Type, Name, DefaultValue, Description) Name = DefaultValue; 110 #include "msan_flags.inc" 111 #undef MSAN_FLAG 112 } 113 114 // keep_going is an old name for halt_on_error, 115 // and it has inverse meaning. 116 class FlagHandlerKeepGoing final : public FlagHandlerBase { 117 bool *halt_on_error_; 118 119 public: 120 explicit FlagHandlerKeepGoing(bool *halt_on_error) 121 : halt_on_error_(halt_on_error) {} 122 bool Parse(const char *value) final { 123 bool tmp; 124 FlagHandler<bool> h(&tmp); 125 if (!h.Parse(value)) return false; 126 *halt_on_error_ = !tmp; 127 return true; 128 } 129 bool Format(char *buffer, uptr size) final { 130 const char *keep_going_str = (*halt_on_error_) ? "false" : "true"; 131 return FormatString(buffer, size, keep_going_str); 132 } 133 }; 134 135 static void RegisterMsanFlags(FlagParser *parser, Flags *f) { 136 #define MSAN_FLAG(Type, Name, DefaultValue, Description) \ 137 RegisterFlag(parser, #Name, Description, &f->Name); 138 #include "msan_flags.inc" 139 #undef MSAN_FLAG 140 141 FlagHandlerKeepGoing *fh_keep_going = 142 new (FlagParser::Alloc) FlagHandlerKeepGoing(&f->halt_on_error); 143 parser->RegisterHandler("keep_going", fh_keep_going, 144 "deprecated, use halt_on_error"); 145 } 146 147 static void InitializeFlags() { 148 SetCommonFlagsDefaults(); 149 { 150 CommonFlags cf; 151 cf.CopyFrom(*common_flags()); 152 cf.external_symbolizer_path = GetEnv("MSAN_SYMBOLIZER_PATH"); 153 cf.malloc_context_size = 20; 154 cf.handle_ioctl = true; 155 // FIXME: test and enable. 156 cf.check_printf = false; 157 cf.intercept_tls_get_addr = true; 158 OverrideCommonFlags(cf); 159 } 160 161 Flags *f = flags(); 162 f->SetDefaults(); 163 164 FlagParser parser; 165 RegisterMsanFlags(&parser, f); 166 RegisterCommonFlags(&parser); 167 168 #if MSAN_CONTAINS_UBSAN 169 __ubsan::Flags *uf = __ubsan::flags(); 170 uf->SetDefaults(); 171 172 FlagParser ubsan_parser; 173 __ubsan::RegisterUbsanFlags(&ubsan_parser, uf); 174 RegisterCommonFlags(&ubsan_parser); 175 #endif 176 177 // Override from user-specified string. 178 parser.ParseString(__msan_default_options()); 179 #if MSAN_CONTAINS_UBSAN 180 const char *ubsan_default_options = __ubsan_default_options(); 181 ubsan_parser.ParseString(ubsan_default_options); 182 #endif 183 184 parser.ParseStringFromEnv("MSAN_OPTIONS"); 185 #if MSAN_CONTAINS_UBSAN 186 ubsan_parser.ParseStringFromEnv("UBSAN_OPTIONS"); 187 #endif 188 189 InitializeCommonFlags(); 190 191 if (Verbosity()) ReportUnrecognizedFlags(); 192 193 if (common_flags()->help) parser.PrintFlagDescriptions(); 194 195 // Check if deprecated exit_code MSan flag is set. 196 if (f->exit_code != -1) { 197 if (Verbosity()) 198 Printf("MSAN_OPTIONS=exit_code is deprecated! " 199 "Please use MSAN_OPTIONS=exitcode instead.\n"); 200 CommonFlags cf; 201 cf.CopyFrom(*common_flags()); 202 cf.exitcode = f->exit_code; 203 OverrideCommonFlags(cf); 204 } 205 206 // Check flag values: 207 if (f->origin_history_size < 0 || 208 f->origin_history_size > Origin::kMaxDepth) { 209 Printf( 210 "Origin history size invalid: %d. Must be 0 (unlimited) or in [1, %d] " 211 "range.\n", 212 f->origin_history_size, Origin::kMaxDepth); 213 Die(); 214 } 215 // Limiting to kStackDepotMaxUseCount / 2 to avoid overflow in 216 // StackDepotHandle::inc_use_count_unsafe. 217 if (f->origin_history_per_stack_limit < 0 || 218 f->origin_history_per_stack_limit > kStackDepotMaxUseCount / 2) { 219 Printf( 220 "Origin per-stack limit invalid: %d. Must be 0 (unlimited) or in [1, " 221 "%d] range.\n", 222 f->origin_history_per_stack_limit, kStackDepotMaxUseCount / 2); 223 Die(); 224 } 225 if (f->store_context_size < 1) f->store_context_size = 1; 226 } 227 228 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin) { 229 if (msan_expect_umr) { 230 // Printf("Expected UMR\n"); 231 __msan_origin_tls = origin; 232 msan_expected_umr_found = 1; 233 return; 234 } 235 236 ++msan_report_count; 237 238 GET_FATAL_STACK_TRACE_PC_BP(pc, bp); 239 240 u32 report_origin = 241 (__msan_get_track_origins() && Origin::isValidId(origin)) ? origin : 0; 242 ReportUMR(&stack, report_origin); 243 244 if (__msan_get_track_origins() && !Origin::isValidId(origin)) { 245 Printf( 246 " ORIGIN: invalid (%x). Might be a bug in MemorySanitizer origin " 247 "tracking.\n This could still be a bug in your code, too!\n", 248 origin); 249 } 250 } 251 252 void UnpoisonParam(uptr n) { 253 internal_memset(__msan_param_tls, 0, n * sizeof(*__msan_param_tls)); 254 } 255 256 // Backup MSan runtime TLS state. 257 // Implementation must be async-signal-safe. 258 // Instances of this class may live on the signal handler stack, and data size 259 // may be an issue. 260 void ScopedThreadLocalStateBackup::Backup() { 261 va_arg_overflow_size_tls = __msan_va_arg_overflow_size_tls; 262 } 263 264 void ScopedThreadLocalStateBackup::Restore() { 265 // A lame implementation that only keeps essential state and resets the rest. 266 __msan_va_arg_overflow_size_tls = va_arg_overflow_size_tls; 267 268 internal_memset(__msan_param_tls, 0, sizeof(__msan_param_tls)); 269 internal_memset(__msan_retval_tls, 0, sizeof(__msan_retval_tls)); 270 internal_memset(__msan_va_arg_tls, 0, sizeof(__msan_va_arg_tls)); 271 internal_memset(__msan_va_arg_origin_tls, 0, 272 sizeof(__msan_va_arg_origin_tls)); 273 274 if (__msan_get_track_origins()) { 275 internal_memset(&__msan_retval_origin_tls, 0, 276 sizeof(__msan_retval_origin_tls)); 277 internal_memset(__msan_param_origin_tls, 0, 278 sizeof(__msan_param_origin_tls)); 279 } 280 } 281 282 void UnpoisonThreadLocalState() { 283 } 284 285 const char *GetStackOriginDescr(u32 id, uptr *pc) { 286 CHECK_LT(id, kNumStackOriginDescrs); 287 if (pc) *pc = StackOriginPC[id]; 288 return StackOriginDescr[id]; 289 } 290 291 u32 ChainOrigin(u32 id, StackTrace *stack) { 292 MsanThread *t = GetCurrentThread(); 293 if (t && t->InSignalHandler()) 294 return id; 295 296 Origin o = Origin::FromRawId(id); 297 stack->tag = StackTrace::TAG_UNKNOWN; 298 Origin chained = Origin::CreateChainedOrigin(o, stack); 299 return chained.raw_id(); 300 } 301 302 // Current implementation separates the 'id_ptr' from the 'descr' and makes 303 // 'descr' constant. 304 // Previous implementation 'descr' is created at compile time and contains 305 // '----' in the beginning. When we see descr for the first time we replace 306 // '----' with a uniq id and set the origin to (id | (31-th bit)). 307 static inline void SetAllocaOrigin(void *a, uptr size, u32 *id_ptr, char *descr, 308 uptr pc) { 309 static const u32 dash = '-'; 310 static const u32 first_timer = 311 dash + (dash << 8) + (dash << 16) + (dash << 24); 312 u32 id = *id_ptr; 313 if (id == 0 || id == first_timer) { 314 u32 idx = atomic_fetch_add(&NumStackOriginDescrs, 1, memory_order_relaxed); 315 CHECK_LT(idx, kNumStackOriginDescrs); 316 StackOriginDescr[idx] = descr; 317 StackOriginPC[idx] = pc; 318 id = Origin::CreateStackOrigin(idx).raw_id(); 319 *id_ptr = id; 320 } 321 __msan_set_origin(a, size, id); 322 } 323 324 } // namespace __msan 325 326 void __sanitizer::BufferedStackTrace::UnwindImpl( 327 uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) { 328 using namespace __msan; 329 MsanThread *t = GetCurrentThread(); 330 if (!t || !StackTrace::WillUseFastUnwind(request_fast)) { 331 // Block reports from our interceptors during _Unwind_Backtrace. 332 UnwinderScope sym_scope; 333 return Unwind(max_depth, pc, bp, context, t ? t->stack_top() : 0, 334 t ? t->stack_bottom() : 0, false); 335 } 336 if (StackTrace::WillUseFastUnwind(request_fast)) 337 Unwind(max_depth, pc, bp, nullptr, t->stack_top(), t->stack_bottom(), true); 338 else 339 Unwind(max_depth, pc, 0, context, 0, 0, false); 340 } 341 342 // Interface. 343 344 using namespace __msan; 345 346 #define MSAN_MAYBE_WARNING(type, size) \ 347 void __msan_maybe_warning_##size(type s, u32 o) { \ 348 GET_CALLER_PC_BP; \ 349 if (UNLIKELY(s)) { \ 350 PrintWarningWithOrigin(pc, bp, o); \ 351 if (__msan::flags()->halt_on_error) { \ 352 Printf("Exiting\n"); \ 353 Die(); \ 354 } \ 355 } \ 356 } 357 358 MSAN_MAYBE_WARNING(u8, 1) 359 MSAN_MAYBE_WARNING(u16, 2) 360 MSAN_MAYBE_WARNING(u32, 4) 361 MSAN_MAYBE_WARNING(u64, 8) 362 363 #define MSAN_MAYBE_STORE_ORIGIN(type, size) \ 364 void __msan_maybe_store_origin_##size(type s, void *p, u32 o) { \ 365 if (UNLIKELY(s)) { \ 366 if (__msan_get_track_origins() > 1) { \ 367 GET_CALLER_PC_BP; \ 368 GET_STORE_STACK_TRACE_PC_BP(pc, bp); \ 369 o = ChainOrigin(o, &stack); \ 370 } \ 371 *(u32 *)MEM_TO_ORIGIN((uptr)p & ~3UL) = o; \ 372 } \ 373 } 374 375 MSAN_MAYBE_STORE_ORIGIN(u8, 1) 376 MSAN_MAYBE_STORE_ORIGIN(u16, 2) 377 MSAN_MAYBE_STORE_ORIGIN(u32, 4) 378 MSAN_MAYBE_STORE_ORIGIN(u64, 8) 379 380 void __msan_warning() { 381 GET_CALLER_PC_BP; 382 PrintWarningWithOrigin(pc, bp, 0); 383 if (__msan::flags()->halt_on_error) { 384 if (__msan::flags()->print_stats) 385 ReportStats(); 386 Printf("Exiting\n"); 387 Die(); 388 } 389 } 390 391 void __msan_warning_noreturn() { 392 GET_CALLER_PC_BP; 393 PrintWarningWithOrigin(pc, bp, 0); 394 if (__msan::flags()->print_stats) 395 ReportStats(); 396 Printf("Exiting\n"); 397 Die(); 398 } 399 400 void __msan_warning_with_origin(u32 origin) { 401 GET_CALLER_PC_BP; 402 PrintWarningWithOrigin(pc, bp, origin); 403 if (__msan::flags()->halt_on_error) { 404 if (__msan::flags()->print_stats) 405 ReportStats(); 406 Printf("Exiting\n"); 407 Die(); 408 } 409 } 410 411 void __msan_warning_with_origin_noreturn(u32 origin) { 412 GET_CALLER_PC_BP; 413 PrintWarningWithOrigin(pc, bp, origin); 414 if (__msan::flags()->print_stats) 415 ReportStats(); 416 Printf("Exiting\n"); 417 Die(); 418 } 419 420 static void OnStackUnwind(const SignalContext &sig, const void *, 421 BufferedStackTrace *stack) { 422 stack->Unwind(StackTrace::GetNextInstructionPc(sig.pc), sig.bp, sig.context, 423 common_flags()->fast_unwind_on_fatal); 424 } 425 426 static void MsanOnDeadlySignal(int signo, void *siginfo, void *context) { 427 HandleDeadlySignal(siginfo, context, GetTid(), &OnStackUnwind, nullptr); 428 } 429 430 static void CheckUnwind() { 431 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 432 stack.Print(); 433 } 434 435 void __msan_init() { 436 CHECK(!msan_init_is_running); 437 if (msan_inited) return; 438 msan_init_is_running = 1; 439 SanitizerToolName = "MemorySanitizer"; 440 441 AvoidCVE_2016_2143(); 442 443 CacheBinaryName(); 444 InitializeFlags(); 445 446 // Install tool-specific callbacks in sanitizer_common. 447 SetCheckUnwindCallback(CheckUnwind); 448 449 __sanitizer_set_report_path(common_flags()->log_path); 450 451 InitializeInterceptors(); 452 CheckASLR(); 453 InitTlsSize(); 454 InstallDeadlySignalHandlers(MsanOnDeadlySignal); 455 InstallAtExitHandler(); // Needs __cxa_atexit interceptor. 456 457 DisableCoreDumperIfNecessary(); 458 if (StackSizeIsUnlimited()) { 459 VPrintf(1, "Unlimited stack, doing reexec\n"); 460 // A reasonably large stack size. It is bigger than the usual 8Mb, because, 461 // well, the program could have been run with unlimited stack for a reason. 462 SetStackSizeLimitInBytes(32 * 1024 * 1024); 463 ReExec(); 464 } 465 466 __msan_clear_on_return(); 467 if (__msan_get_track_origins()) 468 VPrintf(1, "msan_track_origins\n"); 469 if (!InitShadow(__msan_get_track_origins())) { 470 Printf("FATAL: MemorySanitizer can not mmap the shadow memory.\n"); 471 Printf("FATAL: Make sure to compile with -fPIE and to link with -pie.\n"); 472 Printf("FATAL: Disabling ASLR is known to cause this error.\n"); 473 Printf("FATAL: If running under GDB, try " 474 "'set disable-randomization off'.\n"); 475 DumpProcessMap(); 476 Die(); 477 } 478 479 Symbolizer::GetOrInit()->AddHooks(EnterSymbolizerOrUnwider, 480 ExitSymbolizerOrUnwider); 481 482 InitializeCoverage(common_flags()->coverage, common_flags()->coverage_dir); 483 484 MsanTSDInit(MsanTSDDtor); 485 486 MsanAllocatorInit(); 487 488 MsanThread *main_thread = MsanThread::Create(nullptr, nullptr); 489 SetCurrentThread(main_thread); 490 main_thread->Init(); 491 492 #if MSAN_CONTAINS_UBSAN 493 __ubsan::InitAsPlugin(); 494 #endif 495 496 VPrintf(1, "MemorySanitizer init done\n"); 497 498 msan_init_is_running = 0; 499 msan_inited = 1; 500 } 501 502 void __msan_set_keep_going(int keep_going) { 503 flags()->halt_on_error = !keep_going; 504 } 505 506 void __msan_set_expect_umr(int expect_umr) { 507 if (expect_umr) { 508 msan_expected_umr_found = 0; 509 } else if (!msan_expected_umr_found) { 510 GET_CALLER_PC_BP; 511 GET_FATAL_STACK_TRACE_PC_BP(pc, bp); 512 ReportExpectedUMRNotFound(&stack); 513 Die(); 514 } 515 msan_expect_umr = expect_umr; 516 } 517 518 void __msan_print_shadow(const void *x, uptr size) { 519 if (!MEM_IS_APP(x)) { 520 Printf("Not a valid application address: %p\n", x); 521 return; 522 } 523 524 DescribeMemoryRange(x, size); 525 } 526 527 void __msan_dump_shadow(const void *x, uptr size) { 528 if (!MEM_IS_APP(x)) { 529 Printf("Not a valid application address: %p\n", x); 530 return; 531 } 532 533 unsigned char *s = (unsigned char*)MEM_TO_SHADOW(x); 534 Printf("%p[%p] ", (void *)s, x); 535 for (uptr i = 0; i < size; i++) 536 Printf("%x%x ", s[i] >> 4, s[i] & 0xf); 537 Printf("\n"); 538 } 539 540 sptr __msan_test_shadow(const void *x, uptr size) { 541 if (!MEM_IS_APP(x)) return -1; 542 unsigned char *s = (unsigned char *)MEM_TO_SHADOW((uptr)x); 543 if (__sanitizer::mem_is_zero((const char *)s, size)) 544 return -1; 545 // Slow path: loop through again to find the location. 546 for (uptr i = 0; i < size; ++i) 547 if (s[i]) 548 return i; 549 return -1; 550 } 551 552 void __msan_check_mem_is_initialized(const void *x, uptr size) { 553 if (!__msan::flags()->report_umrs) return; 554 sptr offset = __msan_test_shadow(x, size); 555 if (offset < 0) 556 return; 557 558 GET_CALLER_PC_BP; 559 ReportUMRInsideAddressRange(__func__, x, size, offset); 560 __msan::PrintWarningWithOrigin(pc, bp, 561 __msan_get_origin(((const char *)x) + offset)); 562 if (__msan::flags()->halt_on_error) { 563 Printf("Exiting\n"); 564 Die(); 565 } 566 } 567 568 int __msan_set_poison_in_malloc(int do_poison) { 569 int old = flags()->poison_in_malloc; 570 flags()->poison_in_malloc = do_poison; 571 return old; 572 } 573 574 int __msan_has_dynamic_component() { return false; } 575 576 NOINLINE 577 void __msan_clear_on_return() { 578 __msan_param_tls[0] = 0; 579 } 580 581 void __msan_partial_poison(const void* data, void* shadow, uptr size) { 582 internal_memcpy((void*)MEM_TO_SHADOW((uptr)data), shadow, size); 583 } 584 585 void __msan_load_unpoisoned(const void *src, uptr size, void *dst) { 586 internal_memcpy(dst, src, size); 587 __msan_unpoison(dst, size); 588 } 589 590 void __msan_set_origin(const void *a, uptr size, u32 origin) { 591 if (__msan_get_track_origins()) SetOrigin(a, size, origin); 592 } 593 594 void __msan_set_alloca_origin(void *a, uptr size, char *descr) { 595 SetAllocaOrigin(a, size, reinterpret_cast<u32 *>(descr), descr + 4, 596 GET_CALLER_PC()); 597 } 598 599 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc) { 600 // Intentionally ignore pc and use return address. This function is here for 601 // compatibility, in case program is linked with library instrumented by 602 // older clang. 603 SetAllocaOrigin(a, size, reinterpret_cast<u32 *>(descr), descr + 4, 604 GET_CALLER_PC()); 605 } 606 607 void __msan_set_alloca_origin_with_descr(void *a, uptr size, u32 *id_ptr, 608 char *descr) { 609 SetAllocaOrigin(a, size, id_ptr, descr, GET_CALLER_PC()); 610 } 611 612 void __msan_set_alloca_origin_no_descr(void *a, uptr size, u32 *id_ptr) { 613 SetAllocaOrigin(a, size, id_ptr, nullptr, GET_CALLER_PC()); 614 } 615 616 u32 __msan_chain_origin(u32 id) { 617 GET_CALLER_PC_BP; 618 GET_STORE_STACK_TRACE_PC_BP(pc, bp); 619 return ChainOrigin(id, &stack); 620 } 621 622 u32 __msan_get_origin(const void *a) { 623 if (!__msan_get_track_origins()) return 0; 624 uptr x = (uptr)a; 625 uptr aligned = x & ~3ULL; 626 uptr origin_ptr = MEM_TO_ORIGIN(aligned); 627 return *(u32*)origin_ptr; 628 } 629 630 int __msan_origin_is_descendant_or_same(u32 this_id, u32 prev_id) { 631 Origin o = Origin::FromRawId(this_id); 632 while (o.raw_id() != prev_id && o.isChainedOrigin()) 633 o = o.getNextChainedOrigin(nullptr); 634 return o.raw_id() == prev_id; 635 } 636 637 u32 __msan_get_umr_origin() { 638 return __msan_origin_tls; 639 } 640 641 u16 __sanitizer_unaligned_load16(const uu16 *p) { 642 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 643 sizeof(uu16)); 644 if (__msan_get_track_origins()) 645 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 646 return *p; 647 } 648 u32 __sanitizer_unaligned_load32(const uu32 *p) { 649 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 650 sizeof(uu32)); 651 if (__msan_get_track_origins()) 652 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 653 return *p; 654 } 655 u64 __sanitizer_unaligned_load64(const uu64 *p) { 656 internal_memcpy(&__msan_retval_tls[0], (void *)MEM_TO_SHADOW((uptr)p), 657 sizeof(uu64)); 658 if (__msan_get_track_origins()) 659 __msan_retval_origin_tls = GetOriginIfPoisoned((uptr)p, sizeof(*p)); 660 return *p; 661 } 662 void __sanitizer_unaligned_store16(uu16 *p, u16 x) { 663 static_assert(sizeof(uu16) == sizeof(u16), "incompatible types"); 664 u16 s; 665 internal_memcpy(&s, &__msan_param_tls[1], sizeof(uu16)); 666 internal_memcpy((void *)MEM_TO_SHADOW((uptr)p), &s, sizeof(uu16)); 667 if (s && __msan_get_track_origins()) 668 if (uu32 o = __msan_param_origin_tls[2]) 669 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 670 *p = x; 671 } 672 void __sanitizer_unaligned_store32(uu32 *p, u32 x) { 673 static_assert(sizeof(uu32) == sizeof(u32), "incompatible types"); 674 u32 s; 675 internal_memcpy(&s, &__msan_param_tls[1], sizeof(uu32)); 676 internal_memcpy((void *)MEM_TO_SHADOW((uptr)p), &s, sizeof(uu32)); 677 if (s && __msan_get_track_origins()) 678 if (uu32 o = __msan_param_origin_tls[2]) 679 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 680 *p = x; 681 } 682 void __sanitizer_unaligned_store64(uu64 *p, u64 x) { 683 u64 s = __msan_param_tls[1]; 684 *(uu64 *)MEM_TO_SHADOW((uptr)p) = s; 685 if (s && __msan_get_track_origins()) 686 if (uu32 o = __msan_param_origin_tls[2]) 687 SetOriginIfPoisoned((uptr)p, (uptr)&s, sizeof(s), o); 688 *p = x; 689 } 690 691 void __msan_set_death_callback(void (*callback)(void)) { 692 SetUserDieCallback(callback); 693 } 694 695 void __msan_start_switch_fiber(const void *bottom, uptr size) { 696 MsanThread *t = GetCurrentThread(); 697 if (!t) { 698 VReport(1, "__msan_start_switch_fiber called from unknown thread\n"); 699 return; 700 } 701 t->StartSwitchFiber((uptr)bottom, size); 702 } 703 704 void __msan_finish_switch_fiber(const void **bottom_old, uptr *size_old) { 705 MsanThread *t = GetCurrentThread(); 706 if (!t) { 707 VReport(1, "__msan_finish_switch_fiber called from unknown thread\n"); 708 return; 709 } 710 t->FinishSwitchFiber((uptr *)bottom_old, (uptr *)size_old); 711 712 internal_memset(__msan_param_tls, 0, sizeof(__msan_param_tls)); 713 internal_memset(__msan_retval_tls, 0, sizeof(__msan_retval_tls)); 714 internal_memset(__msan_va_arg_tls, 0, sizeof(__msan_va_arg_tls)); 715 716 if (__msan_get_track_origins()) { 717 internal_memset(__msan_param_origin_tls, 0, 718 sizeof(__msan_param_origin_tls)); 719 internal_memset(&__msan_retval_origin_tls, 0, 720 sizeof(__msan_retval_origin_tls)); 721 internal_memset(__msan_va_arg_origin_tls, 0, 722 sizeof(__msan_va_arg_origin_tls)); 723 } 724 } 725 726 SANITIZER_INTERFACE_WEAK_DEF(const char *, __msan_default_options, void) { 727 return ""; 728 } 729 730 extern "C" { 731 SANITIZER_INTERFACE_ATTRIBUTE 732 void __sanitizer_print_stack_trace() { 733 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 734 stack.Print(); 735 } 736 } // extern "C" 737