1 //===-- hwasan.cpp --------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file is a part of HWAddressSanitizer. 10 // 11 // HWAddressSanitizer runtime. 12 //===----------------------------------------------------------------------===// 13 14 #include "hwasan.h" 15 16 #include "hwasan_checks.h" 17 #include "hwasan_dynamic_shadow.h" 18 #include "hwasan_globals.h" 19 #include "hwasan_mapping.h" 20 #include "hwasan_poisoning.h" 21 #include "hwasan_report.h" 22 #include "hwasan_thread.h" 23 #include "hwasan_thread_list.h" 24 #include "sanitizer_common/sanitizer_atomic.h" 25 #include "sanitizer_common/sanitizer_common.h" 26 #include "sanitizer_common/sanitizer_flag_parser.h" 27 #include "sanitizer_common/sanitizer_flags.h" 28 #include "sanitizer_common/sanitizer_interface_internal.h" 29 #include "sanitizer_common/sanitizer_libc.h" 30 #include "sanitizer_common/sanitizer_procmaps.h" 31 #include "sanitizer_common/sanitizer_stackdepot.h" 32 #include "sanitizer_common/sanitizer_stacktrace.h" 33 #include "sanitizer_common/sanitizer_symbolizer.h" 34 #include "ubsan/ubsan_flags.h" 35 #include "ubsan/ubsan_init.h" 36 37 // ACHTUNG! No system header includes in this file. 38 39 using namespace __sanitizer; 40 41 namespace __hwasan { 42 43 static Flags hwasan_flags; 44 45 Flags *flags() { 46 return &hwasan_flags; 47 } 48 49 int hwasan_inited = 0; 50 int hwasan_instrumentation_inited = 0; 51 bool hwasan_init_is_running; 52 53 int hwasan_report_count = 0; 54 55 uptr kLowShadowStart; 56 uptr kLowShadowEnd; 57 uptr kHighShadowStart; 58 uptr kHighShadowEnd; 59 60 void Flags::SetDefaults() { 61 #define HWASAN_FLAG(Type, Name, DefaultValue, Description) Name = DefaultValue; 62 #include "hwasan_flags.inc" 63 #undef HWASAN_FLAG 64 } 65 66 static void RegisterHwasanFlags(FlagParser *parser, Flags *f) { 67 #define HWASAN_FLAG(Type, Name, DefaultValue, Description) \ 68 RegisterFlag(parser, #Name, Description, &f->Name); 69 #include "hwasan_flags.inc" 70 #undef HWASAN_FLAG 71 } 72 73 static void InitializeFlags() { 74 SetCommonFlagsDefaults(); 75 { 76 CommonFlags cf; 77 cf.CopyFrom(*common_flags()); 78 cf.external_symbolizer_path = GetEnv("HWASAN_SYMBOLIZER_PATH"); 79 cf.malloc_context_size = 20; 80 cf.handle_ioctl = true; 81 // FIXME: test and enable. 82 cf.check_printf = false; 83 cf.intercept_tls_get_addr = true; 84 cf.exitcode = 99; 85 // 8 shadow pages ~512kB, small enough to cover common stack sizes. 86 cf.clear_shadow_mmap_threshold = 4096 * (SANITIZER_ANDROID ? 2 : 8); 87 // Sigtrap is used in error reporting. 88 cf.handle_sigtrap = kHandleSignalExclusive; 89 // For now only tested on Linux. Other plantforms can be turned on as they 90 // become ready. 91 cf.detect_leaks = cf.detect_leaks && SANITIZER_LINUX && !SANITIZER_ANDROID; 92 93 #if SANITIZER_ANDROID 94 // Let platform handle other signals. It is better at reporting them then we 95 // are. 96 cf.handle_segv = kHandleSignalNo; 97 cf.handle_sigbus = kHandleSignalNo; 98 cf.handle_abort = kHandleSignalNo; 99 cf.handle_sigill = kHandleSignalNo; 100 cf.handle_sigfpe = kHandleSignalNo; 101 #endif 102 OverrideCommonFlags(cf); 103 } 104 105 Flags *f = flags(); 106 f->SetDefaults(); 107 108 FlagParser parser; 109 RegisterHwasanFlags(&parser, f); 110 RegisterCommonFlags(&parser); 111 112 #if CAN_SANITIZE_LEAKS 113 __lsan::Flags *lf = __lsan::flags(); 114 lf->SetDefaults(); 115 116 FlagParser lsan_parser; 117 __lsan::RegisterLsanFlags(&lsan_parser, lf); 118 RegisterCommonFlags(&lsan_parser); 119 #endif 120 121 #if HWASAN_CONTAINS_UBSAN 122 __ubsan::Flags *uf = __ubsan::flags(); 123 uf->SetDefaults(); 124 125 FlagParser ubsan_parser; 126 __ubsan::RegisterUbsanFlags(&ubsan_parser, uf); 127 RegisterCommonFlags(&ubsan_parser); 128 #endif 129 130 // Override from user-specified string. 131 if (__hwasan_default_options) 132 parser.ParseString(__hwasan_default_options()); 133 #if CAN_SANITIZE_LEAKS 134 lsan_parser.ParseString(__lsan_default_options()); 135 #endif 136 #if HWASAN_CONTAINS_UBSAN 137 const char *ubsan_default_options = __ubsan_default_options(); 138 ubsan_parser.ParseString(ubsan_default_options); 139 #endif 140 141 parser.ParseStringFromEnv("HWASAN_OPTIONS"); 142 #if CAN_SANITIZE_LEAKS 143 lsan_parser.ParseStringFromEnv("LSAN_OPTIONS"); 144 #endif 145 #if HWASAN_CONTAINS_UBSAN 146 ubsan_parser.ParseStringFromEnv("UBSAN_OPTIONS"); 147 #endif 148 149 InitializeCommonFlags(); 150 151 if (Verbosity()) ReportUnrecognizedFlags(); 152 153 if (common_flags()->help) parser.PrintFlagDescriptions(); 154 // Flag validation: 155 if (!CAN_SANITIZE_LEAKS && common_flags()->detect_leaks) { 156 Report("%s: detect_leaks is not supported on this platform.\n", 157 SanitizerToolName); 158 Die(); 159 } 160 } 161 162 static void CheckUnwind() { 163 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 164 stack.Print(); 165 } 166 167 static void HwasanFormatMemoryUsage(InternalScopedString &s) { 168 HwasanThreadList &thread_list = hwasanThreadList(); 169 auto thread_stats = thread_list.GetThreadStats(); 170 auto sds = StackDepotGetStats(); 171 AllocatorStatCounters asc; 172 GetAllocatorStats(asc); 173 s.append( 174 "HWASAN pid: %d rss: %zd threads: %zd stacks: %zd" 175 " thr_aux: %zd stack_depot: %zd uniq_stacks: %zd" 176 " heap: %zd", 177 internal_getpid(), GetRSS(), thread_stats.n_live_threads, 178 thread_stats.total_stack_size, 179 thread_stats.n_live_threads * thread_list.MemoryUsedPerThread(), 180 sds.allocated, sds.n_uniq_ids, asc[AllocatorStatMapped]); 181 } 182 183 #if SANITIZER_ANDROID 184 static constexpr uptr kMemoryUsageBufferSize = 4096; 185 186 static char *memory_usage_buffer = nullptr; 187 188 static void InitMemoryUsage() { 189 memory_usage_buffer = 190 (char *)MmapOrDie(kMemoryUsageBufferSize, "memory usage string"); 191 CHECK(memory_usage_buffer); 192 memory_usage_buffer[0] = '\0'; 193 DecorateMapping((uptr)memory_usage_buffer, kMemoryUsageBufferSize, 194 memory_usage_buffer); 195 } 196 197 void UpdateMemoryUsage() { 198 if (!flags()->export_memory_stats) 199 return; 200 if (!memory_usage_buffer) 201 InitMemoryUsage(); 202 InternalScopedString s; 203 HwasanFormatMemoryUsage(s); 204 internal_strncpy(memory_usage_buffer, s.data(), kMemoryUsageBufferSize - 1); 205 memory_usage_buffer[kMemoryUsageBufferSize - 1] = '\0'; 206 } 207 #else 208 void UpdateMemoryUsage() {} 209 #endif 210 211 void HwasanAtExit() { 212 if (common_flags()->print_module_map) 213 DumpProcessMap(); 214 if (flags()->print_stats && (flags()->atexit || hwasan_report_count > 0)) 215 ReportStats(); 216 if (hwasan_report_count > 0) { 217 // ReportAtExitStatistics(); 218 if (common_flags()->exitcode) 219 internal__exit(common_flags()->exitcode); 220 } 221 } 222 223 void HandleTagMismatch(AccessInfo ai, uptr pc, uptr frame, void *uc, 224 uptr *registers_frame) { 225 InternalMmapVector<BufferedStackTrace> stack_buffer(1); 226 BufferedStackTrace *stack = stack_buffer.data(); 227 stack->Reset(); 228 stack->Unwind(pc, frame, uc, common_flags()->fast_unwind_on_fatal); 229 230 // The second stack frame contains the failure __hwasan_check function, as 231 // we have a stack frame for the registers saved in __hwasan_tag_mismatch that 232 // we wish to ignore. This (currently) only occurs on AArch64, as x64 233 // implementations use SIGTRAP to implement the failure, and thus do not go 234 // through the stack saver. 235 if (registers_frame && stack->trace && stack->size > 0) { 236 stack->trace++; 237 stack->size--; 238 } 239 240 bool fatal = flags()->halt_on_error || !ai.recover; 241 ReportTagMismatch(stack, ai.addr, ai.size, ai.is_store, fatal, 242 registers_frame); 243 } 244 245 void HwasanTagMismatch(uptr addr, uptr pc, uptr frame, uptr access_info, 246 uptr *registers_frame, size_t outsize) { 247 __hwasan::AccessInfo ai; 248 ai.is_store = access_info & 0x10; 249 ai.is_load = !ai.is_store; 250 ai.recover = access_info & 0x20; 251 ai.addr = addr; 252 if ((access_info & 0xf) == 0xf) 253 ai.size = outsize; 254 else 255 ai.size = 1 << (access_info & 0xf); 256 257 HandleTagMismatch(ai, pc, frame, nullptr, registers_frame); 258 } 259 260 Thread *GetCurrentThread() { 261 uptr *ThreadLongPtr = GetCurrentThreadLongPtr(); 262 if (UNLIKELY(*ThreadLongPtr == 0)) 263 return nullptr; 264 auto *R = (StackAllocationsRingBuffer *)ThreadLongPtr; 265 return hwasanThreadList().GetThreadByBufferAddress((uptr)R->Next()); 266 } 267 268 } // namespace __hwasan 269 270 using namespace __hwasan; 271 272 void __sanitizer::BufferedStackTrace::UnwindImpl( 273 uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) { 274 Thread *t = GetCurrentThread(); 275 if (!t) { 276 // The thread is still being created, or has already been destroyed. 277 size = 0; 278 return; 279 } 280 Unwind(max_depth, pc, bp, context, t->stack_top(), t->stack_bottom(), 281 request_fast); 282 } 283 284 static bool InitializeSingleGlobal(const hwasan_global &global) { 285 uptr full_granule_size = RoundDownTo(global.size(), 16); 286 TagMemoryAligned(global.addr(), full_granule_size, global.tag()); 287 if (global.size() % 16) 288 TagMemoryAligned(global.addr() + full_granule_size, 16, global.size() % 16); 289 return false; 290 } 291 292 static void InitLoadedGlobals() { 293 // Fuchsia's libc provides a hook (__sanitizer_module_loaded) that runs on 294 // the startup path which calls into __hwasan_library_loaded on all 295 // initially loaded modules, so explicitly registering the globals here 296 // isn't needed. 297 if constexpr (!SANITIZER_FUCHSIA) { 298 dl_iterate_phdr( 299 [](dl_phdr_info *info, size_t /* size */, void * /* data */) -> int { 300 for (const hwasan_global &global : HwasanGlobalsFor( 301 info->dlpi_addr, info->dlpi_phdr, info->dlpi_phnum)) 302 InitializeSingleGlobal(global); 303 return 0; 304 }, 305 nullptr); 306 } 307 } 308 309 // Prepare to run instrumented code on the main thread. 310 static void InitInstrumentation() { 311 if (hwasan_instrumentation_inited) return; 312 313 InitializeOsSupport(); 314 315 if (!InitShadow()) { 316 Printf("FATAL: HWAddressSanitizer cannot mmap the shadow memory.\n"); 317 DumpProcessMap(); 318 Die(); 319 } 320 321 InitThreads(); 322 323 hwasan_instrumentation_inited = 1; 324 } 325 326 // Interface. 327 328 uptr __hwasan_shadow_memory_dynamic_address; // Global interface symbol. 329 330 // This function was used by the old frame descriptor mechanism. We keep it 331 // around to avoid breaking ABI. 332 void __hwasan_init_frames(uptr beg, uptr end) {} 333 334 void __hwasan_init_static() { 335 InitShadowGOT(); 336 InitInstrumentation(); 337 338 // In the non-static code path we call dl_iterate_phdr here. But at this point 339 // libc might not have been initialized enough for dl_iterate_phdr to work. 340 // Fortunately, since this is a statically linked executable we can use the 341 // linker-defined symbol __ehdr_start to find the only relevant set of phdrs. 342 extern ElfW(Ehdr) __ehdr_start; 343 for (const hwasan_global &global : HwasanGlobalsFor( 344 /* base */ 0, 345 reinterpret_cast<const ElfW(Phdr) *>( 346 reinterpret_cast<const char *>(&__ehdr_start) + 347 __ehdr_start.e_phoff), 348 __ehdr_start.e_phnum)) 349 InitializeSingleGlobal(global); 350 } 351 352 __attribute__((constructor(0))) void __hwasan_init() { 353 CHECK(!hwasan_init_is_running); 354 if (hwasan_inited) return; 355 hwasan_init_is_running = 1; 356 SanitizerToolName = "HWAddressSanitizer"; 357 358 InitTlsSize(); 359 360 CacheBinaryName(); 361 InitializeFlags(); 362 363 // Install tool-specific callbacks in sanitizer_common. 364 SetCheckUnwindCallback(CheckUnwind); 365 366 __sanitizer_set_report_path(common_flags()->log_path); 367 368 AndroidTestTlsSlot(); 369 370 DisableCoreDumperIfNecessary(); 371 372 InitInstrumentation(); 373 InitLoadedGlobals(); 374 375 // Needs to be called here because flags()->random_tags might not have been 376 // initialized when InitInstrumentation() was called. 377 GetCurrentThread()->EnsureRandomStateInited(); 378 379 SetPrintfAndReportCallback(AppendToErrorMessageBuffer); 380 // This may call libc -> needs initialized shadow. 381 AndroidLogInit(); 382 383 InitializeInterceptors(); 384 InstallDeadlySignalHandlers(HwasanOnDeadlySignal); 385 InstallAtExitHandler(); // Needs __cxa_atexit interceptor. 386 387 InitializeCoverage(common_flags()->coverage, common_flags()->coverage_dir); 388 389 HwasanTSDInit(); 390 HwasanTSDThreadInit(); 391 392 HwasanAllocatorInit(); 393 HwasanInstallAtForkHandler(); 394 395 if (CAN_SANITIZE_LEAKS) { 396 __lsan::InitCommonLsan(); 397 InstallAtExitCheckLeaks(); 398 } 399 400 #if HWASAN_CONTAINS_UBSAN 401 __ubsan::InitAsPlugin(); 402 #endif 403 404 if (CAN_SANITIZE_LEAKS && common_flags()->detect_leaks) { 405 __lsan::ScopedInterceptorDisabler disabler; 406 Symbolizer::LateInitialize(); 407 } 408 409 VPrintf(1, "HWAddressSanitizer init done\n"); 410 411 hwasan_init_is_running = 0; 412 hwasan_inited = 1; 413 } 414 415 void __hwasan_library_loaded(ElfW(Addr) base, const ElfW(Phdr) * phdr, 416 ElfW(Half) phnum) { 417 for (const hwasan_global &global : HwasanGlobalsFor(base, phdr, phnum)) 418 InitializeSingleGlobal(global); 419 } 420 421 void __hwasan_library_unloaded(ElfW(Addr) base, const ElfW(Phdr) * phdr, 422 ElfW(Half) phnum) { 423 for (; phnum != 0; ++phdr, --phnum) 424 if (phdr->p_type == PT_LOAD) 425 TagMemory(base + phdr->p_vaddr, phdr->p_memsz, 0); 426 } 427 428 void __hwasan_print_shadow(const void *p, uptr sz) { 429 uptr ptr_raw = UntagAddr(reinterpret_cast<uptr>(p)); 430 uptr shadow_first = MemToShadow(ptr_raw); 431 uptr shadow_last = MemToShadow(ptr_raw + sz - 1); 432 Printf("HWASan shadow map for %zx .. %zx (pointer tag %x)\n", ptr_raw, 433 ptr_raw + sz, GetTagFromPointer((uptr)p)); 434 for (uptr s = shadow_first; s <= shadow_last; ++s) { 435 tag_t mem_tag = *reinterpret_cast<tag_t *>(s); 436 uptr granule_addr = ShadowToMem(s); 437 if (mem_tag && mem_tag < kShadowAlignment) 438 Printf(" %zx: %02x(%02x)\n", granule_addr, mem_tag, 439 *reinterpret_cast<tag_t *>(granule_addr + kShadowAlignment - 1)); 440 else 441 Printf(" %zx: %02x\n", granule_addr, mem_tag); 442 } 443 } 444 445 sptr __hwasan_test_shadow(const void *p, uptr sz) { 446 if (sz == 0) 447 return -1; 448 uptr ptr = reinterpret_cast<uptr>(p); 449 tag_t ptr_tag = GetTagFromPointer(ptr); 450 uptr ptr_raw = UntagAddr(ptr); 451 uptr shadow_first = MemToShadow(ptr_raw); 452 uptr shadow_last = MemToShadow(ptr_raw + sz); 453 for (uptr s = shadow_first; s < shadow_last; ++s) { 454 if (UNLIKELY(*(tag_t *)s != ptr_tag)) { 455 uptr short_size = 456 ShortTagSize(*(tag_t *)s, AddTagToPointer(ShadowToMem(s), ptr_tag)); 457 sptr offset = ShadowToMem(s) - ptr_raw + short_size; 458 return offset < 0 ? 0 : offset; 459 } 460 } 461 462 uptr end = ptr + sz; 463 uptr tail_sz = end & (kShadowAlignment - 1); 464 if (!tail_sz) 465 return -1; 466 467 uptr short_size = 468 ShortTagSize(*(tag_t *)shadow_last, end & ~(kShadowAlignment - 1)); 469 if (LIKELY(tail_sz <= short_size)) 470 return -1; 471 472 sptr offset = sz - tail_sz + short_size; 473 return offset < 0 ? 0 : offset; 474 } 475 476 u16 __sanitizer_unaligned_load16(const uu16 *p) { 477 return *p; 478 } 479 u32 __sanitizer_unaligned_load32(const uu32 *p) { 480 return *p; 481 } 482 u64 __sanitizer_unaligned_load64(const uu64 *p) { 483 return *p; 484 } 485 void __sanitizer_unaligned_store16(uu16 *p, u16 x) { 486 *p = x; 487 } 488 void __sanitizer_unaligned_store32(uu32 *p, u32 x) { 489 *p = x; 490 } 491 void __sanitizer_unaligned_store64(uu64 *p, u64 x) { 492 *p = x; 493 } 494 495 void __hwasan_loadN(uptr p, uptr sz) { 496 CheckAddressSized<ErrorAction::Abort, AccessType::Load>(p, sz); 497 } 498 void __hwasan_load1(uptr p) { 499 CheckAddress<ErrorAction::Abort, AccessType::Load, 0>(p); 500 } 501 void __hwasan_load2(uptr p) { 502 CheckAddress<ErrorAction::Abort, AccessType::Load, 1>(p); 503 } 504 void __hwasan_load4(uptr p) { 505 CheckAddress<ErrorAction::Abort, AccessType::Load, 2>(p); 506 } 507 void __hwasan_load8(uptr p) { 508 CheckAddress<ErrorAction::Abort, AccessType::Load, 3>(p); 509 } 510 void __hwasan_load16(uptr p) { 511 CheckAddress<ErrorAction::Abort, AccessType::Load, 4>(p); 512 } 513 514 void __hwasan_loadN_noabort(uptr p, uptr sz) { 515 CheckAddressSized<ErrorAction::Recover, AccessType::Load>(p, sz); 516 } 517 void __hwasan_load1_noabort(uptr p) { 518 CheckAddress<ErrorAction::Recover, AccessType::Load, 0>(p); 519 } 520 void __hwasan_load2_noabort(uptr p) { 521 CheckAddress<ErrorAction::Recover, AccessType::Load, 1>(p); 522 } 523 void __hwasan_load4_noabort(uptr p) { 524 CheckAddress<ErrorAction::Recover, AccessType::Load, 2>(p); 525 } 526 void __hwasan_load8_noabort(uptr p) { 527 CheckAddress<ErrorAction::Recover, AccessType::Load, 3>(p); 528 } 529 void __hwasan_load16_noabort(uptr p) { 530 CheckAddress<ErrorAction::Recover, AccessType::Load, 4>(p); 531 } 532 533 void __hwasan_loadN_match_all(uptr p, uptr sz, u8 match_all_tag) { 534 if (GetTagFromPointer(p) != match_all_tag) 535 CheckAddressSized<ErrorAction::Abort, AccessType::Load>(p, sz); 536 } 537 void __hwasan_load1_match_all(uptr p, u8 match_all_tag) { 538 if (GetTagFromPointer(p) != match_all_tag) 539 CheckAddress<ErrorAction::Abort, AccessType::Load, 0>(p); 540 } 541 void __hwasan_load2_match_all(uptr p, u8 match_all_tag) { 542 if (GetTagFromPointer(p) != match_all_tag) 543 CheckAddress<ErrorAction::Abort, AccessType::Load, 1>(p); 544 } 545 void __hwasan_load4_match_all(uptr p, u8 match_all_tag) { 546 if (GetTagFromPointer(p) != match_all_tag) 547 CheckAddress<ErrorAction::Abort, AccessType::Load, 2>(p); 548 } 549 void __hwasan_load8_match_all(uptr p, u8 match_all_tag) { 550 if (GetTagFromPointer(p) != match_all_tag) 551 CheckAddress<ErrorAction::Abort, AccessType::Load, 3>(p); 552 } 553 void __hwasan_load16_match_all(uptr p, u8 match_all_tag) { 554 if (GetTagFromPointer(p) != match_all_tag) 555 CheckAddress<ErrorAction::Abort, AccessType::Load, 4>(p); 556 } 557 558 void __hwasan_loadN_match_all_noabort(uptr p, uptr sz, u8 match_all_tag) { 559 if (GetTagFromPointer(p) != match_all_tag) 560 CheckAddressSized<ErrorAction::Recover, AccessType::Load>(p, sz); 561 } 562 void __hwasan_load1_match_all_noabort(uptr p, u8 match_all_tag) { 563 if (GetTagFromPointer(p) != match_all_tag) 564 CheckAddress<ErrorAction::Recover, AccessType::Load, 0>(p); 565 } 566 void __hwasan_load2_match_all_noabort(uptr p, u8 match_all_tag) { 567 if (GetTagFromPointer(p) != match_all_tag) 568 CheckAddress<ErrorAction::Recover, AccessType::Load, 1>(p); 569 } 570 void __hwasan_load4_match_all_noabort(uptr p, u8 match_all_tag) { 571 if (GetTagFromPointer(p) != match_all_tag) 572 CheckAddress<ErrorAction::Recover, AccessType::Load, 2>(p); 573 } 574 void __hwasan_load8_match_all_noabort(uptr p, u8 match_all_tag) { 575 if (GetTagFromPointer(p) != match_all_tag) 576 CheckAddress<ErrorAction::Recover, AccessType::Load, 3>(p); 577 } 578 void __hwasan_load16_match_all_noabort(uptr p, u8 match_all_tag) { 579 if (GetTagFromPointer(p) != match_all_tag) 580 CheckAddress<ErrorAction::Recover, AccessType::Load, 4>(p); 581 } 582 583 void __hwasan_storeN(uptr p, uptr sz) { 584 CheckAddressSized<ErrorAction::Abort, AccessType::Store>(p, sz); 585 } 586 void __hwasan_store1(uptr p) { 587 CheckAddress<ErrorAction::Abort, AccessType::Store, 0>(p); 588 } 589 void __hwasan_store2(uptr p) { 590 CheckAddress<ErrorAction::Abort, AccessType::Store, 1>(p); 591 } 592 void __hwasan_store4(uptr p) { 593 CheckAddress<ErrorAction::Abort, AccessType::Store, 2>(p); 594 } 595 void __hwasan_store8(uptr p) { 596 CheckAddress<ErrorAction::Abort, AccessType::Store, 3>(p); 597 } 598 void __hwasan_store16(uptr p) { 599 CheckAddress<ErrorAction::Abort, AccessType::Store, 4>(p); 600 } 601 602 void __hwasan_storeN_noabort(uptr p, uptr sz) { 603 CheckAddressSized<ErrorAction::Recover, AccessType::Store>(p, sz); 604 } 605 void __hwasan_store1_noabort(uptr p) { 606 CheckAddress<ErrorAction::Recover, AccessType::Store, 0>(p); 607 } 608 void __hwasan_store2_noabort(uptr p) { 609 CheckAddress<ErrorAction::Recover, AccessType::Store, 1>(p); 610 } 611 void __hwasan_store4_noabort(uptr p) { 612 CheckAddress<ErrorAction::Recover, AccessType::Store, 2>(p); 613 } 614 void __hwasan_store8_noabort(uptr p) { 615 CheckAddress<ErrorAction::Recover, AccessType::Store, 3>(p); 616 } 617 void __hwasan_store16_noabort(uptr p) { 618 CheckAddress<ErrorAction::Recover, AccessType::Store, 4>(p); 619 } 620 621 void __hwasan_storeN_match_all(uptr p, uptr sz, u8 match_all_tag) { 622 if (GetTagFromPointer(p) != match_all_tag) 623 CheckAddressSized<ErrorAction::Abort, AccessType::Store>(p, sz); 624 } 625 void __hwasan_store1_match_all(uptr p, u8 match_all_tag) { 626 if (GetTagFromPointer(p) != match_all_tag) 627 CheckAddress<ErrorAction::Abort, AccessType::Store, 0>(p); 628 } 629 void __hwasan_store2_match_all(uptr p, u8 match_all_tag) { 630 if (GetTagFromPointer(p) != match_all_tag) 631 CheckAddress<ErrorAction::Abort, AccessType::Store, 1>(p); 632 } 633 void __hwasan_store4_match_all(uptr p, u8 match_all_tag) { 634 if (GetTagFromPointer(p) != match_all_tag) 635 CheckAddress<ErrorAction::Abort, AccessType::Store, 2>(p); 636 } 637 void __hwasan_store8_match_all(uptr p, u8 match_all_tag) { 638 if (GetTagFromPointer(p) != match_all_tag) 639 CheckAddress<ErrorAction::Abort, AccessType::Store, 3>(p); 640 } 641 void __hwasan_store16_match_all(uptr p, u8 match_all_tag) { 642 if (GetTagFromPointer(p) != match_all_tag) 643 CheckAddress<ErrorAction::Abort, AccessType::Store, 4>(p); 644 } 645 646 void __hwasan_storeN_match_all_noabort(uptr p, uptr sz, u8 match_all_tag) { 647 if (GetTagFromPointer(p) != match_all_tag) 648 CheckAddressSized<ErrorAction::Recover, AccessType::Store>(p, sz); 649 } 650 void __hwasan_store1_match_all_noabort(uptr p, u8 match_all_tag) { 651 if (GetTagFromPointer(p) != match_all_tag) 652 CheckAddress<ErrorAction::Recover, AccessType::Store, 0>(p); 653 } 654 void __hwasan_store2_match_all_noabort(uptr p, u8 match_all_tag) { 655 if (GetTagFromPointer(p) != match_all_tag) 656 CheckAddress<ErrorAction::Recover, AccessType::Store, 1>(p); 657 } 658 void __hwasan_store4_match_all_noabort(uptr p, u8 match_all_tag) { 659 if (GetTagFromPointer(p) != match_all_tag) 660 CheckAddress<ErrorAction::Recover, AccessType::Store, 2>(p); 661 } 662 void __hwasan_store8_match_all_noabort(uptr p, u8 match_all_tag) { 663 if (GetTagFromPointer(p) != match_all_tag) 664 CheckAddress<ErrorAction::Recover, AccessType::Store, 3>(p); 665 } 666 void __hwasan_store16_match_all_noabort(uptr p, u8 match_all_tag) { 667 if (GetTagFromPointer(p) != match_all_tag) 668 CheckAddress<ErrorAction::Recover, AccessType::Store, 4>(p); 669 } 670 671 void __hwasan_tag_memory(uptr p, u8 tag, uptr sz) { 672 TagMemoryAligned(UntagAddr(p), sz, tag); 673 } 674 675 uptr __hwasan_tag_pointer(uptr p, u8 tag) { 676 return AddTagToPointer(p, tag); 677 } 678 679 void __hwasan_handle_longjmp(const void *sp_dst) { 680 uptr dst = (uptr)sp_dst; 681 // HWASan does not support tagged SP. 682 CHECK_EQ(GetTagFromPointer(dst), 0); 683 684 uptr sp = (uptr)__builtin_frame_address(0); 685 static const uptr kMaxExpectedCleanupSize = 64 << 20; // 64M 686 if (dst < sp || dst - sp > kMaxExpectedCleanupSize) { 687 Report( 688 "WARNING: HWASan is ignoring requested __hwasan_handle_longjmp: " 689 "stack top: %p; target %p; distance: %p (%zd)\n" 690 "False positive error reports may follow\n", 691 (void *)sp, (void *)dst, dst - sp); 692 return; 693 } 694 TagMemory(sp, dst - sp, 0); 695 } 696 697 void __hwasan_handle_vfork(const void *sp_dst) { 698 uptr sp = (uptr)sp_dst; 699 Thread *t = GetCurrentThread(); 700 CHECK(t); 701 uptr top = t->stack_top(); 702 uptr bottom = t->stack_bottom(); 703 if (top == 0 || bottom == 0 || sp < bottom || sp >= top) { 704 Report( 705 "WARNING: HWASan is ignoring requested __hwasan_handle_vfork: " 706 "stack top: %zx; current %zx; bottom: %zx \n" 707 "False positive error reports may follow\n", 708 top, sp, bottom); 709 return; 710 } 711 TagMemory(bottom, sp - bottom, 0); 712 } 713 714 extern "C" void *__hwasan_extra_spill_area() { 715 Thread *t = GetCurrentThread(); 716 return &t->vfork_spill(); 717 } 718 719 void __hwasan_print_memory_usage() { 720 InternalScopedString s; 721 HwasanFormatMemoryUsage(s); 722 Printf("%s\n", s.data()); 723 } 724 725 static const u8 kFallbackTag = 0xBB & kTagMask; 726 727 u8 __hwasan_generate_tag() { 728 Thread *t = GetCurrentThread(); 729 if (!t) return kFallbackTag; 730 return t->GenerateRandomTag(); 731 } 732 733 void __hwasan_add_frame_record(u64 frame_record_info) { 734 Thread *t = GetCurrentThread(); 735 if (t) 736 t->stack_allocations()->push(frame_record_info); 737 } 738 739 #if !SANITIZER_SUPPORTS_WEAK_HOOKS 740 extern "C" { 741 SANITIZER_INTERFACE_ATTRIBUTE SANITIZER_WEAK_ATTRIBUTE 742 const char* __hwasan_default_options() { return ""; } 743 } // extern "C" 744 #endif 745 746 extern "C" { 747 SANITIZER_INTERFACE_ATTRIBUTE 748 void __sanitizer_print_stack_trace() { 749 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME()); 750 stack.Print(); 751 } 752 753 // Entry point for interoperability between __hwasan_tag_mismatch (ASM) and the 754 // rest of the mismatch handling code (C++). 755 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame, 756 size_t outsize) { 757 __hwasan::HwasanTagMismatch(addr, (uptr)__builtin_return_address(0), 758 (uptr)__builtin_frame_address(0), access_info, 759 registers_frame, outsize); 760 } 761 762 } // extern "C" 763