xref: /freebsd/contrib/llvm-project/compiler-rt/lib/asan/asan_poisoning.cpp (revision bdd1243df58e60e85101c09001d9812a789b6bc4)
168d75effSDimitry Andric //===-- asan_poisoning.cpp ------------------------------------------------===//
268d75effSDimitry Andric //
368d75effSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
468d75effSDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
568d75effSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
668d75effSDimitry Andric //
768d75effSDimitry Andric //===----------------------------------------------------------------------===//
868d75effSDimitry Andric //
968d75effSDimitry Andric // This file is a part of AddressSanitizer, an address sanity checker.
1068d75effSDimitry Andric //
1168d75effSDimitry Andric // Shadow memory poisoning by ASan RTL and by user application.
1268d75effSDimitry Andric //===----------------------------------------------------------------------===//
1368d75effSDimitry Andric 
1468d75effSDimitry Andric #include "asan_poisoning.h"
1581ad6265SDimitry Andric 
1668d75effSDimitry Andric #include "asan_report.h"
1768d75effSDimitry Andric #include "asan_stack.h"
1868d75effSDimitry Andric #include "sanitizer_common/sanitizer_atomic.h"
1968d75effSDimitry Andric #include "sanitizer_common/sanitizer_flags.h"
2081ad6265SDimitry Andric #include "sanitizer_common/sanitizer_interface_internal.h"
2181ad6265SDimitry Andric #include "sanitizer_common/sanitizer_libc.h"
2268d75effSDimitry Andric 
2368d75effSDimitry Andric namespace __asan {
2468d75effSDimitry Andric 
2568d75effSDimitry Andric static atomic_uint8_t can_poison_memory;
2668d75effSDimitry Andric 
2768d75effSDimitry Andric void SetCanPoisonMemory(bool value) {
2868d75effSDimitry Andric   atomic_store(&can_poison_memory, value, memory_order_release);
2968d75effSDimitry Andric }
3068d75effSDimitry Andric 
3168d75effSDimitry Andric bool CanPoisonMemory() {
3268d75effSDimitry Andric   return atomic_load(&can_poison_memory, memory_order_acquire);
3368d75effSDimitry Andric }
3468d75effSDimitry Andric 
3568d75effSDimitry Andric void PoisonShadow(uptr addr, uptr size, u8 value) {
3668d75effSDimitry Andric   if (value && !CanPoisonMemory()) return;
3768d75effSDimitry Andric   CHECK(AddrIsAlignedByGranularity(addr));
3868d75effSDimitry Andric   CHECK(AddrIsInMem(addr));
3968d75effSDimitry Andric   CHECK(AddrIsAlignedByGranularity(addr + size));
400eae32dcSDimitry Andric   CHECK(AddrIsInMem(addr + size - ASAN_SHADOW_GRANULARITY));
4168d75effSDimitry Andric   CHECK(REAL(memset));
4268d75effSDimitry Andric   FastPoisonShadow(addr, size, value);
4368d75effSDimitry Andric }
4468d75effSDimitry Andric 
4568d75effSDimitry Andric void PoisonShadowPartialRightRedzone(uptr addr,
4668d75effSDimitry Andric                                      uptr size,
4768d75effSDimitry Andric                                      uptr redzone_size,
4868d75effSDimitry Andric                                      u8 value) {
4968d75effSDimitry Andric   if (!CanPoisonMemory()) return;
5068d75effSDimitry Andric   CHECK(AddrIsAlignedByGranularity(addr));
5168d75effSDimitry Andric   CHECK(AddrIsInMem(addr));
5268d75effSDimitry Andric   FastPoisonShadowPartialRightRedzone(addr, size, redzone_size, value);
5368d75effSDimitry Andric }
5468d75effSDimitry Andric 
5568d75effSDimitry Andric struct ShadowSegmentEndpoint {
5668d75effSDimitry Andric   u8 *chunk;
570eae32dcSDimitry Andric   s8 offset;  // in [0, ASAN_SHADOW_GRANULARITY)
5868d75effSDimitry Andric   s8 value;  // = *chunk;
5968d75effSDimitry Andric 
6068d75effSDimitry Andric   explicit ShadowSegmentEndpoint(uptr address) {
6168d75effSDimitry Andric     chunk = (u8*)MemToShadow(address);
620eae32dcSDimitry Andric     offset = address & (ASAN_SHADOW_GRANULARITY - 1);
6368d75effSDimitry Andric     value = *chunk;
6468d75effSDimitry Andric   }
6568d75effSDimitry Andric };
6668d75effSDimitry Andric 
6768d75effSDimitry Andric void AsanPoisonOrUnpoisonIntraObjectRedzone(uptr ptr, uptr size, bool poison) {
6868d75effSDimitry Andric   uptr end = ptr + size;
6968d75effSDimitry Andric   if (Verbosity()) {
7068d75effSDimitry Andric     Printf("__asan_%spoison_intra_object_redzone [%p,%p) %zd\n",
71349cc55cSDimitry Andric            poison ? "" : "un", (void *)ptr, (void *)end, size);
7268d75effSDimitry Andric     if (Verbosity() >= 2)
7368d75effSDimitry Andric       PRINT_CURRENT_STACK();
7468d75effSDimitry Andric   }
7568d75effSDimitry Andric   CHECK(size);
7668d75effSDimitry Andric   CHECK_LE(size, 4096);
770eae32dcSDimitry Andric   CHECK(IsAligned(end, ASAN_SHADOW_GRANULARITY));
780eae32dcSDimitry Andric   if (!IsAligned(ptr, ASAN_SHADOW_GRANULARITY)) {
7968d75effSDimitry Andric     *(u8 *)MemToShadow(ptr) =
800eae32dcSDimitry Andric         poison ? static_cast<u8>(ptr % ASAN_SHADOW_GRANULARITY) : 0;
810eae32dcSDimitry Andric     ptr |= ASAN_SHADOW_GRANULARITY - 1;
8268d75effSDimitry Andric     ptr++;
8368d75effSDimitry Andric   }
840eae32dcSDimitry Andric   for (; ptr < end; ptr += ASAN_SHADOW_GRANULARITY)
8568d75effSDimitry Andric     *(u8*)MemToShadow(ptr) = poison ? kAsanIntraObjectRedzone : 0;
8668d75effSDimitry Andric }
8768d75effSDimitry Andric 
8868d75effSDimitry Andric }  // namespace __asan
8968d75effSDimitry Andric 
9068d75effSDimitry Andric // ---------------------- Interface ---------------- {{{1
9168d75effSDimitry Andric using namespace __asan;
9268d75effSDimitry Andric 
9368d75effSDimitry Andric // Current implementation of __asan_(un)poison_memory_region doesn't check
9468d75effSDimitry Andric // that user program (un)poisons the memory it owns. It poisons memory
9568d75effSDimitry Andric // conservatively, and unpoisons progressively to make sure asan shadow
9668d75effSDimitry Andric // mapping invariant is preserved (see detailed mapping description here:
9768d75effSDimitry Andric // https://github.com/google/sanitizers/wiki/AddressSanitizerAlgorithm).
9868d75effSDimitry Andric //
9968d75effSDimitry Andric // * if user asks to poison region [left, right), the program poisons
10068d75effSDimitry Andric // at least [left, AlignDown(right)).
10168d75effSDimitry Andric // * if user asks to unpoison region [left, right), the program unpoisons
10268d75effSDimitry Andric // at most [AlignDown(left), right).
10368d75effSDimitry Andric void __asan_poison_memory_region(void const volatile *addr, uptr size) {
10468d75effSDimitry Andric   if (!flags()->allow_user_poisoning || size == 0) return;
10568d75effSDimitry Andric   uptr beg_addr = (uptr)addr;
10668d75effSDimitry Andric   uptr end_addr = beg_addr + size;
10768d75effSDimitry Andric   VPrintf(3, "Trying to poison memory region [%p, %p)\n", (void *)beg_addr,
10868d75effSDimitry Andric           (void *)end_addr);
10968d75effSDimitry Andric   ShadowSegmentEndpoint beg(beg_addr);
11068d75effSDimitry Andric   ShadowSegmentEndpoint end(end_addr);
11168d75effSDimitry Andric   if (beg.chunk == end.chunk) {
11268d75effSDimitry Andric     CHECK_LT(beg.offset, end.offset);
11368d75effSDimitry Andric     s8 value = beg.value;
11468d75effSDimitry Andric     CHECK_EQ(value, end.value);
11568d75effSDimitry Andric     // We can only poison memory if the byte in end.offset is unaddressable.
11668d75effSDimitry Andric     // No need to re-poison memory if it is poisoned already.
11768d75effSDimitry Andric     if (value > 0 && value <= end.offset) {
11868d75effSDimitry Andric       if (beg.offset > 0) {
11968d75effSDimitry Andric         *beg.chunk = Min(value, beg.offset);
12068d75effSDimitry Andric       } else {
12168d75effSDimitry Andric         *beg.chunk = kAsanUserPoisonedMemoryMagic;
12268d75effSDimitry Andric       }
12368d75effSDimitry Andric     }
12468d75effSDimitry Andric     return;
12568d75effSDimitry Andric   }
12668d75effSDimitry Andric   CHECK_LT(beg.chunk, end.chunk);
12768d75effSDimitry Andric   if (beg.offset > 0) {
12868d75effSDimitry Andric     // Mark bytes from beg.offset as unaddressable.
12968d75effSDimitry Andric     if (beg.value == 0) {
13068d75effSDimitry Andric       *beg.chunk = beg.offset;
13168d75effSDimitry Andric     } else {
13268d75effSDimitry Andric       *beg.chunk = Min(beg.value, beg.offset);
13368d75effSDimitry Andric     }
13468d75effSDimitry Andric     beg.chunk++;
13568d75effSDimitry Andric   }
13668d75effSDimitry Andric   REAL(memset)(beg.chunk, kAsanUserPoisonedMemoryMagic, end.chunk - beg.chunk);
13768d75effSDimitry Andric   // Poison if byte in end.offset is unaddressable.
13868d75effSDimitry Andric   if (end.value > 0 && end.value <= end.offset) {
13968d75effSDimitry Andric     *end.chunk = kAsanUserPoisonedMemoryMagic;
14068d75effSDimitry Andric   }
14168d75effSDimitry Andric }
14268d75effSDimitry Andric 
14368d75effSDimitry Andric void __asan_unpoison_memory_region(void const volatile *addr, uptr size) {
14468d75effSDimitry Andric   if (!flags()->allow_user_poisoning || size == 0) return;
14568d75effSDimitry Andric   uptr beg_addr = (uptr)addr;
14668d75effSDimitry Andric   uptr end_addr = beg_addr + size;
14768d75effSDimitry Andric   VPrintf(3, "Trying to unpoison memory region [%p, %p)\n", (void *)beg_addr,
14868d75effSDimitry Andric           (void *)end_addr);
14968d75effSDimitry Andric   ShadowSegmentEndpoint beg(beg_addr);
15068d75effSDimitry Andric   ShadowSegmentEndpoint end(end_addr);
15168d75effSDimitry Andric   if (beg.chunk == end.chunk) {
15268d75effSDimitry Andric     CHECK_LT(beg.offset, end.offset);
15368d75effSDimitry Andric     s8 value = beg.value;
15468d75effSDimitry Andric     CHECK_EQ(value, end.value);
15568d75effSDimitry Andric     // We unpoison memory bytes up to enbytes up to end.offset if it is not
15668d75effSDimitry Andric     // unpoisoned already.
15768d75effSDimitry Andric     if (value != 0) {
15868d75effSDimitry Andric       *beg.chunk = Max(value, end.offset);
15968d75effSDimitry Andric     }
16068d75effSDimitry Andric     return;
16168d75effSDimitry Andric   }
16268d75effSDimitry Andric   CHECK_LT(beg.chunk, end.chunk);
16368d75effSDimitry Andric   if (beg.offset > 0) {
16468d75effSDimitry Andric     *beg.chunk = 0;
16568d75effSDimitry Andric     beg.chunk++;
16668d75effSDimitry Andric   }
16768d75effSDimitry Andric   REAL(memset)(beg.chunk, 0, end.chunk - beg.chunk);
16868d75effSDimitry Andric   if (end.offset > 0 && end.value != 0) {
16968d75effSDimitry Andric     *end.chunk = Max(end.value, end.offset);
17068d75effSDimitry Andric   }
17168d75effSDimitry Andric }
17268d75effSDimitry Andric 
17368d75effSDimitry Andric int __asan_address_is_poisoned(void const volatile *addr) {
17468d75effSDimitry Andric   return __asan::AddressIsPoisoned((uptr)addr);
17568d75effSDimitry Andric }
17668d75effSDimitry Andric 
17768d75effSDimitry Andric uptr __asan_region_is_poisoned(uptr beg, uptr size) {
178fe6060f1SDimitry Andric   if (!size)
179fe6060f1SDimitry Andric     return 0;
18068d75effSDimitry Andric   uptr end = beg + size;
181fe6060f1SDimitry Andric   if (!AddrIsInMem(beg))
182fe6060f1SDimitry Andric     return beg;
183fe6060f1SDimitry Andric   if (!AddrIsInMem(end))
184fe6060f1SDimitry Andric     return end;
18568d75effSDimitry Andric   CHECK_LT(beg, end);
1860eae32dcSDimitry Andric   uptr aligned_b = RoundUpTo(beg, ASAN_SHADOW_GRANULARITY);
1870eae32dcSDimitry Andric   uptr aligned_e = RoundDownTo(end, ASAN_SHADOW_GRANULARITY);
18868d75effSDimitry Andric   uptr shadow_beg = MemToShadow(aligned_b);
18968d75effSDimitry Andric   uptr shadow_end = MemToShadow(aligned_e);
19068d75effSDimitry Andric   // First check the first and the last application bytes,
1910eae32dcSDimitry Andric   // then check the ASAN_SHADOW_GRANULARITY-aligned region by calling
19268d75effSDimitry Andric   // mem_is_zero on the corresponding shadow.
193fe6060f1SDimitry Andric   if (!__asan::AddressIsPoisoned(beg) && !__asan::AddressIsPoisoned(end - 1) &&
19468d75effSDimitry Andric       (shadow_end <= shadow_beg ||
19568d75effSDimitry Andric        __sanitizer::mem_is_zero((const char *)shadow_beg,
19668d75effSDimitry Andric                                 shadow_end - shadow_beg)))
19768d75effSDimitry Andric     return 0;
19868d75effSDimitry Andric   // The fast check failed, so we have a poisoned byte somewhere.
19968d75effSDimitry Andric   // Find it slowly.
20068d75effSDimitry Andric   for (; beg < end; beg++)
20168d75effSDimitry Andric     if (__asan::AddressIsPoisoned(beg))
20268d75effSDimitry Andric       return beg;
20368d75effSDimitry Andric   UNREACHABLE("mem_is_zero returned false, but poisoned byte was not found");
20468d75effSDimitry Andric   return 0;
20568d75effSDimitry Andric }
20668d75effSDimitry Andric 
20768d75effSDimitry Andric #define CHECK_SMALL_REGION(p, size, isWrite)                  \
20868d75effSDimitry Andric   do {                                                        \
20968d75effSDimitry Andric     uptr __p = reinterpret_cast<uptr>(p);                     \
21068d75effSDimitry Andric     uptr __size = size;                                       \
21168d75effSDimitry Andric     if (UNLIKELY(__asan::AddressIsPoisoned(__p) ||            \
21268d75effSDimitry Andric         __asan::AddressIsPoisoned(__p + __size - 1))) {       \
21368d75effSDimitry Andric       GET_CURRENT_PC_BP_SP;                                   \
21468d75effSDimitry Andric       uptr __bad = __asan_region_is_poisoned(__p, __size);    \
21568d75effSDimitry Andric       __asan_report_error(pc, bp, sp, __bad, isWrite, __size, 0);\
21668d75effSDimitry Andric     }                                                         \
21768d75effSDimitry Andric   } while (false)
21868d75effSDimitry Andric 
21968d75effSDimitry Andric 
22068d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
22168d75effSDimitry Andric u16 __sanitizer_unaligned_load16(const uu16 *p) {
22268d75effSDimitry Andric   CHECK_SMALL_REGION(p, sizeof(*p), false);
22368d75effSDimitry Andric   return *p;
22468d75effSDimitry Andric }
22568d75effSDimitry Andric 
22668d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
22768d75effSDimitry Andric u32 __sanitizer_unaligned_load32(const uu32 *p) {
22868d75effSDimitry Andric   CHECK_SMALL_REGION(p, sizeof(*p), false);
22968d75effSDimitry Andric   return *p;
23068d75effSDimitry Andric }
23168d75effSDimitry Andric 
23268d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
23368d75effSDimitry Andric u64 __sanitizer_unaligned_load64(const uu64 *p) {
23468d75effSDimitry Andric   CHECK_SMALL_REGION(p, sizeof(*p), false);
23568d75effSDimitry Andric   return *p;
23668d75effSDimitry Andric }
23768d75effSDimitry Andric 
23868d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
23968d75effSDimitry Andric void __sanitizer_unaligned_store16(uu16 *p, u16 x) {
24068d75effSDimitry Andric   CHECK_SMALL_REGION(p, sizeof(*p), true);
24168d75effSDimitry Andric   *p = x;
24268d75effSDimitry Andric }
24368d75effSDimitry Andric 
24468d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
24568d75effSDimitry Andric void __sanitizer_unaligned_store32(uu32 *p, u32 x) {
24668d75effSDimitry Andric   CHECK_SMALL_REGION(p, sizeof(*p), true);
24768d75effSDimitry Andric   *p = x;
24868d75effSDimitry Andric }
24968d75effSDimitry Andric 
25068d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
25168d75effSDimitry Andric void __sanitizer_unaligned_store64(uu64 *p, u64 x) {
25268d75effSDimitry Andric   CHECK_SMALL_REGION(p, sizeof(*p), true);
25368d75effSDimitry Andric   *p = x;
25468d75effSDimitry Andric }
25568d75effSDimitry Andric 
25668d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
25768d75effSDimitry Andric void __asan_poison_cxx_array_cookie(uptr p) {
25868d75effSDimitry Andric   if (SANITIZER_WORDSIZE != 64) return;
25968d75effSDimitry Andric   if (!flags()->poison_array_cookie) return;
26068d75effSDimitry Andric   uptr s = MEM_TO_SHADOW(p);
26168d75effSDimitry Andric   *reinterpret_cast<u8*>(s) = kAsanArrayCookieMagic;
26268d75effSDimitry Andric }
26368d75effSDimitry Andric 
26468d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
26568d75effSDimitry Andric uptr __asan_load_cxx_array_cookie(uptr *p) {
26668d75effSDimitry Andric   if (SANITIZER_WORDSIZE != 64) return *p;
26768d75effSDimitry Andric   if (!flags()->poison_array_cookie) return *p;
26868d75effSDimitry Andric   uptr s = MEM_TO_SHADOW(reinterpret_cast<uptr>(p));
26968d75effSDimitry Andric   u8 sval = *reinterpret_cast<u8*>(s);
27068d75effSDimitry Andric   if (sval == kAsanArrayCookieMagic) return *p;
27168d75effSDimitry Andric   // If sval is not kAsanArrayCookieMagic it can only be freed memory,
27268d75effSDimitry Andric   // which means that we are going to get double-free. So, return 0 to avoid
27368d75effSDimitry Andric   // infinite loop of destructors. We don't want to report a double-free here
27468d75effSDimitry Andric   // though, so print a warning just in case.
27568d75effSDimitry Andric   // CHECK_EQ(sval, kAsanHeapFreeMagic);
27668d75effSDimitry Andric   if (sval == kAsanHeapFreeMagic) {
27768d75effSDimitry Andric     Report("AddressSanitizer: loaded array cookie from free-d memory; "
27868d75effSDimitry Andric            "expect a double-free report\n");
27968d75effSDimitry Andric     return 0;
28068d75effSDimitry Andric   }
28168d75effSDimitry Andric   // The cookie may remain unpoisoned if e.g. it comes from a custom
28268d75effSDimitry Andric   // operator new defined inside a class.
28368d75effSDimitry Andric   return *p;
28468d75effSDimitry Andric }
28568d75effSDimitry Andric 
28668d75effSDimitry Andric // This is a simplified version of __asan_(un)poison_memory_region, which
28768d75effSDimitry Andric // assumes that left border of region to be poisoned is properly aligned.
28868d75effSDimitry Andric static void PoisonAlignedStackMemory(uptr addr, uptr size, bool do_poison) {
28968d75effSDimitry Andric   if (size == 0) return;
2900eae32dcSDimitry Andric   uptr aligned_size = size & ~(ASAN_SHADOW_GRANULARITY - 1);
29168d75effSDimitry Andric   PoisonShadow(addr, aligned_size,
29268d75effSDimitry Andric                do_poison ? kAsanStackUseAfterScopeMagic : 0);
29368d75effSDimitry Andric   if (size == aligned_size)
29468d75effSDimitry Andric     return;
29568d75effSDimitry Andric   s8 end_offset = (s8)(size - aligned_size);
29668d75effSDimitry Andric   s8* shadow_end = (s8*)MemToShadow(addr + aligned_size);
29768d75effSDimitry Andric   s8 end_value = *shadow_end;
29868d75effSDimitry Andric   if (do_poison) {
29968d75effSDimitry Andric     // If possible, mark all the bytes mapping to last shadow byte as
30068d75effSDimitry Andric     // unaddressable.
30168d75effSDimitry Andric     if (end_value > 0 && end_value <= end_offset)
30268d75effSDimitry Andric       *shadow_end = (s8)kAsanStackUseAfterScopeMagic;
30368d75effSDimitry Andric   } else {
30468d75effSDimitry Andric     // If necessary, mark few first bytes mapping to last shadow byte
30568d75effSDimitry Andric     // as addressable
30668d75effSDimitry Andric     if (end_value != 0)
30768d75effSDimitry Andric       *shadow_end = Max(end_value, end_offset);
30868d75effSDimitry Andric   }
30968d75effSDimitry Andric }
31068d75effSDimitry Andric 
31168d75effSDimitry Andric void __asan_set_shadow_00(uptr addr, uptr size) {
31268d75effSDimitry Andric   REAL(memset)((void *)addr, 0, size);
31368d75effSDimitry Andric }
31468d75effSDimitry Andric 
315*bdd1243dSDimitry Andric void __asan_set_shadow_01(uptr addr, uptr size) {
316*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x01, size);
317*bdd1243dSDimitry Andric }
318*bdd1243dSDimitry Andric 
319*bdd1243dSDimitry Andric void __asan_set_shadow_02(uptr addr, uptr size) {
320*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x02, size);
321*bdd1243dSDimitry Andric }
322*bdd1243dSDimitry Andric 
323*bdd1243dSDimitry Andric void __asan_set_shadow_03(uptr addr, uptr size) {
324*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x03, size);
325*bdd1243dSDimitry Andric }
326*bdd1243dSDimitry Andric 
327*bdd1243dSDimitry Andric void __asan_set_shadow_04(uptr addr, uptr size) {
328*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x04, size);
329*bdd1243dSDimitry Andric }
330*bdd1243dSDimitry Andric 
331*bdd1243dSDimitry Andric void __asan_set_shadow_05(uptr addr, uptr size) {
332*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x05, size);
333*bdd1243dSDimitry Andric }
334*bdd1243dSDimitry Andric 
335*bdd1243dSDimitry Andric void __asan_set_shadow_06(uptr addr, uptr size) {
336*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x06, size);
337*bdd1243dSDimitry Andric }
338*bdd1243dSDimitry Andric 
339*bdd1243dSDimitry Andric void __asan_set_shadow_07(uptr addr, uptr size) {
340*bdd1243dSDimitry Andric   REAL(memset)((void *)addr, 0x07, size);
341*bdd1243dSDimitry Andric }
342*bdd1243dSDimitry Andric 
34368d75effSDimitry Andric void __asan_set_shadow_f1(uptr addr, uptr size) {
34468d75effSDimitry Andric   REAL(memset)((void *)addr, 0xf1, size);
34568d75effSDimitry Andric }
34668d75effSDimitry Andric 
34768d75effSDimitry Andric void __asan_set_shadow_f2(uptr addr, uptr size) {
34868d75effSDimitry Andric   REAL(memset)((void *)addr, 0xf2, size);
34968d75effSDimitry Andric }
35068d75effSDimitry Andric 
35168d75effSDimitry Andric void __asan_set_shadow_f3(uptr addr, uptr size) {
35268d75effSDimitry Andric   REAL(memset)((void *)addr, 0xf3, size);
35368d75effSDimitry Andric }
35468d75effSDimitry Andric 
35568d75effSDimitry Andric void __asan_set_shadow_f5(uptr addr, uptr size) {
35668d75effSDimitry Andric   REAL(memset)((void *)addr, 0xf5, size);
35768d75effSDimitry Andric }
35868d75effSDimitry Andric 
35968d75effSDimitry Andric void __asan_set_shadow_f8(uptr addr, uptr size) {
36068d75effSDimitry Andric   REAL(memset)((void *)addr, 0xf8, size);
36168d75effSDimitry Andric }
36268d75effSDimitry Andric 
36368d75effSDimitry Andric void __asan_poison_stack_memory(uptr addr, uptr size) {
36468d75effSDimitry Andric   VReport(1, "poisoning: %p %zx\n", (void *)addr, size);
36568d75effSDimitry Andric   PoisonAlignedStackMemory(addr, size, true);
36668d75effSDimitry Andric }
36768d75effSDimitry Andric 
36868d75effSDimitry Andric void __asan_unpoison_stack_memory(uptr addr, uptr size) {
36968d75effSDimitry Andric   VReport(1, "unpoisoning: %p %zx\n", (void *)addr, size);
37068d75effSDimitry Andric   PoisonAlignedStackMemory(addr, size, false);
37168d75effSDimitry Andric }
37268d75effSDimitry Andric 
373*bdd1243dSDimitry Andric static void FixUnalignedStorage(uptr storage_beg, uptr storage_end,
374*bdd1243dSDimitry Andric                                 uptr &old_beg, uptr &old_end, uptr &new_beg,
375*bdd1243dSDimitry Andric                                 uptr &new_end) {
376*bdd1243dSDimitry Andric   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
377*bdd1243dSDimitry Andric   if (UNLIKELY(!AddrIsAlignedByGranularity(storage_end))) {
378*bdd1243dSDimitry Andric     uptr end_down = RoundDownTo(storage_end, granularity);
379*bdd1243dSDimitry Andric     // Ignore the last unaligned granule if the storage is followed by
380*bdd1243dSDimitry Andric     // unpoisoned byte, because we can't poison the prefix anyway. Don't call
381*bdd1243dSDimitry Andric     // AddressIsPoisoned at all if container changes does not affect the last
382*bdd1243dSDimitry Andric     // granule at all.
383*bdd1243dSDimitry Andric     if ((((old_end != new_end) && Max(old_end, new_end) > end_down) ||
384*bdd1243dSDimitry Andric          ((old_beg != new_beg) && Max(old_beg, new_beg) > end_down)) &&
385*bdd1243dSDimitry Andric         !AddressIsPoisoned(storage_end)) {
386*bdd1243dSDimitry Andric       old_beg = Min(end_down, old_beg);
387*bdd1243dSDimitry Andric       old_end = Min(end_down, old_end);
388*bdd1243dSDimitry Andric       new_beg = Min(end_down, new_beg);
389*bdd1243dSDimitry Andric       new_end = Min(end_down, new_end);
390*bdd1243dSDimitry Andric     }
391*bdd1243dSDimitry Andric   }
392*bdd1243dSDimitry Andric 
393*bdd1243dSDimitry Andric   // Handle misaligned begin and cut it off.
394*bdd1243dSDimitry Andric   if (UNLIKELY(!AddrIsAlignedByGranularity(storage_beg))) {
395*bdd1243dSDimitry Andric     uptr beg_up = RoundUpTo(storage_beg, granularity);
396*bdd1243dSDimitry Andric     // The first unaligned granule needs special handling only if we had bytes
397*bdd1243dSDimitry Andric     // there before and will have none after.
398*bdd1243dSDimitry Andric     if ((new_beg == new_end || new_beg >= beg_up) && old_beg != old_end &&
399*bdd1243dSDimitry Andric         old_beg < beg_up) {
400*bdd1243dSDimitry Andric       // Keep granule prefix outside of the storage unpoisoned.
401*bdd1243dSDimitry Andric       uptr beg_down = RoundDownTo(storage_beg, granularity);
402*bdd1243dSDimitry Andric       *(u8 *)MemToShadow(beg_down) = storage_beg - beg_down;
403*bdd1243dSDimitry Andric       old_beg = Max(beg_up, old_beg);
404*bdd1243dSDimitry Andric       old_end = Max(beg_up, old_end);
405*bdd1243dSDimitry Andric       new_beg = Max(beg_up, new_beg);
406*bdd1243dSDimitry Andric       new_end = Max(beg_up, new_end);
407*bdd1243dSDimitry Andric     }
408*bdd1243dSDimitry Andric   }
409*bdd1243dSDimitry Andric }
410*bdd1243dSDimitry Andric 
41168d75effSDimitry Andric void __sanitizer_annotate_contiguous_container(const void *beg_p,
41268d75effSDimitry Andric                                                const void *end_p,
41368d75effSDimitry Andric                                                const void *old_mid_p,
41468d75effSDimitry Andric                                                const void *new_mid_p) {
415*bdd1243dSDimitry Andric   if (!flags()->detect_container_overflow)
416*bdd1243dSDimitry Andric     return;
41768d75effSDimitry Andric   VPrintf(2, "contiguous_container: %p %p %p %p\n", beg_p, end_p, old_mid_p,
41868d75effSDimitry Andric           new_mid_p);
419*bdd1243dSDimitry Andric   uptr storage_beg = reinterpret_cast<uptr>(beg_p);
420*bdd1243dSDimitry Andric   uptr storage_end = reinterpret_cast<uptr>(end_p);
421*bdd1243dSDimitry Andric   uptr old_end = reinterpret_cast<uptr>(old_mid_p);
422*bdd1243dSDimitry Andric   uptr new_end = reinterpret_cast<uptr>(new_mid_p);
423*bdd1243dSDimitry Andric   uptr old_beg = storage_beg;
424*bdd1243dSDimitry Andric   uptr new_beg = storage_beg;
4250eae32dcSDimitry Andric   uptr granularity = ASAN_SHADOW_GRANULARITY;
426*bdd1243dSDimitry Andric   if (!(storage_beg <= old_end && storage_beg <= new_end &&
427*bdd1243dSDimitry Andric         old_end <= storage_end && new_end <= storage_end)) {
42868d75effSDimitry Andric     GET_STACK_TRACE_FATAL_HERE;
429*bdd1243dSDimitry Andric     ReportBadParamsToAnnotateContiguousContainer(storage_beg, storage_end,
430*bdd1243dSDimitry Andric                                                  old_end, new_end, &stack);
43168d75effSDimitry Andric   }
432*bdd1243dSDimitry Andric   CHECK_LE(storage_end - storage_beg,
433fe6060f1SDimitry Andric            FIRST_32_SECOND_64(1UL << 30, 1ULL << 40));  // Sanity check.
43468d75effSDimitry Andric 
435*bdd1243dSDimitry Andric   if (old_end == new_end)
436*bdd1243dSDimitry Andric     return;  // Nothing to do here.
437*bdd1243dSDimitry Andric 
438*bdd1243dSDimitry Andric   FixUnalignedStorage(storage_beg, storage_end, old_beg, old_end, new_beg,
439*bdd1243dSDimitry Andric                       new_end);
440*bdd1243dSDimitry Andric 
441*bdd1243dSDimitry Andric   uptr a = RoundDownTo(Min(old_end, new_end), granularity);
442*bdd1243dSDimitry Andric   uptr c = RoundUpTo(Max(old_end, new_end), granularity);
443*bdd1243dSDimitry Andric   uptr d1 = RoundDownTo(old_end, granularity);
44468d75effSDimitry Andric   // uptr d2 = RoundUpTo(old_mid, granularity);
44568d75effSDimitry Andric   // Currently we should be in this state:
44668d75effSDimitry Andric   // [a, d1) is good, [d2, c) is bad, [d1, d2) is partially good.
44768d75effSDimitry Andric   // Make a quick sanity check that we are indeed in this state.
44868d75effSDimitry Andric   //
44968d75effSDimitry Andric   // FIXME: Two of these three checks are disabled until we fix
45068d75effSDimitry Andric   // https://github.com/google/sanitizers/issues/258.
45168d75effSDimitry Andric   // if (d1 != d2)
45268d75effSDimitry Andric   //  CHECK_EQ(*(u8*)MemToShadow(d1), old_mid - d1);
45368d75effSDimitry Andric   if (a + granularity <= d1)
45468d75effSDimitry Andric     CHECK_EQ(*(u8 *)MemToShadow(a), 0);
45568d75effSDimitry Andric   // if (d2 + granularity <= c && c <= end)
45668d75effSDimitry Andric   //   CHECK_EQ(*(u8 *)MemToShadow(c - granularity),
45768d75effSDimitry Andric   //            kAsanContiguousContainerOOBMagic);
45868d75effSDimitry Andric 
459*bdd1243dSDimitry Andric   uptr b1 = RoundDownTo(new_end, granularity);
460*bdd1243dSDimitry Andric   uptr b2 = RoundUpTo(new_end, granularity);
46168d75effSDimitry Andric   // New state:
46268d75effSDimitry Andric   // [a, b1) is good, [b2, c) is bad, [b1, b2) is partially good.
463*bdd1243dSDimitry Andric   if (b1 > a)
46468d75effSDimitry Andric     PoisonShadow(a, b1 - a, 0);
465*bdd1243dSDimitry Andric   else if (c > b2)
46668d75effSDimitry Andric     PoisonShadow(b2, c - b2, kAsanContiguousContainerOOBMagic);
46768d75effSDimitry Andric   if (b1 != b2) {
46868d75effSDimitry Andric     CHECK_EQ(b2 - b1, granularity);
469*bdd1243dSDimitry Andric     *(u8 *)MemToShadow(b1) = static_cast<u8>(new_end - b1);
47068d75effSDimitry Andric   }
47168d75effSDimitry Andric }
47268d75effSDimitry Andric 
473*bdd1243dSDimitry Andric // Annotates a double ended contiguous memory area like std::deque's chunk.
474*bdd1243dSDimitry Andric // It allows detecting buggy accesses to allocated but not used begining
475*bdd1243dSDimitry Andric // or end items of such a container.
476*bdd1243dSDimitry Andric void __sanitizer_annotate_double_ended_contiguous_container(
477*bdd1243dSDimitry Andric     const void *storage_beg_p, const void *storage_end_p,
478*bdd1243dSDimitry Andric     const void *old_container_beg_p, const void *old_container_end_p,
479*bdd1243dSDimitry Andric     const void *new_container_beg_p, const void *new_container_end_p) {
480*bdd1243dSDimitry Andric   if (!flags()->detect_container_overflow)
481*bdd1243dSDimitry Andric     return;
482*bdd1243dSDimitry Andric 
483*bdd1243dSDimitry Andric   VPrintf(2, "contiguous_container: %p %p %p %p %p %p\n", storage_beg_p,
484*bdd1243dSDimitry Andric           storage_end_p, old_container_beg_p, old_container_end_p,
485*bdd1243dSDimitry Andric           new_container_beg_p, new_container_end_p);
486*bdd1243dSDimitry Andric 
487*bdd1243dSDimitry Andric   uptr storage_beg = reinterpret_cast<uptr>(storage_beg_p);
488*bdd1243dSDimitry Andric   uptr storage_end = reinterpret_cast<uptr>(storage_end_p);
489*bdd1243dSDimitry Andric   uptr old_beg = reinterpret_cast<uptr>(old_container_beg_p);
490*bdd1243dSDimitry Andric   uptr old_end = reinterpret_cast<uptr>(old_container_end_p);
491*bdd1243dSDimitry Andric   uptr new_beg = reinterpret_cast<uptr>(new_container_beg_p);
492*bdd1243dSDimitry Andric   uptr new_end = reinterpret_cast<uptr>(new_container_end_p);
493*bdd1243dSDimitry Andric 
494*bdd1243dSDimitry Andric   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
495*bdd1243dSDimitry Andric 
496*bdd1243dSDimitry Andric   if (!(old_beg <= old_end && new_beg <= new_end) ||
497*bdd1243dSDimitry Andric       !(storage_beg <= new_beg && new_end <= storage_end) ||
498*bdd1243dSDimitry Andric       !(storage_beg <= old_beg && old_end <= storage_end)) {
499*bdd1243dSDimitry Andric     GET_STACK_TRACE_FATAL_HERE;
500*bdd1243dSDimitry Andric     ReportBadParamsToAnnotateDoubleEndedContiguousContainer(
501*bdd1243dSDimitry Andric         storage_beg, storage_end, old_beg, old_end, new_beg, new_end, &stack);
502*bdd1243dSDimitry Andric   }
503*bdd1243dSDimitry Andric   CHECK_LE(storage_end - storage_beg,
504*bdd1243dSDimitry Andric            FIRST_32_SECOND_64(1UL << 30, 1ULL << 40));  // Sanity check.
505*bdd1243dSDimitry Andric 
506*bdd1243dSDimitry Andric   if ((old_beg == old_end && new_beg == new_end) ||
507*bdd1243dSDimitry Andric       (old_beg == new_beg && old_end == new_end))
508*bdd1243dSDimitry Andric     return;  // Nothing to do here.
509*bdd1243dSDimitry Andric 
510*bdd1243dSDimitry Andric   FixUnalignedStorage(storage_beg, storage_end, old_beg, old_end, new_beg,
511*bdd1243dSDimitry Andric                       new_end);
512*bdd1243dSDimitry Andric 
513*bdd1243dSDimitry Andric   // Handle non-intersecting new/old containers separately have simpler
514*bdd1243dSDimitry Andric   // intersecting case.
515*bdd1243dSDimitry Andric   if (old_beg == old_end || new_beg == new_end || new_end <= old_beg ||
516*bdd1243dSDimitry Andric       old_end <= new_beg) {
517*bdd1243dSDimitry Andric     if (old_beg != old_end) {
518*bdd1243dSDimitry Andric       // Poisoning the old container.
519*bdd1243dSDimitry Andric       uptr a = RoundDownTo(old_beg, granularity);
520*bdd1243dSDimitry Andric       uptr b = RoundUpTo(old_end, granularity);
521*bdd1243dSDimitry Andric       PoisonShadow(a, b - a, kAsanContiguousContainerOOBMagic);
522*bdd1243dSDimitry Andric     }
523*bdd1243dSDimitry Andric 
524*bdd1243dSDimitry Andric     if (new_beg != new_end) {
525*bdd1243dSDimitry Andric       // Unpoisoning the new container.
526*bdd1243dSDimitry Andric       uptr a = RoundDownTo(new_beg, granularity);
527*bdd1243dSDimitry Andric       uptr b = RoundDownTo(new_end, granularity);
528*bdd1243dSDimitry Andric       PoisonShadow(a, b - a, 0);
529*bdd1243dSDimitry Andric       if (!AddrIsAlignedByGranularity(new_end))
530*bdd1243dSDimitry Andric         *(u8 *)MemToShadow(b) = static_cast<u8>(new_end - b);
531*bdd1243dSDimitry Andric     }
532*bdd1243dSDimitry Andric 
533*bdd1243dSDimitry Andric     return;
534*bdd1243dSDimitry Andric   }
535*bdd1243dSDimitry Andric 
536*bdd1243dSDimitry Andric   // Intersection of old and new containers is not empty.
537*bdd1243dSDimitry Andric   CHECK_LT(new_beg, old_end);
538*bdd1243dSDimitry Andric   CHECK_GT(new_end, old_beg);
539*bdd1243dSDimitry Andric 
540*bdd1243dSDimitry Andric   if (new_beg < old_beg) {
541*bdd1243dSDimitry Andric     // Round down because we can't poison prefixes.
542*bdd1243dSDimitry Andric     uptr a = RoundDownTo(new_beg, granularity);
543*bdd1243dSDimitry Andric     // Round down and ignore the [c, old_beg) as its state defined by unchanged
544*bdd1243dSDimitry Andric     // [old_beg, old_end).
545*bdd1243dSDimitry Andric     uptr c = RoundDownTo(old_beg, granularity);
546*bdd1243dSDimitry Andric     PoisonShadow(a, c - a, 0);
547*bdd1243dSDimitry Andric   } else if (new_beg > old_beg) {
548*bdd1243dSDimitry Andric     // Round down and poison [a, old_beg) because it was unpoisoned only as a
549*bdd1243dSDimitry Andric     // prefix.
550*bdd1243dSDimitry Andric     uptr a = RoundDownTo(old_beg, granularity);
551*bdd1243dSDimitry Andric     // Round down and ignore the [c, new_beg) as its state defined by unchanged
552*bdd1243dSDimitry Andric     // [new_beg, old_end).
553*bdd1243dSDimitry Andric     uptr c = RoundDownTo(new_beg, granularity);
554*bdd1243dSDimitry Andric 
555*bdd1243dSDimitry Andric     PoisonShadow(a, c - a, kAsanContiguousContainerOOBMagic);
556*bdd1243dSDimitry Andric   }
557*bdd1243dSDimitry Andric 
558*bdd1243dSDimitry Andric   if (new_end > old_end) {
559*bdd1243dSDimitry Andric     // Round down to poison the prefix.
560*bdd1243dSDimitry Andric     uptr a = RoundDownTo(old_end, granularity);
561*bdd1243dSDimitry Andric     // Round down and handle remainder below.
562*bdd1243dSDimitry Andric     uptr c = RoundDownTo(new_end, granularity);
563*bdd1243dSDimitry Andric     PoisonShadow(a, c - a, 0);
564*bdd1243dSDimitry Andric     if (!AddrIsAlignedByGranularity(new_end))
565*bdd1243dSDimitry Andric       *(u8 *)MemToShadow(c) = static_cast<u8>(new_end - c);
566*bdd1243dSDimitry Andric   } else if (new_end < old_end) {
567*bdd1243dSDimitry Andric     // Round up and handle remained below.
568*bdd1243dSDimitry Andric     uptr a2 = RoundUpTo(new_end, granularity);
569*bdd1243dSDimitry Andric     // Round up to poison entire granule as we had nothing in [old_end, c2).
570*bdd1243dSDimitry Andric     uptr c2 = RoundUpTo(old_end, granularity);
571*bdd1243dSDimitry Andric     PoisonShadow(a2, c2 - a2, kAsanContiguousContainerOOBMagic);
572*bdd1243dSDimitry Andric 
573*bdd1243dSDimitry Andric     if (!AddrIsAlignedByGranularity(new_end)) {
574*bdd1243dSDimitry Andric       uptr a = RoundDownTo(new_end, granularity);
575*bdd1243dSDimitry Andric       *(u8 *)MemToShadow(a) = static_cast<u8>(new_end - a);
576*bdd1243dSDimitry Andric     }
577*bdd1243dSDimitry Andric   }
578*bdd1243dSDimitry Andric }
579*bdd1243dSDimitry Andric 
580*bdd1243dSDimitry Andric static const void *FindBadAddress(uptr begin, uptr end, bool poisoned) {
581*bdd1243dSDimitry Andric   CHECK_LE(begin, end);
582*bdd1243dSDimitry Andric   constexpr uptr kMaxRangeToCheck = 32;
583*bdd1243dSDimitry Andric   if (end - begin > kMaxRangeToCheck * 2) {
584*bdd1243dSDimitry Andric     if (auto *bad = FindBadAddress(begin, begin + kMaxRangeToCheck, poisoned))
585*bdd1243dSDimitry Andric       return bad;
586*bdd1243dSDimitry Andric     if (auto *bad = FindBadAddress(end - kMaxRangeToCheck, end, poisoned))
587*bdd1243dSDimitry Andric       return bad;
588*bdd1243dSDimitry Andric   }
589*bdd1243dSDimitry Andric 
590*bdd1243dSDimitry Andric   for (uptr i = begin; i < end; ++i)
591*bdd1243dSDimitry Andric     if (AddressIsPoisoned(i) != poisoned)
592*bdd1243dSDimitry Andric       return reinterpret_cast<const void *>(i);
593*bdd1243dSDimitry Andric   return nullptr;
594*bdd1243dSDimitry Andric }
595*bdd1243dSDimitry Andric 
59668d75effSDimitry Andric const void *__sanitizer_contiguous_container_find_bad_address(
59768d75effSDimitry Andric     const void *beg_p, const void *mid_p, const void *end_p) {
59868d75effSDimitry Andric   if (!flags()->detect_container_overflow)
59968d75effSDimitry Andric     return nullptr;
600*bdd1243dSDimitry Andric   uptr granularity = ASAN_SHADOW_GRANULARITY;
60168d75effSDimitry Andric   uptr beg = reinterpret_cast<uptr>(beg_p);
60268d75effSDimitry Andric   uptr end = reinterpret_cast<uptr>(end_p);
60368d75effSDimitry Andric   uptr mid = reinterpret_cast<uptr>(mid_p);
60468d75effSDimitry Andric   CHECK_LE(beg, mid);
60568d75effSDimitry Andric   CHECK_LE(mid, end);
606*bdd1243dSDimitry Andric   // If the byte after the storage is unpoisoned, everything in the granule
607*bdd1243dSDimitry Andric   // before must stay unpoisoned.
608*bdd1243dSDimitry Andric   uptr annotations_end =
609*bdd1243dSDimitry Andric       (!AddrIsAlignedByGranularity(end) && !AddressIsPoisoned(end))
610*bdd1243dSDimitry Andric           ? RoundDownTo(end, granularity)
611*bdd1243dSDimitry Andric           : end;
612*bdd1243dSDimitry Andric   beg = Min(beg, annotations_end);
613*bdd1243dSDimitry Andric   mid = Min(mid, annotations_end);
614*bdd1243dSDimitry Andric   if (auto *bad = FindBadAddress(beg, mid, false))
615*bdd1243dSDimitry Andric     return bad;
616*bdd1243dSDimitry Andric   if (auto *bad = FindBadAddress(mid, annotations_end, true))
617*bdd1243dSDimitry Andric     return bad;
618*bdd1243dSDimitry Andric   return FindBadAddress(annotations_end, end, false);
61968d75effSDimitry Andric }
62068d75effSDimitry Andric 
62168d75effSDimitry Andric int __sanitizer_verify_contiguous_container(const void *beg_p,
62268d75effSDimitry Andric                                             const void *mid_p,
62368d75effSDimitry Andric                                             const void *end_p) {
62468d75effSDimitry Andric   return __sanitizer_contiguous_container_find_bad_address(beg_p, mid_p,
62568d75effSDimitry Andric                                                            end_p) == nullptr;
62668d75effSDimitry Andric }
62768d75effSDimitry Andric 
628*bdd1243dSDimitry Andric const void *__sanitizer_double_ended_contiguous_container_find_bad_address(
629*bdd1243dSDimitry Andric     const void *storage_beg_p, const void *container_beg_p,
630*bdd1243dSDimitry Andric     const void *container_end_p, const void *storage_end_p) {
631*bdd1243dSDimitry Andric   if (!flags()->detect_container_overflow)
632*bdd1243dSDimitry Andric     return nullptr;
633*bdd1243dSDimitry Andric   uptr granularity = ASAN_SHADOW_GRANULARITY;
634*bdd1243dSDimitry Andric   uptr storage_beg = reinterpret_cast<uptr>(storage_beg_p);
635*bdd1243dSDimitry Andric   uptr storage_end = reinterpret_cast<uptr>(storage_end_p);
636*bdd1243dSDimitry Andric   uptr beg = reinterpret_cast<uptr>(container_beg_p);
637*bdd1243dSDimitry Andric   uptr end = reinterpret_cast<uptr>(container_end_p);
638*bdd1243dSDimitry Andric 
639*bdd1243dSDimitry Andric   // The prefix of the firs granule of the container is unpoisoned.
640*bdd1243dSDimitry Andric   if (beg != end)
641*bdd1243dSDimitry Andric     beg = Max(storage_beg, RoundDownTo(beg, granularity));
642*bdd1243dSDimitry Andric 
643*bdd1243dSDimitry Andric   // If the byte after the storage is unpoisoned, the prefix of the last granule
644*bdd1243dSDimitry Andric   // is unpoisoned.
645*bdd1243dSDimitry Andric   uptr annotations_end = (!AddrIsAlignedByGranularity(storage_end) &&
646*bdd1243dSDimitry Andric                           !AddressIsPoisoned(storage_end))
647*bdd1243dSDimitry Andric                              ? RoundDownTo(storage_end, granularity)
648*bdd1243dSDimitry Andric                              : storage_end;
649*bdd1243dSDimitry Andric   storage_beg = Min(storage_beg, annotations_end);
650*bdd1243dSDimitry Andric   beg = Min(beg, annotations_end);
651*bdd1243dSDimitry Andric   end = Min(end, annotations_end);
652*bdd1243dSDimitry Andric 
653*bdd1243dSDimitry Andric   if (auto *bad = FindBadAddress(storage_beg, beg, true))
654*bdd1243dSDimitry Andric     return bad;
655*bdd1243dSDimitry Andric   if (auto *bad = FindBadAddress(beg, end, false))
656*bdd1243dSDimitry Andric     return bad;
657*bdd1243dSDimitry Andric   if (auto *bad = FindBadAddress(end, annotations_end, true))
658*bdd1243dSDimitry Andric     return bad;
659*bdd1243dSDimitry Andric   return FindBadAddress(annotations_end, storage_end, false);
660*bdd1243dSDimitry Andric }
661*bdd1243dSDimitry Andric 
662*bdd1243dSDimitry Andric int __sanitizer_verify_double_ended_contiguous_container(
663*bdd1243dSDimitry Andric     const void *storage_beg_p, const void *container_beg_p,
664*bdd1243dSDimitry Andric     const void *container_end_p, const void *storage_end_p) {
665*bdd1243dSDimitry Andric   return __sanitizer_double_ended_contiguous_container_find_bad_address(
666*bdd1243dSDimitry Andric              storage_beg_p, container_beg_p, container_end_p, storage_end_p) ==
667*bdd1243dSDimitry Andric          nullptr;
668*bdd1243dSDimitry Andric }
669*bdd1243dSDimitry Andric 
67068d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
67168d75effSDimitry Andric void __asan_poison_intra_object_redzone(uptr ptr, uptr size) {
67268d75effSDimitry Andric   AsanPoisonOrUnpoisonIntraObjectRedzone(ptr, size, true);
67368d75effSDimitry Andric }
67468d75effSDimitry Andric 
67568d75effSDimitry Andric extern "C" SANITIZER_INTERFACE_ATTRIBUTE
67668d75effSDimitry Andric void __asan_unpoison_intra_object_redzone(uptr ptr, uptr size) {
67768d75effSDimitry Andric   AsanPoisonOrUnpoisonIntraObjectRedzone(ptr, size, false);
67868d75effSDimitry Andric }
67968d75effSDimitry Andric 
68068d75effSDimitry Andric // --- Implementation of LSan-specific functions --- {{{1
68168d75effSDimitry Andric namespace __lsan {
68268d75effSDimitry Andric bool WordIsPoisoned(uptr addr) {
68368d75effSDimitry Andric   return (__asan_region_is_poisoned(addr, sizeof(uptr)) != 0);
68468d75effSDimitry Andric }
68568d75effSDimitry Andric }
686