1 2 #include "hwasan_thread.h" 3 4 #include "hwasan.h" 5 #include "hwasan_interface_internal.h" 6 #include "hwasan_mapping.h" 7 #include "hwasan_poisoning.h" 8 #include "hwasan_thread_list.h" 9 #include "sanitizer_common/sanitizer_atomic.h" 10 #include "sanitizer_common/sanitizer_file.h" 11 #include "sanitizer_common/sanitizer_placement_new.h" 12 #include "sanitizer_common/sanitizer_tls_get_addr.h" 13 14 namespace __hwasan { 15 16 static u32 RandomSeed() { 17 u32 seed; 18 do { 19 if (UNLIKELY(!GetRandom(reinterpret_cast<void *>(&seed), sizeof(seed), 20 /*blocking=*/false))) { 21 seed = static_cast<u32>( 22 (NanoTime() >> 12) ^ 23 (reinterpret_cast<uptr>(__builtin_frame_address(0)) >> 4)); 24 } 25 } while (!seed); 26 return seed; 27 } 28 29 void Thread::InitRandomState() { 30 random_state_ = flags()->random_tags ? RandomSeed() : unique_id_; 31 random_state_inited_ = true; 32 33 // Push a random number of zeros onto the ring buffer so that the first stack 34 // tag base will be random. 35 for (tag_t i = 0, e = GenerateRandomTag(); i != e; ++i) 36 stack_allocations_->push(0); 37 } 38 39 void Thread::Init(uptr stack_buffer_start, uptr stack_buffer_size, 40 const InitState *state) { 41 CHECK_EQ(0, unique_id_); // try to catch bad stack reuse 42 CHECK_EQ(0, stack_top_); 43 CHECK_EQ(0, stack_bottom_); 44 45 static atomic_uint64_t unique_id; 46 unique_id_ = atomic_fetch_add(&unique_id, 1, memory_order_relaxed); 47 if (!IsMainThread()) 48 os_id_ = GetTid(); 49 50 if (auto sz = flags()->heap_history_size) 51 heap_allocations_ = HeapAllocationsRingBuffer::New(sz); 52 53 #if !SANITIZER_FUCHSIA 54 // Do not initialize the stack ring buffer just yet on Fuchsia. Threads will 55 // be initialized before we enter the thread itself, so we will instead call 56 // this later. 57 InitStackRingBuffer(stack_buffer_start, stack_buffer_size); 58 #endif 59 InitStackAndTls(state); 60 dtls_ = DTLS_Get(); 61 AllocatorThreadStart(allocator_cache()); 62 63 if (flags()->verbose_threads) { 64 if (IsMainThread()) { 65 Printf("sizeof(Thread): %zd sizeof(HeapRB): %zd sizeof(StackRB): %zd\n", 66 sizeof(Thread), heap_allocations_->SizeInBytes(), 67 stack_allocations_->size() * sizeof(uptr)); 68 } 69 Print("Creating : "); 70 } 71 ClearShadowForThreadStackAndTLS(); 72 } 73 74 void Thread::InitStackRingBuffer(uptr stack_buffer_start, 75 uptr stack_buffer_size) { 76 HwasanTSDThreadInit(); // Only needed with interceptors. 77 uptr *ThreadLong = GetCurrentThreadLongPtr(); 78 // The following implicitly sets (this) as the current thread. 79 stack_allocations_ = new (ThreadLong) 80 StackAllocationsRingBuffer((void *)stack_buffer_start, stack_buffer_size); 81 // Check that it worked. 82 CHECK_EQ(GetCurrentThread(), this); 83 84 // ScopedTaggingDisable needs GetCurrentThread to be set up. 85 ScopedTaggingDisabler disabler; 86 87 if (stack_bottom_) { 88 int local; 89 CHECK(AddrIsInStack((uptr)&local)); 90 CHECK(MemIsApp(stack_bottom_)); 91 CHECK(MemIsApp(stack_top_ - 1)); 92 } 93 } 94 95 void Thread::ClearShadowForThreadStackAndTLS() { 96 if (stack_top_ != stack_bottom_) 97 TagMemory(UntagAddr(stack_bottom_), 98 UntagAddr(stack_top_) - UntagAddr(stack_bottom_), 99 GetTagFromPointer(stack_top_)); 100 if (tls_begin_ != tls_end_) 101 TagMemory(UntagAddr(tls_begin_), 102 UntagAddr(tls_end_) - UntagAddr(tls_begin_), 103 GetTagFromPointer(tls_begin_)); 104 } 105 106 void Thread::Destroy() { 107 if (flags()->verbose_threads) 108 Print("Destroying: "); 109 AllocatorThreadFinish(allocator_cache()); 110 ClearShadowForThreadStackAndTLS(); 111 if (heap_allocations_) 112 heap_allocations_->Delete(); 113 DTLS_Destroy(); 114 // Unregister this as the current thread. 115 // Instrumented code can not run on this thread from this point onwards, but 116 // malloc/free can still be served. Glibc may call free() very late, after all 117 // TSD destructors are done. 118 CHECK_EQ(GetCurrentThread(), this); 119 *GetCurrentThreadLongPtr() = 0; 120 } 121 122 void Thread::Print(const char *Prefix) { 123 Printf("%sT%zd %p stack: [%p,%p) sz: %zd tls: [%p,%p)\n", Prefix, unique_id_, 124 (void *)this, stack_bottom(), stack_top(), 125 stack_top() - stack_bottom(), tls_begin(), tls_end()); 126 } 127 128 static u32 xorshift(u32 state) { 129 state ^= state << 13; 130 state ^= state >> 17; 131 state ^= state << 5; 132 return state; 133 } 134 135 // Generate a (pseudo-)random non-zero tag. 136 tag_t Thread::GenerateRandomTag(uptr num_bits) { 137 DCHECK_GT(num_bits, 0); 138 if (tagging_disabled_) 139 return 0; 140 tag_t tag; 141 const uptr tag_mask = (1ULL << num_bits) - 1; 142 do { 143 if (flags()->random_tags) { 144 if (!random_buffer_) { 145 EnsureRandomStateInited(); 146 random_buffer_ = random_state_ = xorshift(random_state_); 147 } 148 CHECK(random_buffer_); 149 tag = random_buffer_ & tag_mask; 150 random_buffer_ >>= num_bits; 151 } else { 152 EnsureRandomStateInited(); 153 random_state_ += 1; 154 tag = random_state_ & tag_mask; 155 } 156 } while (!tag); 157 return tag; 158 } 159 160 void EnsureMainThreadIDIsCorrect() { 161 auto *t = __hwasan::GetCurrentThread(); 162 if (t && (t->IsMainThread())) 163 t->set_os_id(GetTid()); 164 } 165 166 } // namespace __hwasan 167 168 // --- Implementation of LSan-specific functions --- {{{1 169 namespace __lsan { 170 171 static __hwasan::HwasanThreadList *GetHwasanThreadListLocked() { 172 auto &tl = __hwasan::hwasanThreadList(); 173 tl.CheckLocked(); 174 return &tl; 175 } 176 177 static __hwasan::Thread *GetThreadByOsIDLocked(tid_t os_id) { 178 return GetHwasanThreadListLocked()->FindThreadLocked( 179 [os_id](__hwasan::Thread *t) { return t->os_id() == os_id; }); 180 } 181 182 void LockThreads() { 183 __hwasan::hwasanThreadList().Lock(); 184 __hwasan::hwasanThreadArgRetval().Lock(); 185 } 186 187 void UnlockThreads() { 188 __hwasan::hwasanThreadArgRetval().Unlock(); 189 __hwasan::hwasanThreadList().Unlock(); 190 } 191 192 void EnsureMainThreadIDIsCorrect() { __hwasan::EnsureMainThreadIDIsCorrect(); } 193 194 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end, 195 uptr *tls_begin, uptr *tls_end, uptr *cache_begin, 196 uptr *cache_end, DTLS **dtls) { 197 auto *t = GetThreadByOsIDLocked(os_id); 198 if (!t) 199 return false; 200 *stack_begin = t->stack_bottom(); 201 *stack_end = t->stack_top(); 202 *tls_begin = t->tls_begin(); 203 *tls_end = t->tls_end(); 204 // Fixme: is this correct for HWASan. 205 *cache_begin = 0; 206 *cache_end = 0; 207 *dtls = t->dtls(); 208 return true; 209 } 210 211 void GetAllThreadAllocatorCachesLocked(InternalMmapVector<uptr> *caches) {} 212 213 void GetThreadExtraStackRangesLocked(tid_t os_id, 214 InternalMmapVector<Range> *ranges) {} 215 void GetThreadExtraStackRangesLocked(InternalMmapVector<Range> *ranges) {} 216 217 void GetAdditionalThreadContextPtrsLocked(InternalMmapVector<uptr> *ptrs) { 218 __hwasan::hwasanThreadArgRetval().GetAllPtrsLocked(ptrs); 219 } 220 221 void GetRunningThreadsLocked(InternalMmapVector<tid_t> *threads) {} 222 223 } // namespace __lsan 224