1 //===- FuzzerTracePC.h - Internal header for the Fuzzer ---------*- C++ -* ===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // fuzzer::TracePC 9 //===----------------------------------------------------------------------===// 10 11 #ifndef LLVM_FUZZER_TRACE_PC 12 #define LLVM_FUZZER_TRACE_PC 13 14 #include "FuzzerDefs.h" 15 #include "FuzzerDictionary.h" 16 #include "FuzzerValueBitMap.h" 17 18 #include <set> 19 #include <unordered_map> 20 21 namespace fuzzer { 22 23 // TableOfRecentCompares (TORC) remembers the most recently performed 24 // comparisons of type T. 25 // We record the arguments of CMP instructions in this table unconditionally 26 // because it seems cheaper this way than to compute some expensive 27 // conditions inside __sanitizer_cov_trace_cmp*. 28 // After the unit has been executed we may decide to use the contents of 29 // this table to populate a Dictionary. 30 template<class T, size_t kSizeT> 31 struct TableOfRecentCompares { 32 static const size_t kSize = kSizeT; 33 struct Pair { 34 T A, B; 35 }; 36 ATTRIBUTE_NO_SANITIZE_ALL 37 void Insert(size_t Idx, const T &Arg1, const T &Arg2) { 38 Idx = Idx % kSize; 39 Table[Idx].A = Arg1; 40 Table[Idx].B = Arg2; 41 } 42 43 Pair Get(size_t I) { return Table[I % kSize]; } 44 45 Pair Table[kSize]; 46 }; 47 48 template <size_t kSizeT> 49 struct MemMemTable { 50 static const size_t kSize = kSizeT; 51 Word MemMemWords[kSize]; 52 Word EmptyWord; 53 54 void Add(const uint8_t *Data, size_t Size) { 55 if (Size <= 2) return; 56 Size = std::min(Size, Word::GetMaxSize()); 57 auto Idx = SimpleFastHash(Data, Size) % kSize; 58 MemMemWords[Idx].Set(Data, Size); 59 } 60 const Word &Get(size_t Idx) { 61 for (size_t i = 0; i < kSize; i++) { 62 const Word &W = MemMemWords[(Idx + i) % kSize]; 63 if (W.size()) return W; 64 } 65 EmptyWord.Set(nullptr, 0); 66 return EmptyWord; 67 } 68 }; 69 70 class TracePC { 71 public: 72 void HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop); 73 void HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop); 74 void HandleCallerCallee(uintptr_t Caller, uintptr_t Callee); 75 template <class T> void HandleCmp(uintptr_t PC, T Arg1, T Arg2); 76 size_t GetTotalPCCoverage(); 77 void SetUseCounters(bool UC) { UseCounters = UC; } 78 void SetUseValueProfileMask(uint32_t VPMask) { UseValueProfileMask = VPMask; } 79 void SetPrintNewPCs(bool P) { DoPrintNewPCs = P; } 80 void SetPrintNewFuncs(size_t P) { NumPrintNewFuncs = P; } 81 void UpdateObservedPCs(); 82 template <class Callback> size_t CollectFeatures(Callback CB) const; 83 84 void ResetMaps() { 85 ValueProfileMap.Reset(); 86 ClearExtraCounters(); 87 ClearInlineCounters(); 88 } 89 90 void ClearInlineCounters(); 91 92 void UpdateFeatureSet(size_t CurrentElementIdx, size_t CurrentElementSize); 93 void PrintFeatureSet(); 94 95 void PrintModuleInfo(); 96 97 void PrintCoverage(bool PrintAllCounters); 98 99 template<class CallBack> 100 void IterateCoveredFunctions(CallBack CB); 101 102 void AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2, 103 size_t n, bool StopAtZero); 104 105 TableOfRecentCompares<uint32_t, 32> TORC4; 106 TableOfRecentCompares<uint64_t, 32> TORC8; 107 TableOfRecentCompares<Word, 32> TORCW; 108 MemMemTable<1024> MMT; 109 110 void RecordInitialStack(); 111 uintptr_t GetMaxStackOffset() const; 112 113 template<class CallBack> 114 void ForEachObservedPC(CallBack CB) { 115 for (auto PC : ObservedPCs) 116 CB(PC); 117 } 118 119 void SetFocusFunction(const std::string &FuncName); 120 bool ObservedFocusFunction(); 121 122 struct PCTableEntry { 123 uintptr_t PC, PCFlags; 124 }; 125 126 uintptr_t PCTableEntryIdx(const PCTableEntry *TE); 127 const PCTableEntry *PCTableEntryByIdx(uintptr_t Idx); 128 static uintptr_t GetNextInstructionPc(uintptr_t PC); 129 bool PcIsFuncEntry(const PCTableEntry *TE) { return TE->PCFlags & 1; } 130 131 private: 132 bool UseCounters = false; 133 uint32_t UseValueProfileMask = false; 134 bool DoPrintNewPCs = false; 135 size_t NumPrintNewFuncs = 0; 136 137 // Module represents the array of 8-bit counters split into regions 138 // such that every region, except maybe the first and the last one, is one 139 // full page. 140 struct Module { 141 struct Region { 142 uint8_t *Start, *Stop; 143 bool Enabled; 144 bool OneFullPage; 145 }; 146 Region *Regions; 147 size_t NumRegions; 148 uint8_t *Start() { return Regions[0].Start; } 149 uint8_t *Stop() { return Regions[NumRegions - 1].Stop; } 150 size_t Size() { return Stop() - Start(); } 151 size_t Idx(uint8_t *P) { 152 assert(P >= Start() && P < Stop()); 153 return P - Start(); 154 } 155 }; 156 157 Module Modules[4096]; 158 size_t NumModules; // linker-initialized. 159 size_t NumInline8bitCounters; 160 161 template <class Callback> 162 void IterateCounterRegions(Callback CB) { 163 for (size_t m = 0; m < NumModules; m++) 164 for (size_t r = 0; r < Modules[m].NumRegions; r++) 165 CB(Modules[m].Regions[r]); 166 } 167 168 struct { const PCTableEntry *Start, *Stop; } ModulePCTable[4096]; 169 size_t NumPCTables; 170 size_t NumPCsInPCTables; 171 172 std::set<const PCTableEntry *> ObservedPCs; 173 std::unordered_map<uintptr_t, uintptr_t> ObservedFuncs; // PC => Counter. 174 175 uint8_t *FocusFunctionCounterPtr = nullptr; 176 177 ValueBitMap ValueProfileMap; 178 uintptr_t InitialStack; 179 }; 180 181 template <class Callback> 182 // void Callback(size_t FirstFeature, size_t Idx, uint8_t Value); 183 ATTRIBUTE_NO_SANITIZE_ALL 184 size_t ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End, 185 size_t FirstFeature, Callback Handle8bitCounter) { 186 typedef uintptr_t LargeType; 187 const size_t Step = sizeof(LargeType) / sizeof(uint8_t); 188 const size_t StepMask = Step - 1; 189 auto P = Begin; 190 // Iterate by 1 byte until either the alignment boundary or the end. 191 for (; reinterpret_cast<uintptr_t>(P) & StepMask && P < End; P++) 192 if (uint8_t V = *P) 193 Handle8bitCounter(FirstFeature, P - Begin, V); 194 195 // Iterate by Step bytes at a time. 196 for (; P + Step <= End; P += Step) 197 if (LargeType Bundle = *reinterpret_cast<const LargeType *>(P)) { 198 Bundle = HostToLE(Bundle); 199 for (size_t I = 0; I < Step; I++, Bundle >>= 8) 200 if (uint8_t V = Bundle & 0xff) 201 Handle8bitCounter(FirstFeature, P - Begin + I, V); 202 } 203 204 // Iterate by 1 byte until the end. 205 for (; P < End; P++) 206 if (uint8_t V = *P) 207 Handle8bitCounter(FirstFeature, P - Begin, V); 208 return End - Begin; 209 } 210 211 // Given a non-zero Counter returns a number in the range [0,7]. 212 template<class T> 213 unsigned CounterToFeature(T Counter) { 214 // Returns a feature number by placing Counters into buckets as illustrated 215 // below. 216 // 217 // Counter bucket: [1] [2] [3] [4-7] [8-15] [16-31] [32-127] [128+] 218 // Feature number: 0 1 2 3 4 5 6 7 219 // 220 // This is a heuristic taken from AFL (see 221 // http://lcamtuf.coredump.cx/afl/technical_details.txt). 222 // 223 // This implementation may change in the future so clients should 224 // not rely on it. 225 assert(Counter); 226 unsigned Bit = 0; 227 /**/ if (Counter >= 128) Bit = 7; 228 else if (Counter >= 32) Bit = 6; 229 else if (Counter >= 16) Bit = 5; 230 else if (Counter >= 8) Bit = 4; 231 else if (Counter >= 4) Bit = 3; 232 else if (Counter >= 3) Bit = 2; 233 else if (Counter >= 2) Bit = 1; 234 return Bit; 235 } 236 237 template <class Callback> // void Callback(uint32_t Feature) 238 ATTRIBUTE_NO_SANITIZE_ADDRESS ATTRIBUTE_NOINLINE size_t 239 TracePC::CollectFeatures(Callback HandleFeature) const { 240 auto Handle8bitCounter = [&](size_t FirstFeature, 241 size_t Idx, uint8_t Counter) { 242 if (UseCounters) 243 HandleFeature(static_cast<uint32_t>(FirstFeature + Idx * 8 + 244 CounterToFeature(Counter))); 245 else 246 HandleFeature(static_cast<uint32_t>(FirstFeature + Idx)); 247 }; 248 249 size_t FirstFeature = 0; 250 251 for (size_t i = 0; i < NumModules; i++) { 252 for (size_t r = 0; r < Modules[i].NumRegions; r++) { 253 if (!Modules[i].Regions[r].Enabled) continue; 254 FirstFeature += 8 * ForEachNonZeroByte(Modules[i].Regions[r].Start, 255 Modules[i].Regions[r].Stop, 256 FirstFeature, Handle8bitCounter); 257 } 258 } 259 260 FirstFeature += 261 8 * ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), 262 FirstFeature, Handle8bitCounter); 263 264 if (UseValueProfileMask) { 265 ValueProfileMap.ForEach([&](size_t Idx) { 266 HandleFeature(static_cast<uint32_t>(FirstFeature + Idx)); 267 }); 268 FirstFeature += ValueProfileMap.SizeInBits(); 269 } 270 271 // Step function, grows similar to 8 * Log_2(A). 272 auto StackDepthStepFunction = [](size_t A) -> size_t { 273 if (!A) 274 return A; 275 auto Log2 = Log(A); 276 if (Log2 < 3) 277 return A; 278 Log2 -= 3; 279 return (Log2 + 1) * 8 + ((A >> Log2) & 7); 280 }; 281 assert(StackDepthStepFunction(1024) == 64); 282 assert(StackDepthStepFunction(1024 * 4) == 80); 283 assert(StackDepthStepFunction(1024 * 1024) == 144); 284 285 if (auto MaxStackOffset = GetMaxStackOffset()) { 286 HandleFeature(static_cast<uint32_t>( 287 FirstFeature + StackDepthStepFunction(MaxStackOffset / 8))); 288 FirstFeature += StackDepthStepFunction(std::numeric_limits<size_t>::max()); 289 } 290 291 return FirstFeature; 292 } 293 294 extern TracePC TPC; 295 296 } // namespace fuzzer 297 298 #endif // LLVM_FUZZER_TRACE_PC 299