1 //===- FuzzerMerge.cpp - merging corpora ----------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // Merging corpora. 9 //===----------------------------------------------------------------------===// 10 11 #include "FuzzerCommand.h" 12 #include "FuzzerMerge.h" 13 #include "FuzzerIO.h" 14 #include "FuzzerInternal.h" 15 #include "FuzzerTracePC.h" 16 #include "FuzzerUtil.h" 17 18 #include <fstream> 19 #include <iterator> 20 #include <set> 21 #include <sstream> 22 #include <unordered_set> 23 24 namespace fuzzer { 25 26 bool Merger::Parse(const std::string &Str, bool ParseCoverage) { 27 std::istringstream SS(Str); 28 return Parse(SS, ParseCoverage); 29 } 30 31 void Merger::ParseOrExit(std::istream &IS, bool ParseCoverage) { 32 if (!Parse(IS, ParseCoverage)) { 33 Printf("MERGE: failed to parse the control file (unexpected error)\n"); 34 exit(1); 35 } 36 } 37 38 // The control file example: 39 // 40 // 3 # The number of inputs 41 // 1 # The number of inputs in the first corpus, <= the previous number 42 // file0 43 // file1 44 // file2 # One file name per line. 45 // STARTED 0 123 # FileID, file size 46 // FT 0 1 4 6 8 # FileID COV1 COV2 ... 47 // COV 0 7 8 9 # FileID COV1 COV1 48 // STARTED 1 456 # If FT is missing, the input crashed while processing. 49 // STARTED 2 567 50 // FT 2 8 9 51 // COV 2 11 12 52 bool Merger::Parse(std::istream &IS, bool ParseCoverage) { 53 LastFailure.clear(); 54 std::string Line; 55 56 // Parse NumFiles. 57 if (!std::getline(IS, Line, '\n')) return false; 58 std::istringstream L1(Line); 59 size_t NumFiles = 0; 60 L1 >> NumFiles; 61 if (NumFiles == 0 || NumFiles > 10000000) return false; 62 63 // Parse NumFilesInFirstCorpus. 64 if (!std::getline(IS, Line, '\n')) return false; 65 std::istringstream L2(Line); 66 NumFilesInFirstCorpus = NumFiles + 1; 67 L2 >> NumFilesInFirstCorpus; 68 if (NumFilesInFirstCorpus > NumFiles) return false; 69 70 // Parse file names. 71 Files.resize(NumFiles); 72 for (size_t i = 0; i < NumFiles; i++) 73 if (!std::getline(IS, Files[i].Name, '\n')) 74 return false; 75 76 // Parse STARTED, FT, and COV lines. 77 size_t ExpectedStartMarker = 0; 78 const size_t kInvalidStartMarker = -1; 79 size_t LastSeenStartMarker = kInvalidStartMarker; 80 std::vector<uint32_t> TmpFeatures; 81 std::set<uint32_t> PCs; 82 while (std::getline(IS, Line, '\n')) { 83 std::istringstream ISS1(Line); 84 std::string Marker; 85 uint32_t N; 86 if (!(ISS1 >> Marker) || !(ISS1 >> N)) 87 return false; 88 if (Marker == "STARTED") { 89 // STARTED FILE_ID FILE_SIZE 90 if (ExpectedStartMarker != N) 91 return false; 92 ISS1 >> Files[ExpectedStartMarker].Size; 93 LastSeenStartMarker = ExpectedStartMarker; 94 assert(ExpectedStartMarker < Files.size()); 95 ExpectedStartMarker++; 96 } else if (Marker == "FT") { 97 // FT FILE_ID COV1 COV2 COV3 ... 98 size_t CurrentFileIdx = N; 99 if (CurrentFileIdx != LastSeenStartMarker) 100 return false; 101 LastSeenStartMarker = kInvalidStartMarker; 102 if (ParseCoverage) { 103 TmpFeatures.clear(); // use a vector from outer scope to avoid resizes. 104 while (ISS1 >> N) 105 TmpFeatures.push_back(N); 106 std::sort(TmpFeatures.begin(), TmpFeatures.end()); 107 Files[CurrentFileIdx].Features = TmpFeatures; 108 } 109 } else if (Marker == "COV") { 110 size_t CurrentFileIdx = N; 111 if (ParseCoverage) 112 while (ISS1 >> N) 113 if (PCs.insert(N).second) 114 Files[CurrentFileIdx].Cov.push_back(N); 115 } else { 116 return false; 117 } 118 } 119 if (LastSeenStartMarker != kInvalidStartMarker) 120 LastFailure = Files[LastSeenStartMarker].Name; 121 122 FirstNotProcessedFile = ExpectedStartMarker; 123 return true; 124 } 125 126 size_t Merger::ApproximateMemoryConsumption() const { 127 size_t Res = 0; 128 for (const auto &F: Files) 129 Res += sizeof(F) + F.Features.size() * sizeof(F.Features[0]); 130 return Res; 131 } 132 133 // Decides which files need to be merged (add those to NewFiles). 134 // Returns the number of new features added. 135 size_t Merger::Merge(const std::set<uint32_t> &InitialFeatures, 136 std::set<uint32_t> *NewFeatures, 137 const std::set<uint32_t> &InitialCov, 138 std::set<uint32_t> *NewCov, 139 std::vector<std::string> *NewFiles) { 140 NewFiles->clear(); 141 NewFeatures->clear(); 142 NewCov->clear(); 143 assert(NumFilesInFirstCorpus <= Files.size()); 144 std::set<uint32_t> AllFeatures = InitialFeatures; 145 146 // What features are in the initial corpus? 147 for (size_t i = 0; i < NumFilesInFirstCorpus; i++) { 148 auto &Cur = Files[i].Features; 149 AllFeatures.insert(Cur.begin(), Cur.end()); 150 } 151 // Remove all features that we already know from all other inputs. 152 for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) { 153 auto &Cur = Files[i].Features; 154 std::vector<uint32_t> Tmp; 155 std::set_difference(Cur.begin(), Cur.end(), AllFeatures.begin(), 156 AllFeatures.end(), std::inserter(Tmp, Tmp.begin())); 157 Cur.swap(Tmp); 158 } 159 160 // Sort. Give preference to 161 // * smaller files 162 // * files with more features. 163 std::sort(Files.begin() + NumFilesInFirstCorpus, Files.end(), 164 [&](const MergeFileInfo &a, const MergeFileInfo &b) -> bool { 165 if (a.Size != b.Size) 166 return a.Size < b.Size; 167 return a.Features.size() > b.Features.size(); 168 }); 169 170 // One greedy pass: add the file's features to AllFeatures. 171 // If new features were added, add this file to NewFiles. 172 for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) { 173 auto &Cur = Files[i].Features; 174 // Printf("%s -> sz %zd ft %zd\n", Files[i].Name.c_str(), 175 // Files[i].Size, Cur.size()); 176 bool FoundNewFeatures = false; 177 for (auto Fe: Cur) { 178 if (AllFeatures.insert(Fe).second) { 179 FoundNewFeatures = true; 180 NewFeatures->insert(Fe); 181 } 182 } 183 if (FoundNewFeatures) 184 NewFiles->push_back(Files[i].Name); 185 for (auto Cov : Files[i].Cov) 186 if (InitialCov.find(Cov) == InitialCov.end()) 187 NewCov->insert(Cov); 188 } 189 return NewFeatures->size(); 190 } 191 192 std::set<uint32_t> Merger::AllFeatures() const { 193 std::set<uint32_t> S; 194 for (auto &File : Files) 195 S.insert(File.Features.begin(), File.Features.end()); 196 return S; 197 } 198 199 // Inner process. May crash if the target crashes. 200 void Fuzzer::CrashResistantMergeInternalStep(const std::string &CFPath, 201 bool IsSetCoverMerge) { 202 Printf("MERGE-INNER: using the control file '%s'\n", CFPath.c_str()); 203 Merger M; 204 std::ifstream IF(CFPath); 205 M.ParseOrExit(IF, false); 206 IF.close(); 207 if (!M.LastFailure.empty()) 208 Printf("MERGE-INNER: '%s' caused a failure at the previous merge step\n", 209 M.LastFailure.c_str()); 210 211 Printf("MERGE-INNER: %zd total files;" 212 " %zd processed earlier; will process %zd files now\n", 213 M.Files.size(), M.FirstNotProcessedFile, 214 M.Files.size() - M.FirstNotProcessedFile); 215 216 std::ofstream OF(CFPath, std::ofstream::out | std::ofstream::app); 217 std::set<size_t> AllFeatures; 218 auto PrintStatsWrapper = [this, &AllFeatures](const char* Where) { 219 this->PrintStats(Where, "\n", 0, AllFeatures.size()); 220 }; 221 std::set<const TracePC::PCTableEntry *> AllPCs; 222 for (size_t i = M.FirstNotProcessedFile; i < M.Files.size(); i++) { 223 Fuzzer::MaybeExitGracefully(); 224 auto U = FileToVector(M.Files[i].Name); 225 if (U.size() > MaxInputLen) { 226 U.resize(MaxInputLen); 227 U.shrink_to_fit(); 228 } 229 230 // Write the pre-run marker. 231 OF << "STARTED " << i << " " << U.size() << "\n"; 232 OF.flush(); // Flush is important since Command::Execute may crash. 233 // Run. 234 TPC.ResetMaps(); 235 ExecuteCallback(U.data(), U.size()); 236 // Collect coverage. We are iterating over the files in this order: 237 // * First, files in the initial corpus ordered by size, smallest first. 238 // * Then, all other files, smallest first. 239 std::set<size_t> Features; 240 if (IsSetCoverMerge) 241 TPC.CollectFeatures([&](size_t Feature) { Features.insert(Feature); }); 242 else 243 TPC.CollectFeatures([&](size_t Feature) { 244 if (AllFeatures.insert(Feature).second) 245 Features.insert(Feature); 246 }); 247 TPC.UpdateObservedPCs(); 248 // Show stats. 249 if (!(TotalNumberOfRuns & (TotalNumberOfRuns - 1))) 250 PrintStatsWrapper("pulse "); 251 if (TotalNumberOfRuns == M.NumFilesInFirstCorpus) 252 PrintStatsWrapper("LOADED"); 253 // Write the post-run marker and the coverage. 254 OF << "FT " << i; 255 for (size_t F : Features) 256 OF << " " << F; 257 OF << "\n"; 258 OF << "COV " << i; 259 TPC.ForEachObservedPC([&](const TracePC::PCTableEntry *TE) { 260 if (AllPCs.insert(TE).second) 261 OF << " " << TPC.PCTableEntryIdx(TE); 262 }); 263 OF << "\n"; 264 OF.flush(); 265 } 266 PrintStatsWrapper("DONE "); 267 } 268 269 // Merges all corpora into the first corpus. A file is added into 270 // the first corpus only if it adds new features. Unlike `Merger::Merge`, 271 // this implementation calculates an approximation of the minimum set 272 // of corpora files, that cover all known features (set cover problem). 273 // Generally, this means that files with more features are preferred for 274 // merge into the first corpus. When two files have the same number of 275 // features, the smaller one is preferred. 276 size_t Merger::SetCoverMerge(const std::set<uint32_t> &InitialFeatures, 277 std::set<uint32_t> *NewFeatures, 278 const std::set<uint32_t> &InitialCov, 279 std::set<uint32_t> *NewCov, 280 std::vector<std::string> *NewFiles) { 281 assert(NumFilesInFirstCorpus <= Files.size()); 282 NewFiles->clear(); 283 NewFeatures->clear(); 284 NewCov->clear(); 285 std::set<uint32_t> AllFeatures; 286 // 1 << 21 - 1 is the maximum feature index. 287 // See 'kFeatureSetSize' in 'FuzzerCorpus.h'. 288 const uint32_t kFeatureSetSize = 1 << 21; 289 std::vector<bool> Covered(kFeatureSetSize, false); 290 size_t NumCovered = 0; 291 292 std::set<uint32_t> ExistingFeatures = InitialFeatures; 293 for (size_t i = 0; i < NumFilesInFirstCorpus; ++i) 294 ExistingFeatures.insert(Files[i].Features.begin(), Files[i].Features.end()); 295 296 // Mark the existing features as covered. 297 for (const auto &F : ExistingFeatures) { 298 if (!Covered[F % kFeatureSetSize]) { 299 ++NumCovered; 300 Covered[F % kFeatureSetSize] = true; 301 } 302 // Calculate an underestimation of the set of covered features 303 // since the `Covered` bitvector is smaller than the feature range. 304 AllFeatures.insert(F % kFeatureSetSize); 305 } 306 307 std::set<size_t> RemainingFiles; 308 for (size_t i = NumFilesInFirstCorpus; i < Files.size(); ++i) { 309 // Construct an incremental sequence which represent the 310 // indices to all files (excluding those in the initial corpus). 311 // RemainingFiles = range(NumFilesInFirstCorpus..Files.size()). 312 RemainingFiles.insert(i); 313 // Insert this file's unique features to all features. 314 for (const auto &F : Files[i].Features) 315 AllFeatures.insert(F % kFeatureSetSize); 316 } 317 318 // Integrate files into Covered until set is complete. 319 while (NumCovered != AllFeatures.size()) { 320 // Index to file with largest number of unique features. 321 size_t MaxFeaturesIndex = NumFilesInFirstCorpus; 322 // Indices to remove from RemainingFiles. 323 std::set<size_t> RemoveIndices; 324 // Running max unique feature count. 325 // Updated upon finding a file with more features. 326 size_t MaxNumFeatures = 0; 327 328 // Iterate over all files not yet integrated into Covered, 329 // to find the file which has the largest number of 330 // features that are not already in Covered. 331 for (const auto &i : RemainingFiles) { 332 const auto &File = Files[i]; 333 size_t CurrentUnique = 0; 334 // Count number of features in this file 335 // which are not yet in Covered. 336 for (const auto &F : File.Features) 337 if (!Covered[F % kFeatureSetSize]) 338 ++CurrentUnique; 339 340 if (CurrentUnique == 0) { 341 // All features in this file are already in Covered: skip next time. 342 RemoveIndices.insert(i); 343 } else if (CurrentUnique > MaxNumFeatures || 344 (CurrentUnique == MaxNumFeatures && 345 File.Size < Files[MaxFeaturesIndex].Size)) { 346 // Update the max features file based on unique features 347 // Break ties by selecting smaller files. 348 MaxNumFeatures = CurrentUnique; 349 MaxFeaturesIndex = i; 350 } 351 } 352 // Must be a valid index/ 353 assert(MaxFeaturesIndex < Files.size()); 354 // Remove any feature-less files found. 355 for (const auto &i : RemoveIndices) 356 RemainingFiles.erase(i); 357 if (MaxNumFeatures == 0) { 358 // Did not find a file that adds unique features. 359 // This means that we should have no remaining files. 360 assert(RemainingFiles.size() == 0); 361 assert(NumCovered == AllFeatures.size()); 362 break; 363 } 364 365 // MaxFeaturesIndex must be an element of Remaining. 366 assert(RemainingFiles.find(MaxFeaturesIndex) != RemainingFiles.end()); 367 // Remove the file with the most features from Remaining. 368 RemainingFiles.erase(MaxFeaturesIndex); 369 const auto &MaxFeatureFile = Files[MaxFeaturesIndex]; 370 // Add the features of the max feature file to Covered. 371 for (const auto &F : MaxFeatureFile.Features) { 372 if (!Covered[F % kFeatureSetSize]) { 373 ++NumCovered; 374 Covered[F % kFeatureSetSize] = true; 375 NewFeatures->insert(F); 376 } 377 } 378 // Add the index to this file to the result. 379 NewFiles->push_back(MaxFeatureFile.Name); 380 // Update NewCov with the additional coverage 381 // that MaxFeatureFile provides. 382 for (const auto &C : MaxFeatureFile.Cov) 383 if (InitialCov.find(C) == InitialCov.end()) 384 NewCov->insert(C); 385 } 386 387 return NewFeatures->size(); 388 } 389 390 static size_t 391 WriteNewControlFile(const std::string &CFPath, 392 const std::vector<SizedFile> &OldCorpus, 393 const std::vector<SizedFile> &NewCorpus, 394 const std::vector<MergeFileInfo> &KnownFiles) { 395 std::unordered_set<std::string> FilesToSkip; 396 for (auto &SF: KnownFiles) 397 FilesToSkip.insert(SF.Name); 398 399 std::vector<std::string> FilesToUse; 400 auto MaybeUseFile = [=, &FilesToUse](std::string Name) { 401 if (FilesToSkip.find(Name) == FilesToSkip.end()) 402 FilesToUse.push_back(Name); 403 }; 404 for (auto &SF: OldCorpus) 405 MaybeUseFile(SF.File); 406 auto FilesToUseFromOldCorpus = FilesToUse.size(); 407 for (auto &SF: NewCorpus) 408 MaybeUseFile(SF.File); 409 410 RemoveFile(CFPath); 411 std::ofstream ControlFile(CFPath); 412 ControlFile << FilesToUse.size() << "\n"; 413 ControlFile << FilesToUseFromOldCorpus << "\n"; 414 for (auto &FN: FilesToUse) 415 ControlFile << FN << "\n"; 416 417 if (!ControlFile) { 418 Printf("MERGE-OUTER: failed to write to the control file: %s\n", 419 CFPath.c_str()); 420 exit(1); 421 } 422 423 return FilesToUse.size(); 424 } 425 426 // Outer process. Does not call the target code and thus should not fail. 427 void CrashResistantMerge(const std::vector<std::string> &Args, 428 const std::vector<SizedFile> &OldCorpus, 429 const std::vector<SizedFile> &NewCorpus, 430 std::vector<std::string> *NewFiles, 431 const std::set<uint32_t> &InitialFeatures, 432 std::set<uint32_t> *NewFeatures, 433 const std::set<uint32_t> &InitialCov, 434 std::set<uint32_t> *NewCov, const std::string &CFPath, 435 bool V, /*Verbose*/ 436 bool IsSetCoverMerge) { 437 if (NewCorpus.empty() && OldCorpus.empty()) return; // Nothing to merge. 438 size_t NumAttempts = 0; 439 std::vector<MergeFileInfo> KnownFiles; 440 if (FileSize(CFPath)) { 441 VPrintf(V, "MERGE-OUTER: non-empty control file provided: '%s'\n", 442 CFPath.c_str()); 443 Merger M; 444 std::ifstream IF(CFPath); 445 if (M.Parse(IF, /*ParseCoverage=*/true)) { 446 VPrintf(V, "MERGE-OUTER: control file ok, %zd files total," 447 " first not processed file %zd\n", 448 M.Files.size(), M.FirstNotProcessedFile); 449 if (!M.LastFailure.empty()) 450 VPrintf(V, "MERGE-OUTER: '%s' will be skipped as unlucky " 451 "(merge has stumbled on it the last time)\n", 452 M.LastFailure.c_str()); 453 if (M.FirstNotProcessedFile >= M.Files.size()) { 454 // Merge has already been completed with the given merge control file. 455 if (M.Files.size() == OldCorpus.size() + NewCorpus.size()) { 456 VPrintf( 457 V, 458 "MERGE-OUTER: nothing to do, merge has been completed before\n"); 459 exit(0); 460 } 461 462 // Number of input files likely changed, start merge from scratch, but 463 // reuse coverage information from the given merge control file. 464 VPrintf( 465 V, 466 "MERGE-OUTER: starting merge from scratch, but reusing coverage " 467 "information from the given control file\n"); 468 KnownFiles = M.Files; 469 } else { 470 // There is a merge in progress, continue. 471 NumAttempts = M.Files.size() - M.FirstNotProcessedFile; 472 } 473 } else { 474 VPrintf(V, "MERGE-OUTER: bad control file, will overwrite it\n"); 475 } 476 } 477 478 if (!NumAttempts) { 479 // The supplied control file is empty or bad, create a fresh one. 480 VPrintf(V, "MERGE-OUTER: " 481 "%zd files, %zd in the initial corpus, %zd processed earlier\n", 482 OldCorpus.size() + NewCorpus.size(), OldCorpus.size(), 483 KnownFiles.size()); 484 NumAttempts = WriteNewControlFile(CFPath, OldCorpus, NewCorpus, KnownFiles); 485 } 486 487 // Execute the inner process until it passes. 488 // Every inner process should execute at least one input. 489 Command BaseCmd(Args); 490 BaseCmd.removeFlag("merge"); 491 BaseCmd.removeFlag("set_cover_merge"); 492 BaseCmd.removeFlag("fork"); 493 BaseCmd.removeFlag("collect_data_flow"); 494 for (size_t Attempt = 1; Attempt <= NumAttempts; Attempt++) { 495 Fuzzer::MaybeExitGracefully(); 496 VPrintf(V, "MERGE-OUTER: attempt %zd\n", Attempt); 497 Command Cmd(BaseCmd); 498 Cmd.addFlag("merge_control_file", CFPath); 499 // If we are going to use the set cover implementation for 500 // minimization add the merge_inner=2 internal flag. 501 Cmd.addFlag("merge_inner", IsSetCoverMerge ? "2" : "1"); 502 if (!V) { 503 Cmd.setOutputFile(getDevNull()); 504 Cmd.combineOutAndErr(); 505 } 506 auto ExitCode = ExecuteCommand(Cmd); 507 if (!ExitCode) { 508 VPrintf(V, "MERGE-OUTER: successful in %zd attempt(s)\n", Attempt); 509 break; 510 } 511 } 512 // Read the control file and do the merge. 513 Merger M; 514 std::ifstream IF(CFPath); 515 IF.seekg(0, IF.end); 516 VPrintf(V, "MERGE-OUTER: the control file has %zd bytes\n", 517 (size_t)IF.tellg()); 518 IF.seekg(0, IF.beg); 519 M.ParseOrExit(IF, true); 520 IF.close(); 521 VPrintf(V, 522 "MERGE-OUTER: consumed %zdMb (%zdMb rss) to parse the control file\n", 523 M.ApproximateMemoryConsumption() >> 20, GetPeakRSSMb()); 524 525 M.Files.insert(M.Files.end(), KnownFiles.begin(), KnownFiles.end()); 526 if (IsSetCoverMerge) 527 M.SetCoverMerge(InitialFeatures, NewFeatures, InitialCov, NewCov, NewFiles); 528 else 529 M.Merge(InitialFeatures, NewFeatures, InitialCov, NewCov, NewFiles); 530 VPrintf(V, "MERGE-OUTER: %zd new files with %zd new features added; " 531 "%zd new coverage edges\n", 532 NewFiles->size(), NewFeatures->size(), NewCov->size()); 533 } 534 535 } // namespace fuzzer 536