1 //===- InstrProfWriter.cpp - Instrumented profiling writer ----------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains support for writing profiling data for clang's 10 // instrumentation based PGO and coverage. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/ProfileData/InstrProfWriter.h" 15 #include "llvm/ADT/STLExtras.h" 16 #include "llvm/ADT/StringRef.h" 17 #include "llvm/IR/ProfileSummary.h" 18 #include "llvm/ProfileData/InstrProf.h" 19 #include "llvm/ProfileData/ProfileCommon.h" 20 #include "llvm/Support/Endian.h" 21 #include "llvm/Support/EndianStream.h" 22 #include "llvm/Support/Error.h" 23 #include "llvm/Support/MemoryBuffer.h" 24 #include "llvm/Support/OnDiskHashTable.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include <algorithm> 27 #include <cstdint> 28 #include <memory> 29 #include <string> 30 #include <tuple> 31 #include <utility> 32 #include <vector> 33 34 using namespace llvm; 35 extern cl::opt<bool> DebugInfoCorrelate; 36 37 // A struct to define how the data stream should be patched. For Indexed 38 // profiling, only uint64_t data type is needed. 39 struct PatchItem { 40 uint64_t Pos; // Where to patch. 41 uint64_t *D; // Pointer to an array of source data. 42 int N; // Number of elements in \c D array. 43 }; 44 45 namespace llvm { 46 47 // A wrapper class to abstract writer stream with support of bytes 48 // back patching. 49 class ProfOStream { 50 public: 51 ProfOStream(raw_fd_ostream &FD) 52 : IsFDOStream(true), OS(FD), LE(FD, support::little) {} 53 ProfOStream(raw_string_ostream &STR) 54 : IsFDOStream(false), OS(STR), LE(STR, support::little) {} 55 56 uint64_t tell() { return OS.tell(); } 57 void write(uint64_t V) { LE.write<uint64_t>(V); } 58 59 // \c patch can only be called when all data is written and flushed. 60 // For raw_string_ostream, the patch is done on the target string 61 // directly and it won't be reflected in the stream's internal buffer. 62 void patch(PatchItem *P, int NItems) { 63 using namespace support; 64 65 if (IsFDOStream) { 66 raw_fd_ostream &FDOStream = static_cast<raw_fd_ostream &>(OS); 67 for (int K = 0; K < NItems; K++) { 68 FDOStream.seek(P[K].Pos); 69 for (int I = 0; I < P[K].N; I++) 70 write(P[K].D[I]); 71 } 72 } else { 73 raw_string_ostream &SOStream = static_cast<raw_string_ostream &>(OS); 74 std::string &Data = SOStream.str(); // with flush 75 for (int K = 0; K < NItems; K++) { 76 for (int I = 0; I < P[K].N; I++) { 77 uint64_t Bytes = endian::byte_swap<uint64_t, little>(P[K].D[I]); 78 Data.replace(P[K].Pos + I * sizeof(uint64_t), sizeof(uint64_t), 79 (const char *)&Bytes, sizeof(uint64_t)); 80 } 81 } 82 } 83 } 84 85 // If \c OS is an instance of \c raw_fd_ostream, this field will be 86 // true. Otherwise, \c OS will be an raw_string_ostream. 87 bool IsFDOStream; 88 raw_ostream &OS; 89 support::endian::Writer LE; 90 }; 91 92 class InstrProfRecordWriterTrait { 93 public: 94 using key_type = StringRef; 95 using key_type_ref = StringRef; 96 97 using data_type = const InstrProfWriter::ProfilingData *const; 98 using data_type_ref = const InstrProfWriter::ProfilingData *const; 99 100 using hash_value_type = uint64_t; 101 using offset_type = uint64_t; 102 103 support::endianness ValueProfDataEndianness = support::little; 104 InstrProfSummaryBuilder *SummaryBuilder; 105 InstrProfSummaryBuilder *CSSummaryBuilder; 106 107 InstrProfRecordWriterTrait() = default; 108 109 static hash_value_type ComputeHash(key_type_ref K) { 110 return IndexedInstrProf::ComputeHash(K); 111 } 112 113 static std::pair<offset_type, offset_type> 114 EmitKeyDataLength(raw_ostream &Out, key_type_ref K, data_type_ref V) { 115 using namespace support; 116 117 endian::Writer LE(Out, little); 118 119 offset_type N = K.size(); 120 LE.write<offset_type>(N); 121 122 offset_type M = 0; 123 for (const auto &ProfileData : *V) { 124 const InstrProfRecord &ProfRecord = ProfileData.second; 125 M += sizeof(uint64_t); // The function hash 126 M += sizeof(uint64_t); // The size of the Counts vector 127 M += ProfRecord.Counts.size() * sizeof(uint64_t); 128 129 // Value data 130 M += ValueProfData::getSize(ProfileData.second); 131 } 132 LE.write<offset_type>(M); 133 134 return std::make_pair(N, M); 135 } 136 137 void EmitKey(raw_ostream &Out, key_type_ref K, offset_type N) { 138 Out.write(K.data(), N); 139 } 140 141 void EmitData(raw_ostream &Out, key_type_ref, data_type_ref V, offset_type) { 142 using namespace support; 143 144 endian::Writer LE(Out, little); 145 for (const auto &ProfileData : *V) { 146 const InstrProfRecord &ProfRecord = ProfileData.second; 147 if (NamedInstrProfRecord::hasCSFlagInHash(ProfileData.first)) 148 CSSummaryBuilder->addRecord(ProfRecord); 149 else 150 SummaryBuilder->addRecord(ProfRecord); 151 152 LE.write<uint64_t>(ProfileData.first); // Function hash 153 LE.write<uint64_t>(ProfRecord.Counts.size()); 154 for (uint64_t I : ProfRecord.Counts) 155 LE.write<uint64_t>(I); 156 157 // Write value data 158 std::unique_ptr<ValueProfData> VDataPtr = 159 ValueProfData::serializeFrom(ProfileData.second); 160 uint32_t S = VDataPtr->getSize(); 161 VDataPtr->swapBytesFromHost(ValueProfDataEndianness); 162 Out.write((const char *)VDataPtr.get(), S); 163 } 164 } 165 }; 166 167 } // end namespace llvm 168 169 InstrProfWriter::InstrProfWriter(bool Sparse) 170 : Sparse(Sparse), InfoObj(new InstrProfRecordWriterTrait()) {} 171 172 InstrProfWriter::~InstrProfWriter() { delete InfoObj; } 173 174 // Internal interface for testing purpose only. 175 void InstrProfWriter::setValueProfDataEndianness( 176 support::endianness Endianness) { 177 InfoObj->ValueProfDataEndianness = Endianness; 178 } 179 180 void InstrProfWriter::setOutputSparse(bool Sparse) { 181 this->Sparse = Sparse; 182 } 183 184 void InstrProfWriter::addRecord(NamedInstrProfRecord &&I, uint64_t Weight, 185 function_ref<void(Error)> Warn) { 186 auto Name = I.Name; 187 auto Hash = I.Hash; 188 addRecord(Name, Hash, std::move(I), Weight, Warn); 189 } 190 191 void InstrProfWriter::overlapRecord(NamedInstrProfRecord &&Other, 192 OverlapStats &Overlap, 193 OverlapStats &FuncLevelOverlap, 194 const OverlapFuncFilters &FuncFilter) { 195 auto Name = Other.Name; 196 auto Hash = Other.Hash; 197 Other.accumulateCounts(FuncLevelOverlap.Test); 198 if (FunctionData.find(Name) == FunctionData.end()) { 199 Overlap.addOneUnique(FuncLevelOverlap.Test); 200 return; 201 } 202 if (FuncLevelOverlap.Test.CountSum < 1.0f) { 203 Overlap.Overlap.NumEntries += 1; 204 return; 205 } 206 auto &ProfileDataMap = FunctionData[Name]; 207 bool NewFunc; 208 ProfilingData::iterator Where; 209 std::tie(Where, NewFunc) = 210 ProfileDataMap.insert(std::make_pair(Hash, InstrProfRecord())); 211 if (NewFunc) { 212 Overlap.addOneMismatch(FuncLevelOverlap.Test); 213 return; 214 } 215 InstrProfRecord &Dest = Where->second; 216 217 uint64_t ValueCutoff = FuncFilter.ValueCutoff; 218 if (!FuncFilter.NameFilter.empty() && Name.contains(FuncFilter.NameFilter)) 219 ValueCutoff = 0; 220 221 Dest.overlap(Other, Overlap, FuncLevelOverlap, ValueCutoff); 222 } 223 224 void InstrProfWriter::addRecord(StringRef Name, uint64_t Hash, 225 InstrProfRecord &&I, uint64_t Weight, 226 function_ref<void(Error)> Warn) { 227 auto &ProfileDataMap = FunctionData[Name]; 228 229 bool NewFunc; 230 ProfilingData::iterator Where; 231 std::tie(Where, NewFunc) = 232 ProfileDataMap.insert(std::make_pair(Hash, InstrProfRecord())); 233 InstrProfRecord &Dest = Where->second; 234 235 auto MapWarn = [&](instrprof_error E) { 236 Warn(make_error<InstrProfError>(E)); 237 }; 238 239 if (NewFunc) { 240 // We've never seen a function with this name and hash, add it. 241 Dest = std::move(I); 242 if (Weight > 1) 243 Dest.scale(Weight, 1, MapWarn); 244 } else { 245 // We're updating a function we've seen before. 246 Dest.merge(I, Weight, MapWarn); 247 } 248 249 Dest.sortValueData(); 250 } 251 252 void InstrProfWriter::mergeRecordsFromWriter(InstrProfWriter &&IPW, 253 function_ref<void(Error)> Warn) { 254 for (auto &I : IPW.FunctionData) 255 for (auto &Func : I.getValue()) 256 addRecord(I.getKey(), Func.first, std::move(Func.second), 1, Warn); 257 } 258 259 bool InstrProfWriter::shouldEncodeData(const ProfilingData &PD) { 260 if (!Sparse) 261 return true; 262 for (const auto &Func : PD) { 263 const InstrProfRecord &IPR = Func.second; 264 if (llvm::any_of(IPR.Counts, [](uint64_t Count) { return Count > 0; })) 265 return true; 266 } 267 return false; 268 } 269 270 static void setSummary(IndexedInstrProf::Summary *TheSummary, 271 ProfileSummary &PS) { 272 using namespace IndexedInstrProf; 273 274 const std::vector<ProfileSummaryEntry> &Res = PS.getDetailedSummary(); 275 TheSummary->NumSummaryFields = Summary::NumKinds; 276 TheSummary->NumCutoffEntries = Res.size(); 277 TheSummary->set(Summary::MaxFunctionCount, PS.getMaxFunctionCount()); 278 TheSummary->set(Summary::MaxBlockCount, PS.getMaxCount()); 279 TheSummary->set(Summary::MaxInternalBlockCount, PS.getMaxInternalCount()); 280 TheSummary->set(Summary::TotalBlockCount, PS.getTotalCount()); 281 TheSummary->set(Summary::TotalNumBlocks, PS.getNumCounts()); 282 TheSummary->set(Summary::TotalNumFunctions, PS.getNumFunctions()); 283 for (unsigned I = 0; I < Res.size(); I++) 284 TheSummary->setEntry(I, Res[I]); 285 } 286 287 Error InstrProfWriter::writeImpl(ProfOStream &OS) { 288 using namespace IndexedInstrProf; 289 290 OnDiskChainedHashTableGenerator<InstrProfRecordWriterTrait> Generator; 291 292 InstrProfSummaryBuilder ISB(ProfileSummaryBuilder::DefaultCutoffs); 293 InfoObj->SummaryBuilder = &ISB; 294 InstrProfSummaryBuilder CSISB(ProfileSummaryBuilder::DefaultCutoffs); 295 InfoObj->CSSummaryBuilder = &CSISB; 296 297 // Populate the hash table generator. 298 for (const auto &I : FunctionData) 299 if (shouldEncodeData(I.getValue())) 300 Generator.insert(I.getKey(), &I.getValue()); 301 // Write the header. 302 IndexedInstrProf::Header Header; 303 Header.Magic = IndexedInstrProf::Magic; 304 Header.Version = IndexedInstrProf::ProfVersion::CurrentVersion; 305 if (static_cast<bool>(ProfileKind & InstrProfKind::IR)) 306 Header.Version |= VARIANT_MASK_IR_PROF; 307 if (static_cast<bool>(ProfileKind & InstrProfKind::CS)) 308 Header.Version |= VARIANT_MASK_CSIR_PROF; 309 if (static_cast<bool>(ProfileKind & InstrProfKind::BB)) 310 Header.Version |= VARIANT_MASK_INSTR_ENTRY; 311 if (static_cast<bool>(ProfileKind & InstrProfKind::SingleByteCoverage)) 312 Header.Version |= VARIANT_MASK_BYTE_COVERAGE; 313 if (static_cast<bool>(ProfileKind & InstrProfKind::FunctionEntryOnly)) 314 Header.Version |= VARIANT_MASK_FUNCTION_ENTRY_ONLY; 315 316 Header.Unused = 0; 317 Header.HashType = static_cast<uint64_t>(IndexedInstrProf::HashType); 318 Header.HashOffset = 0; 319 int N = sizeof(IndexedInstrProf::Header) / sizeof(uint64_t); 320 321 // Only write out all the fields except 'HashOffset'. We need 322 // to remember the offset of that field to allow back patching 323 // later. 324 for (int I = 0; I < N - 1; I++) 325 OS.write(reinterpret_cast<uint64_t *>(&Header)[I]); 326 327 // Save the location of Header.HashOffset field in \c OS. 328 uint64_t HashTableStartFieldOffset = OS.tell(); 329 // Reserve the space for HashOffset field. 330 OS.write(0); 331 332 // Reserve space to write profile summary data. 333 uint32_t NumEntries = ProfileSummaryBuilder::DefaultCutoffs.size(); 334 uint32_t SummarySize = Summary::getSize(Summary::NumKinds, NumEntries); 335 // Remember the summary offset. 336 uint64_t SummaryOffset = OS.tell(); 337 for (unsigned I = 0; I < SummarySize / sizeof(uint64_t); I++) 338 OS.write(0); 339 uint64_t CSSummaryOffset = 0; 340 uint64_t CSSummarySize = 0; 341 if (static_cast<bool>(ProfileKind & InstrProfKind::CS)) { 342 CSSummaryOffset = OS.tell(); 343 CSSummarySize = SummarySize / sizeof(uint64_t); 344 for (unsigned I = 0; I < CSSummarySize; I++) 345 OS.write(0); 346 } 347 348 // Write the hash table. 349 uint64_t HashTableStart = Generator.Emit(OS.OS, *InfoObj); 350 351 // Allocate space for data to be serialized out. 352 std::unique_ptr<IndexedInstrProf::Summary> TheSummary = 353 IndexedInstrProf::allocSummary(SummarySize); 354 // Compute the Summary and copy the data to the data 355 // structure to be serialized out (to disk or buffer). 356 std::unique_ptr<ProfileSummary> PS = ISB.getSummary(); 357 setSummary(TheSummary.get(), *PS); 358 InfoObj->SummaryBuilder = nullptr; 359 360 // For Context Sensitive summary. 361 std::unique_ptr<IndexedInstrProf::Summary> TheCSSummary = nullptr; 362 if (static_cast<bool>(ProfileKind & InstrProfKind::CS)) { 363 TheCSSummary = IndexedInstrProf::allocSummary(SummarySize); 364 std::unique_ptr<ProfileSummary> CSPS = CSISB.getSummary(); 365 setSummary(TheCSSummary.get(), *CSPS); 366 } 367 InfoObj->CSSummaryBuilder = nullptr; 368 369 // Now do the final patch: 370 PatchItem PatchItems[] = { 371 // Patch the Header.HashOffset field. 372 {HashTableStartFieldOffset, &HashTableStart, 1}, 373 // Patch the summary data. 374 {SummaryOffset, reinterpret_cast<uint64_t *>(TheSummary.get()), 375 (int)(SummarySize / sizeof(uint64_t))}, 376 {CSSummaryOffset, reinterpret_cast<uint64_t *>(TheCSSummary.get()), 377 (int)CSSummarySize}}; 378 379 OS.patch(PatchItems, sizeof(PatchItems) / sizeof(*PatchItems)); 380 381 for (const auto &I : FunctionData) 382 for (const auto &F : I.getValue()) 383 if (Error E = validateRecord(F.second)) 384 return E; 385 386 return Error::success(); 387 } 388 389 Error InstrProfWriter::write(raw_fd_ostream &OS) { 390 // Write the hash table. 391 ProfOStream POS(OS); 392 return writeImpl(POS); 393 } 394 395 std::unique_ptr<MemoryBuffer> InstrProfWriter::writeBuffer() { 396 std::string Data; 397 raw_string_ostream OS(Data); 398 ProfOStream POS(OS); 399 // Write the hash table. 400 if (Error E = writeImpl(POS)) 401 return nullptr; 402 // Return this in an aligned memory buffer. 403 return MemoryBuffer::getMemBufferCopy(Data); 404 } 405 406 static const char *ValueProfKindStr[] = { 407 #define VALUE_PROF_KIND(Enumerator, Value, Descr) #Enumerator, 408 #include "llvm/ProfileData/InstrProfData.inc" 409 }; 410 411 Error InstrProfWriter::validateRecord(const InstrProfRecord &Func) { 412 for (uint32_t VK = 0; VK <= IPVK_Last; VK++) { 413 uint32_t NS = Func.getNumValueSites(VK); 414 if (!NS) 415 continue; 416 for (uint32_t S = 0; S < NS; S++) { 417 uint32_t ND = Func.getNumValueDataForSite(VK, S); 418 std::unique_ptr<InstrProfValueData[]> VD = Func.getValueForSite(VK, S); 419 bool WasZero = false; 420 for (uint32_t I = 0; I < ND; I++) 421 if ((VK != IPVK_IndirectCallTarget) && (VD[I].Value == 0)) { 422 if (WasZero) 423 return make_error<InstrProfError>(instrprof_error::invalid_prof); 424 WasZero = true; 425 } 426 } 427 } 428 429 return Error::success(); 430 } 431 432 void InstrProfWriter::writeRecordInText(StringRef Name, uint64_t Hash, 433 const InstrProfRecord &Func, 434 InstrProfSymtab &Symtab, 435 raw_fd_ostream &OS) { 436 OS << Name << "\n"; 437 OS << "# Func Hash:\n" << Hash << "\n"; 438 OS << "# Num Counters:\n" << Func.Counts.size() << "\n"; 439 OS << "# Counter Values:\n"; 440 for (uint64_t Count : Func.Counts) 441 OS << Count << "\n"; 442 443 uint32_t NumValueKinds = Func.getNumValueKinds(); 444 if (!NumValueKinds) { 445 OS << "\n"; 446 return; 447 } 448 449 OS << "# Num Value Kinds:\n" << Func.getNumValueKinds() << "\n"; 450 for (uint32_t VK = 0; VK < IPVK_Last + 1; VK++) { 451 uint32_t NS = Func.getNumValueSites(VK); 452 if (!NS) 453 continue; 454 OS << "# ValueKind = " << ValueProfKindStr[VK] << ":\n" << VK << "\n"; 455 OS << "# NumValueSites:\n" << NS << "\n"; 456 for (uint32_t S = 0; S < NS; S++) { 457 uint32_t ND = Func.getNumValueDataForSite(VK, S); 458 OS << ND << "\n"; 459 std::unique_ptr<InstrProfValueData[]> VD = Func.getValueForSite(VK, S); 460 for (uint32_t I = 0; I < ND; I++) { 461 if (VK == IPVK_IndirectCallTarget) 462 OS << Symtab.getFuncNameOrExternalSymbol(VD[I].Value) << ":" 463 << VD[I].Count << "\n"; 464 else 465 OS << VD[I].Value << ":" << VD[I].Count << "\n"; 466 } 467 } 468 } 469 470 OS << "\n"; 471 } 472 473 Error InstrProfWriter::writeText(raw_fd_ostream &OS) { 474 // Check CS first since it implies an IR level profile. 475 if (static_cast<bool>(ProfileKind & InstrProfKind::CS)) 476 OS << "# CSIR level Instrumentation Flag\n:csir\n"; 477 else if (static_cast<bool>(ProfileKind & InstrProfKind::IR)) 478 OS << "# IR level Instrumentation Flag\n:ir\n"; 479 480 if (static_cast<bool>(ProfileKind & InstrProfKind::BB)) 481 OS << "# Always instrument the function entry block\n:entry_first\n"; 482 InstrProfSymtab Symtab; 483 484 using FuncPair = detail::DenseMapPair<uint64_t, InstrProfRecord>; 485 using RecordType = std::pair<StringRef, FuncPair>; 486 SmallVector<RecordType, 4> OrderedFuncData; 487 488 for (const auto &I : FunctionData) { 489 if (shouldEncodeData(I.getValue())) { 490 if (Error E = Symtab.addFuncName(I.getKey())) 491 return E; 492 for (const auto &Func : I.getValue()) 493 OrderedFuncData.push_back(std::make_pair(I.getKey(), Func)); 494 } 495 } 496 497 llvm::sort(OrderedFuncData, [](const RecordType &A, const RecordType &B) { 498 return std::tie(A.first, A.second.first) < 499 std::tie(B.first, B.second.first); 500 }); 501 502 for (const auto &record : OrderedFuncData) { 503 const StringRef &Name = record.first; 504 const FuncPair &Func = record.second; 505 writeRecordInText(Name, Func.first, Func.second, Symtab, OS); 506 } 507 508 for (const auto &record : OrderedFuncData) { 509 const FuncPair &Func = record.second; 510 if (Error E = validateRecord(Func.second)) 511 return E; 512 } 513 514 return Error::success(); 515 } 516