1 //===- StackMaps.cpp ------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/CodeGen/StackMaps.h" 10 #include "llvm/ADT/DenseMapInfo.h" 11 #include "llvm/ADT/STLExtras.h" 12 #include "llvm/ADT/Twine.h" 13 #include "llvm/CodeGen/AsmPrinter.h" 14 #include "llvm/CodeGen/MachineFrameInfo.h" 15 #include "llvm/CodeGen/MachineFunction.h" 16 #include "llvm/CodeGen/MachineInstr.h" 17 #include "llvm/CodeGen/MachineOperand.h" 18 #include "llvm/CodeGen/TargetOpcodes.h" 19 #include "llvm/CodeGen/TargetRegisterInfo.h" 20 #include "llvm/CodeGen/TargetSubtargetInfo.h" 21 #include "llvm/IR/DataLayout.h" 22 #include "llvm/MC/MCContext.h" 23 #include "llvm/MC/MCExpr.h" 24 #include "llvm/MC/MCObjectFileInfo.h" 25 #include "llvm/MC/MCRegisterInfo.h" 26 #include "llvm/MC/MCStreamer.h" 27 #include "llvm/Support/CommandLine.h" 28 #include "llvm/Support/Debug.h" 29 #include "llvm/Support/ErrorHandling.h" 30 #include "llvm/Support/MathExtras.h" 31 #include "llvm/Support/raw_ostream.h" 32 #include <algorithm> 33 #include <cassert> 34 #include <cstdint> 35 #include <iterator> 36 #include <utility> 37 38 using namespace llvm; 39 40 #define DEBUG_TYPE "stackmaps" 41 42 static cl::opt<int> StackMapVersion( 43 "stackmap-version", cl::init(3), cl::Hidden, 44 cl::desc("Specify the stackmap encoding version (default = 3)")); 45 46 const char *StackMaps::WSMP = "Stack Maps: "; 47 48 StackMapOpers::StackMapOpers(const MachineInstr *MI) 49 : MI(MI) { 50 assert(getVarIdx() <= MI->getNumOperands() && 51 "invalid stackmap definition"); 52 } 53 54 PatchPointOpers::PatchPointOpers(const MachineInstr *MI) 55 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() && 56 !MI->getOperand(0).isImplicit()) { 57 #ifndef NDEBUG 58 unsigned CheckStartIdx = 0, e = MI->getNumOperands(); 59 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() && 60 MI->getOperand(CheckStartIdx).isDef() && 61 !MI->getOperand(CheckStartIdx).isImplicit()) 62 ++CheckStartIdx; 63 64 assert(getMetaIdx() == CheckStartIdx && 65 "Unexpected additional definition in Patchpoint intrinsic."); 66 #endif 67 } 68 69 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const { 70 if (!StartIdx) 71 StartIdx = getVarIdx(); 72 73 // Find the next scratch register (implicit def and early clobber) 74 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands(); 75 while (ScratchIdx < e && 76 !(MI->getOperand(ScratchIdx).isReg() && 77 MI->getOperand(ScratchIdx).isDef() && 78 MI->getOperand(ScratchIdx).isImplicit() && 79 MI->getOperand(ScratchIdx).isEarlyClobber())) 80 ++ScratchIdx; 81 82 assert(ScratchIdx != e && "No scratch register available"); 83 return ScratchIdx; 84 } 85 86 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) { 87 if (StackMapVersion != 3) 88 llvm_unreachable("Unsupported stackmap version!"); 89 } 90 91 /// Go up the super-register chain until we hit a valid dwarf register number. 92 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) { 93 int RegNum = TRI->getDwarfRegNum(Reg, false); 94 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR) 95 RegNum = TRI->getDwarfRegNum(*SR, false); 96 97 assert(RegNum >= 0 && "Invalid Dwarf register number."); 98 return (unsigned)RegNum; 99 } 100 101 MachineInstr::const_mop_iterator 102 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI, 103 MachineInstr::const_mop_iterator MOE, LocationVec &Locs, 104 LiveOutVec &LiveOuts) const { 105 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 106 if (MOI->isImm()) { 107 switch (MOI->getImm()) { 108 default: 109 llvm_unreachable("Unrecognized operand type."); 110 case StackMaps::DirectMemRefOp: { 111 auto &DL = AP.MF->getDataLayout(); 112 113 unsigned Size = DL.getPointerSizeInBits(); 114 assert((Size % 8) == 0 && "Need pointer size in bytes."); 115 Size /= 8; 116 unsigned Reg = (++MOI)->getReg(); 117 int64_t Imm = (++MOI)->getImm(); 118 Locs.emplace_back(StackMaps::Location::Direct, Size, 119 getDwarfRegNum(Reg, TRI), Imm); 120 break; 121 } 122 case StackMaps::IndirectMemRefOp: { 123 int64_t Size = (++MOI)->getImm(); 124 assert(Size > 0 && "Need a valid size for indirect memory locations."); 125 unsigned Reg = (++MOI)->getReg(); 126 int64_t Imm = (++MOI)->getImm(); 127 Locs.emplace_back(StackMaps::Location::Indirect, Size, 128 getDwarfRegNum(Reg, TRI), Imm); 129 break; 130 } 131 case StackMaps::ConstantOp: { 132 ++MOI; 133 assert(MOI->isImm() && "Expected constant operand."); 134 int64_t Imm = MOI->getImm(); 135 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm); 136 break; 137 } 138 } 139 return ++MOI; 140 } 141 142 // The physical register number will ultimately be encoded as a DWARF regno. 143 // The stack map also records the size of a spill slot that can hold the 144 // register content. (The runtime can track the actual size of the data type 145 // if it needs to.) 146 if (MOI->isReg()) { 147 // Skip implicit registers (this includes our scratch registers) 148 if (MOI->isImplicit()) 149 return ++MOI; 150 151 assert(TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) && 152 "Virtreg operands should have been rewritten before now."); 153 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg()); 154 assert(!MOI->getSubReg() && "Physical subreg still around."); 155 156 unsigned Offset = 0; 157 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI); 158 unsigned LLVMRegNum = TRI->getLLVMRegNum(DwarfRegNum, false); 159 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg()); 160 if (SubRegIdx) 161 Offset = TRI->getSubRegIdxOffset(SubRegIdx); 162 163 Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC), 164 DwarfRegNum, Offset); 165 return ++MOI; 166 } 167 168 if (MOI->isRegLiveOut()) 169 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut()); 170 171 return ++MOI; 172 } 173 174 void StackMaps::print(raw_ostream &OS) { 175 const TargetRegisterInfo *TRI = 176 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr; 177 OS << WSMP << "callsites:\n"; 178 for (const auto &CSI : CSInfos) { 179 const LocationVec &CSLocs = CSI.Locations; 180 const LiveOutVec &LiveOuts = CSI.LiveOuts; 181 182 OS << WSMP << "callsite " << CSI.ID << "\n"; 183 OS << WSMP << " has " << CSLocs.size() << " locations\n"; 184 185 unsigned Idx = 0; 186 for (const auto &Loc : CSLocs) { 187 OS << WSMP << "\t\tLoc " << Idx << ": "; 188 switch (Loc.Type) { 189 case Location::Unprocessed: 190 OS << "<Unprocessed operand>"; 191 break; 192 case Location::Register: 193 OS << "Register "; 194 if (TRI) 195 OS << printReg(Loc.Reg, TRI); 196 else 197 OS << Loc.Reg; 198 break; 199 case Location::Direct: 200 OS << "Direct "; 201 if (TRI) 202 OS << printReg(Loc.Reg, TRI); 203 else 204 OS << Loc.Reg; 205 if (Loc.Offset) 206 OS << " + " << Loc.Offset; 207 break; 208 case Location::Indirect: 209 OS << "Indirect "; 210 if (TRI) 211 OS << printReg(Loc.Reg, TRI); 212 else 213 OS << Loc.Reg; 214 OS << "+" << Loc.Offset; 215 break; 216 case Location::Constant: 217 OS << "Constant " << Loc.Offset; 218 break; 219 case Location::ConstantIndex: 220 OS << "Constant Index " << Loc.Offset; 221 break; 222 } 223 OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0" 224 << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0" 225 << ", .int " << Loc.Offset << "]\n"; 226 Idx++; 227 } 228 229 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n"; 230 231 Idx = 0; 232 for (const auto &LO : LiveOuts) { 233 OS << WSMP << "\t\tLO " << Idx << ": "; 234 if (TRI) 235 OS << printReg(LO.Reg, TRI); 236 else 237 OS << LO.Reg; 238 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte " 239 << LO.Size << "]\n"; 240 Idx++; 241 } 242 } 243 } 244 245 /// Create a live-out register record for the given register Reg. 246 StackMaps::LiveOutReg 247 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const { 248 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI); 249 unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg)); 250 return LiveOutReg(Reg, DwarfRegNum, Size); 251 } 252 253 /// Parse the register live-out mask and return a vector of live-out registers 254 /// that need to be recorded in the stackmap. 255 StackMaps::LiveOutVec 256 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const { 257 assert(Mask && "No register mask specified"); 258 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 259 LiveOutVec LiveOuts; 260 261 // Create a LiveOutReg for each bit that is set in the register mask. 262 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg) 263 if ((Mask[Reg / 32] >> Reg % 32) & 1) 264 LiveOuts.push_back(createLiveOutReg(Reg, TRI)); 265 266 // We don't need to keep track of a register if its super-register is already 267 // in the list. Merge entries that refer to the same dwarf register and use 268 // the maximum size that needs to be spilled. 269 270 llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) { 271 // Only sort by the dwarf register number. 272 return LHS.DwarfRegNum < RHS.DwarfRegNum; 273 }); 274 275 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) { 276 for (auto II = std::next(I); II != E; ++II) { 277 if (I->DwarfRegNum != II->DwarfRegNum) { 278 // Skip all the now invalid entries. 279 I = --II; 280 break; 281 } 282 I->Size = std::max(I->Size, II->Size); 283 if (TRI->isSuperRegister(I->Reg, II->Reg)) 284 I->Reg = II->Reg; 285 II->Reg = 0; // mark for deletion. 286 } 287 } 288 289 LiveOuts.erase( 290 llvm::remove_if(LiveOuts, 291 [](const LiveOutReg &LO) { return LO.Reg == 0; }), 292 LiveOuts.end()); 293 294 return LiveOuts; 295 } 296 297 void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint64_t ID, 298 MachineInstr::const_mop_iterator MOI, 299 MachineInstr::const_mop_iterator MOE, 300 bool recordResult) { 301 MCContext &OutContext = AP.OutStreamer->getContext(); 302 MCSymbol *MILabel = OutContext.createTempSymbol(); 303 AP.OutStreamer->EmitLabel(MILabel); 304 305 LocationVec Locations; 306 LiveOutVec LiveOuts; 307 308 if (recordResult) { 309 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value."); 310 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations, 311 LiveOuts); 312 } 313 314 // Parse operands. 315 while (MOI != MOE) { 316 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 317 } 318 319 // Move large constants into the constant pool. 320 for (auto &Loc : Locations) { 321 // Constants are encoded as sign-extended integers. 322 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool. 323 if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) { 324 Loc.Type = Location::ConstantIndex; 325 // ConstPool is intentionally a MapVector of 'uint64_t's (as 326 // opposed to 'int64_t's). We should never be in a situation 327 // where we have to insert either the tombstone or the empty 328 // keys into a map, and for a DenseMap<uint64_t, T> these are 329 // (uint64_t)0 and (uint64_t)-1. They can be and are 330 // represented using 32 bit integers. 331 assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() && 332 (uint64_t)Loc.Offset != 333 DenseMapInfo<uint64_t>::getTombstoneKey() && 334 "empty and tombstone keys should fit in 32 bits!"); 335 auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset)); 336 Loc.Offset = Result.first - ConstPool.begin(); 337 } 338 } 339 340 // Create an expression to calculate the offset of the callsite from function 341 // entry. 342 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub( 343 MCSymbolRefExpr::create(MILabel, OutContext), 344 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext); 345 346 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations), 347 std::move(LiveOuts)); 348 349 // Record the stack size of the current function and update callsite count. 350 const MachineFrameInfo &MFI = AP.MF->getFrameInfo(); 351 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo(); 352 bool HasDynamicFrameSize = 353 MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF)); 354 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize(); 355 356 auto CurrentIt = FnInfos.find(AP.CurrentFnSym); 357 if (CurrentIt != FnInfos.end()) 358 CurrentIt->second.RecordCount++; 359 else 360 FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize))); 361 } 362 363 void StackMaps::recordStackMap(const MachineInstr &MI) { 364 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap"); 365 366 StackMapOpers opers(&MI); 367 const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm(); 368 recordStackMapOpers(MI, ID, std::next(MI.operands_begin(), opers.getVarIdx()), 369 MI.operands_end()); 370 } 371 372 void StackMaps::recordPatchPoint(const MachineInstr &MI) { 373 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint"); 374 375 PatchPointOpers opers(&MI); 376 const int64_t ID = opers.getID(); 377 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx()); 378 recordStackMapOpers(MI, ID, MOI, MI.operands_end(), 379 opers.isAnyReg() && opers.hasDef()); 380 381 #ifndef NDEBUG 382 // verify anyregcc 383 auto &Locations = CSInfos.back().Locations; 384 if (opers.isAnyReg()) { 385 unsigned NArgs = opers.getNumCallArgs(); 386 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i) 387 assert(Locations[i].Type == Location::Register && 388 "anyreg arg must be in reg."); 389 } 390 #endif 391 } 392 393 void StackMaps::recordStatepoint(const MachineInstr &MI) { 394 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint"); 395 396 StatepointOpers opers(&MI); 397 // Record all the deopt and gc operands (they're contiguous and run from the 398 // initial index to the end of the operand list) 399 const unsigned StartIdx = opers.getVarIdx(); 400 recordStackMapOpers(MI, opers.getID(), MI.operands_begin() + StartIdx, 401 MI.operands_end(), false); 402 } 403 404 /// Emit the stackmap header. 405 /// 406 /// Header { 407 /// uint8 : Stack Map Version (currently 2) 408 /// uint8 : Reserved (expected to be 0) 409 /// uint16 : Reserved (expected to be 0) 410 /// } 411 /// uint32 : NumFunctions 412 /// uint32 : NumConstants 413 /// uint32 : NumRecords 414 void StackMaps::emitStackmapHeader(MCStreamer &OS) { 415 // Header. 416 OS.EmitIntValue(StackMapVersion, 1); // Version. 417 OS.EmitIntValue(0, 1); // Reserved. 418 OS.EmitIntValue(0, 2); // Reserved. 419 420 // Num functions. 421 LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n'); 422 OS.EmitIntValue(FnInfos.size(), 4); 423 // Num constants. 424 LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n'); 425 OS.EmitIntValue(ConstPool.size(), 4); 426 // Num callsites. 427 LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n'); 428 OS.EmitIntValue(CSInfos.size(), 4); 429 } 430 431 /// Emit the function frame record for each function. 432 /// 433 /// StkSizeRecord[NumFunctions] { 434 /// uint64 : Function Address 435 /// uint64 : Stack Size 436 /// uint64 : Record Count 437 /// } 438 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) { 439 // Function Frame records. 440 LLVM_DEBUG(dbgs() << WSMP << "functions:\n"); 441 for (auto const &FR : FnInfos) { 442 LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first 443 << " frame size: " << FR.second.StackSize 444 << " callsite count: " << FR.second.RecordCount << '\n'); 445 OS.EmitSymbolValue(FR.first, 8); 446 OS.EmitIntValue(FR.second.StackSize, 8); 447 OS.EmitIntValue(FR.second.RecordCount, 8); 448 } 449 } 450 451 /// Emit the constant pool. 452 /// 453 /// int64 : Constants[NumConstants] 454 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) { 455 // Constant pool entries. 456 LLVM_DEBUG(dbgs() << WSMP << "constants:\n"); 457 for (const auto &ConstEntry : ConstPool) { 458 LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n'); 459 OS.EmitIntValue(ConstEntry.second, 8); 460 } 461 } 462 463 /// Emit the callsite info for each callsite. 464 /// 465 /// StkMapRecord[NumRecords] { 466 /// uint64 : PatchPoint ID 467 /// uint32 : Instruction Offset 468 /// uint16 : Reserved (record flags) 469 /// uint16 : NumLocations 470 /// Location[NumLocations] { 471 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex 472 /// uint8 : Size in Bytes 473 /// uint16 : Dwarf RegNum 474 /// int32 : Offset 475 /// } 476 /// uint16 : Padding 477 /// uint16 : NumLiveOuts 478 /// LiveOuts[NumLiveOuts] { 479 /// uint16 : Dwarf RegNum 480 /// uint8 : Reserved 481 /// uint8 : Size in Bytes 482 /// } 483 /// uint32 : Padding (only if required to align to 8 byte) 484 /// } 485 /// 486 /// Location Encoding, Type, Value: 487 /// 0x1, Register, Reg (value in register) 488 /// 0x2, Direct, Reg + Offset (frame index) 489 /// 0x3, Indirect, [Reg + Offset] (spilled value) 490 /// 0x4, Constant, Offset (small constant) 491 /// 0x5, ConstIndex, Constants[Offset] (large constant) 492 void StackMaps::emitCallsiteEntries(MCStreamer &OS) { 493 LLVM_DEBUG(print(dbgs())); 494 // Callsite entries. 495 for (const auto &CSI : CSInfos) { 496 const LocationVec &CSLocs = CSI.Locations; 497 const LiveOutVec &LiveOuts = CSI.LiveOuts; 498 499 // Verify stack map entry. It's better to communicate a problem to the 500 // runtime than crash in case of in-process compilation. Currently, we do 501 // simple overflow checks, but we may eventually communicate other 502 // compilation errors this way. 503 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) { 504 OS.EmitIntValue(UINT64_MAX, 8); // Invalid ID. 505 OS.EmitValue(CSI.CSOffsetExpr, 4); 506 OS.EmitIntValue(0, 2); // Reserved. 507 OS.EmitIntValue(0, 2); // 0 locations. 508 OS.EmitIntValue(0, 2); // padding. 509 OS.EmitIntValue(0, 2); // 0 live-out registers. 510 OS.EmitIntValue(0, 4); // padding. 511 continue; 512 } 513 514 OS.EmitIntValue(CSI.ID, 8); 515 OS.EmitValue(CSI.CSOffsetExpr, 4); 516 517 // Reserved for flags. 518 OS.EmitIntValue(0, 2); 519 OS.EmitIntValue(CSLocs.size(), 2); 520 521 for (const auto &Loc : CSLocs) { 522 OS.EmitIntValue(Loc.Type, 1); 523 OS.EmitIntValue(0, 1); // Reserved 524 OS.EmitIntValue(Loc.Size, 2); 525 OS.EmitIntValue(Loc.Reg, 2); 526 OS.EmitIntValue(0, 2); // Reserved 527 OS.EmitIntValue(Loc.Offset, 4); 528 } 529 530 // Emit alignment to 8 byte. 531 OS.EmitValueToAlignment(8); 532 533 // Num live-out registers and padding to align to 4 byte. 534 OS.EmitIntValue(0, 2); 535 OS.EmitIntValue(LiveOuts.size(), 2); 536 537 for (const auto &LO : LiveOuts) { 538 OS.EmitIntValue(LO.DwarfRegNum, 2); 539 OS.EmitIntValue(0, 1); 540 OS.EmitIntValue(LO.Size, 1); 541 } 542 // Emit alignment to 8 byte. 543 OS.EmitValueToAlignment(8); 544 } 545 } 546 547 /// Serialize the stackmap data. 548 void StackMaps::serializeToStackMapSection() { 549 (void)WSMP; 550 // Bail out if there's no stack map data. 551 assert((!CSInfos.empty() || ConstPool.empty()) && 552 "Expected empty constant pool too!"); 553 assert((!CSInfos.empty() || FnInfos.empty()) && 554 "Expected empty function record too!"); 555 if (CSInfos.empty()) 556 return; 557 558 MCContext &OutContext = AP.OutStreamer->getContext(); 559 MCStreamer &OS = *AP.OutStreamer; 560 561 // Create the section. 562 MCSection *StackMapSection = 563 OutContext.getObjectFileInfo()->getStackMapSection(); 564 OS.SwitchSection(StackMapSection); 565 566 // Emit a dummy symbol to force section inclusion. 567 OS.EmitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps"))); 568 569 // Serialize data. 570 LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n"); 571 emitStackmapHeader(OS); 572 emitFunctionFrameRecords(OS); 573 emitConstantPoolEntries(OS); 574 emitCallsiteEntries(OS); 575 OS.AddBlankLine(); 576 577 // Clean up. 578 CSInfos.clear(); 579 ConstPool.clear(); 580 } 581