xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/StackMaps.cpp (revision 1d479bf6b4741fdc24490ad7179bbef0a78af288)
1  //===- StackMaps.cpp ------------------------------------------------------===//
2  //
3  // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4  // See https://llvm.org/LICENSE.txt for license information.
5  // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6  //
7  //===----------------------------------------------------------------------===//
8  
9  #include "llvm/CodeGen/StackMaps.h"
10  #include "llvm/ADT/DenseMapInfo.h"
11  #include "llvm/ADT/STLExtras.h"
12  #include "llvm/ADT/Twine.h"
13  #include "llvm/CodeGen/AsmPrinter.h"
14  #include "llvm/CodeGen/MachineFrameInfo.h"
15  #include "llvm/CodeGen/MachineFunction.h"
16  #include "llvm/CodeGen/MachineInstr.h"
17  #include "llvm/CodeGen/MachineOperand.h"
18  #include "llvm/CodeGen/TargetOpcodes.h"
19  #include "llvm/CodeGen/TargetRegisterInfo.h"
20  #include "llvm/CodeGen/TargetSubtargetInfo.h"
21  #include "llvm/IR/DataLayout.h"
22  #include "llvm/MC/MCContext.h"
23  #include "llvm/MC/MCExpr.h"
24  #include "llvm/MC/MCObjectFileInfo.h"
25  #include "llvm/MC/MCRegisterInfo.h"
26  #include "llvm/MC/MCStreamer.h"
27  #include "llvm/Support/CommandLine.h"
28  #include "llvm/Support/Debug.h"
29  #include "llvm/Support/ErrorHandling.h"
30  #include "llvm/Support/MathExtras.h"
31  #include "llvm/Support/raw_ostream.h"
32  #include <algorithm>
33  #include <cassert>
34  #include <cstdint>
35  #include <iterator>
36  #include <utility>
37  
38  using namespace llvm;
39  
40  #define DEBUG_TYPE "stackmaps"
41  
42  static cl::opt<int> StackMapVersion(
43      "stackmap-version", cl::init(3), cl::Hidden,
44      cl::desc("Specify the stackmap encoding version (default = 3)"));
45  
46  const char *StackMaps::WSMP = "Stack Maps: ";
47  
48  static uint64_t getConstMetaVal(const MachineInstr &MI, unsigned Idx) {
49    assert(MI.getOperand(Idx).isImm() &&
50           MI.getOperand(Idx).getImm() == StackMaps::ConstantOp);
51    const auto &MO = MI.getOperand(Idx + 1);
52    assert(MO.isImm());
53    return MO.getImm();
54  }
55  
56  StackMapOpers::StackMapOpers(const MachineInstr *MI)
57    : MI(MI) {
58    assert(getVarIdx() <= MI->getNumOperands() &&
59           "invalid stackmap definition");
60  }
61  
62  PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
63      : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
64                       !MI->getOperand(0).isImplicit()) {
65  #ifndef NDEBUG
66    unsigned CheckStartIdx = 0, e = MI->getNumOperands();
67    while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
68           MI->getOperand(CheckStartIdx).isDef() &&
69           !MI->getOperand(CheckStartIdx).isImplicit())
70      ++CheckStartIdx;
71  
72    assert(getMetaIdx() == CheckStartIdx &&
73           "Unexpected additional definition in Patchpoint intrinsic.");
74  #endif
75  }
76  
77  unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
78    if (!StartIdx)
79      StartIdx = getVarIdx();
80  
81    // Find the next scratch register (implicit def and early clobber)
82    unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
83    while (ScratchIdx < e &&
84           !(MI->getOperand(ScratchIdx).isReg() &&
85             MI->getOperand(ScratchIdx).isDef() &&
86             MI->getOperand(ScratchIdx).isImplicit() &&
87             MI->getOperand(ScratchIdx).isEarlyClobber()))
88      ++ScratchIdx;
89  
90    assert(ScratchIdx != e && "No scratch register available");
91    return ScratchIdx;
92  }
93  
94  unsigned StatepointOpers::getNumGcMapEntriesIdx() {
95    // Take index of num of allocas and skip all allocas records.
96    unsigned CurIdx = getNumAllocaIdx();
97    unsigned NumAllocas = getConstMetaVal(*MI, CurIdx - 1);
98    CurIdx++;
99    while (NumAllocas--)
100      CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
101    return CurIdx + 1; // skip <StackMaps::ConstantOp>
102  }
103  
104  unsigned StatepointOpers::getNumAllocaIdx() {
105    // Take index of num of gc ptrs and skip all gc ptr records.
106    unsigned CurIdx = getNumGCPtrIdx();
107    unsigned NumGCPtrs = getConstMetaVal(*MI, CurIdx - 1);
108    CurIdx++;
109    while (NumGCPtrs--)
110      CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
111    return CurIdx + 1; // skip <StackMaps::ConstantOp>
112  }
113  
114  unsigned StatepointOpers::getNumGCPtrIdx() {
115    // Take index of num of deopt args and skip all deopt records.
116    unsigned CurIdx = getNumDeoptArgsIdx();
117    unsigned NumDeoptArgs = getConstMetaVal(*MI, CurIdx - 1);
118    CurIdx++;
119    while (NumDeoptArgs--) {
120      CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
121    }
122    return CurIdx + 1; // skip <StackMaps::ConstantOp>
123  }
124  
125  int StatepointOpers::getFirstGCPtrIdx() {
126    unsigned NumGCPtrsIdx = getNumGCPtrIdx();
127    unsigned NumGCPtrs = getConstMetaVal(*MI, NumGCPtrsIdx - 1);
128    if (NumGCPtrs == 0)
129      return -1;
130    ++NumGCPtrsIdx; // skip <num gc ptrs>
131    assert(NumGCPtrsIdx < MI->getNumOperands());
132    return (int)NumGCPtrsIdx;
133  }
134  
135  unsigned StatepointOpers::getGCPointerMap(
136      SmallVectorImpl<std::pair<unsigned, unsigned>> &GCMap) {
137    unsigned CurIdx = getNumGcMapEntriesIdx();
138    unsigned GCMapSize = getConstMetaVal(*MI, CurIdx - 1);
139    CurIdx++;
140    for (unsigned N = 0; N < GCMapSize; ++N) {
141      unsigned B = MI->getOperand(CurIdx++).getImm();
142      unsigned D = MI->getOperand(CurIdx++).getImm();
143      GCMap.push_back(std::make_pair(B, D));
144    }
145  
146    return GCMapSize;
147  }
148  
149  bool StatepointOpers::isFoldableReg(Register Reg) const {
150    unsigned FoldableAreaStart = getVarIdx();
151    for (const MachineOperand &MO : MI->uses()) {
152      if (MO.getOperandNo() >= FoldableAreaStart)
153        break;
154      if (MO.isReg() && MO.getReg() == Reg)
155        return false;
156    }
157    return true;
158  }
159  
160  bool StatepointOpers::isFoldableReg(const MachineInstr *MI, Register Reg) {
161    if (MI->getOpcode() != TargetOpcode::STATEPOINT)
162      return false;
163    return StatepointOpers(MI).isFoldableReg(Reg);
164  }
165  
166  StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
167    if (StackMapVersion != 3)
168      llvm_unreachable("Unsupported stackmap version!");
169  }
170  
171  unsigned StackMaps::getNextMetaArgIdx(const MachineInstr *MI, unsigned CurIdx) {
172    assert(CurIdx < MI->getNumOperands() && "Bad meta arg index");
173    const auto &MO = MI->getOperand(CurIdx);
174    if (MO.isImm()) {
175      switch (MO.getImm()) {
176      default:
177        llvm_unreachable("Unrecognized operand type.");
178      case StackMaps::DirectMemRefOp:
179        CurIdx += 2;
180        break;
181      case StackMaps::IndirectMemRefOp:
182        CurIdx += 3;
183        break;
184      case StackMaps::ConstantOp:
185        ++CurIdx;
186        break;
187      }
188    }
189    ++CurIdx;
190    assert(CurIdx < MI->getNumOperands() && "points past operand list");
191    return CurIdx;
192  }
193  
194  /// Go up the super-register chain until we hit a valid dwarf register number.
195  static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
196    int RegNum;
197    for (MCPhysReg SR : TRI->superregs_inclusive(Reg)) {
198      RegNum = TRI->getDwarfRegNum(SR, false);
199      if (RegNum >= 0)
200        break;
201    }
202  
203    assert(RegNum >= 0 && "Invalid Dwarf register number.");
204    return (unsigned)RegNum;
205  }
206  
207  MachineInstr::const_mop_iterator
208  StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
209                          MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
210                          LiveOutVec &LiveOuts) const {
211    const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
212    if (MOI->isImm()) {
213      switch (MOI->getImm()) {
214      default:
215        llvm_unreachable("Unrecognized operand type.");
216      case StackMaps::DirectMemRefOp: {
217        auto &DL = AP.MF->getDataLayout();
218  
219        unsigned Size = DL.getPointerSizeInBits();
220        assert((Size % 8) == 0 && "Need pointer size in bytes.");
221        Size /= 8;
222        Register Reg = (++MOI)->getReg();
223        int64_t Imm = (++MOI)->getImm();
224        Locs.emplace_back(StackMaps::Location::Direct, Size,
225                          getDwarfRegNum(Reg, TRI), Imm);
226        break;
227      }
228      case StackMaps::IndirectMemRefOp: {
229        int64_t Size = (++MOI)->getImm();
230        assert(Size > 0 && "Need a valid size for indirect memory locations.");
231        Register Reg = (++MOI)->getReg();
232        int64_t Imm = (++MOI)->getImm();
233        Locs.emplace_back(StackMaps::Location::Indirect, Size,
234                          getDwarfRegNum(Reg, TRI), Imm);
235        break;
236      }
237      case StackMaps::ConstantOp: {
238        ++MOI;
239        assert(MOI->isImm() && "Expected constant operand.");
240        int64_t Imm = MOI->getImm();
241        Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm);
242        break;
243      }
244      }
245      return ++MOI;
246    }
247  
248    // The physical register number will ultimately be encoded as a DWARF regno.
249    // The stack map also records the size of a spill slot that can hold the
250    // register content. (The runtime can track the actual size of the data type
251    // if it needs to.)
252    if (MOI->isReg()) {
253      // Skip implicit registers (this includes our scratch registers)
254      if (MOI->isImplicit())
255        return ++MOI;
256  
257      if (MOI->isUndef()) {
258        // Record `undef` register as constant. Use same value as ISel uses.
259        Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, 0xFEFEFEFE);
260        return ++MOI;
261      }
262  
263      assert(MOI->getReg().isPhysical() &&
264             "Virtreg operands should have been rewritten before now.");
265      const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg());
266      assert(!MOI->getSubReg() && "Physical subreg still around.");
267  
268      unsigned Offset = 0;
269      unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI);
270      unsigned LLVMRegNum = *TRI->getLLVMRegNum(DwarfRegNum, false);
271      unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg());
272      if (SubRegIdx)
273        Offset = TRI->getSubRegIdxOffset(SubRegIdx);
274  
275      Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC),
276                        DwarfRegNum, Offset);
277      return ++MOI;
278    }
279  
280    if (MOI->isRegLiveOut())
281      LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut());
282  
283    return ++MOI;
284  }
285  
286  void StackMaps::print(raw_ostream &OS) {
287    const TargetRegisterInfo *TRI =
288        AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
289    OS << WSMP << "callsites:\n";
290    for (const auto &CSI : CSInfos) {
291      const LocationVec &CSLocs = CSI.Locations;
292      const LiveOutVec &LiveOuts = CSI.LiveOuts;
293  
294      OS << WSMP << "callsite " << CSI.ID << "\n";
295      OS << WSMP << "  has " << CSLocs.size() << " locations\n";
296  
297      unsigned Idx = 0;
298      for (const auto &Loc : CSLocs) {
299        OS << WSMP << "\t\tLoc " << Idx << ": ";
300        switch (Loc.Type) {
301        case Location::Unprocessed:
302          OS << "<Unprocessed operand>";
303          break;
304        case Location::Register:
305          OS << "Register ";
306          if (TRI)
307            OS << printReg(Loc.Reg, TRI);
308          else
309            OS << Loc.Reg;
310          break;
311        case Location::Direct:
312          OS << "Direct ";
313          if (TRI)
314            OS << printReg(Loc.Reg, TRI);
315          else
316            OS << Loc.Reg;
317          if (Loc.Offset)
318            OS << " + " << Loc.Offset;
319          break;
320        case Location::Indirect:
321          OS << "Indirect ";
322          if (TRI)
323            OS << printReg(Loc.Reg, TRI);
324          else
325            OS << Loc.Reg;
326          OS << "+" << Loc.Offset;
327          break;
328        case Location::Constant:
329          OS << "Constant " << Loc.Offset;
330          break;
331        case Location::ConstantIndex:
332          OS << "Constant Index " << Loc.Offset;
333          break;
334        }
335        OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0"
336           << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0"
337           << ", .int " << Loc.Offset << "]\n";
338        Idx++;
339      }
340  
341      OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
342  
343      Idx = 0;
344      for (const auto &LO : LiveOuts) {
345        OS << WSMP << "\t\tLO " << Idx << ": ";
346        if (TRI)
347          OS << printReg(LO.Reg, TRI);
348        else
349          OS << LO.Reg;
350        OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
351           << LO.Size << "]\n";
352        Idx++;
353      }
354    }
355  }
356  
357  /// Create a live-out register record for the given register Reg.
358  StackMaps::LiveOutReg
359  StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
360    unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
361    unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg));
362    return LiveOutReg(Reg, DwarfRegNum, Size);
363  }
364  
365  /// Parse the register live-out mask and return a vector of live-out registers
366  /// that need to be recorded in the stackmap.
367  StackMaps::LiveOutVec
368  StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
369    assert(Mask && "No register mask specified");
370    const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
371    LiveOutVec LiveOuts;
372  
373    // Create a LiveOutReg for each bit that is set in the register mask.
374    for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
375      if ((Mask[Reg / 32] >> (Reg % 32)) & 1)
376        LiveOuts.push_back(createLiveOutReg(Reg, TRI));
377  
378    // We don't need to keep track of a register if its super-register is already
379    // in the list. Merge entries that refer to the same dwarf register and use
380    // the maximum size that needs to be spilled.
381  
382    llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
383      // Only sort by the dwarf register number.
384      return LHS.DwarfRegNum < RHS.DwarfRegNum;
385    });
386  
387    for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
388      for (auto *II = std::next(I); II != E; ++II) {
389        if (I->DwarfRegNum != II->DwarfRegNum) {
390          // Skip all the now invalid entries.
391          I = --II;
392          break;
393        }
394        I->Size = std::max(I->Size, II->Size);
395        if (I->Reg && TRI->isSuperRegister(I->Reg, II->Reg))
396          I->Reg = II->Reg;
397        II->Reg = 0; // mark for deletion.
398      }
399    }
400  
401    llvm::erase_if(LiveOuts, [](const LiveOutReg &LO) { return LO.Reg == 0; });
402  
403    return LiveOuts;
404  }
405  
406  // See statepoint MI format description in StatepointOpers' class comment
407  // in include/llvm/CodeGen/StackMaps.h
408  void StackMaps::parseStatepointOpers(const MachineInstr &MI,
409                                       MachineInstr::const_mop_iterator MOI,
410                                       MachineInstr::const_mop_iterator MOE,
411                                       LocationVec &Locations,
412                                       LiveOutVec &LiveOuts) {
413    LLVM_DEBUG(dbgs() << "record statepoint : " << MI << "\n");
414    StatepointOpers SO(&MI);
415    MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // CC
416    MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Flags
417    MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Num Deopts
418  
419    // Record Deopt Args.
420    unsigned NumDeoptArgs = Locations.back().Offset;
421    assert(Locations.back().Type == Location::Constant);
422    assert(NumDeoptArgs == SO.getNumDeoptArgs());
423  
424    while (NumDeoptArgs--)
425      MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
426  
427    // Record gc base/derived pairs
428    assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
429    ++MOI;
430    assert(MOI->isImm());
431    unsigned NumGCPointers = MOI->getImm();
432    ++MOI;
433    if (NumGCPointers) {
434      // Map logical index of GC ptr to MI operand index.
435      SmallVector<unsigned, 8> GCPtrIndices;
436      unsigned GCPtrIdx = (unsigned)SO.getFirstGCPtrIdx();
437      assert((int)GCPtrIdx != -1);
438      assert(MOI - MI.operands_begin() == GCPtrIdx + 0LL);
439      while (NumGCPointers--) {
440        GCPtrIndices.push_back(GCPtrIdx);
441        GCPtrIdx = StackMaps::getNextMetaArgIdx(&MI, GCPtrIdx);
442      }
443  
444      SmallVector<std::pair<unsigned, unsigned>, 8> GCPairs;
445      unsigned NumGCPairs = SO.getGCPointerMap(GCPairs);
446      (void)NumGCPairs;
447      LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs << "\n");
448  
449      auto MOB = MI.operands_begin();
450      for (auto &P : GCPairs) {
451        assert(P.first < GCPtrIndices.size() && "base pointer index not found");
452        assert(P.second < GCPtrIndices.size() &&
453               "derived pointer index not found");
454        unsigned BaseIdx = GCPtrIndices[P.first];
455        unsigned DerivedIdx = GCPtrIndices[P.second];
456        LLVM_DEBUG(dbgs() << "Base : " << BaseIdx << " Derived : " << DerivedIdx
457                          << "\n");
458        (void)parseOperand(MOB + BaseIdx, MOE, Locations, LiveOuts);
459        (void)parseOperand(MOB + DerivedIdx, MOE, Locations, LiveOuts);
460      }
461  
462      MOI = MOB + GCPtrIdx;
463    }
464  
465    // Record gc allocas
466    assert(MOI < MOE);
467    assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
468    ++MOI;
469    unsigned NumAllocas = MOI->getImm();
470    ++MOI;
471    while (NumAllocas--) {
472      MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
473      assert(MOI < MOE);
474    }
475  }
476  
477  void StackMaps::recordStackMapOpers(const MCSymbol &MILabel,
478                                      const MachineInstr &MI, uint64_t ID,
479                                      MachineInstr::const_mop_iterator MOI,
480                                      MachineInstr::const_mop_iterator MOE,
481                                      bool recordResult) {
482    MCContext &OutContext = AP.OutStreamer->getContext();
483  
484    LocationVec Locations;
485    LiveOutVec LiveOuts;
486  
487    if (recordResult) {
488      assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
489      parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations,
490                   LiveOuts);
491    }
492  
493    // Parse operands.
494    if (MI.getOpcode() == TargetOpcode::STATEPOINT)
495      parseStatepointOpers(MI, MOI, MOE, Locations, LiveOuts);
496    else
497      while (MOI != MOE)
498        MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
499  
500    // Move large constants into the constant pool.
501    for (auto &Loc : Locations) {
502      // Constants are encoded as sign-extended integers.
503      // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
504      if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) {
505        Loc.Type = Location::ConstantIndex;
506        // ConstPool is intentionally a MapVector of 'uint64_t's (as
507        // opposed to 'int64_t's).  We should never be in a situation
508        // where we have to insert either the tombstone or the empty
509        // keys into a map, and for a DenseMap<uint64_t, T> these are
510        // (uint64_t)0 and (uint64_t)-1.  They can be and are
511        // represented using 32 bit integers.
512        assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() &&
513               (uint64_t)Loc.Offset !=
514                   DenseMapInfo<uint64_t>::getTombstoneKey() &&
515               "empty and tombstone keys should fit in 32 bits!");
516        auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset));
517        Loc.Offset = Result.first - ConstPool.begin();
518      }
519    }
520  
521    // Create an expression to calculate the offset of the callsite from function
522    // entry.
523    const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
524        MCSymbolRefExpr::create(&MILabel, OutContext),
525        MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext);
526  
527    CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations),
528                         std::move(LiveOuts));
529  
530    // Record the stack size of the current function and update callsite count.
531    const MachineFrameInfo &MFI = AP.MF->getFrameInfo();
532    const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
533    bool HasDynamicFrameSize =
534        MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(*(AP.MF));
535    uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize();
536  
537    auto CurrentIt = FnInfos.find(AP.CurrentFnSym);
538    if (CurrentIt != FnInfos.end())
539      CurrentIt->second.RecordCount++;
540    else
541      FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize)));
542  }
543  
544  void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) {
545    assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
546  
547    StackMapOpers opers(&MI);
548    const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm();
549    recordStackMapOpers(L, MI, ID, std::next(MI.operands_begin(),
550                                             opers.getVarIdx()),
551                        MI.operands_end());
552  }
553  
554  void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) {
555    assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
556  
557    PatchPointOpers opers(&MI);
558    const int64_t ID = opers.getID();
559    auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx());
560    recordStackMapOpers(L, MI, ID, MOI, MI.operands_end(),
561                        opers.isAnyReg() && opers.hasDef());
562  
563  #ifndef NDEBUG
564    // verify anyregcc
565    auto &Locations = CSInfos.back().Locations;
566    if (opers.isAnyReg()) {
567      unsigned NArgs = opers.getNumCallArgs();
568      for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
569        assert(Locations[i].Type == Location::Register &&
570               "anyreg arg must be in reg.");
571    }
572  #endif
573  }
574  
575  void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) {
576    assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
577  
578    StatepointOpers opers(&MI);
579    const unsigned StartIdx = opers.getVarIdx();
580    recordStackMapOpers(L, MI, opers.getID(), MI.operands_begin() + StartIdx,
581                        MI.operands_end(), false);
582  }
583  
584  /// Emit the stackmap header.
585  ///
586  /// Header {
587  ///   uint8  : Stack Map Version (currently 3)
588  ///   uint8  : Reserved (expected to be 0)
589  ///   uint16 : Reserved (expected to be 0)
590  /// }
591  /// uint32 : NumFunctions
592  /// uint32 : NumConstants
593  /// uint32 : NumRecords
594  void StackMaps::emitStackmapHeader(MCStreamer &OS) {
595    // Header.
596    OS.emitIntValue(StackMapVersion, 1); // Version.
597    OS.emitIntValue(0, 1);               // Reserved.
598    OS.emitInt16(0);                     // Reserved.
599  
600    // Num functions.
601    LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n');
602    OS.emitInt32(FnInfos.size());
603    // Num constants.
604    LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
605    OS.emitInt32(ConstPool.size());
606    // Num callsites.
607    LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
608    OS.emitInt32(CSInfos.size());
609  }
610  
611  /// Emit the function frame record for each function.
612  ///
613  /// StkSizeRecord[NumFunctions] {
614  ///   uint64 : Function Address
615  ///   uint64 : Stack Size
616  ///   uint64 : Record Count
617  /// }
618  void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
619    // Function Frame records.
620    LLVM_DEBUG(dbgs() << WSMP << "functions:\n");
621    for (auto const &FR : FnInfos) {
622      LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first
623                        << " frame size: " << FR.second.StackSize
624                        << " callsite count: " << FR.second.RecordCount << '\n');
625      OS.emitSymbolValue(FR.first, 8);
626      OS.emitIntValue(FR.second.StackSize, 8);
627      OS.emitIntValue(FR.second.RecordCount, 8);
628    }
629  }
630  
631  /// Emit the constant pool.
632  ///
633  /// int64  : Constants[NumConstants]
634  void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
635    // Constant pool entries.
636    LLVM_DEBUG(dbgs() << WSMP << "constants:\n");
637    for (const auto &ConstEntry : ConstPool) {
638      LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
639      OS.emitIntValue(ConstEntry.second, 8);
640    }
641  }
642  
643  /// Emit the callsite info for each callsite.
644  ///
645  /// StkMapRecord[NumRecords] {
646  ///   uint64 : PatchPoint ID
647  ///   uint32 : Instruction Offset
648  ///   uint16 : Reserved (record flags)
649  ///   uint16 : NumLocations
650  ///   Location[NumLocations] {
651  ///     uint8  : Register | Direct | Indirect | Constant | ConstantIndex
652  ///     uint8  : Size in Bytes
653  ///     uint16 : Dwarf RegNum
654  ///     int32  : Offset
655  ///   }
656  ///   uint16 : Padding
657  ///   uint16 : NumLiveOuts
658  ///   LiveOuts[NumLiveOuts] {
659  ///     uint16 : Dwarf RegNum
660  ///     uint8  : Reserved
661  ///     uint8  : Size in Bytes
662  ///   }
663  ///   uint32 : Padding (only if required to align to 8 byte)
664  /// }
665  ///
666  /// Location Encoding, Type, Value:
667  ///   0x1, Register, Reg                 (value in register)
668  ///   0x2, Direct, Reg + Offset          (frame index)
669  ///   0x3, Indirect, [Reg + Offset]      (spilled value)
670  ///   0x4, Constant, Offset              (small constant)
671  ///   0x5, ConstIndex, Constants[Offset] (large constant)
672  void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
673    LLVM_DEBUG(print(dbgs()));
674    // Callsite entries.
675    for (const auto &CSI : CSInfos) {
676      const LocationVec &CSLocs = CSI.Locations;
677      const LiveOutVec &LiveOuts = CSI.LiveOuts;
678  
679      // Verify stack map entry. It's better to communicate a problem to the
680      // runtime than crash in case of in-process compilation. Currently, we do
681      // simple overflow checks, but we may eventually communicate other
682      // compilation errors this way.
683      if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
684        OS.emitIntValue(UINT64_MAX, 8); // Invalid ID.
685        OS.emitValue(CSI.CSOffsetExpr, 4);
686        OS.emitInt16(0); // Reserved.
687        OS.emitInt16(0); // 0 locations.
688        OS.emitInt16(0); // padding.
689        OS.emitInt16(0); // 0 live-out registers.
690        OS.emitInt32(0); // padding.
691        continue;
692      }
693  
694      OS.emitIntValue(CSI.ID, 8);
695      OS.emitValue(CSI.CSOffsetExpr, 4);
696  
697      // Reserved for flags.
698      OS.emitInt16(0);
699      OS.emitInt16(CSLocs.size());
700  
701      for (const auto &Loc : CSLocs) {
702        OS.emitIntValue(Loc.Type, 1);
703        OS.emitIntValue(0, 1);  // Reserved
704        OS.emitInt16(Loc.Size);
705        OS.emitInt16(Loc.Reg);
706        OS.emitInt16(0); // Reserved
707        OS.emitInt32(Loc.Offset);
708      }
709  
710      // Emit alignment to 8 byte.
711      OS.emitValueToAlignment(Align(8));
712  
713      // Num live-out registers and padding to align to 4 byte.
714      OS.emitInt16(0);
715      OS.emitInt16(LiveOuts.size());
716  
717      for (const auto &LO : LiveOuts) {
718        OS.emitInt16(LO.DwarfRegNum);
719        OS.emitIntValue(0, 1);
720        OS.emitIntValue(LO.Size, 1);
721      }
722      // Emit alignment to 8 byte.
723      OS.emitValueToAlignment(Align(8));
724    }
725  }
726  
727  /// Serialize the stackmap data.
728  void StackMaps::serializeToStackMapSection() {
729    (void)WSMP;
730    // Bail out if there's no stack map data.
731    assert((!CSInfos.empty() || ConstPool.empty()) &&
732           "Expected empty constant pool too!");
733    assert((!CSInfos.empty() || FnInfos.empty()) &&
734           "Expected empty function record too!");
735    if (CSInfos.empty())
736      return;
737  
738    MCContext &OutContext = AP.OutStreamer->getContext();
739    MCStreamer &OS = *AP.OutStreamer;
740  
741    // Create the section.
742    MCSection *StackMapSection =
743        OutContext.getObjectFileInfo()->getStackMapSection();
744    OS.switchSection(StackMapSection);
745  
746    // Emit a dummy symbol to force section inclusion.
747    OS.emitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
748  
749    // Serialize data.
750    LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n");
751    emitStackmapHeader(OS);
752    emitFunctionFrameRecords(OS);
753    emitConstantPoolEntries(OS);
754    emitCallsiteEntries(OS);
755    OS.addBlankLine();
756  
757    // Clean up.
758    CSInfos.clear();
759    ConstPool.clear();
760  }
761