xref: /llvm-project/llvm/lib/CodeGen/StackMaps.cpp (revision 3285ee143b7f4b41ab3c37ea4ba10ba00a57756e)
1 //===- StackMaps.cpp ------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/CodeGen/StackMaps.h"
10 #include "llvm/ADT/DenseMapInfo.h"
11 #include "llvm/ADT/STLExtras.h"
12 #include "llvm/ADT/Twine.h"
13 #include "llvm/CodeGen/AsmPrinter.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunction.h"
16 #include "llvm/CodeGen/MachineInstr.h"
17 #include "llvm/CodeGen/MachineOperand.h"
18 #include "llvm/CodeGen/TargetOpcodes.h"
19 #include "llvm/CodeGen/TargetRegisterInfo.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DataLayout.h"
22 #include "llvm/MC/MCContext.h"
23 #include "llvm/MC/MCExpr.h"
24 #include "llvm/MC/MCObjectFileInfo.h"
25 #include "llvm/MC/MCRegisterInfo.h"
26 #include "llvm/MC/MCStreamer.h"
27 #include "llvm/Support/CommandLine.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/ErrorHandling.h"
30 #include "llvm/Support/MathExtras.h"
31 #include "llvm/Support/raw_ostream.h"
32 #include <algorithm>
33 #include <cassert>
34 #include <cstdint>
35 #include <iterator>
36 #include <utility>
37 
38 using namespace llvm;
39 
40 #define DEBUG_TYPE "stackmaps"
41 
42 static cl::opt<int> StackMapVersion(
43     "stackmap-version", cl::init(3), cl::Hidden,
44     cl::desc("Specify the stackmap encoding version (default = 3)"));
45 
46 const char *StackMaps::WSMP = "Stack Maps: ";
47 
48 static uint64_t getConstMetaVal(const MachineInstr &MI, unsigned Idx) {
49   assert(MI.getOperand(Idx).isImm() &&
50          MI.getOperand(Idx).getImm() == StackMaps::ConstantOp);
51   const auto &MO = MI.getOperand(Idx + 1);
52   assert(MO.isImm());
53   return MO.getImm();
54 }
55 
56 StackMapOpers::StackMapOpers(const MachineInstr *MI)
57   : MI(MI) {
58   assert(getVarIdx() <= MI->getNumOperands() &&
59          "invalid stackmap definition");
60 }
61 
62 PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
63     : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
64                      !MI->getOperand(0).isImplicit()) {
65 #ifndef NDEBUG
66   unsigned CheckStartIdx = 0, e = MI->getNumOperands();
67   while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
68          MI->getOperand(CheckStartIdx).isDef() &&
69          !MI->getOperand(CheckStartIdx).isImplicit())
70     ++CheckStartIdx;
71 
72   assert(getMetaIdx() == CheckStartIdx &&
73          "Unexpected additional definition in Patchpoint intrinsic.");
74 #endif
75 }
76 
77 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
78   if (!StartIdx)
79     StartIdx = getVarIdx();
80 
81   // Find the next scratch register (implicit def and early clobber)
82   unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
83   while (ScratchIdx < e &&
84          !(MI->getOperand(ScratchIdx).isReg() &&
85            MI->getOperand(ScratchIdx).isDef() &&
86            MI->getOperand(ScratchIdx).isImplicit() &&
87            MI->getOperand(ScratchIdx).isEarlyClobber()))
88     ++ScratchIdx;
89 
90   assert(ScratchIdx != e && "No scratch register available");
91   return ScratchIdx;
92 }
93 
94 int StatepointOpers::getFirstGCPtrIdx() {
95   unsigned NumDeoptsIdx = getNumDeoptArgsIdx();
96   unsigned NumDeoptArgs = MI->getOperand(NumDeoptsIdx).getImm();
97 
98   unsigned CurIdx = NumDeoptsIdx + 1;
99   while (NumDeoptArgs--) {
100     CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
101   }
102   ++CurIdx; // <StackMaps::ConstantOp>
103   unsigned NumGCPtrs = MI->getOperand(CurIdx).getImm();
104   if (NumGCPtrs == 0)
105     return -1;
106   ++CurIdx; // <num gc ptrs>
107   assert(CurIdx < MI->getNumOperands() && "Index points past operand list");
108   return (int)CurIdx;
109 }
110 
111 unsigned StatepointOpers::getGCPointerMap(
112     SmallVectorImpl<std::pair<unsigned, unsigned>> &GCMap) {
113   int FirstGCIdx = getFirstGCPtrIdx();
114   if (FirstGCIdx == -1)
115     return 0;
116   unsigned NumGCPtr = getConstMetaVal(*MI, (unsigned)FirstGCIdx - 2);
117   unsigned CurIdx = (unsigned)FirstGCIdx;
118   while (NumGCPtr--)
119     CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
120 
121   unsigned NumAllocas = getConstMetaVal(*MI, CurIdx);
122   CurIdx += 2;
123   while (NumAllocas--)
124     CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
125 
126   assert(CurIdx < MI->getNumOperands());
127   unsigned GCMapSize = getConstMetaVal(*MI, CurIdx);
128   CurIdx += 2;
129   for (unsigned N = 0; N < GCMapSize; ++N) {
130     unsigned B = MI->getOperand(CurIdx++).getImm();
131     unsigned D = MI->getOperand(CurIdx++).getImm();
132     GCMap.push_back(std::make_pair(B, D));
133   }
134 
135   return GCMapSize;
136 }
137 
138 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
139   if (StackMapVersion != 3)
140     llvm_unreachable("Unsupported stackmap version!");
141 }
142 
143 unsigned StackMaps::getNextMetaArgIdx(const MachineInstr *MI, unsigned CurIdx) {
144   assert(CurIdx < MI->getNumOperands() && "Bad meta arg index");
145   const auto &MO = MI->getOperand(CurIdx);
146   if (MO.isImm()) {
147     switch (MO.getImm()) {
148     default:
149       llvm_unreachable("Unrecognized operand type.");
150     case StackMaps::DirectMemRefOp:
151       CurIdx += 2;
152       break;
153     case StackMaps::IndirectMemRefOp:
154       CurIdx += 3;
155       break;
156     case StackMaps::ConstantOp:
157       ++CurIdx;
158       break;
159     }
160   }
161   ++CurIdx;
162   assert(CurIdx < MI->getNumOperands() && "points past operand list");
163   return CurIdx;
164 }
165 
166 /// Go up the super-register chain until we hit a valid dwarf register number.
167 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
168   int RegNum = TRI->getDwarfRegNum(Reg, false);
169   for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR)
170     RegNum = TRI->getDwarfRegNum(*SR, false);
171 
172   assert(RegNum >= 0 && "Invalid Dwarf register number.");
173   return (unsigned)RegNum;
174 }
175 
176 MachineInstr::const_mop_iterator
177 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
178                         MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
179                         LiveOutVec &LiveOuts) const {
180   const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
181   if (MOI->isImm()) {
182     switch (MOI->getImm()) {
183     default:
184       llvm_unreachable("Unrecognized operand type.");
185     case StackMaps::DirectMemRefOp: {
186       auto &DL = AP.MF->getDataLayout();
187 
188       unsigned Size = DL.getPointerSizeInBits();
189       assert((Size % 8) == 0 && "Need pointer size in bytes.");
190       Size /= 8;
191       Register Reg = (++MOI)->getReg();
192       int64_t Imm = (++MOI)->getImm();
193       Locs.emplace_back(StackMaps::Location::Direct, Size,
194                         getDwarfRegNum(Reg, TRI), Imm);
195       break;
196     }
197     case StackMaps::IndirectMemRefOp: {
198       int64_t Size = (++MOI)->getImm();
199       assert(Size > 0 && "Need a valid size for indirect memory locations.");
200       Register Reg = (++MOI)->getReg();
201       int64_t Imm = (++MOI)->getImm();
202       Locs.emplace_back(StackMaps::Location::Indirect, Size,
203                         getDwarfRegNum(Reg, TRI), Imm);
204       break;
205     }
206     case StackMaps::ConstantOp: {
207       ++MOI;
208       assert(MOI->isImm() && "Expected constant operand.");
209       int64_t Imm = MOI->getImm();
210       Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm);
211       break;
212     }
213     }
214     return ++MOI;
215   }
216 
217   // The physical register number will ultimately be encoded as a DWARF regno.
218   // The stack map also records the size of a spill slot that can hold the
219   // register content. (The runtime can track the actual size of the data type
220   // if it needs to.)
221   if (MOI->isReg()) {
222     // Skip implicit registers (this includes our scratch registers)
223     if (MOI->isImplicit())
224       return ++MOI;
225 
226     assert(Register::isPhysicalRegister(MOI->getReg()) &&
227            "Virtreg operands should have been rewritten before now.");
228     const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg());
229     assert(!MOI->getSubReg() && "Physical subreg still around.");
230 
231     unsigned Offset = 0;
232     unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI);
233     unsigned LLVMRegNum = *TRI->getLLVMRegNum(DwarfRegNum, false);
234     unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg());
235     if (SubRegIdx)
236       Offset = TRI->getSubRegIdxOffset(SubRegIdx);
237 
238     Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC),
239                       DwarfRegNum, Offset);
240     return ++MOI;
241   }
242 
243   if (MOI->isRegLiveOut())
244     LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut());
245 
246   return ++MOI;
247 }
248 
249 void StackMaps::print(raw_ostream &OS) {
250   const TargetRegisterInfo *TRI =
251       AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
252   OS << WSMP << "callsites:\n";
253   for (const auto &CSI : CSInfos) {
254     const LocationVec &CSLocs = CSI.Locations;
255     const LiveOutVec &LiveOuts = CSI.LiveOuts;
256 
257     OS << WSMP << "callsite " << CSI.ID << "\n";
258     OS << WSMP << "  has " << CSLocs.size() << " locations\n";
259 
260     unsigned Idx = 0;
261     for (const auto &Loc : CSLocs) {
262       OS << WSMP << "\t\tLoc " << Idx << ": ";
263       switch (Loc.Type) {
264       case Location::Unprocessed:
265         OS << "<Unprocessed operand>";
266         break;
267       case Location::Register:
268         OS << "Register ";
269         if (TRI)
270           OS << printReg(Loc.Reg, TRI);
271         else
272           OS << Loc.Reg;
273         break;
274       case Location::Direct:
275         OS << "Direct ";
276         if (TRI)
277           OS << printReg(Loc.Reg, TRI);
278         else
279           OS << Loc.Reg;
280         if (Loc.Offset)
281           OS << " + " << Loc.Offset;
282         break;
283       case Location::Indirect:
284         OS << "Indirect ";
285         if (TRI)
286           OS << printReg(Loc.Reg, TRI);
287         else
288           OS << Loc.Reg;
289         OS << "+" << Loc.Offset;
290         break;
291       case Location::Constant:
292         OS << "Constant " << Loc.Offset;
293         break;
294       case Location::ConstantIndex:
295         OS << "Constant Index " << Loc.Offset;
296         break;
297       }
298       OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0"
299          << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0"
300          << ", .int " << Loc.Offset << "]\n";
301       Idx++;
302     }
303 
304     OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
305 
306     Idx = 0;
307     for (const auto &LO : LiveOuts) {
308       OS << WSMP << "\t\tLO " << Idx << ": ";
309       if (TRI)
310         OS << printReg(LO.Reg, TRI);
311       else
312         OS << LO.Reg;
313       OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
314          << LO.Size << "]\n";
315       Idx++;
316     }
317   }
318 }
319 
320 /// Create a live-out register record for the given register Reg.
321 StackMaps::LiveOutReg
322 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
323   unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
324   unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg));
325   return LiveOutReg(Reg, DwarfRegNum, Size);
326 }
327 
328 /// Parse the register live-out mask and return a vector of live-out registers
329 /// that need to be recorded in the stackmap.
330 StackMaps::LiveOutVec
331 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
332   assert(Mask && "No register mask specified");
333   const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
334   LiveOutVec LiveOuts;
335 
336   // Create a LiveOutReg for each bit that is set in the register mask.
337   for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
338     if ((Mask[Reg / 32] >> (Reg % 32)) & 1)
339       LiveOuts.push_back(createLiveOutReg(Reg, TRI));
340 
341   // We don't need to keep track of a register if its super-register is already
342   // in the list. Merge entries that refer to the same dwarf register and use
343   // the maximum size that needs to be spilled.
344 
345   llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
346     // Only sort by the dwarf register number.
347     return LHS.DwarfRegNum < RHS.DwarfRegNum;
348   });
349 
350   for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
351     for (auto II = std::next(I); II != E; ++II) {
352       if (I->DwarfRegNum != II->DwarfRegNum) {
353         // Skip all the now invalid entries.
354         I = --II;
355         break;
356       }
357       I->Size = std::max(I->Size, II->Size);
358       if (TRI->isSuperRegister(I->Reg, II->Reg))
359         I->Reg = II->Reg;
360       II->Reg = 0; // mark for deletion.
361     }
362   }
363 
364   llvm::erase_if(LiveOuts, [](const LiveOutReg &LO) { return LO.Reg == 0; });
365 
366   return LiveOuts;
367 }
368 
369 // See statepoint MI format description in StatepointOpers' class comment
370 // in include/llvm/CodeGen/StackMaps.h
371 void StackMaps::parseStatepointOpers(const MachineInstr &MI,
372                                      MachineInstr::const_mop_iterator MOI,
373                                      MachineInstr::const_mop_iterator MOE,
374                                      LocationVec &Locations,
375                                      LiveOutVec &LiveOuts) {
376   LLVM_DEBUG(dbgs() << "record statepoint : " << MI << "\n");
377   StatepointOpers SO(&MI);
378   MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // CC
379   MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Flags
380   MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Num Deopts
381 
382   // Record Deopt Args.
383   unsigned NumDeoptArgs = Locations.back().Offset;
384   assert(Locations.back().Type == Location::Constant);
385   assert(NumDeoptArgs == SO.getNumDeoptArgs());
386 
387   while (NumDeoptArgs--)
388     MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
389 
390   // Record gc base/derived pairs
391   assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
392   ++MOI;
393   assert(MOI->isImm());
394   unsigned NumGCPointers = MOI->getImm();
395   ++MOI;
396   if (NumGCPointers) {
397     // Map logical index of GC ptr to MI operand index.
398     SmallVector<unsigned, 8> GCPtrIndices;
399     unsigned GCPtrIdx = (unsigned)SO.getFirstGCPtrIdx();
400     assert((int)GCPtrIdx != -1);
401     assert(MOI - MI.operands_begin() == GCPtrIdx + 0LL);
402     while (NumGCPointers--) {
403       GCPtrIndices.push_back(GCPtrIdx);
404       GCPtrIdx = StackMaps::getNextMetaArgIdx(&MI, GCPtrIdx);
405     }
406 
407     SmallVector<std::pair<unsigned, unsigned>, 8> GCPairs;
408     unsigned NumGCPairs = SO.getGCPointerMap(GCPairs);
409     (void)NumGCPairs;
410     LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs << "\n");
411 
412     auto MOB = MI.operands_begin();
413     for (auto &P : GCPairs) {
414       assert(P.first < GCPtrIndices.size() && "base pointer index not found");
415       assert(P.second < GCPtrIndices.size() &&
416              "derived pointer index not found");
417       unsigned BaseIdx = GCPtrIndices[P.first];
418       unsigned DerivedIdx = GCPtrIndices[P.second];
419       LLVM_DEBUG(dbgs() << "Base : " << BaseIdx << " Derived : " << DerivedIdx
420                         << "\n");
421       (void)parseOperand(MOB + BaseIdx, MOE, Locations, LiveOuts);
422       (void)parseOperand(MOB + DerivedIdx, MOE, Locations, LiveOuts);
423     }
424 
425     MOI = MOB + GCPtrIdx;
426   }
427 
428   // Record gc allocas
429   assert(MOI < MOE);
430   assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
431   ++MOI;
432   unsigned NumAllocas = MOI->getImm();
433   ++MOI;
434   while (NumAllocas--) {
435     MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
436     assert(MOI < MOE);
437   }
438 }
439 
440 void StackMaps::recordStackMapOpers(const MCSymbol &MILabel,
441                                     const MachineInstr &MI, uint64_t ID,
442                                     MachineInstr::const_mop_iterator MOI,
443                                     MachineInstr::const_mop_iterator MOE,
444                                     bool recordResult) {
445   MCContext &OutContext = AP.OutStreamer->getContext();
446 
447   LocationVec Locations;
448   LiveOutVec LiveOuts;
449 
450   if (recordResult) {
451     assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
452     parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations,
453                  LiveOuts);
454   }
455 
456   // Parse operands.
457   if (MI.getOpcode() == TargetOpcode::STATEPOINT)
458     parseStatepointOpers(MI, MOI, MOE, Locations, LiveOuts);
459   else
460     while (MOI != MOE)
461       MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
462 
463   // Move large constants into the constant pool.
464   for (auto &Loc : Locations) {
465     // Constants are encoded as sign-extended integers.
466     // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
467     if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) {
468       Loc.Type = Location::ConstantIndex;
469       // ConstPool is intentionally a MapVector of 'uint64_t's (as
470       // opposed to 'int64_t's).  We should never be in a situation
471       // where we have to insert either the tombstone or the empty
472       // keys into a map, and for a DenseMap<uint64_t, T> these are
473       // (uint64_t)0 and (uint64_t)-1.  They can be and are
474       // represented using 32 bit integers.
475       assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() &&
476              (uint64_t)Loc.Offset !=
477                  DenseMapInfo<uint64_t>::getTombstoneKey() &&
478              "empty and tombstone keys should fit in 32 bits!");
479       auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset));
480       Loc.Offset = Result.first - ConstPool.begin();
481     }
482   }
483 
484   // Create an expression to calculate the offset of the callsite from function
485   // entry.
486   const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
487       MCSymbolRefExpr::create(&MILabel, OutContext),
488       MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext);
489 
490   CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations),
491                        std::move(LiveOuts));
492 
493   // Record the stack size of the current function and update callsite count.
494   const MachineFrameInfo &MFI = AP.MF->getFrameInfo();
495   const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
496   bool HasDynamicFrameSize =
497       MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF));
498   uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize();
499 
500   auto CurrentIt = FnInfos.find(AP.CurrentFnSym);
501   if (CurrentIt != FnInfos.end())
502     CurrentIt->second.RecordCount++;
503   else
504     FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize)));
505 }
506 
507 void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) {
508   assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
509 
510   StackMapOpers opers(&MI);
511   const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm();
512   recordStackMapOpers(L, MI, ID, std::next(MI.operands_begin(),
513                                            opers.getVarIdx()),
514                       MI.operands_end());
515 }
516 
517 void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) {
518   assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
519 
520   PatchPointOpers opers(&MI);
521   const int64_t ID = opers.getID();
522   auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx());
523   recordStackMapOpers(L, MI, ID, MOI, MI.operands_end(),
524                       opers.isAnyReg() && opers.hasDef());
525 
526 #ifndef NDEBUG
527   // verify anyregcc
528   auto &Locations = CSInfos.back().Locations;
529   if (opers.isAnyReg()) {
530     unsigned NArgs = opers.getNumCallArgs();
531     for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
532       assert(Locations[i].Type == Location::Register &&
533              "anyreg arg must be in reg.");
534   }
535 #endif
536 }
537 
538 void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) {
539   assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
540 
541   StatepointOpers opers(&MI);
542   const unsigned StartIdx = opers.getVarIdx();
543   recordStackMapOpers(L, MI, opers.getID(), MI.operands_begin() + StartIdx,
544                       MI.operands_end(), false);
545 }
546 
547 /// Emit the stackmap header.
548 ///
549 /// Header {
550 ///   uint8  : Stack Map Version (currently 3)
551 ///   uint8  : Reserved (expected to be 0)
552 ///   uint16 : Reserved (expected to be 0)
553 /// }
554 /// uint32 : NumFunctions
555 /// uint32 : NumConstants
556 /// uint32 : NumRecords
557 void StackMaps::emitStackmapHeader(MCStreamer &OS) {
558   // Header.
559   OS.emitIntValue(StackMapVersion, 1); // Version.
560   OS.emitIntValue(0, 1);               // Reserved.
561   OS.emitInt16(0);                     // Reserved.
562 
563   // Num functions.
564   LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n');
565   OS.emitInt32(FnInfos.size());
566   // Num constants.
567   LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
568   OS.emitInt32(ConstPool.size());
569   // Num callsites.
570   LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
571   OS.emitInt32(CSInfos.size());
572 }
573 
574 /// Emit the function frame record for each function.
575 ///
576 /// StkSizeRecord[NumFunctions] {
577 ///   uint64 : Function Address
578 ///   uint64 : Stack Size
579 ///   uint64 : Record Count
580 /// }
581 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
582   // Function Frame records.
583   LLVM_DEBUG(dbgs() << WSMP << "functions:\n");
584   for (auto const &FR : FnInfos) {
585     LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first
586                       << " frame size: " << FR.second.StackSize
587                       << " callsite count: " << FR.second.RecordCount << '\n');
588     OS.emitSymbolValue(FR.first, 8);
589     OS.emitIntValue(FR.second.StackSize, 8);
590     OS.emitIntValue(FR.second.RecordCount, 8);
591   }
592 }
593 
594 /// Emit the constant pool.
595 ///
596 /// int64  : Constants[NumConstants]
597 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
598   // Constant pool entries.
599   LLVM_DEBUG(dbgs() << WSMP << "constants:\n");
600   for (const auto &ConstEntry : ConstPool) {
601     LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
602     OS.emitIntValue(ConstEntry.second, 8);
603   }
604 }
605 
606 /// Emit the callsite info for each callsite.
607 ///
608 /// StkMapRecord[NumRecords] {
609 ///   uint64 : PatchPoint ID
610 ///   uint32 : Instruction Offset
611 ///   uint16 : Reserved (record flags)
612 ///   uint16 : NumLocations
613 ///   Location[NumLocations] {
614 ///     uint8  : Register | Direct | Indirect | Constant | ConstantIndex
615 ///     uint8  : Size in Bytes
616 ///     uint16 : Dwarf RegNum
617 ///     int32  : Offset
618 ///   }
619 ///   uint16 : Padding
620 ///   uint16 : NumLiveOuts
621 ///   LiveOuts[NumLiveOuts] {
622 ///     uint16 : Dwarf RegNum
623 ///     uint8  : Reserved
624 ///     uint8  : Size in Bytes
625 ///   }
626 ///   uint32 : Padding (only if required to align to 8 byte)
627 /// }
628 ///
629 /// Location Encoding, Type, Value:
630 ///   0x1, Register, Reg                 (value in register)
631 ///   0x2, Direct, Reg + Offset          (frame index)
632 ///   0x3, Indirect, [Reg + Offset]      (spilled value)
633 ///   0x4, Constant, Offset              (small constant)
634 ///   0x5, ConstIndex, Constants[Offset] (large constant)
635 void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
636   LLVM_DEBUG(print(dbgs()));
637   // Callsite entries.
638   for (const auto &CSI : CSInfos) {
639     const LocationVec &CSLocs = CSI.Locations;
640     const LiveOutVec &LiveOuts = CSI.LiveOuts;
641 
642     // Verify stack map entry. It's better to communicate a problem to the
643     // runtime than crash in case of in-process compilation. Currently, we do
644     // simple overflow checks, but we may eventually communicate other
645     // compilation errors this way.
646     if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
647       OS.emitIntValue(UINT64_MAX, 8); // Invalid ID.
648       OS.emitValue(CSI.CSOffsetExpr, 4);
649       OS.emitInt16(0); // Reserved.
650       OS.emitInt16(0); // 0 locations.
651       OS.emitInt16(0); // padding.
652       OS.emitInt16(0); // 0 live-out registers.
653       OS.emitInt32(0); // padding.
654       continue;
655     }
656 
657     OS.emitIntValue(CSI.ID, 8);
658     OS.emitValue(CSI.CSOffsetExpr, 4);
659 
660     // Reserved for flags.
661     OS.emitInt16(0);
662     OS.emitInt16(CSLocs.size());
663 
664     for (const auto &Loc : CSLocs) {
665       OS.emitIntValue(Loc.Type, 1);
666       OS.emitIntValue(0, 1);  // Reserved
667       OS.emitInt16(Loc.Size);
668       OS.emitInt16(Loc.Reg);
669       OS.emitInt16(0); // Reserved
670       OS.emitInt32(Loc.Offset);
671     }
672 
673     // Emit alignment to 8 byte.
674     OS.emitValueToAlignment(8);
675 
676     // Num live-out registers and padding to align to 4 byte.
677     OS.emitInt16(0);
678     OS.emitInt16(LiveOuts.size());
679 
680     for (const auto &LO : LiveOuts) {
681       OS.emitInt16(LO.DwarfRegNum);
682       OS.emitIntValue(0, 1);
683       OS.emitIntValue(LO.Size, 1);
684     }
685     // Emit alignment to 8 byte.
686     OS.emitValueToAlignment(8);
687   }
688 }
689 
690 /// Serialize the stackmap data.
691 void StackMaps::serializeToStackMapSection() {
692   (void)WSMP;
693   // Bail out if there's no stack map data.
694   assert((!CSInfos.empty() || ConstPool.empty()) &&
695          "Expected empty constant pool too!");
696   assert((!CSInfos.empty() || FnInfos.empty()) &&
697          "Expected empty function record too!");
698   if (CSInfos.empty())
699     return;
700 
701   MCContext &OutContext = AP.OutStreamer->getContext();
702   MCStreamer &OS = *AP.OutStreamer;
703 
704   // Create the section.
705   MCSection *StackMapSection =
706       OutContext.getObjectFileInfo()->getStackMapSection();
707   OS.SwitchSection(StackMapSection);
708 
709   // Emit a dummy symbol to force section inclusion.
710   OS.emitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
711 
712   // Serialize data.
713   LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n");
714   emitStackmapHeader(OS);
715   emitFunctionFrameRecords(OS);
716   emitConstantPoolEntries(OS);
717   emitCallsiteEntries(OS);
718   OS.AddBlankLine();
719 
720   // Clean up.
721   CSInfos.clear();
722   ConstPool.clear();
723 }
724