1 //===- StackMaps.cpp ------------------------------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "llvm/ADT/DenseMapInfo.h" 11 #include "llvm/ADT/STLExtras.h" 12 #include "llvm/ADT/Twine.h" 13 #include "llvm/CodeGen/AsmPrinter.h" 14 #include "llvm/CodeGen/MachineFrameInfo.h" 15 #include "llvm/CodeGen/MachineFunction.h" 16 #include "llvm/CodeGen/MachineInstr.h" 17 #include "llvm/CodeGen/MachineOperand.h" 18 #include "llvm/CodeGen/StackMaps.h" 19 #include "llvm/IR/DataLayout.h" 20 #include "llvm/MC/MCContext.h" 21 #include "llvm/MC/MCExpr.h" 22 #include "llvm/MC/MCObjectFileInfo.h" 23 #include "llvm/MC/MCRegisterInfo.h" 24 #include "llvm/MC/MCStreamer.h" 25 #include "llvm/Support/CommandLine.h" 26 #include "llvm/Support/Debug.h" 27 #include "llvm/Support/ErrorHandling.h" 28 #include "llvm/Support/MathExtras.h" 29 #include "llvm/Support/raw_ostream.h" 30 #include "llvm/Target/TargetOpcodes.h" 31 #include "llvm/Target/TargetRegisterInfo.h" 32 #include "llvm/Target/TargetSubtargetInfo.h" 33 #include <algorithm> 34 #include <cassert> 35 #include <cstdint> 36 #include <iterator> 37 #include <utility> 38 39 using namespace llvm; 40 41 #define DEBUG_TYPE "stackmaps" 42 43 static cl::opt<int> StackMapVersion( 44 "stackmap-version", cl::init(3), 45 cl::desc("Specify the stackmap encoding version (default = 3)")); 46 47 const char *StackMaps::WSMP = "Stack Maps: "; 48 49 StackMapOpers::StackMapOpers(const MachineInstr *MI) 50 : MI(MI) { 51 assert(getVarIdx() <= MI->getNumOperands() && 52 "invalid stackmap definition"); 53 } 54 55 PatchPointOpers::PatchPointOpers(const MachineInstr *MI) 56 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() && 57 !MI->getOperand(0).isImplicit()) { 58 #ifndef NDEBUG 59 unsigned CheckStartIdx = 0, e = MI->getNumOperands(); 60 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() && 61 MI->getOperand(CheckStartIdx).isDef() && 62 !MI->getOperand(CheckStartIdx).isImplicit()) 63 ++CheckStartIdx; 64 65 assert(getMetaIdx() == CheckStartIdx && 66 "Unexpected additional definition in Patchpoint intrinsic."); 67 #endif 68 } 69 70 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const { 71 if (!StartIdx) 72 StartIdx = getVarIdx(); 73 74 // Find the next scratch register (implicit def and early clobber) 75 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands(); 76 while (ScratchIdx < e && 77 !(MI->getOperand(ScratchIdx).isReg() && 78 MI->getOperand(ScratchIdx).isDef() && 79 MI->getOperand(ScratchIdx).isImplicit() && 80 MI->getOperand(ScratchIdx).isEarlyClobber())) 81 ++ScratchIdx; 82 83 assert(ScratchIdx != e && "No scratch register available"); 84 return ScratchIdx; 85 } 86 87 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) { 88 if (StackMapVersion != 3) 89 llvm_unreachable("Unsupported stackmap version!"); 90 } 91 92 /// Go up the super-register chain until we hit a valid dwarf register number. 93 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) { 94 int RegNum = TRI->getDwarfRegNum(Reg, false); 95 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR) 96 RegNum = TRI->getDwarfRegNum(*SR, false); 97 98 assert(RegNum >= 0 && "Invalid Dwarf register number."); 99 return (unsigned)RegNum; 100 } 101 102 MachineInstr::const_mop_iterator 103 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI, 104 MachineInstr::const_mop_iterator MOE, LocationVec &Locs, 105 LiveOutVec &LiveOuts) const { 106 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 107 if (MOI->isImm()) { 108 switch (MOI->getImm()) { 109 default: 110 llvm_unreachable("Unrecognized operand type."); 111 case StackMaps::DirectMemRefOp: { 112 auto &DL = AP.MF->getDataLayout(); 113 114 unsigned Size = DL.getPointerSizeInBits(); 115 assert((Size % 8) == 0 && "Need pointer size in bytes."); 116 Size /= 8; 117 unsigned Reg = (++MOI)->getReg(); 118 int64_t Imm = (++MOI)->getImm(); 119 Locs.emplace_back(StackMaps::Location::Direct, Size, 120 getDwarfRegNum(Reg, TRI), Imm); 121 break; 122 } 123 case StackMaps::IndirectMemRefOp: { 124 int64_t Size = (++MOI)->getImm(); 125 assert(Size > 0 && "Need a valid size for indirect memory locations."); 126 unsigned Reg = (++MOI)->getReg(); 127 int64_t Imm = (++MOI)->getImm(); 128 Locs.emplace_back(StackMaps::Location::Indirect, Size, 129 getDwarfRegNum(Reg, TRI), Imm); 130 break; 131 } 132 case StackMaps::ConstantOp: { 133 ++MOI; 134 assert(MOI->isImm() && "Expected constant operand."); 135 int64_t Imm = MOI->getImm(); 136 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm); 137 break; 138 } 139 } 140 return ++MOI; 141 } 142 143 // The physical register number will ultimately be encoded as a DWARF regno. 144 // The stack map also records the size of a spill slot that can hold the 145 // register content. (The runtime can track the actual size of the data type 146 // if it needs to.) 147 if (MOI->isReg()) { 148 // Skip implicit registers (this includes our scratch registers) 149 if (MOI->isImplicit()) 150 return ++MOI; 151 152 assert(TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) && 153 "Virtreg operands should have been rewritten before now."); 154 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg()); 155 assert(!MOI->getSubReg() && "Physical subreg still around."); 156 157 unsigned Offset = 0; 158 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI); 159 unsigned LLVMRegNum = TRI->getLLVMRegNum(DwarfRegNum, false); 160 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg()); 161 if (SubRegIdx) 162 Offset = TRI->getSubRegIdxOffset(SubRegIdx); 163 164 Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC), 165 DwarfRegNum, Offset); 166 return ++MOI; 167 } 168 169 if (MOI->isRegLiveOut()) 170 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut()); 171 172 return ++MOI; 173 } 174 175 void StackMaps::print(raw_ostream &OS) { 176 const TargetRegisterInfo *TRI = 177 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr; 178 OS << WSMP << "callsites:\n"; 179 for (const auto &CSI : CSInfos) { 180 const LocationVec &CSLocs = CSI.Locations; 181 const LiveOutVec &LiveOuts = CSI.LiveOuts; 182 183 OS << WSMP << "callsite " << CSI.ID << "\n"; 184 OS << WSMP << " has " << CSLocs.size() << " locations\n"; 185 186 unsigned Idx = 0; 187 for (const auto &Loc : CSLocs) { 188 OS << WSMP << "\t\tLoc " << Idx << ": "; 189 switch (Loc.Type) { 190 case Location::Unprocessed: 191 OS << "<Unprocessed operand>"; 192 break; 193 case Location::Register: 194 OS << "Register "; 195 if (TRI) 196 OS << TRI->getName(Loc.Reg); 197 else 198 OS << Loc.Reg; 199 break; 200 case Location::Direct: 201 OS << "Direct "; 202 if (TRI) 203 OS << TRI->getName(Loc.Reg); 204 else 205 OS << Loc.Reg; 206 if (Loc.Offset) 207 OS << " + " << Loc.Offset; 208 break; 209 case Location::Indirect: 210 OS << "Indirect "; 211 if (TRI) 212 OS << TRI->getName(Loc.Reg); 213 else 214 OS << Loc.Reg; 215 OS << "+" << Loc.Offset; 216 break; 217 case Location::Constant: 218 OS << "Constant " << Loc.Offset; 219 break; 220 case Location::ConstantIndex: 221 OS << "Constant Index " << Loc.Offset; 222 break; 223 } 224 OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0" 225 << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0" 226 << ", .int " << Loc.Offset << "]\n"; 227 Idx++; 228 } 229 230 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n"; 231 232 Idx = 0; 233 for (const auto &LO : LiveOuts) { 234 OS << WSMP << "\t\tLO " << Idx << ": "; 235 if (TRI) 236 OS << TRI->getName(LO.Reg); 237 else 238 OS << LO.Reg; 239 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte " 240 << LO.Size << "]\n"; 241 Idx++; 242 } 243 } 244 } 245 246 /// Create a live-out register record for the given register Reg. 247 StackMaps::LiveOutReg 248 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const { 249 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI); 250 unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg)); 251 return LiveOutReg(Reg, DwarfRegNum, Size); 252 } 253 254 /// Parse the register live-out mask and return a vector of live-out registers 255 /// that need to be recorded in the stackmap. 256 StackMaps::LiveOutVec 257 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const { 258 assert(Mask && "No register mask specified"); 259 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 260 LiveOutVec LiveOuts; 261 262 // Create a LiveOutReg for each bit that is set in the register mask. 263 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg) 264 if ((Mask[Reg / 32] >> Reg % 32) & 1) 265 LiveOuts.push_back(createLiveOutReg(Reg, TRI)); 266 267 // We don't need to keep track of a register if its super-register is already 268 // in the list. Merge entries that refer to the same dwarf register and use 269 // the maximum size that needs to be spilled. 270 271 std::sort(LiveOuts.begin(), LiveOuts.end(), 272 [](const LiveOutReg &LHS, const LiveOutReg &RHS) { 273 // Only sort by the dwarf register number. 274 return LHS.DwarfRegNum < RHS.DwarfRegNum; 275 }); 276 277 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) { 278 for (auto II = std::next(I); II != E; ++II) { 279 if (I->DwarfRegNum != II->DwarfRegNum) { 280 // Skip all the now invalid entries. 281 I = --II; 282 break; 283 } 284 I->Size = std::max(I->Size, II->Size); 285 if (TRI->isSuperRegister(I->Reg, II->Reg)) 286 I->Reg = II->Reg; 287 II->Reg = 0; // mark for deletion. 288 } 289 } 290 291 LiveOuts.erase( 292 llvm::remove_if(LiveOuts, 293 [](const LiveOutReg &LO) { return LO.Reg == 0; }), 294 LiveOuts.end()); 295 296 return LiveOuts; 297 } 298 299 void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint64_t ID, 300 MachineInstr::const_mop_iterator MOI, 301 MachineInstr::const_mop_iterator MOE, 302 bool recordResult) { 303 MCContext &OutContext = AP.OutStreamer->getContext(); 304 MCSymbol *MILabel = OutContext.createTempSymbol(); 305 AP.OutStreamer->EmitLabel(MILabel); 306 307 LocationVec Locations; 308 LiveOutVec LiveOuts; 309 310 if (recordResult) { 311 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value."); 312 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations, 313 LiveOuts); 314 } 315 316 // Parse operands. 317 while (MOI != MOE) { 318 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 319 } 320 321 // Move large constants into the constant pool. 322 for (auto &Loc : Locations) { 323 // Constants are encoded as sign-extended integers. 324 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool. 325 if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) { 326 Loc.Type = Location::ConstantIndex; 327 // ConstPool is intentionally a MapVector of 'uint64_t's (as 328 // opposed to 'int64_t's). We should never be in a situation 329 // where we have to insert either the tombstone or the empty 330 // keys into a map, and for a DenseMap<uint64_t, T> these are 331 // (uint64_t)0 and (uint64_t)-1. They can be and are 332 // represented using 32 bit integers. 333 assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() && 334 (uint64_t)Loc.Offset != 335 DenseMapInfo<uint64_t>::getTombstoneKey() && 336 "empty and tombstone keys should fit in 32 bits!"); 337 auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset)); 338 Loc.Offset = Result.first - ConstPool.begin(); 339 } 340 } 341 342 // Create an expression to calculate the offset of the callsite from function 343 // entry. 344 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub( 345 MCSymbolRefExpr::create(MILabel, OutContext), 346 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext); 347 348 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations), 349 std::move(LiveOuts)); 350 351 // Record the stack size of the current function and update callsite count. 352 const MachineFrameInfo &MFI = AP.MF->getFrameInfo(); 353 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo(); 354 bool HasDynamicFrameSize = 355 MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF)); 356 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize(); 357 358 auto CurrentIt = FnInfos.find(AP.CurrentFnSym); 359 if (CurrentIt != FnInfos.end()) 360 CurrentIt->second.RecordCount++; 361 else 362 FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize))); 363 } 364 365 void StackMaps::recordStackMap(const MachineInstr &MI) { 366 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap"); 367 368 StackMapOpers opers(&MI); 369 const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm(); 370 recordStackMapOpers(MI, ID, std::next(MI.operands_begin(), opers.getVarIdx()), 371 MI.operands_end()); 372 } 373 374 void StackMaps::recordPatchPoint(const MachineInstr &MI) { 375 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint"); 376 377 PatchPointOpers opers(&MI); 378 const int64_t ID = opers.getID(); 379 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx()); 380 recordStackMapOpers(MI, ID, MOI, MI.operands_end(), 381 opers.isAnyReg() && opers.hasDef()); 382 383 #ifndef NDEBUG 384 // verify anyregcc 385 auto &Locations = CSInfos.back().Locations; 386 if (opers.isAnyReg()) { 387 unsigned NArgs = opers.getNumCallArgs(); 388 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i) 389 assert(Locations[i].Type == Location::Register && 390 "anyreg arg must be in reg."); 391 } 392 #endif 393 } 394 395 void StackMaps::recordStatepoint(const MachineInstr &MI) { 396 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint"); 397 398 StatepointOpers opers(&MI); 399 // Record all the deopt and gc operands (they're contiguous and run from the 400 // initial index to the end of the operand list) 401 const unsigned StartIdx = opers.getVarIdx(); 402 recordStackMapOpers(MI, opers.getID(), MI.operands_begin() + StartIdx, 403 MI.operands_end(), false); 404 } 405 406 /// Emit the stackmap header. 407 /// 408 /// Header { 409 /// uint8 : Stack Map Version (currently 2) 410 /// uint8 : Reserved (expected to be 0) 411 /// uint16 : Reserved (expected to be 0) 412 /// } 413 /// uint32 : NumFunctions 414 /// uint32 : NumConstants 415 /// uint32 : NumRecords 416 void StackMaps::emitStackmapHeader(MCStreamer &OS) { 417 // Header. 418 OS.EmitIntValue(StackMapVersion, 1); // Version. 419 OS.EmitIntValue(0, 1); // Reserved. 420 OS.EmitIntValue(0, 2); // Reserved. 421 422 // Num functions. 423 DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n'); 424 OS.EmitIntValue(FnInfos.size(), 4); 425 // Num constants. 426 DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n'); 427 OS.EmitIntValue(ConstPool.size(), 4); 428 // Num callsites. 429 DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n'); 430 OS.EmitIntValue(CSInfos.size(), 4); 431 } 432 433 /// Emit the function frame record for each function. 434 /// 435 /// StkSizeRecord[NumFunctions] { 436 /// uint64 : Function Address 437 /// uint64 : Stack Size 438 /// uint64 : Record Count 439 /// } 440 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) { 441 // Function Frame records. 442 DEBUG(dbgs() << WSMP << "functions:\n"); 443 for (auto const &FR : FnInfos) { 444 DEBUG(dbgs() << WSMP << "function addr: " << FR.first 445 << " frame size: " << FR.second.StackSize 446 << " callsite count: " << FR.second.RecordCount << '\n'); 447 OS.EmitSymbolValue(FR.first, 8); 448 OS.EmitIntValue(FR.second.StackSize, 8); 449 OS.EmitIntValue(FR.second.RecordCount, 8); 450 } 451 } 452 453 /// Emit the constant pool. 454 /// 455 /// int64 : Constants[NumConstants] 456 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) { 457 // Constant pool entries. 458 DEBUG(dbgs() << WSMP << "constants:\n"); 459 for (const auto &ConstEntry : ConstPool) { 460 DEBUG(dbgs() << WSMP << ConstEntry.second << '\n'); 461 OS.EmitIntValue(ConstEntry.second, 8); 462 } 463 } 464 465 /// Emit the callsite info for each callsite. 466 /// 467 /// StkMapRecord[NumRecords] { 468 /// uint64 : PatchPoint ID 469 /// uint32 : Instruction Offset 470 /// uint16 : Reserved (record flags) 471 /// uint16 : NumLocations 472 /// Location[NumLocations] { 473 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex 474 /// uint8 : Size in Bytes 475 /// uint16 : Dwarf RegNum 476 /// int32 : Offset 477 /// } 478 /// uint16 : Padding 479 /// uint16 : NumLiveOuts 480 /// LiveOuts[NumLiveOuts] { 481 /// uint16 : Dwarf RegNum 482 /// uint8 : Reserved 483 /// uint8 : Size in Bytes 484 /// } 485 /// uint32 : Padding (only if required to align to 8 byte) 486 /// } 487 /// 488 /// Location Encoding, Type, Value: 489 /// 0x1, Register, Reg (value in register) 490 /// 0x2, Direct, Reg + Offset (frame index) 491 /// 0x3, Indirect, [Reg + Offset] (spilled value) 492 /// 0x4, Constant, Offset (small constant) 493 /// 0x5, ConstIndex, Constants[Offset] (large constant) 494 void StackMaps::emitCallsiteEntries(MCStreamer &OS) { 495 DEBUG(print(dbgs())); 496 // Callsite entries. 497 for (const auto &CSI : CSInfos) { 498 const LocationVec &CSLocs = CSI.Locations; 499 const LiveOutVec &LiveOuts = CSI.LiveOuts; 500 501 // Verify stack map entry. It's better to communicate a problem to the 502 // runtime than crash in case of in-process compilation. Currently, we do 503 // simple overflow checks, but we may eventually communicate other 504 // compilation errors this way. 505 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) { 506 OS.EmitIntValue(UINT64_MAX, 8); // Invalid ID. 507 OS.EmitValue(CSI.CSOffsetExpr, 4); 508 OS.EmitIntValue(0, 2); // Reserved. 509 OS.EmitIntValue(0, 2); // 0 locations. 510 OS.EmitIntValue(0, 2); // padding. 511 OS.EmitIntValue(0, 2); // 0 live-out registers. 512 OS.EmitIntValue(0, 4); // padding. 513 continue; 514 } 515 516 OS.EmitIntValue(CSI.ID, 8); 517 OS.EmitValue(CSI.CSOffsetExpr, 4); 518 519 // Reserved for flags. 520 OS.EmitIntValue(0, 2); 521 OS.EmitIntValue(CSLocs.size(), 2); 522 523 for (const auto &Loc : CSLocs) { 524 OS.EmitIntValue(Loc.Type, 1); 525 OS.EmitIntValue(0, 1); // Reserved 526 OS.EmitIntValue(Loc.Size, 2); 527 OS.EmitIntValue(Loc.Reg, 2); 528 OS.EmitIntValue(0, 2); // Reserved 529 OS.EmitIntValue(Loc.Offset, 4); 530 } 531 532 // Emit alignment to 8 byte. 533 OS.EmitValueToAlignment(8); 534 535 // Num live-out registers and padding to align to 4 byte. 536 OS.EmitIntValue(0, 2); 537 OS.EmitIntValue(LiveOuts.size(), 2); 538 539 for (const auto &LO : LiveOuts) { 540 OS.EmitIntValue(LO.DwarfRegNum, 2); 541 OS.EmitIntValue(0, 1); 542 OS.EmitIntValue(LO.Size, 1); 543 } 544 // Emit alignment to 8 byte. 545 OS.EmitValueToAlignment(8); 546 } 547 } 548 549 /// Serialize the stackmap data. 550 void StackMaps::serializeToStackMapSection() { 551 (void)WSMP; 552 // Bail out if there's no stack map data. 553 assert((!CSInfos.empty() || ConstPool.empty()) && 554 "Expected empty constant pool too!"); 555 assert((!CSInfos.empty() || FnInfos.empty()) && 556 "Expected empty function record too!"); 557 if (CSInfos.empty()) 558 return; 559 560 MCContext &OutContext = AP.OutStreamer->getContext(); 561 MCStreamer &OS = *AP.OutStreamer; 562 563 // Create the section. 564 MCSection *StackMapSection = 565 OutContext.getObjectFileInfo()->getStackMapSection(); 566 OS.SwitchSection(StackMapSection); 567 568 // Emit a dummy symbol to force section inclusion. 569 OS.EmitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps"))); 570 571 // Serialize data. 572 DEBUG(dbgs() << "********** Stack Map Output **********\n"); 573 emitStackmapHeader(OS); 574 emitFunctionFrameRecords(OS); 575 emitConstantPoolEntries(OS); 576 emitCallsiteEntries(OS); 577 OS.AddBlankLine(); 578 579 // Clean up. 580 CSInfos.clear(); 581 ConstPool.clear(); 582 } 583