1 //===- StackMaps.cpp ------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/CodeGen/StackMaps.h" 10 #include "llvm/ADT/DenseMapInfo.h" 11 #include "llvm/ADT/STLExtras.h" 12 #include "llvm/ADT/Twine.h" 13 #include "llvm/CodeGen/AsmPrinter.h" 14 #include "llvm/CodeGen/MachineFrameInfo.h" 15 #include "llvm/CodeGen/MachineFunction.h" 16 #include "llvm/CodeGen/MachineInstr.h" 17 #include "llvm/CodeGen/MachineOperand.h" 18 #include "llvm/CodeGen/TargetOpcodes.h" 19 #include "llvm/CodeGen/TargetRegisterInfo.h" 20 #include "llvm/CodeGen/TargetSubtargetInfo.h" 21 #include "llvm/IR/DataLayout.h" 22 #include "llvm/MC/MCContext.h" 23 #include "llvm/MC/MCExpr.h" 24 #include "llvm/MC/MCObjectFileInfo.h" 25 #include "llvm/MC/MCStreamer.h" 26 #include "llvm/Support/CommandLine.h" 27 #include "llvm/Support/Debug.h" 28 #include "llvm/Support/ErrorHandling.h" 29 #include "llvm/Support/MathExtras.h" 30 #include "llvm/Support/raw_ostream.h" 31 #include <algorithm> 32 #include <cassert> 33 #include <cstdint> 34 #include <iterator> 35 #include <utility> 36 37 using namespace llvm; 38 39 #define DEBUG_TYPE "stackmaps" 40 41 static cl::opt<int> StackMapVersion( 42 "stackmap-version", cl::init(3), cl::Hidden, 43 cl::desc("Specify the stackmap encoding version (default = 3)")); 44 45 const char *StackMaps::WSMP = "Stack Maps: "; 46 47 static uint64_t getConstMetaVal(const MachineInstr &MI, unsigned Idx) { 48 assert(MI.getOperand(Idx).isImm() && 49 MI.getOperand(Idx).getImm() == StackMaps::ConstantOp); 50 const auto &MO = MI.getOperand(Idx + 1); 51 assert(MO.isImm()); 52 return MO.getImm(); 53 } 54 55 StackMapOpers::StackMapOpers(const MachineInstr *MI) 56 : MI(MI) { 57 assert(getVarIdx() <= MI->getNumOperands() && 58 "invalid stackmap definition"); 59 } 60 61 PatchPointOpers::PatchPointOpers(const MachineInstr *MI) 62 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() && 63 !MI->getOperand(0).isImplicit()) { 64 #ifndef NDEBUG 65 unsigned CheckStartIdx = 0, e = MI->getNumOperands(); 66 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() && 67 MI->getOperand(CheckStartIdx).isDef() && 68 !MI->getOperand(CheckStartIdx).isImplicit()) 69 ++CheckStartIdx; 70 71 assert(getMetaIdx() == CheckStartIdx && 72 "Unexpected additional definition in Patchpoint intrinsic."); 73 #endif 74 } 75 76 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const { 77 if (!StartIdx) 78 StartIdx = getVarIdx(); 79 80 // Find the next scratch register (implicit def and early clobber) 81 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands(); 82 while (ScratchIdx < e && 83 !(MI->getOperand(ScratchIdx).isReg() && 84 MI->getOperand(ScratchIdx).isDef() && 85 MI->getOperand(ScratchIdx).isImplicit() && 86 MI->getOperand(ScratchIdx).isEarlyClobber())) 87 ++ScratchIdx; 88 89 assert(ScratchIdx != e && "No scratch register available"); 90 return ScratchIdx; 91 } 92 93 unsigned StatepointOpers::getNumGcMapEntriesIdx() { 94 // Take index of num of allocas and skip all allocas records. 95 unsigned CurIdx = getNumAllocaIdx(); 96 unsigned NumAllocas = getConstMetaVal(*MI, CurIdx - 1); 97 CurIdx++; 98 while (NumAllocas--) 99 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx); 100 return CurIdx + 1; // skip <StackMaps::ConstantOp> 101 } 102 103 unsigned StatepointOpers::getNumAllocaIdx() { 104 // Take index of num of gc ptrs and skip all gc ptr records. 105 unsigned CurIdx = getNumGCPtrIdx(); 106 unsigned NumGCPtrs = getConstMetaVal(*MI, CurIdx - 1); 107 CurIdx++; 108 while (NumGCPtrs--) 109 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx); 110 return CurIdx + 1; // skip <StackMaps::ConstantOp> 111 } 112 113 unsigned StatepointOpers::getNumGCPtrIdx() { 114 // Take index of num of deopt args and skip all deopt records. 115 unsigned CurIdx = getNumDeoptArgsIdx(); 116 unsigned NumDeoptArgs = getConstMetaVal(*MI, CurIdx - 1); 117 CurIdx++; 118 while (NumDeoptArgs--) { 119 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx); 120 } 121 return CurIdx + 1; // skip <StackMaps::ConstantOp> 122 } 123 124 int StatepointOpers::getFirstGCPtrIdx() { 125 unsigned NumGCPtrsIdx = getNumGCPtrIdx(); 126 unsigned NumGCPtrs = getConstMetaVal(*MI, NumGCPtrsIdx - 1); 127 if (NumGCPtrs == 0) 128 return -1; 129 ++NumGCPtrsIdx; // skip <num gc ptrs> 130 assert(NumGCPtrsIdx < MI->getNumOperands()); 131 return (int)NumGCPtrsIdx; 132 } 133 134 unsigned StatepointOpers::getGCPointerMap( 135 SmallVectorImpl<std::pair<unsigned, unsigned>> &GCMap) { 136 unsigned CurIdx = getNumGcMapEntriesIdx(); 137 unsigned GCMapSize = getConstMetaVal(*MI, CurIdx - 1); 138 CurIdx++; 139 for (unsigned N = 0; N < GCMapSize; ++N) { 140 unsigned B = MI->getOperand(CurIdx++).getImm(); 141 unsigned D = MI->getOperand(CurIdx++).getImm(); 142 GCMap.push_back(std::make_pair(B, D)); 143 } 144 145 return GCMapSize; 146 } 147 148 bool StatepointOpers::isFoldableReg(Register Reg) const { 149 unsigned FoldableAreaStart = getVarIdx(); 150 for (const MachineOperand &MO : MI->uses()) { 151 if (MO.getOperandNo() >= FoldableAreaStart) 152 break; 153 if (MO.isReg() && MO.getReg() == Reg) 154 return false; 155 } 156 return true; 157 } 158 159 bool StatepointOpers::isFoldableReg(const MachineInstr *MI, Register Reg) { 160 if (MI->getOpcode() != TargetOpcode::STATEPOINT) 161 return false; 162 return StatepointOpers(MI).isFoldableReg(Reg); 163 } 164 165 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) { 166 if (StackMapVersion != 3) 167 llvm_unreachable("Unsupported stackmap version!"); 168 } 169 170 unsigned StackMaps::getNextMetaArgIdx(const MachineInstr *MI, unsigned CurIdx) { 171 assert(CurIdx < MI->getNumOperands() && "Bad meta arg index"); 172 const auto &MO = MI->getOperand(CurIdx); 173 if (MO.isImm()) { 174 switch (MO.getImm()) { 175 default: 176 llvm_unreachable("Unrecognized operand type."); 177 case StackMaps::DirectMemRefOp: 178 CurIdx += 2; 179 break; 180 case StackMaps::IndirectMemRefOp: 181 CurIdx += 3; 182 break; 183 case StackMaps::ConstantOp: 184 ++CurIdx; 185 break; 186 } 187 } 188 ++CurIdx; 189 assert(CurIdx < MI->getNumOperands() && "points past operand list"); 190 return CurIdx; 191 } 192 193 /// Go up the super-register chain until we hit a valid dwarf register number. 194 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) { 195 int RegNum; 196 for (MCPhysReg SR : TRI->superregs_inclusive(Reg)) { 197 RegNum = TRI->getDwarfRegNum(SR, false); 198 if (RegNum >= 0) 199 break; 200 } 201 202 assert(RegNum >= 0 && isUInt<16>(RegNum) && "Invalid Dwarf register number."); 203 return (unsigned)RegNum; 204 } 205 206 MachineInstr::const_mop_iterator 207 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI, 208 MachineInstr::const_mop_iterator MOE, LocationVec &Locs, 209 LiveOutVec &LiveOuts) { 210 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 211 if (MOI->isImm()) { 212 switch (MOI->getImm()) { 213 default: 214 llvm_unreachable("Unrecognized operand type."); 215 case StackMaps::DirectMemRefOp: { 216 auto &DL = AP.MF->getDataLayout(); 217 218 unsigned Size = DL.getPointerSizeInBits(); 219 assert((Size % 8) == 0 && "Need pointer size in bytes."); 220 Size /= 8; 221 Register Reg = (++MOI)->getReg(); 222 int64_t Imm = (++MOI)->getImm(); 223 Locs.emplace_back(StackMaps::Location::Direct, Size, 224 getDwarfRegNum(Reg, TRI), Imm); 225 break; 226 } 227 case StackMaps::IndirectMemRefOp: { 228 int64_t Size = (++MOI)->getImm(); 229 assert(Size > 0 && "Need a valid size for indirect memory locations."); 230 Register Reg = (++MOI)->getReg(); 231 int64_t Imm = (++MOI)->getImm(); 232 Locs.emplace_back(StackMaps::Location::Indirect, Size, 233 getDwarfRegNum(Reg, TRI), Imm); 234 break; 235 } 236 case StackMaps::ConstantOp: { 237 ++MOI; 238 assert(MOI->isImm() && "Expected constant operand."); 239 int64_t Imm = MOI->getImm(); 240 if (isInt<32>(Imm)) { 241 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm); 242 } else { 243 // ConstPool is intentionally a MapVector of 'uint64_t's (as 244 // opposed to 'int64_t's). We should never be in a situation 245 // where we have to insert either the tombstone or the empty 246 // keys into a map, and for a DenseMap<uint64_t, T> these are 247 // (uint64_t)0 and (uint64_t)-1. They can be and are 248 // represented using 32 bit integers. 249 assert((uint64_t)Imm != DenseMapInfo<uint64_t>::getEmptyKey() && 250 (uint64_t)Imm != DenseMapInfo<uint64_t>::getTombstoneKey() && 251 "empty and tombstone keys should fit in 32 bits!"); 252 auto Result = ConstPool.insert(std::make_pair(Imm, Imm)); 253 Locs.emplace_back(Location::ConstantIndex, sizeof(int64_t), 0, 254 Result.first - ConstPool.begin()); 255 } 256 break; 257 } 258 } 259 return ++MOI; 260 } 261 262 // The physical register number will ultimately be encoded as a DWARF regno. 263 // The stack map also records the size of a spill slot that can hold the 264 // register content. (The runtime can track the actual size of the data type 265 // if it needs to.) 266 if (MOI->isReg()) { 267 // Skip implicit registers (this includes our scratch registers) 268 if (MOI->isImplicit()) 269 return ++MOI; 270 271 assert(MOI->getReg().isPhysical() && 272 "Virtreg operands should have been rewritten before now."); 273 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg()); 274 assert(!MOI->getSubReg() && "Physical subreg still around."); 275 276 unsigned Offset = 0; 277 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI); 278 MCRegister LLVMRegNum = *TRI->getLLVMRegNum(DwarfRegNum, false); 279 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg()); 280 if (SubRegIdx) 281 Offset = TRI->getSubRegIdxOffset(SubRegIdx); 282 283 Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC), 284 DwarfRegNum, Offset); 285 return ++MOI; 286 } 287 288 if (MOI->isRegLiveOut()) 289 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut()); 290 291 return ++MOI; 292 } 293 294 void StackMaps::print(raw_ostream &OS) { 295 const TargetRegisterInfo *TRI = 296 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr; 297 OS << WSMP << "callsites:\n"; 298 for (const auto &CSI : CSInfos) { 299 const LocationVec &CSLocs = CSI.Locations; 300 const LiveOutVec &LiveOuts = CSI.LiveOuts; 301 302 OS << WSMP << "callsite " << CSI.ID << "\n"; 303 OS << WSMP << " has " << CSLocs.size() << " locations\n"; 304 305 unsigned Idx = 0; 306 for (const auto &Loc : CSLocs) { 307 OS << WSMP << "\t\tLoc " << Idx << ": "; 308 switch (Loc.Type) { 309 case Location::Unprocessed: 310 OS << "<Unprocessed operand>"; 311 break; 312 case Location::Register: 313 OS << "Register "; 314 if (TRI) 315 OS << printReg(Loc.Reg, TRI); 316 else 317 OS << Loc.Reg; 318 break; 319 case Location::Direct: 320 OS << "Direct "; 321 if (TRI) 322 OS << printReg(Loc.Reg, TRI); 323 else 324 OS << Loc.Reg; 325 if (Loc.Offset) 326 OS << " + " << Loc.Offset; 327 break; 328 case Location::Indirect: 329 OS << "Indirect "; 330 if (TRI) 331 OS << printReg(Loc.Reg, TRI); 332 else 333 OS << Loc.Reg; 334 OS << "+" << Loc.Offset; 335 break; 336 case Location::Constant: 337 OS << "Constant " << Loc.Offset; 338 break; 339 case Location::ConstantIndex: 340 OS << "Constant Index " << Loc.Offset; 341 break; 342 } 343 OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0" 344 << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0" 345 << ", .int " << Loc.Offset << "]\n"; 346 Idx++; 347 } 348 349 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n"; 350 351 Idx = 0; 352 for (const auto &LO : LiveOuts) { 353 OS << WSMP << "\t\tLO " << Idx << ": "; 354 if (TRI) 355 OS << printReg(LO.Reg, TRI); 356 else 357 OS << LO.Reg; 358 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte " 359 << LO.Size << "]\n"; 360 Idx++; 361 } 362 } 363 } 364 365 /// Create a live-out register record for the given register Reg. 366 StackMaps::LiveOutReg 367 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const { 368 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI); 369 unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg)); 370 return LiveOutReg(Reg, DwarfRegNum, Size); 371 } 372 373 /// Parse the register live-out mask and return a vector of live-out registers 374 /// that need to be recorded in the stackmap. 375 StackMaps::LiveOutVec 376 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const { 377 assert(Mask && "No register mask specified"); 378 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 379 LiveOutVec LiveOuts; 380 381 // Create a LiveOutReg for each bit that is set in the register mask. 382 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg) 383 if ((Mask[Reg / 32] >> (Reg % 32)) & 1) 384 LiveOuts.push_back(createLiveOutReg(Reg, TRI)); 385 386 // We don't need to keep track of a register if its super-register is already 387 // in the list. Merge entries that refer to the same dwarf register and use 388 // the maximum size that needs to be spilled. 389 390 llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) { 391 // Only sort by the dwarf register number. 392 return LHS.DwarfRegNum < RHS.DwarfRegNum; 393 }); 394 395 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) { 396 for (auto *II = std::next(I); II != E; ++II) { 397 if (I->DwarfRegNum != II->DwarfRegNum) { 398 // Skip all the now invalid entries. 399 I = --II; 400 break; 401 } 402 I->Size = std::max(I->Size, II->Size); 403 if (I->Reg && TRI->isSuperRegister(I->Reg, II->Reg)) 404 I->Reg = II->Reg; 405 II->Reg = 0; // mark for deletion. 406 } 407 } 408 409 llvm::erase_if(LiveOuts, [](const LiveOutReg &LO) { return LO.Reg == 0; }); 410 411 return LiveOuts; 412 } 413 414 // See statepoint MI format description in StatepointOpers' class comment 415 // in include/llvm/CodeGen/StackMaps.h 416 void StackMaps::parseStatepointOpers(const MachineInstr &MI, 417 MachineInstr::const_mop_iterator MOI, 418 MachineInstr::const_mop_iterator MOE, 419 LocationVec &Locations, 420 LiveOutVec &LiveOuts) { 421 LLVM_DEBUG(dbgs() << "record statepoint : " << MI << "\n"); 422 StatepointOpers SO(&MI); 423 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // CC 424 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Flags 425 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Num Deopts 426 427 // Record Deopt Args. 428 unsigned NumDeoptArgs = Locations.back().Offset; 429 assert(Locations.back().Type == Location::Constant); 430 assert(NumDeoptArgs == SO.getNumDeoptArgs()); 431 432 while (NumDeoptArgs--) 433 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 434 435 // Record gc base/derived pairs 436 assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp); 437 ++MOI; 438 assert(MOI->isImm()); 439 unsigned NumGCPointers = MOI->getImm(); 440 ++MOI; 441 if (NumGCPointers) { 442 // Map logical index of GC ptr to MI operand index. 443 SmallVector<unsigned, 8> GCPtrIndices; 444 unsigned GCPtrIdx = (unsigned)SO.getFirstGCPtrIdx(); 445 assert((int)GCPtrIdx != -1); 446 assert(MOI - MI.operands_begin() == GCPtrIdx + 0LL); 447 while (NumGCPointers--) { 448 GCPtrIndices.push_back(GCPtrIdx); 449 GCPtrIdx = StackMaps::getNextMetaArgIdx(&MI, GCPtrIdx); 450 } 451 452 SmallVector<std::pair<unsigned, unsigned>, 8> GCPairs; 453 unsigned NumGCPairs = SO.getGCPointerMap(GCPairs); 454 (void)NumGCPairs; 455 LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs << "\n"); 456 457 auto MOB = MI.operands_begin(); 458 for (auto &P : GCPairs) { 459 assert(P.first < GCPtrIndices.size() && "base pointer index not found"); 460 assert(P.second < GCPtrIndices.size() && 461 "derived pointer index not found"); 462 unsigned BaseIdx = GCPtrIndices[P.first]; 463 unsigned DerivedIdx = GCPtrIndices[P.second]; 464 LLVM_DEBUG(dbgs() << "Base : " << BaseIdx << " Derived : " << DerivedIdx 465 << "\n"); 466 (void)parseOperand(MOB + BaseIdx, MOE, Locations, LiveOuts); 467 (void)parseOperand(MOB + DerivedIdx, MOE, Locations, LiveOuts); 468 } 469 470 MOI = MOB + GCPtrIdx; 471 } 472 473 // Record gc allocas 474 assert(MOI < MOE); 475 assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp); 476 ++MOI; 477 unsigned NumAllocas = MOI->getImm(); 478 ++MOI; 479 while (NumAllocas--) { 480 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 481 assert(MOI < MOE); 482 } 483 } 484 485 void StackMaps::recordStackMapOpers(const MCSymbol &MILabel, 486 const MachineInstr &MI, uint64_t ID, 487 MachineInstr::const_mop_iterator MOI, 488 MachineInstr::const_mop_iterator MOE, 489 bool recordResult) { 490 MCContext &OutContext = AP.OutStreamer->getContext(); 491 492 LocationVec Locations; 493 LiveOutVec LiveOuts; 494 495 if (recordResult) { 496 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value."); 497 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations, 498 LiveOuts); 499 } 500 501 // Parse operands. 502 if (MI.getOpcode() == TargetOpcode::STATEPOINT) 503 parseStatepointOpers(MI, MOI, MOE, Locations, LiveOuts); 504 else 505 while (MOI != MOE) 506 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 507 508 // Create an expression to calculate the offset of the callsite from function 509 // entry. 510 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub( 511 MCSymbolRefExpr::create(&MILabel, OutContext), 512 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext); 513 514 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations), 515 std::move(LiveOuts)); 516 517 // Record the stack size of the current function and update callsite count. 518 const MachineFrameInfo &MFI = AP.MF->getFrameInfo(); 519 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo(); 520 bool HasDynamicFrameSize = 521 MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(*(AP.MF)); 522 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize(); 523 524 auto CurrentIt = FnInfos.find(AP.CurrentFnSym); 525 if (CurrentIt != FnInfos.end()) 526 CurrentIt->second.RecordCount++; 527 else 528 FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize))); 529 } 530 531 void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) { 532 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap"); 533 534 StackMapOpers opers(&MI); 535 const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm(); 536 recordStackMapOpers(L, MI, ID, std::next(MI.operands_begin(), 537 opers.getVarIdx()), 538 MI.operands_end()); 539 } 540 541 void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) { 542 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint"); 543 544 PatchPointOpers opers(&MI); 545 const int64_t ID = opers.getID(); 546 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx()); 547 recordStackMapOpers(L, MI, ID, MOI, MI.operands_end(), 548 opers.isAnyReg() && opers.hasDef()); 549 550 #ifndef NDEBUG 551 // verify anyregcc 552 auto &Locations = CSInfos.back().Locations; 553 if (opers.isAnyReg()) { 554 unsigned NArgs = opers.getNumCallArgs(); 555 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i) 556 assert(Locations[i].Type == Location::Register && 557 "anyreg arg must be in reg."); 558 } 559 #endif 560 } 561 562 void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) { 563 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint"); 564 565 StatepointOpers opers(&MI); 566 const unsigned StartIdx = opers.getVarIdx(); 567 recordStackMapOpers(L, MI, opers.getID(), MI.operands_begin() + StartIdx, 568 MI.operands_end(), false); 569 } 570 571 /// Emit the stackmap header. 572 /// 573 /// Header { 574 /// uint8 : Stack Map Version (currently 3) 575 /// uint8 : Reserved (expected to be 0) 576 /// uint16 : Reserved (expected to be 0) 577 /// } 578 /// uint32 : NumFunctions 579 /// uint32 : NumConstants 580 /// uint32 : NumRecords 581 void StackMaps::emitStackmapHeader(MCStreamer &OS) { 582 // Header. 583 OS.emitIntValue(StackMapVersion, 1); // Version. 584 OS.emitIntValue(0, 1); // Reserved. 585 OS.emitInt16(0); // Reserved. 586 587 // Num functions. 588 LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n'); 589 OS.emitInt32(FnInfos.size()); 590 // Num constants. 591 LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n'); 592 OS.emitInt32(ConstPool.size()); 593 // Num callsites. 594 LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n'); 595 OS.emitInt32(CSInfos.size()); 596 } 597 598 /// Emit the function frame record for each function. 599 /// 600 /// StkSizeRecord[NumFunctions] { 601 /// uint64 : Function Address 602 /// uint64 : Stack Size 603 /// uint64 : Record Count 604 /// } 605 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) { 606 // Function Frame records. 607 LLVM_DEBUG(dbgs() << WSMP << "functions:\n"); 608 for (auto const &FR : FnInfos) { 609 LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first 610 << " frame size: " << FR.second.StackSize 611 << " callsite count: " << FR.second.RecordCount << '\n'); 612 OS.emitSymbolValue(FR.first, 8); 613 OS.emitIntValue(FR.second.StackSize, 8); 614 OS.emitIntValue(FR.second.RecordCount, 8); 615 } 616 } 617 618 /// Emit the constant pool. 619 /// 620 /// int64 : Constants[NumConstants] 621 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) { 622 // Constant pool entries. 623 LLVM_DEBUG(dbgs() << WSMP << "constants:\n"); 624 for (const auto &ConstEntry : ConstPool) { 625 LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n'); 626 OS.emitIntValue(ConstEntry.second, 8); 627 } 628 } 629 630 /// Emit the callsite info for each callsite. 631 /// 632 /// StkMapRecord[NumRecords] { 633 /// uint64 : PatchPoint ID 634 /// uint32 : Instruction Offset 635 /// uint16 : Reserved (record flags) 636 /// uint16 : NumLocations 637 /// Location[NumLocations] { 638 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex 639 /// uint8 : Size in Bytes 640 /// uint16 : Dwarf RegNum 641 /// int32 : Offset 642 /// } 643 /// uint16 : Padding 644 /// uint16 : NumLiveOuts 645 /// LiveOuts[NumLiveOuts] { 646 /// uint16 : Dwarf RegNum 647 /// uint8 : Reserved 648 /// uint8 : Size in Bytes 649 /// } 650 /// uint32 : Padding (only if required to align to 8 byte) 651 /// } 652 /// 653 /// Location Encoding, Type, Value: 654 /// 0x1, Register, Reg (value in register) 655 /// 0x2, Direct, Reg + Offset (frame index) 656 /// 0x3, Indirect, [Reg + Offset] (spilled value) 657 /// 0x4, Constant, Offset (small constant) 658 /// 0x5, ConstIndex, Constants[Offset] (large constant) 659 void StackMaps::emitCallsiteEntries(MCStreamer &OS) { 660 LLVM_DEBUG(print(dbgs())); 661 // Callsite entries. 662 for (const auto &CSI : CSInfos) { 663 const LocationVec &CSLocs = CSI.Locations; 664 const LiveOutVec &LiveOuts = CSI.LiveOuts; 665 666 // Verify stack map entry. It's better to communicate a problem to the 667 // runtime than crash in case of in-process compilation. Currently, we do 668 // simple overflow checks, but we may eventually communicate other 669 // compilation errors this way. 670 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) { 671 OS.emitIntValue(UINT64_MAX, 8); // Invalid ID. 672 OS.emitValue(CSI.CSOffsetExpr, 4); 673 OS.emitInt16(0); // Reserved. 674 OS.emitInt16(0); // 0 locations. 675 OS.emitInt16(0); // padding. 676 OS.emitInt16(0); // 0 live-out registers. 677 OS.emitInt32(0); // padding. 678 continue; 679 } 680 681 OS.emitIntValue(CSI.ID, 8); 682 OS.emitValue(CSI.CSOffsetExpr, 4); 683 684 // Reserved for flags. 685 OS.emitInt16(0); 686 OS.emitInt16(CSLocs.size()); 687 688 for (const auto &Loc : CSLocs) { 689 OS.emitIntValue(Loc.Type, 1); 690 OS.emitIntValue(0, 1); // Reserved 691 OS.emitInt16(Loc.Size); 692 OS.emitInt16(Loc.Reg); 693 OS.emitInt16(0); // Reserved 694 OS.emitInt32(Loc.Offset); 695 } 696 697 // Emit alignment to 8 byte. 698 OS.emitValueToAlignment(Align(8)); 699 700 // Num live-out registers and padding to align to 4 byte. 701 OS.emitInt16(0); 702 OS.emitInt16(LiveOuts.size()); 703 704 for (const auto &LO : LiveOuts) { 705 OS.emitInt16(LO.DwarfRegNum); 706 OS.emitIntValue(0, 1); 707 OS.emitIntValue(LO.Size, 1); 708 } 709 // Emit alignment to 8 byte. 710 OS.emitValueToAlignment(Align(8)); 711 } 712 } 713 714 /// Serialize the stackmap data. 715 void StackMaps::serializeToStackMapSection() { 716 (void)WSMP; 717 // Bail out if there's no stack map data. 718 assert((!CSInfos.empty() || ConstPool.empty()) && 719 "Expected empty constant pool too!"); 720 assert((!CSInfos.empty() || FnInfos.empty()) && 721 "Expected empty function record too!"); 722 if (CSInfos.empty()) 723 return; 724 725 MCContext &OutContext = AP.OutStreamer->getContext(); 726 MCStreamer &OS = *AP.OutStreamer; 727 728 // Create the section. 729 MCSection *StackMapSection = 730 OutContext.getObjectFileInfo()->getStackMapSection(); 731 OS.switchSection(StackMapSection); 732 733 // Emit a dummy symbol to force section inclusion. 734 OS.emitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps"))); 735 736 // Serialize data. 737 LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n"); 738 emitStackmapHeader(OS); 739 emitFunctionFrameRecords(OS); 740 emitConstantPoolEntries(OS); 741 emitCallsiteEntries(OS); 742 OS.addBlankLine(); 743 744 // Clean up. 745 CSInfos.clear(); 746 ConstPool.clear(); 747 } 748