xref: /llvm-project/llvm/lib/CodeGen/StackMaps.cpp (revision 570dd009c376cce18dd7f9a894854c4d4bd51b30)
1 //===---------------------------- StackMaps.cpp ---------------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 
10 #include "llvm/CodeGen/StackMaps.h"
11 #include "llvm/CodeGen/AsmPrinter.h"
12 #include "llvm/CodeGen/MachineFrameInfo.h"
13 #include "llvm/CodeGen/MachineFunction.h"
14 #include "llvm/CodeGen/MachineInstr.h"
15 #include "llvm/IR/DataLayout.h"
16 #include "llvm/MC/MCContext.h"
17 #include "llvm/MC/MCExpr.h"
18 #include "llvm/MC/MCObjectFileInfo.h"
19 #include "llvm/MC/MCSectionMachO.h"
20 #include "llvm/MC/MCStreamer.h"
21 #include "llvm/Support/CommandLine.h"
22 #include "llvm/Target/TargetMachine.h"
23 #include "llvm/Target/TargetOpcodes.h"
24 #include "llvm/Target/TargetRegisterInfo.h"
25 #include "llvm/Target/TargetSubtargetInfo.h"
26 #include <iterator>
27 
28 using namespace llvm;
29 
30 #define DEBUG_TYPE "stackmaps"
31 
32 static cl::opt<int> StackMapVersion(
33     "stackmap-version", cl::init(1),
34     cl::desc("Specify the stackmap encoding version (default = 1)"));
35 
36 const char *StackMaps::WSMP = "Stack Maps: ";
37 
38 StackMapOpers::StackMapOpers(const MachineInstr *MI)
39   : MI(MI) {
40   assert(getVarIdx() <= MI->getNumOperands() &&
41          "invalid stackmap definition");
42 }
43 
44 PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
45     : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
46                      !MI->getOperand(0).isImplicit()),
47       IsAnyReg(MI->getOperand(getMetaIdx(CCPos)).getImm() ==
48                CallingConv::AnyReg) {
49 #ifndef NDEBUG
50   unsigned CheckStartIdx = 0, e = MI->getNumOperands();
51   while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
52          MI->getOperand(CheckStartIdx).isDef() &&
53          !MI->getOperand(CheckStartIdx).isImplicit())
54     ++CheckStartIdx;
55 
56   assert(getMetaIdx() == CheckStartIdx &&
57          "Unexpected additional definition in Patchpoint intrinsic.");
58 #endif
59 }
60 
61 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
62   if (!StartIdx)
63     StartIdx = getVarIdx();
64 
65   // Find the next scratch register (implicit def and early clobber)
66   unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
67   while (ScratchIdx < e &&
68          !(MI->getOperand(ScratchIdx).isReg() &&
69            MI->getOperand(ScratchIdx).isDef() &&
70            MI->getOperand(ScratchIdx).isImplicit() &&
71            MI->getOperand(ScratchIdx).isEarlyClobber()))
72     ++ScratchIdx;
73 
74   assert(ScratchIdx != e && "No scratch register available");
75   return ScratchIdx;
76 }
77 
78 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
79   if (StackMapVersion != 1)
80     llvm_unreachable("Unsupported stackmap version!");
81 }
82 
83 /// Go up the super-register chain until we hit a valid dwarf register number.
84 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
85   int RegNum = TRI->getDwarfRegNum(Reg, false);
86   for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR)
87     RegNum = TRI->getDwarfRegNum(*SR, false);
88 
89   assert(RegNum >= 0 && "Invalid Dwarf register number.");
90   return (unsigned)RegNum;
91 }
92 
93 MachineInstr::const_mop_iterator
94 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
95                         MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
96                         LiveOutVec &LiveOuts) const {
97   const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
98   if (MOI->isImm()) {
99     switch (MOI->getImm()) {
100     default:
101       llvm_unreachable("Unrecognized operand type.");
102     case StackMaps::DirectMemRefOp: {
103       auto &DL = AP.MF->getDataLayout();
104 
105       unsigned Size = DL.getPointerSizeInBits();
106       assert((Size % 8) == 0 && "Need pointer size in bytes.");
107       Size /= 8;
108       unsigned Reg = (++MOI)->getReg();
109       int64_t Imm = (++MOI)->getImm();
110       Locs.emplace_back(StackMaps::Location::Direct, Size,
111                         getDwarfRegNum(Reg, TRI), Imm);
112       break;
113     }
114     case StackMaps::IndirectMemRefOp: {
115       int64_t Size = (++MOI)->getImm();
116       assert(Size > 0 && "Need a valid size for indirect memory locations.");
117       unsigned Reg = (++MOI)->getReg();
118       int64_t Imm = (++MOI)->getImm();
119       Locs.emplace_back(StackMaps::Location::Indirect, Size,
120                         getDwarfRegNum(Reg, TRI), Imm);
121       break;
122     }
123     case StackMaps::ConstantOp: {
124       ++MOI;
125       assert(MOI->isImm() && "Expected constant operand.");
126       int64_t Imm = MOI->getImm();
127       Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm);
128       break;
129     }
130     }
131     return ++MOI;
132   }
133 
134   // The physical register number will ultimately be encoded as a DWARF regno.
135   // The stack map also records the size of a spill slot that can hold the
136   // register content. (The runtime can track the actual size of the data type
137   // if it needs to.)
138   if (MOI->isReg()) {
139     // Skip implicit registers (this includes our scratch registers)
140     if (MOI->isImplicit())
141       return ++MOI;
142 
143     assert(TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) &&
144            "Virtreg operands should have been rewritten before now.");
145     const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg());
146     assert(!MOI->getSubReg() && "Physical subreg still around.");
147 
148     unsigned Offset = 0;
149     unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI);
150     unsigned LLVMRegNum = TRI->getLLVMRegNum(DwarfRegNum, false);
151     unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg());
152     if (SubRegIdx)
153       Offset = TRI->getSubRegIdxOffset(SubRegIdx);
154 
155     Locs.emplace_back(Location::Register, RC->getSize(), DwarfRegNum, Offset);
156     return ++MOI;
157   }
158 
159   if (MOI->isRegLiveOut())
160     LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut());
161 
162   return ++MOI;
163 }
164 
165 void StackMaps::print(raw_ostream &OS) {
166   const TargetRegisterInfo *TRI =
167       AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
168   OS << WSMP << "callsites:\n";
169   for (const auto &CSI : CSInfos) {
170     const LocationVec &CSLocs = CSI.Locations;
171     const LiveOutVec &LiveOuts = CSI.LiveOuts;
172 
173     OS << WSMP << "callsite " << CSI.ID << "\n";
174     OS << WSMP << "  has " << CSLocs.size() << " locations\n";
175 
176     unsigned Idx = 0;
177     for (const auto &Loc : CSLocs) {
178       OS << WSMP << "\t\tLoc " << Idx << ": ";
179       switch (Loc.Type) {
180       case Location::Unprocessed:
181         OS << "<Unprocessed operand>";
182         break;
183       case Location::Register:
184         OS << "Register ";
185         if (TRI)
186           OS << TRI->getName(Loc.Reg);
187         else
188           OS << Loc.Reg;
189         break;
190       case Location::Direct:
191         OS << "Direct ";
192         if (TRI)
193           OS << TRI->getName(Loc.Reg);
194         else
195           OS << Loc.Reg;
196         if (Loc.Offset)
197           OS << " + " << Loc.Offset;
198         break;
199       case Location::Indirect:
200         OS << "Indirect ";
201         if (TRI)
202           OS << TRI->getName(Loc.Reg);
203         else
204           OS << Loc.Reg;
205         OS << "+" << Loc.Offset;
206         break;
207       case Location::Constant:
208         OS << "Constant " << Loc.Offset;
209         break;
210       case Location::ConstantIndex:
211         OS << "Constant Index " << Loc.Offset;
212         break;
213       }
214       OS << "\t[encoding: .byte " << Loc.Type << ", .byte " << Loc.Size
215          << ", .short " << Loc.Reg << ", .int " << Loc.Offset << "]\n";
216       Idx++;
217     }
218 
219     OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
220 
221     Idx = 0;
222     for (const auto &LO : LiveOuts) {
223       OS << WSMP << "\t\tLO " << Idx << ": ";
224       if (TRI)
225         OS << TRI->getName(LO.Reg);
226       else
227         OS << LO.Reg;
228       OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
229          << LO.Size << "]\n";
230       Idx++;
231     }
232   }
233 }
234 
235 /// Create a live-out register record for the given register Reg.
236 StackMaps::LiveOutReg
237 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
238   unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
239   unsigned Size = TRI->getMinimalPhysRegClass(Reg)->getSize();
240   return LiveOutReg(Reg, DwarfRegNum, Size);
241 }
242 
243 /// Parse the register live-out mask and return a vector of live-out registers
244 /// that need to be recorded in the stackmap.
245 StackMaps::LiveOutVec
246 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
247   assert(Mask && "No register mask specified");
248   const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
249   LiveOutVec LiveOuts;
250 
251   // Create a LiveOutReg for each bit that is set in the register mask.
252   for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
253     if ((Mask[Reg / 32] >> Reg % 32) & 1)
254       LiveOuts.push_back(createLiveOutReg(Reg, TRI));
255 
256   // We don't need to keep track of a register if its super-register is already
257   // in the list. Merge entries that refer to the same dwarf register and use
258   // the maximum size that needs to be spilled.
259 
260   std::sort(LiveOuts.begin(), LiveOuts.end(),
261             [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
262               // Only sort by the dwarf register number.
263               return LHS.DwarfRegNum < RHS.DwarfRegNum;
264             });
265 
266   for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
267     for (auto II = std::next(I); II != E; ++II) {
268       if (I->DwarfRegNum != II->DwarfRegNum) {
269         // Skip all the now invalid entries.
270         I = --II;
271         break;
272       }
273       I->Size = std::max(I->Size, II->Size);
274       if (TRI->isSuperRegister(I->Reg, II->Reg))
275         I->Reg = II->Reg;
276       II->Reg = 0; // mark for deletion.
277     }
278   }
279 
280   LiveOuts.erase(
281       remove_if(LiveOuts, [](const LiveOutReg &LO) { return LO.Reg == 0; }),
282       LiveOuts.end());
283 
284   return LiveOuts;
285 }
286 
287 void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint64_t ID,
288                                     MachineInstr::const_mop_iterator MOI,
289                                     MachineInstr::const_mop_iterator MOE,
290                                     bool recordResult) {
291 
292   MCContext &OutContext = AP.OutStreamer->getContext();
293   MCSymbol *MILabel = OutContext.createTempSymbol();
294   AP.OutStreamer->EmitLabel(MILabel);
295 
296   LocationVec Locations;
297   LiveOutVec LiveOuts;
298 
299   if (recordResult) {
300     assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
301     parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations,
302                  LiveOuts);
303   }
304 
305   // Parse operands.
306   while (MOI != MOE) {
307     MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
308   }
309 
310   // Move large constants into the constant pool.
311   for (auto &Loc : Locations) {
312     // Constants are encoded as sign-extended integers.
313     // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
314     if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) {
315       Loc.Type = Location::ConstantIndex;
316       // ConstPool is intentionally a MapVector of 'uint64_t's (as
317       // opposed to 'int64_t's).  We should never be in a situation
318       // where we have to insert either the tombstone or the empty
319       // keys into a map, and for a DenseMap<uint64_t, T> these are
320       // (uint64_t)0 and (uint64_t)-1.  They can be and are
321       // represented using 32 bit integers.
322       assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() &&
323              (uint64_t)Loc.Offset !=
324                  DenseMapInfo<uint64_t>::getTombstoneKey() &&
325              "empty and tombstone keys should fit in 32 bits!");
326       auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset));
327       Loc.Offset = Result.first - ConstPool.begin();
328     }
329   }
330 
331   // Create an expression to calculate the offset of the callsite from function
332   // entry.
333   const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
334       MCSymbolRefExpr::create(MILabel, OutContext),
335       MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext);
336 
337   CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations),
338                        std::move(LiveOuts));
339 
340   // Record the stack size of the current function.
341   const MachineFrameInfo &MFI = AP.MF->getFrameInfo();
342   const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
343   bool HasDynamicFrameSize =
344       MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF));
345   FnStackSize[AP.CurrentFnSym] =
346       HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize();
347 }
348 
349 void StackMaps::recordStackMap(const MachineInstr &MI) {
350   assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
351 
352   StackMapOpers opers(&MI);
353   const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm();
354   recordStackMapOpers(MI, ID, std::next(MI.operands_begin(), opers.getVarIdx()),
355                       MI.operands_end());
356 }
357 
358 void StackMaps::recordPatchPoint(const MachineInstr &MI) {
359   assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
360 
361   PatchPointOpers opers(&MI);
362   const int64_t ID = opers.getMetaOper(PatchPointOpers::IDPos).getImm();
363 
364   auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx());
365   recordStackMapOpers(MI, ID, MOI, MI.operands_end(),
366                       opers.isAnyReg() && opers.hasDef());
367 
368 #ifndef NDEBUG
369   // verify anyregcc
370   auto &Locations = CSInfos.back().Locations;
371   if (opers.isAnyReg()) {
372     unsigned NArgs = opers.getMetaOper(PatchPointOpers::NArgPos).getImm();
373     for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
374       assert(Locations[i].Type == Location::Register &&
375              "anyreg arg must be in reg.");
376   }
377 #endif
378 }
379 void StackMaps::recordStatepoint(const MachineInstr &MI) {
380   assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
381 
382   StatepointOpers opers(&MI);
383   // Record all the deopt and gc operands (they're contiguous and run from the
384   // initial index to the end of the operand list)
385   const unsigned StartIdx = opers.getVarIdx();
386   recordStackMapOpers(MI, opers.getID(), MI.operands_begin() + StartIdx,
387                       MI.operands_end(), false);
388 }
389 
390 /// Emit the stackmap header.
391 ///
392 /// Header {
393 ///   uint8  : Stack Map Version (currently 1)
394 ///   uint8  : Reserved (expected to be 0)
395 ///   uint16 : Reserved (expected to be 0)
396 /// }
397 /// uint32 : NumFunctions
398 /// uint32 : NumConstants
399 /// uint32 : NumRecords
400 void StackMaps::emitStackmapHeader(MCStreamer &OS) {
401   // Header.
402   OS.EmitIntValue(StackMapVersion, 1); // Version.
403   OS.EmitIntValue(0, 1);               // Reserved.
404   OS.EmitIntValue(0, 2);               // Reserved.
405 
406   // Num functions.
407   DEBUG(dbgs() << WSMP << "#functions = " << FnStackSize.size() << '\n');
408   OS.EmitIntValue(FnStackSize.size(), 4);
409   // Num constants.
410   DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
411   OS.EmitIntValue(ConstPool.size(), 4);
412   // Num callsites.
413   DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
414   OS.EmitIntValue(CSInfos.size(), 4);
415 }
416 
417 /// Emit the function frame record for each function.
418 ///
419 /// StkSizeRecord[NumFunctions] {
420 ///   uint64 : Function Address
421 ///   uint64 : Stack Size
422 /// }
423 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
424   // Function Frame records.
425   DEBUG(dbgs() << WSMP << "functions:\n");
426   for (auto const &FR : FnStackSize) {
427     DEBUG(dbgs() << WSMP << "function addr: " << FR.first
428                  << " frame size: " << FR.second);
429     OS.EmitSymbolValue(FR.first, 8);
430     OS.EmitIntValue(FR.second, 8);
431   }
432 }
433 
434 /// Emit the constant pool.
435 ///
436 /// int64  : Constants[NumConstants]
437 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
438   // Constant pool entries.
439   DEBUG(dbgs() << WSMP << "constants:\n");
440   for (const auto &ConstEntry : ConstPool) {
441     DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
442     OS.EmitIntValue(ConstEntry.second, 8);
443   }
444 }
445 
446 /// Emit the callsite info for each callsite.
447 ///
448 /// StkMapRecord[NumRecords] {
449 ///   uint64 : PatchPoint ID
450 ///   uint32 : Instruction Offset
451 ///   uint16 : Reserved (record flags)
452 ///   uint16 : NumLocations
453 ///   Location[NumLocations] {
454 ///     uint8  : Register | Direct | Indirect | Constant | ConstantIndex
455 ///     uint8  : Size in Bytes
456 ///     uint16 : Dwarf RegNum
457 ///     int32  : Offset
458 ///   }
459 ///   uint16 : Padding
460 ///   uint16 : NumLiveOuts
461 ///   LiveOuts[NumLiveOuts] {
462 ///     uint16 : Dwarf RegNum
463 ///     uint8  : Reserved
464 ///     uint8  : Size in Bytes
465 ///   }
466 ///   uint32 : Padding (only if required to align to 8 byte)
467 /// }
468 ///
469 /// Location Encoding, Type, Value:
470 ///   0x1, Register, Reg                 (value in register)
471 ///   0x2, Direct, Reg + Offset          (frame index)
472 ///   0x3, Indirect, [Reg + Offset]      (spilled value)
473 ///   0x4, Constant, Offset              (small constant)
474 ///   0x5, ConstIndex, Constants[Offset] (large constant)
475 void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
476   DEBUG(print(dbgs()));
477   // Callsite entries.
478   for (const auto &CSI : CSInfos) {
479     const LocationVec &CSLocs = CSI.Locations;
480     const LiveOutVec &LiveOuts = CSI.LiveOuts;
481 
482     // Verify stack map entry. It's better to communicate a problem to the
483     // runtime than crash in case of in-process compilation. Currently, we do
484     // simple overflow checks, but we may eventually communicate other
485     // compilation errors this way.
486     if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
487       OS.EmitIntValue(UINT64_MAX, 8); // Invalid ID.
488       OS.EmitValue(CSI.CSOffsetExpr, 4);
489       OS.EmitIntValue(0, 2); // Reserved.
490       OS.EmitIntValue(0, 2); // 0 locations.
491       OS.EmitIntValue(0, 2); // padding.
492       OS.EmitIntValue(0, 2); // 0 live-out registers.
493       OS.EmitIntValue(0, 4); // padding.
494       continue;
495     }
496 
497     OS.EmitIntValue(CSI.ID, 8);
498     OS.EmitValue(CSI.CSOffsetExpr, 4);
499 
500     // Reserved for flags.
501     OS.EmitIntValue(0, 2);
502     OS.EmitIntValue(CSLocs.size(), 2);
503 
504     for (const auto &Loc : CSLocs) {
505       OS.EmitIntValue(Loc.Type, 1);
506       OS.EmitIntValue(Loc.Size, 1);
507       OS.EmitIntValue(Loc.Reg, 2);
508       OS.EmitIntValue(Loc.Offset, 4);
509     }
510 
511     // Num live-out registers and padding to align to 4 byte.
512     OS.EmitIntValue(0, 2);
513     OS.EmitIntValue(LiveOuts.size(), 2);
514 
515     for (const auto &LO : LiveOuts) {
516       OS.EmitIntValue(LO.DwarfRegNum, 2);
517       OS.EmitIntValue(0, 1);
518       OS.EmitIntValue(LO.Size, 1);
519     }
520     // Emit alignment to 8 byte.
521     OS.EmitValueToAlignment(8);
522   }
523 }
524 
525 /// Serialize the stackmap data.
526 void StackMaps::serializeToStackMapSection() {
527   (void)WSMP;
528   // Bail out if there's no stack map data.
529   assert((!CSInfos.empty() || ConstPool.empty()) &&
530          "Expected empty constant pool too!");
531   assert((!CSInfos.empty() || FnStackSize.empty()) &&
532          "Expected empty function record too!");
533   if (CSInfos.empty())
534     return;
535 
536   MCContext &OutContext = AP.OutStreamer->getContext();
537   MCStreamer &OS = *AP.OutStreamer;
538 
539   // Create the section.
540   MCSection *StackMapSection =
541       OutContext.getObjectFileInfo()->getStackMapSection();
542   OS.SwitchSection(StackMapSection);
543 
544   // Emit a dummy symbol to force section inclusion.
545   OS.EmitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
546 
547   // Serialize data.
548   DEBUG(dbgs() << "********** Stack Map Output **********\n");
549   emitStackmapHeader(OS);
550   emitFunctionFrameRecords(OS);
551   emitConstantPoolEntries(OS);
552   emitCallsiteEntries(OS);
553   OS.AddBlankLine();
554 
555   // Clean up.
556   CSInfos.clear();
557   ConstPool.clear();
558 }
559