1 //===---------------------------- StackMaps.cpp ---------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "llvm/CodeGen/StackMaps.h" 11 #include "llvm/CodeGen/AsmPrinter.h" 12 #include "llvm/CodeGen/MachineFrameInfo.h" 13 #include "llvm/CodeGen/MachineFunction.h" 14 #include "llvm/CodeGen/MachineInstr.h" 15 #include "llvm/IR/DataLayout.h" 16 #include "llvm/MC/MCContext.h" 17 #include "llvm/MC/MCExpr.h" 18 #include "llvm/MC/MCObjectFileInfo.h" 19 #include "llvm/MC/MCSectionMachO.h" 20 #include "llvm/MC/MCStreamer.h" 21 #include "llvm/Support/CommandLine.h" 22 #include "llvm/Target/TargetMachine.h" 23 #include "llvm/Target/TargetOpcodes.h" 24 #include "llvm/Target/TargetRegisterInfo.h" 25 #include "llvm/Target/TargetSubtargetInfo.h" 26 #include <iterator> 27 28 using namespace llvm; 29 30 #define DEBUG_TYPE "stackmaps" 31 32 static cl::opt<int> StackMapVersion( 33 "stackmap-version", cl::init(2), 34 cl::desc("Specify the stackmap encoding version (default = 2)")); 35 36 const char *StackMaps::WSMP = "Stack Maps: "; 37 38 StackMapOpers::StackMapOpers(const MachineInstr *MI) 39 : MI(MI) { 40 assert(getVarIdx() <= MI->getNumOperands() && 41 "invalid stackmap definition"); 42 } 43 44 PatchPointOpers::PatchPointOpers(const MachineInstr *MI) 45 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() && 46 !MI->getOperand(0).isImplicit()) { 47 #ifndef NDEBUG 48 unsigned CheckStartIdx = 0, e = MI->getNumOperands(); 49 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() && 50 MI->getOperand(CheckStartIdx).isDef() && 51 !MI->getOperand(CheckStartIdx).isImplicit()) 52 ++CheckStartIdx; 53 54 assert(getMetaIdx() == CheckStartIdx && 55 "Unexpected additional definition in Patchpoint intrinsic."); 56 #endif 57 } 58 59 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const { 60 if (!StartIdx) 61 StartIdx = getVarIdx(); 62 63 // Find the next scratch register (implicit def and early clobber) 64 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands(); 65 while (ScratchIdx < e && 66 !(MI->getOperand(ScratchIdx).isReg() && 67 MI->getOperand(ScratchIdx).isDef() && 68 MI->getOperand(ScratchIdx).isImplicit() && 69 MI->getOperand(ScratchIdx).isEarlyClobber())) 70 ++ScratchIdx; 71 72 assert(ScratchIdx != e && "No scratch register available"); 73 return ScratchIdx; 74 } 75 76 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) { 77 if (StackMapVersion != 2) 78 llvm_unreachable("Unsupported stackmap version!"); 79 } 80 81 /// Go up the super-register chain until we hit a valid dwarf register number. 82 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) { 83 int RegNum = TRI->getDwarfRegNum(Reg, false); 84 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR) 85 RegNum = TRI->getDwarfRegNum(*SR, false); 86 87 assert(RegNum >= 0 && "Invalid Dwarf register number."); 88 return (unsigned)RegNum; 89 } 90 91 MachineInstr::const_mop_iterator 92 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI, 93 MachineInstr::const_mop_iterator MOE, LocationVec &Locs, 94 LiveOutVec &LiveOuts) const { 95 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 96 if (MOI->isImm()) { 97 switch (MOI->getImm()) { 98 default: 99 llvm_unreachable("Unrecognized operand type."); 100 case StackMaps::DirectMemRefOp: { 101 auto &DL = AP.MF->getDataLayout(); 102 103 unsigned Size = DL.getPointerSizeInBits(); 104 assert((Size % 8) == 0 && "Need pointer size in bytes."); 105 Size /= 8; 106 unsigned Reg = (++MOI)->getReg(); 107 int64_t Imm = (++MOI)->getImm(); 108 Locs.emplace_back(StackMaps::Location::Direct, Size, 109 getDwarfRegNum(Reg, TRI), Imm); 110 break; 111 } 112 case StackMaps::IndirectMemRefOp: { 113 int64_t Size = (++MOI)->getImm(); 114 assert(Size > 0 && "Need a valid size for indirect memory locations."); 115 unsigned Reg = (++MOI)->getReg(); 116 int64_t Imm = (++MOI)->getImm(); 117 Locs.emplace_back(StackMaps::Location::Indirect, Size, 118 getDwarfRegNum(Reg, TRI), Imm); 119 break; 120 } 121 case StackMaps::ConstantOp: { 122 ++MOI; 123 assert(MOI->isImm() && "Expected constant operand."); 124 int64_t Imm = MOI->getImm(); 125 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm); 126 break; 127 } 128 } 129 return ++MOI; 130 } 131 132 // The physical register number will ultimately be encoded as a DWARF regno. 133 // The stack map also records the size of a spill slot that can hold the 134 // register content. (The runtime can track the actual size of the data type 135 // if it needs to.) 136 if (MOI->isReg()) { 137 // Skip implicit registers (this includes our scratch registers) 138 if (MOI->isImplicit()) 139 return ++MOI; 140 141 assert(TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) && 142 "Virtreg operands should have been rewritten before now."); 143 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg()); 144 assert(!MOI->getSubReg() && "Physical subreg still around."); 145 146 unsigned Offset = 0; 147 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI); 148 unsigned LLVMRegNum = TRI->getLLVMRegNum(DwarfRegNum, false); 149 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg()); 150 if (SubRegIdx) 151 Offset = TRI->getSubRegIdxOffset(SubRegIdx); 152 153 Locs.emplace_back(Location::Register, RC->getSize(), DwarfRegNum, Offset); 154 return ++MOI; 155 } 156 157 if (MOI->isRegLiveOut()) 158 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut()); 159 160 return ++MOI; 161 } 162 163 void StackMaps::print(raw_ostream &OS) { 164 const TargetRegisterInfo *TRI = 165 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr; 166 OS << WSMP << "callsites:\n"; 167 for (const auto &CSI : CSInfos) { 168 const LocationVec &CSLocs = CSI.Locations; 169 const LiveOutVec &LiveOuts = CSI.LiveOuts; 170 171 OS << WSMP << "callsite " << CSI.ID << "\n"; 172 OS << WSMP << " has " << CSLocs.size() << " locations\n"; 173 174 unsigned Idx = 0; 175 for (const auto &Loc : CSLocs) { 176 OS << WSMP << "\t\tLoc " << Idx << ": "; 177 switch (Loc.Type) { 178 case Location::Unprocessed: 179 OS << "<Unprocessed operand>"; 180 break; 181 case Location::Register: 182 OS << "Register "; 183 if (TRI) 184 OS << TRI->getName(Loc.Reg); 185 else 186 OS << Loc.Reg; 187 break; 188 case Location::Direct: 189 OS << "Direct "; 190 if (TRI) 191 OS << TRI->getName(Loc.Reg); 192 else 193 OS << Loc.Reg; 194 if (Loc.Offset) 195 OS << " + " << Loc.Offset; 196 break; 197 case Location::Indirect: 198 OS << "Indirect "; 199 if (TRI) 200 OS << TRI->getName(Loc.Reg); 201 else 202 OS << Loc.Reg; 203 OS << "+" << Loc.Offset; 204 break; 205 case Location::Constant: 206 OS << "Constant " << Loc.Offset; 207 break; 208 case Location::ConstantIndex: 209 OS << "Constant Index " << Loc.Offset; 210 break; 211 } 212 OS << "\t[encoding: .byte " << Loc.Type << ", .byte " << Loc.Size 213 << ", .short " << Loc.Reg << ", .int " << Loc.Offset << "]\n"; 214 Idx++; 215 } 216 217 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n"; 218 219 Idx = 0; 220 for (const auto &LO : LiveOuts) { 221 OS << WSMP << "\t\tLO " << Idx << ": "; 222 if (TRI) 223 OS << TRI->getName(LO.Reg); 224 else 225 OS << LO.Reg; 226 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte " 227 << LO.Size << "]\n"; 228 Idx++; 229 } 230 } 231 } 232 233 /// Create a live-out register record for the given register Reg. 234 StackMaps::LiveOutReg 235 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const { 236 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI); 237 unsigned Size = TRI->getMinimalPhysRegClass(Reg)->getSize(); 238 return LiveOutReg(Reg, DwarfRegNum, Size); 239 } 240 241 /// Parse the register live-out mask and return a vector of live-out registers 242 /// that need to be recorded in the stackmap. 243 StackMaps::LiveOutVec 244 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const { 245 assert(Mask && "No register mask specified"); 246 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo(); 247 LiveOutVec LiveOuts; 248 249 // Create a LiveOutReg for each bit that is set in the register mask. 250 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg) 251 if ((Mask[Reg / 32] >> Reg % 32) & 1) 252 LiveOuts.push_back(createLiveOutReg(Reg, TRI)); 253 254 // We don't need to keep track of a register if its super-register is already 255 // in the list. Merge entries that refer to the same dwarf register and use 256 // the maximum size that needs to be spilled. 257 258 std::sort(LiveOuts.begin(), LiveOuts.end(), 259 [](const LiveOutReg &LHS, const LiveOutReg &RHS) { 260 // Only sort by the dwarf register number. 261 return LHS.DwarfRegNum < RHS.DwarfRegNum; 262 }); 263 264 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) { 265 for (auto II = std::next(I); II != E; ++II) { 266 if (I->DwarfRegNum != II->DwarfRegNum) { 267 // Skip all the now invalid entries. 268 I = --II; 269 break; 270 } 271 I->Size = std::max(I->Size, II->Size); 272 if (TRI->isSuperRegister(I->Reg, II->Reg)) 273 I->Reg = II->Reg; 274 II->Reg = 0; // mark for deletion. 275 } 276 } 277 278 LiveOuts.erase( 279 remove_if(LiveOuts, [](const LiveOutReg &LO) { return LO.Reg == 0; }), 280 LiveOuts.end()); 281 282 return LiveOuts; 283 } 284 285 void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint64_t ID, 286 MachineInstr::const_mop_iterator MOI, 287 MachineInstr::const_mop_iterator MOE, 288 bool recordResult) { 289 290 MCContext &OutContext = AP.OutStreamer->getContext(); 291 MCSymbol *MILabel = OutContext.createTempSymbol(); 292 AP.OutStreamer->EmitLabel(MILabel); 293 294 LocationVec Locations; 295 LiveOutVec LiveOuts; 296 297 if (recordResult) { 298 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value."); 299 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations, 300 LiveOuts); 301 } 302 303 // Parse operands. 304 while (MOI != MOE) { 305 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); 306 } 307 308 // Move large constants into the constant pool. 309 for (auto &Loc : Locations) { 310 // Constants are encoded as sign-extended integers. 311 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool. 312 if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) { 313 Loc.Type = Location::ConstantIndex; 314 // ConstPool is intentionally a MapVector of 'uint64_t's (as 315 // opposed to 'int64_t's). We should never be in a situation 316 // where we have to insert either the tombstone or the empty 317 // keys into a map, and for a DenseMap<uint64_t, T> these are 318 // (uint64_t)0 and (uint64_t)-1. They can be and are 319 // represented using 32 bit integers. 320 assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() && 321 (uint64_t)Loc.Offset != 322 DenseMapInfo<uint64_t>::getTombstoneKey() && 323 "empty and tombstone keys should fit in 32 bits!"); 324 auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset)); 325 Loc.Offset = Result.first - ConstPool.begin(); 326 } 327 } 328 329 // Create an expression to calculate the offset of the callsite from function 330 // entry. 331 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub( 332 MCSymbolRefExpr::create(MILabel, OutContext), 333 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext); 334 335 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations), 336 std::move(LiveOuts)); 337 338 // Record the stack size of the current function and update callsite count. 339 const MachineFrameInfo &MFI = AP.MF->getFrameInfo(); 340 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo(); 341 bool HasDynamicFrameSize = 342 MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF)); 343 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize(); 344 345 auto CurrentIt = FnInfos.find(AP.CurrentFnSym); 346 if (CurrentIt != FnInfos.end()) 347 CurrentIt->second.RecordCount++; 348 else 349 FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize))); 350 } 351 352 void StackMaps::recordStackMap(const MachineInstr &MI) { 353 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap"); 354 355 StackMapOpers opers(&MI); 356 const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm(); 357 recordStackMapOpers(MI, ID, std::next(MI.operands_begin(), opers.getVarIdx()), 358 MI.operands_end()); 359 } 360 361 void StackMaps::recordPatchPoint(const MachineInstr &MI) { 362 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint"); 363 364 PatchPointOpers opers(&MI); 365 const int64_t ID = opers.getID(); 366 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx()); 367 recordStackMapOpers(MI, ID, MOI, MI.operands_end(), 368 opers.isAnyReg() && opers.hasDef()); 369 370 #ifndef NDEBUG 371 // verify anyregcc 372 auto &Locations = CSInfos.back().Locations; 373 if (opers.isAnyReg()) { 374 unsigned NArgs = opers.getNumCallArgs(); 375 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i) 376 assert(Locations[i].Type == Location::Register && 377 "anyreg arg must be in reg."); 378 } 379 #endif 380 } 381 void StackMaps::recordStatepoint(const MachineInstr &MI) { 382 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint"); 383 384 StatepointOpers opers(&MI); 385 // Record all the deopt and gc operands (they're contiguous and run from the 386 // initial index to the end of the operand list) 387 const unsigned StartIdx = opers.getVarIdx(); 388 recordStackMapOpers(MI, opers.getID(), MI.operands_begin() + StartIdx, 389 MI.operands_end(), false); 390 } 391 392 /// Emit the stackmap header. 393 /// 394 /// Header { 395 /// uint8 : Stack Map Version (currently 2) 396 /// uint8 : Reserved (expected to be 0) 397 /// uint16 : Reserved (expected to be 0) 398 /// } 399 /// uint32 : NumFunctions 400 /// uint32 : NumConstants 401 /// uint32 : NumRecords 402 void StackMaps::emitStackmapHeader(MCStreamer &OS) { 403 // Header. 404 OS.EmitIntValue(StackMapVersion, 1); // Version. 405 OS.EmitIntValue(0, 1); // Reserved. 406 OS.EmitIntValue(0, 2); // Reserved. 407 408 // Num functions. 409 DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n'); 410 OS.EmitIntValue(FnInfos.size(), 4); 411 // Num constants. 412 DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n'); 413 OS.EmitIntValue(ConstPool.size(), 4); 414 // Num callsites. 415 DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n'); 416 OS.EmitIntValue(CSInfos.size(), 4); 417 } 418 419 /// Emit the function frame record for each function. 420 /// 421 /// StkSizeRecord[NumFunctions] { 422 /// uint64 : Function Address 423 /// uint64 : Stack Size 424 /// uint64 : Record Count 425 /// } 426 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) { 427 // Function Frame records. 428 DEBUG(dbgs() << WSMP << "functions:\n"); 429 for (auto const &FR : FnInfos) { 430 DEBUG(dbgs() << WSMP << "function addr: " << FR.first 431 << " frame size: " << FR.second.StackSize 432 << " callsite count: " << FR.second.RecordCount << '\n'); 433 OS.EmitSymbolValue(FR.first, 8); 434 OS.EmitIntValue(FR.second.StackSize, 8); 435 OS.EmitIntValue(FR.second.RecordCount, 8); 436 } 437 } 438 439 /// Emit the constant pool. 440 /// 441 /// int64 : Constants[NumConstants] 442 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) { 443 // Constant pool entries. 444 DEBUG(dbgs() << WSMP << "constants:\n"); 445 for (const auto &ConstEntry : ConstPool) { 446 DEBUG(dbgs() << WSMP << ConstEntry.second << '\n'); 447 OS.EmitIntValue(ConstEntry.second, 8); 448 } 449 } 450 451 /// Emit the callsite info for each callsite. 452 /// 453 /// StkMapRecord[NumRecords] { 454 /// uint64 : PatchPoint ID 455 /// uint32 : Instruction Offset 456 /// uint16 : Reserved (record flags) 457 /// uint16 : NumLocations 458 /// Location[NumLocations] { 459 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex 460 /// uint8 : Size in Bytes 461 /// uint16 : Dwarf RegNum 462 /// int32 : Offset 463 /// } 464 /// uint16 : Padding 465 /// uint16 : NumLiveOuts 466 /// LiveOuts[NumLiveOuts] { 467 /// uint16 : Dwarf RegNum 468 /// uint8 : Reserved 469 /// uint8 : Size in Bytes 470 /// } 471 /// uint32 : Padding (only if required to align to 8 byte) 472 /// } 473 /// 474 /// Location Encoding, Type, Value: 475 /// 0x1, Register, Reg (value in register) 476 /// 0x2, Direct, Reg + Offset (frame index) 477 /// 0x3, Indirect, [Reg + Offset] (spilled value) 478 /// 0x4, Constant, Offset (small constant) 479 /// 0x5, ConstIndex, Constants[Offset] (large constant) 480 void StackMaps::emitCallsiteEntries(MCStreamer &OS) { 481 DEBUG(print(dbgs())); 482 // Callsite entries. 483 for (const auto &CSI : CSInfos) { 484 const LocationVec &CSLocs = CSI.Locations; 485 const LiveOutVec &LiveOuts = CSI.LiveOuts; 486 487 // Verify stack map entry. It's better to communicate a problem to the 488 // runtime than crash in case of in-process compilation. Currently, we do 489 // simple overflow checks, but we may eventually communicate other 490 // compilation errors this way. 491 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) { 492 OS.EmitIntValue(UINT64_MAX, 8); // Invalid ID. 493 OS.EmitValue(CSI.CSOffsetExpr, 4); 494 OS.EmitIntValue(0, 2); // Reserved. 495 OS.EmitIntValue(0, 2); // 0 locations. 496 OS.EmitIntValue(0, 2); // padding. 497 OS.EmitIntValue(0, 2); // 0 live-out registers. 498 OS.EmitIntValue(0, 4); // padding. 499 continue; 500 } 501 502 OS.EmitIntValue(CSI.ID, 8); 503 OS.EmitValue(CSI.CSOffsetExpr, 4); 504 505 // Reserved for flags. 506 OS.EmitIntValue(0, 2); 507 OS.EmitIntValue(CSLocs.size(), 2); 508 509 for (const auto &Loc : CSLocs) { 510 OS.EmitIntValue(Loc.Type, 1); 511 OS.EmitIntValue(Loc.Size, 1); 512 OS.EmitIntValue(Loc.Reg, 2); 513 OS.EmitIntValue(Loc.Offset, 4); 514 } 515 516 // Num live-out registers and padding to align to 4 byte. 517 OS.EmitIntValue(0, 2); 518 OS.EmitIntValue(LiveOuts.size(), 2); 519 520 for (const auto &LO : LiveOuts) { 521 OS.EmitIntValue(LO.DwarfRegNum, 2); 522 OS.EmitIntValue(0, 1); 523 OS.EmitIntValue(LO.Size, 1); 524 } 525 // Emit alignment to 8 byte. 526 OS.EmitValueToAlignment(8); 527 } 528 } 529 530 /// Serialize the stackmap data. 531 void StackMaps::serializeToStackMapSection() { 532 (void)WSMP; 533 // Bail out if there's no stack map data. 534 assert((!CSInfos.empty() || ConstPool.empty()) && 535 "Expected empty constant pool too!"); 536 assert((!CSInfos.empty() || FnInfos.empty()) && 537 "Expected empty function record too!"); 538 if (CSInfos.empty()) 539 return; 540 541 MCContext &OutContext = AP.OutStreamer->getContext(); 542 MCStreamer &OS = *AP.OutStreamer; 543 544 // Create the section. 545 MCSection *StackMapSection = 546 OutContext.getObjectFileInfo()->getStackMapSection(); 547 OS.SwitchSection(StackMapSection); 548 549 // Emit a dummy symbol to force section inclusion. 550 OS.EmitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps"))); 551 552 // Serialize data. 553 DEBUG(dbgs() << "********** Stack Map Output **********\n"); 554 emitStackmapHeader(OS); 555 emitFunctionFrameRecords(OS); 556 emitConstantPoolEntries(OS); 557 emitCallsiteEntries(OS); 558 OS.AddBlankLine(); 559 560 // Clean up. 561 CSInfos.clear(); 562 ConstPool.clear(); 563 } 564