1 //===- MIRYamlMapping.h - Describe mapping between MIR and YAML--*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the mapping between various MIR data structures and 10 // their corresponding YAML representation. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_CODEGEN_MIRYAMLMAPPING_H 15 #define LLVM_CODEGEN_MIRYAMLMAPPING_H 16 17 #include "llvm/ADT/StringRef.h" 18 #include "llvm/CodeGen/MachineJumpTableInfo.h" 19 #include "llvm/CodeGen/TargetFrameLowering.h" 20 #include "llvm/Support/SMLoc.h" 21 #include "llvm/Support/YAMLTraits.h" 22 #include "llvm/Support/raw_ostream.h" 23 #include <algorithm> 24 #include <cstdint> 25 #include <optional> 26 #include <string> 27 #include <vector> 28 29 namespace llvm { 30 namespace yaml { 31 32 /// A wrapper around std::string which contains a source range that's being 33 /// set during parsing. 34 struct StringValue { 35 std::string Value; 36 SMRange SourceRange; 37 38 StringValue() = default; 39 StringValue(std::string Value) : Value(std::move(Value)) {} 40 StringValue(const char Val[]) : Value(Val) {} 41 42 bool operator==(const StringValue &Other) const { 43 return Value == Other.Value; 44 } 45 }; 46 47 template <> struct ScalarTraits<StringValue> { 48 static void output(const StringValue &S, void *, raw_ostream &OS) { 49 OS << S.Value; 50 } 51 52 static StringRef input(StringRef Scalar, void *Ctx, StringValue &S) { 53 S.Value = Scalar.str(); 54 if (const auto *Node = 55 reinterpret_cast<yaml::Input *>(Ctx)->getCurrentNode()) 56 S.SourceRange = Node->getSourceRange(); 57 return ""; 58 } 59 60 static QuotingType mustQuote(StringRef S) { return needsQuotes(S); } 61 }; 62 63 struct FlowStringValue : StringValue { 64 FlowStringValue() = default; 65 FlowStringValue(std::string Value) : StringValue(std::move(Value)) {} 66 }; 67 68 template <> struct ScalarTraits<FlowStringValue> { 69 static void output(const FlowStringValue &S, void *, raw_ostream &OS) { 70 return ScalarTraits<StringValue>::output(S, nullptr, OS); 71 } 72 73 static StringRef input(StringRef Scalar, void *Ctx, FlowStringValue &S) { 74 return ScalarTraits<StringValue>::input(Scalar, Ctx, S); 75 } 76 77 static QuotingType mustQuote(StringRef S) { return needsQuotes(S); } 78 }; 79 80 struct BlockStringValue { 81 StringValue Value; 82 83 bool operator==(const BlockStringValue &Other) const { 84 return Value == Other.Value; 85 } 86 }; 87 88 template <> struct BlockScalarTraits<BlockStringValue> { 89 static void output(const BlockStringValue &S, void *Ctx, raw_ostream &OS) { 90 return ScalarTraits<StringValue>::output(S.Value, Ctx, OS); 91 } 92 93 static StringRef input(StringRef Scalar, void *Ctx, BlockStringValue &S) { 94 return ScalarTraits<StringValue>::input(Scalar, Ctx, S.Value); 95 } 96 }; 97 98 /// A wrapper around unsigned which contains a source range that's being set 99 /// during parsing. 100 struct UnsignedValue { 101 unsigned Value = 0; 102 SMRange SourceRange; 103 104 UnsignedValue() = default; 105 UnsignedValue(unsigned Value) : Value(Value) {} 106 107 bool operator==(const UnsignedValue &Other) const { 108 return Value == Other.Value; 109 } 110 }; 111 112 template <> struct ScalarTraits<UnsignedValue> { 113 static void output(const UnsignedValue &Value, void *Ctx, raw_ostream &OS) { 114 return ScalarTraits<unsigned>::output(Value.Value, Ctx, OS); 115 } 116 117 static StringRef input(StringRef Scalar, void *Ctx, UnsignedValue &Value) { 118 if (const auto *Node = 119 reinterpret_cast<yaml::Input *>(Ctx)->getCurrentNode()) 120 Value.SourceRange = Node->getSourceRange(); 121 return ScalarTraits<unsigned>::input(Scalar, Ctx, Value.Value); 122 } 123 124 static QuotingType mustQuote(StringRef Scalar) { 125 return ScalarTraits<unsigned>::mustQuote(Scalar); 126 } 127 }; 128 129 template <> struct ScalarEnumerationTraits<MachineJumpTableInfo::JTEntryKind> { 130 static void enumeration(yaml::IO &IO, 131 MachineJumpTableInfo::JTEntryKind &EntryKind) { 132 IO.enumCase(EntryKind, "block-address", 133 MachineJumpTableInfo::EK_BlockAddress); 134 IO.enumCase(EntryKind, "gp-rel64-block-address", 135 MachineJumpTableInfo::EK_GPRel64BlockAddress); 136 IO.enumCase(EntryKind, "gp-rel32-block-address", 137 MachineJumpTableInfo::EK_GPRel32BlockAddress); 138 IO.enumCase(EntryKind, "label-difference32", 139 MachineJumpTableInfo::EK_LabelDifference32); 140 IO.enumCase(EntryKind, "label-difference64", 141 MachineJumpTableInfo::EK_LabelDifference64); 142 IO.enumCase(EntryKind, "inline", MachineJumpTableInfo::EK_Inline); 143 IO.enumCase(EntryKind, "custom32", MachineJumpTableInfo::EK_Custom32); 144 } 145 }; 146 147 template <> struct ScalarTraits<MaybeAlign> { 148 static void output(const MaybeAlign &Alignment, void *, 149 llvm::raw_ostream &out) { 150 out << uint64_t(Alignment ? Alignment->value() : 0U); 151 } 152 static StringRef input(StringRef Scalar, void *, MaybeAlign &Alignment) { 153 unsigned long long n; 154 if (getAsUnsignedInteger(Scalar, 10, n)) 155 return "invalid number"; 156 if (n > 0 && !isPowerOf2_64(n)) 157 return "must be 0 or a power of two"; 158 Alignment = MaybeAlign(n); 159 return StringRef(); 160 } 161 static QuotingType mustQuote(StringRef) { return QuotingType::None; } 162 }; 163 164 template <> struct ScalarTraits<Align> { 165 static void output(const Align &Alignment, void *, llvm::raw_ostream &OS) { 166 OS << Alignment.value(); 167 } 168 static StringRef input(StringRef Scalar, void *, Align &Alignment) { 169 unsigned long long N; 170 if (getAsUnsignedInteger(Scalar, 10, N)) 171 return "invalid number"; 172 if (!isPowerOf2_64(N)) 173 return "must be a power of two"; 174 Alignment = Align(N); 175 return StringRef(); 176 } 177 static QuotingType mustQuote(StringRef) { return QuotingType::None; } 178 }; 179 180 } // end namespace yaml 181 } // end namespace llvm 182 183 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::StringValue) 184 LLVM_YAML_IS_FLOW_SEQUENCE_VECTOR(llvm::yaml::FlowStringValue) 185 LLVM_YAML_IS_FLOW_SEQUENCE_VECTOR(llvm::yaml::UnsignedValue) 186 187 namespace llvm { 188 namespace yaml { 189 190 struct VirtualRegisterDefinition { 191 UnsignedValue ID; 192 StringValue Class; 193 StringValue PreferredRegister; 194 std::vector<FlowStringValue> RegisterFlags; 195 196 // TODO: Serialize the target specific register hints. 197 198 bool operator==(const VirtualRegisterDefinition &Other) const { 199 return ID == Other.ID && Class == Other.Class && 200 PreferredRegister == Other.PreferredRegister; 201 } 202 }; 203 204 template <> struct MappingTraits<VirtualRegisterDefinition> { 205 static void mapping(IO &YamlIO, VirtualRegisterDefinition &Reg) { 206 YamlIO.mapRequired("id", Reg.ID); 207 YamlIO.mapRequired("class", Reg.Class); 208 YamlIO.mapOptional("preferred-register", Reg.PreferredRegister, 209 StringValue()); // Don't print out when it's empty. 210 YamlIO.mapOptional("flags", Reg.RegisterFlags, 211 std::vector<FlowStringValue>()); 212 } 213 214 static const bool flow = true; 215 }; 216 217 struct MachineFunctionLiveIn { 218 StringValue Register; 219 StringValue VirtualRegister; 220 221 bool operator==(const MachineFunctionLiveIn &Other) const { 222 return Register == Other.Register && 223 VirtualRegister == Other.VirtualRegister; 224 } 225 }; 226 227 template <> struct MappingTraits<MachineFunctionLiveIn> { 228 static void mapping(IO &YamlIO, MachineFunctionLiveIn &LiveIn) { 229 YamlIO.mapRequired("reg", LiveIn.Register); 230 YamlIO.mapOptional( 231 "virtual-reg", LiveIn.VirtualRegister, 232 StringValue()); // Don't print the virtual register when it's empty. 233 } 234 235 static const bool flow = true; 236 }; 237 238 /// Serializable representation of stack object from the MachineFrameInfo class. 239 /// 240 /// The flags 'isImmutable' and 'isAliased' aren't serialized, as they are 241 /// determined by the object's type and frame information flags. 242 /// Dead stack objects aren't serialized. 243 /// 244 /// The 'isPreallocated' flag is determined by the local offset. 245 struct MachineStackObject { 246 enum ObjectType { DefaultType, SpillSlot, VariableSized }; 247 UnsignedValue ID; 248 StringValue Name; 249 // TODO: Serialize unnamed LLVM alloca reference. 250 ObjectType Type = DefaultType; 251 int64_t Offset = 0; 252 uint64_t Size = 0; 253 MaybeAlign Alignment = std::nullopt; 254 TargetStackID::Value StackID; 255 StringValue CalleeSavedRegister; 256 bool CalleeSavedRestored = true; 257 std::optional<int64_t> LocalOffset; 258 StringValue DebugVar; 259 StringValue DebugExpr; 260 StringValue DebugLoc; 261 262 bool operator==(const MachineStackObject &Other) const { 263 return ID == Other.ID && Name == Other.Name && Type == Other.Type && 264 Offset == Other.Offset && Size == Other.Size && 265 Alignment == Other.Alignment && 266 StackID == Other.StackID && 267 CalleeSavedRegister == Other.CalleeSavedRegister && 268 CalleeSavedRestored == Other.CalleeSavedRestored && 269 LocalOffset == Other.LocalOffset && DebugVar == Other.DebugVar && 270 DebugExpr == Other.DebugExpr && DebugLoc == Other.DebugLoc; 271 } 272 }; 273 274 template <> struct ScalarEnumerationTraits<MachineStackObject::ObjectType> { 275 static void enumeration(yaml::IO &IO, MachineStackObject::ObjectType &Type) { 276 IO.enumCase(Type, "default", MachineStackObject::DefaultType); 277 IO.enumCase(Type, "spill-slot", MachineStackObject::SpillSlot); 278 IO.enumCase(Type, "variable-sized", MachineStackObject::VariableSized); 279 } 280 }; 281 282 template <> struct MappingTraits<MachineStackObject> { 283 static void mapping(yaml::IO &YamlIO, MachineStackObject &Object) { 284 YamlIO.mapRequired("id", Object.ID); 285 YamlIO.mapOptional("name", Object.Name, 286 StringValue()); // Don't print out an empty name. 287 YamlIO.mapOptional( 288 "type", Object.Type, 289 MachineStackObject::DefaultType); // Don't print the default type. 290 YamlIO.mapOptional("offset", Object.Offset, (int64_t)0); 291 if (Object.Type != MachineStackObject::VariableSized) 292 YamlIO.mapRequired("size", Object.Size); 293 YamlIO.mapOptional("alignment", Object.Alignment, std::nullopt); 294 YamlIO.mapOptional("stack-id", Object.StackID, TargetStackID::Default); 295 YamlIO.mapOptional("callee-saved-register", Object.CalleeSavedRegister, 296 StringValue()); // Don't print it out when it's empty. 297 YamlIO.mapOptional("callee-saved-restored", Object.CalleeSavedRestored, 298 true); 299 YamlIO.mapOptional("local-offset", Object.LocalOffset, 300 std::optional<int64_t>()); 301 YamlIO.mapOptional("debug-info-variable", Object.DebugVar, 302 StringValue()); // Don't print it out when it's empty. 303 YamlIO.mapOptional("debug-info-expression", Object.DebugExpr, 304 StringValue()); // Don't print it out when it's empty. 305 YamlIO.mapOptional("debug-info-location", Object.DebugLoc, 306 StringValue()); // Don't print it out when it's empty. 307 } 308 309 static const bool flow = true; 310 }; 311 312 /// Serializable representation of the MCRegister variant of 313 /// MachineFunction::VariableDbgInfo. 314 struct EntryValueObject { 315 StringValue EntryValueRegister; 316 StringValue DebugVar; 317 StringValue DebugExpr; 318 StringValue DebugLoc; 319 bool operator==(const EntryValueObject &Other) const { 320 return EntryValueRegister == Other.EntryValueRegister && 321 DebugVar == Other.DebugVar && DebugExpr == Other.DebugExpr && 322 DebugLoc == Other.DebugLoc; 323 } 324 }; 325 326 template <> struct MappingTraits<EntryValueObject> { 327 static void mapping(yaml::IO &YamlIO, EntryValueObject &Object) { 328 YamlIO.mapRequired("entry-value-register", Object.EntryValueRegister); 329 YamlIO.mapRequired("debug-info-variable", Object.DebugVar); 330 YamlIO.mapRequired("debug-info-expression", Object.DebugExpr); 331 YamlIO.mapRequired("debug-info-location", Object.DebugLoc); 332 } 333 static const bool flow = true; 334 }; 335 336 /// Serializable representation of the fixed stack object from the 337 /// MachineFrameInfo class. 338 struct FixedMachineStackObject { 339 enum ObjectType { DefaultType, SpillSlot }; 340 UnsignedValue ID; 341 ObjectType Type = DefaultType; 342 int64_t Offset = 0; 343 uint64_t Size = 0; 344 MaybeAlign Alignment = std::nullopt; 345 TargetStackID::Value StackID; 346 bool IsImmutable = false; 347 bool IsAliased = false; 348 StringValue CalleeSavedRegister; 349 bool CalleeSavedRestored = true; 350 StringValue DebugVar; 351 StringValue DebugExpr; 352 StringValue DebugLoc; 353 354 bool operator==(const FixedMachineStackObject &Other) const { 355 return ID == Other.ID && Type == Other.Type && Offset == Other.Offset && 356 Size == Other.Size && Alignment == Other.Alignment && 357 StackID == Other.StackID && 358 IsImmutable == Other.IsImmutable && IsAliased == Other.IsAliased && 359 CalleeSavedRegister == Other.CalleeSavedRegister && 360 CalleeSavedRestored == Other.CalleeSavedRestored && 361 DebugVar == Other.DebugVar && DebugExpr == Other.DebugExpr 362 && DebugLoc == Other.DebugLoc; 363 } 364 }; 365 366 template <> 367 struct ScalarEnumerationTraits<FixedMachineStackObject::ObjectType> { 368 static void enumeration(yaml::IO &IO, 369 FixedMachineStackObject::ObjectType &Type) { 370 IO.enumCase(Type, "default", FixedMachineStackObject::DefaultType); 371 IO.enumCase(Type, "spill-slot", FixedMachineStackObject::SpillSlot); 372 } 373 }; 374 375 template <> 376 struct ScalarEnumerationTraits<TargetStackID::Value> { 377 static void enumeration(yaml::IO &IO, TargetStackID::Value &ID) { 378 IO.enumCase(ID, "default", TargetStackID::Default); 379 IO.enumCase(ID, "sgpr-spill", TargetStackID::SGPRSpill); 380 IO.enumCase(ID, "scalable-vector", TargetStackID::ScalableVector); 381 IO.enumCase(ID, "wasm-local", TargetStackID::WasmLocal); 382 IO.enumCase(ID, "noalloc", TargetStackID::NoAlloc); 383 } 384 }; 385 386 template <> struct MappingTraits<FixedMachineStackObject> { 387 static void mapping(yaml::IO &YamlIO, FixedMachineStackObject &Object) { 388 YamlIO.mapRequired("id", Object.ID); 389 YamlIO.mapOptional( 390 "type", Object.Type, 391 FixedMachineStackObject::DefaultType); // Don't print the default type. 392 YamlIO.mapOptional("offset", Object.Offset, (int64_t)0); 393 YamlIO.mapOptional("size", Object.Size, (uint64_t)0); 394 YamlIO.mapOptional("alignment", Object.Alignment, std::nullopt); 395 YamlIO.mapOptional("stack-id", Object.StackID, TargetStackID::Default); 396 if (Object.Type != FixedMachineStackObject::SpillSlot) { 397 YamlIO.mapOptional("isImmutable", Object.IsImmutable, false); 398 YamlIO.mapOptional("isAliased", Object.IsAliased, false); 399 } 400 YamlIO.mapOptional("callee-saved-register", Object.CalleeSavedRegister, 401 StringValue()); // Don't print it out when it's empty. 402 YamlIO.mapOptional("callee-saved-restored", Object.CalleeSavedRestored, 403 true); 404 YamlIO.mapOptional("debug-info-variable", Object.DebugVar, 405 StringValue()); // Don't print it out when it's empty. 406 YamlIO.mapOptional("debug-info-expression", Object.DebugExpr, 407 StringValue()); // Don't print it out when it's empty. 408 YamlIO.mapOptional("debug-info-location", Object.DebugLoc, 409 StringValue()); // Don't print it out when it's empty. 410 } 411 412 static const bool flow = true; 413 }; 414 415 /// A serializaable representation of a reference to a stack object or fixed 416 /// stack object. 417 struct FrameIndex { 418 // The frame index as printed. This is always a positive number, even for 419 // fixed objects. To obtain the real index, 420 // MachineFrameInfo::getObjectIndexBegin has to be added. 421 int FI; 422 bool IsFixed; 423 SMRange SourceRange; 424 425 FrameIndex() = default; 426 FrameIndex(int FI, const llvm::MachineFrameInfo &MFI); 427 428 Expected<int> getFI(const llvm::MachineFrameInfo &MFI) const; 429 }; 430 431 template <> struct ScalarTraits<FrameIndex> { 432 static void output(const FrameIndex &FI, void *, raw_ostream &OS) { 433 MachineOperand::printStackObjectReference(OS, FI.FI, FI.IsFixed, ""); 434 } 435 436 static StringRef input(StringRef Scalar, void *Ctx, FrameIndex &FI) { 437 FI.IsFixed = false; 438 StringRef Num; 439 if (Scalar.starts_with("%stack.")) { 440 Num = Scalar.substr(7); 441 } else if (Scalar.starts_with("%fixed-stack.")) { 442 Num = Scalar.substr(13); 443 FI.IsFixed = true; 444 } else { 445 return "Invalid frame index, needs to start with %stack. or " 446 "%fixed-stack."; 447 } 448 if (Num.consumeInteger(10, FI.FI)) 449 return "Invalid frame index, not a valid number"; 450 451 if (const auto *Node = 452 reinterpret_cast<yaml::Input *>(Ctx)->getCurrentNode()) 453 FI.SourceRange = Node->getSourceRange(); 454 return StringRef(); 455 } 456 457 static QuotingType mustQuote(StringRef S) { return needsQuotes(S); } 458 }; 459 460 /// Identifies call instruction location in machine function. 461 struct MachineInstrLoc { 462 unsigned BlockNum; 463 unsigned Offset; 464 465 bool operator==(const MachineInstrLoc &Other) const { 466 return BlockNum == Other.BlockNum && Offset == Other.Offset; 467 } 468 }; 469 470 /// Serializable representation of CallSiteInfo. 471 struct CallSiteInfo { 472 // Representation of call argument and register which is used to 473 // transfer it. 474 struct ArgRegPair { 475 StringValue Reg; 476 uint16_t ArgNo; 477 478 bool operator==(const ArgRegPair &Other) const { 479 return Reg == Other.Reg && ArgNo == Other.ArgNo; 480 } 481 }; 482 483 MachineInstrLoc CallLocation; 484 std::vector<ArgRegPair> ArgForwardingRegs; 485 486 bool operator==(const CallSiteInfo &Other) const { 487 return CallLocation.BlockNum == Other.CallLocation.BlockNum && 488 CallLocation.Offset == Other.CallLocation.Offset; 489 } 490 }; 491 492 template <> struct MappingTraits<CallSiteInfo::ArgRegPair> { 493 static void mapping(IO &YamlIO, CallSiteInfo::ArgRegPair &ArgReg) { 494 YamlIO.mapRequired("arg", ArgReg.ArgNo); 495 YamlIO.mapRequired("reg", ArgReg.Reg); 496 } 497 498 static const bool flow = true; 499 }; 500 } 501 } 502 503 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::CallSiteInfo::ArgRegPair) 504 505 namespace llvm { 506 namespace yaml { 507 508 template <> struct MappingTraits<CallSiteInfo> { 509 static void mapping(IO &YamlIO, CallSiteInfo &CSInfo) { 510 YamlIO.mapRequired("bb", CSInfo.CallLocation.BlockNum); 511 YamlIO.mapRequired("offset", CSInfo.CallLocation.Offset); 512 YamlIO.mapOptional("fwdArgRegs", CSInfo.ArgForwardingRegs, 513 std::vector<CallSiteInfo::ArgRegPair>()); 514 } 515 516 static const bool flow = true; 517 }; 518 519 /// Serializable representation of debug value substitutions. 520 struct DebugValueSubstitution { 521 unsigned SrcInst; 522 unsigned SrcOp; 523 unsigned DstInst; 524 unsigned DstOp; 525 unsigned Subreg; 526 527 bool operator==(const DebugValueSubstitution &Other) const { 528 return std::tie(SrcInst, SrcOp, DstInst, DstOp) == 529 std::tie(Other.SrcInst, Other.SrcOp, Other.DstInst, Other.DstOp); 530 } 531 }; 532 533 template <> struct MappingTraits<DebugValueSubstitution> { 534 static void mapping(IO &YamlIO, DebugValueSubstitution &Sub) { 535 YamlIO.mapRequired("srcinst", Sub.SrcInst); 536 YamlIO.mapRequired("srcop", Sub.SrcOp); 537 YamlIO.mapRequired("dstinst", Sub.DstInst); 538 YamlIO.mapRequired("dstop", Sub.DstOp); 539 YamlIO.mapRequired("subreg", Sub.Subreg); 540 } 541 542 static const bool flow = true; 543 }; 544 } // namespace yaml 545 } // namespace llvm 546 547 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::DebugValueSubstitution) 548 549 namespace llvm { 550 namespace yaml { 551 struct MachineConstantPoolValue { 552 UnsignedValue ID; 553 StringValue Value; 554 MaybeAlign Alignment = std::nullopt; 555 bool IsTargetSpecific = false; 556 557 bool operator==(const MachineConstantPoolValue &Other) const { 558 return ID == Other.ID && Value == Other.Value && 559 Alignment == Other.Alignment && 560 IsTargetSpecific == Other.IsTargetSpecific; 561 } 562 }; 563 564 template <> struct MappingTraits<MachineConstantPoolValue> { 565 static void mapping(IO &YamlIO, MachineConstantPoolValue &Constant) { 566 YamlIO.mapRequired("id", Constant.ID); 567 YamlIO.mapOptional("value", Constant.Value, StringValue()); 568 YamlIO.mapOptional("alignment", Constant.Alignment, std::nullopt); 569 YamlIO.mapOptional("isTargetSpecific", Constant.IsTargetSpecific, false); 570 } 571 }; 572 573 struct MachineJumpTable { 574 struct Entry { 575 UnsignedValue ID; 576 std::vector<FlowStringValue> Blocks; 577 578 bool operator==(const Entry &Other) const { 579 return ID == Other.ID && Blocks == Other.Blocks; 580 } 581 }; 582 583 MachineJumpTableInfo::JTEntryKind Kind = MachineJumpTableInfo::EK_Custom32; 584 std::vector<Entry> Entries; 585 586 bool operator==(const MachineJumpTable &Other) const { 587 return Kind == Other.Kind && Entries == Other.Entries; 588 } 589 }; 590 591 template <> struct MappingTraits<MachineJumpTable::Entry> { 592 static void mapping(IO &YamlIO, MachineJumpTable::Entry &Entry) { 593 YamlIO.mapRequired("id", Entry.ID); 594 YamlIO.mapOptional("blocks", Entry.Blocks, std::vector<FlowStringValue>()); 595 } 596 }; 597 598 struct CalledGlobal { 599 MachineInstrLoc CallSite; 600 StringValue Callee; 601 unsigned Flags; 602 603 bool operator==(const CalledGlobal &Other) const { 604 return CallSite == Other.CallSite && Callee == Other.Callee && 605 Flags == Other.Flags; 606 } 607 }; 608 609 template <> struct MappingTraits<CalledGlobal> { 610 static void mapping(IO &YamlIO, CalledGlobal &CG) { 611 YamlIO.mapRequired("bb", CG.CallSite.BlockNum); 612 YamlIO.mapRequired("offset", CG.CallSite.Offset); 613 YamlIO.mapRequired("callee", CG.Callee); 614 YamlIO.mapRequired("flags", CG.Flags); 615 } 616 }; 617 618 } // end namespace yaml 619 } // end namespace llvm 620 621 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::MachineFunctionLiveIn) 622 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::VirtualRegisterDefinition) 623 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::MachineStackObject) 624 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::EntryValueObject) 625 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::FixedMachineStackObject) 626 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::CallSiteInfo) 627 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::MachineConstantPoolValue) 628 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::MachineJumpTable::Entry) 629 LLVM_YAML_IS_SEQUENCE_VECTOR(llvm::yaml::CalledGlobal) 630 631 namespace llvm { 632 namespace yaml { 633 634 template <> struct MappingTraits<MachineJumpTable> { 635 static void mapping(IO &YamlIO, MachineJumpTable &JT) { 636 YamlIO.mapRequired("kind", JT.Kind); 637 YamlIO.mapOptional("entries", JT.Entries, 638 std::vector<MachineJumpTable::Entry>()); 639 } 640 }; 641 642 /// Serializable representation of MachineFrameInfo. 643 /// 644 /// Doesn't serialize attributes like 'StackAlignment', 'IsStackRealignable' and 645 /// 'RealignOption' as they are determined by the target and LLVM function 646 /// attributes. 647 /// It also doesn't serialize attributes like 'NumFixedObject' and 648 /// 'HasVarSizedObjects' as they are determined by the frame objects themselves. 649 struct MachineFrameInfo { 650 bool IsFrameAddressTaken = false; 651 bool IsReturnAddressTaken = false; 652 bool HasStackMap = false; 653 bool HasPatchPoint = false; 654 uint64_t StackSize = 0; 655 int OffsetAdjustment = 0; 656 unsigned MaxAlignment = 0; 657 bool AdjustsStack = false; 658 bool HasCalls = false; 659 StringValue StackProtector; 660 StringValue FunctionContext; 661 unsigned MaxCallFrameSize = ~0u; ///< ~0u means: not computed yet. 662 unsigned CVBytesOfCalleeSavedRegisters = 0; 663 bool HasOpaqueSPAdjustment = false; 664 bool HasVAStart = false; 665 bool HasMustTailInVarArgFunc = false; 666 bool HasTailCall = false; 667 bool IsCalleeSavedInfoValid = false; 668 unsigned LocalFrameSize = 0; 669 StringValue SavePoint; 670 StringValue RestorePoint; 671 672 bool operator==(const MachineFrameInfo &Other) const { 673 return IsFrameAddressTaken == Other.IsFrameAddressTaken && 674 IsReturnAddressTaken == Other.IsReturnAddressTaken && 675 HasStackMap == Other.HasStackMap && 676 HasPatchPoint == Other.HasPatchPoint && 677 StackSize == Other.StackSize && 678 OffsetAdjustment == Other.OffsetAdjustment && 679 MaxAlignment == Other.MaxAlignment && 680 AdjustsStack == Other.AdjustsStack && HasCalls == Other.HasCalls && 681 StackProtector == Other.StackProtector && 682 FunctionContext == Other.FunctionContext && 683 MaxCallFrameSize == Other.MaxCallFrameSize && 684 CVBytesOfCalleeSavedRegisters == 685 Other.CVBytesOfCalleeSavedRegisters && 686 HasOpaqueSPAdjustment == Other.HasOpaqueSPAdjustment && 687 HasVAStart == Other.HasVAStart && 688 HasMustTailInVarArgFunc == Other.HasMustTailInVarArgFunc && 689 HasTailCall == Other.HasTailCall && 690 LocalFrameSize == Other.LocalFrameSize && 691 SavePoint == Other.SavePoint && RestorePoint == Other.RestorePoint && 692 IsCalleeSavedInfoValid == Other.IsCalleeSavedInfoValid; 693 } 694 }; 695 696 template <> struct MappingTraits<MachineFrameInfo> { 697 static void mapping(IO &YamlIO, MachineFrameInfo &MFI) { 698 YamlIO.mapOptional("isFrameAddressTaken", MFI.IsFrameAddressTaken, false); 699 YamlIO.mapOptional("isReturnAddressTaken", MFI.IsReturnAddressTaken, false); 700 YamlIO.mapOptional("hasStackMap", MFI.HasStackMap, false); 701 YamlIO.mapOptional("hasPatchPoint", MFI.HasPatchPoint, false); 702 YamlIO.mapOptional("stackSize", MFI.StackSize, (uint64_t)0); 703 YamlIO.mapOptional("offsetAdjustment", MFI.OffsetAdjustment, (int)0); 704 YamlIO.mapOptional("maxAlignment", MFI.MaxAlignment, (unsigned)0); 705 YamlIO.mapOptional("adjustsStack", MFI.AdjustsStack, false); 706 YamlIO.mapOptional("hasCalls", MFI.HasCalls, false); 707 YamlIO.mapOptional("stackProtector", MFI.StackProtector, 708 StringValue()); // Don't print it out when it's empty. 709 YamlIO.mapOptional("functionContext", MFI.FunctionContext, 710 StringValue()); // Don't print it out when it's empty. 711 YamlIO.mapOptional("maxCallFrameSize", MFI.MaxCallFrameSize, (unsigned)~0); 712 YamlIO.mapOptional("cvBytesOfCalleeSavedRegisters", 713 MFI.CVBytesOfCalleeSavedRegisters, 0U); 714 YamlIO.mapOptional("hasOpaqueSPAdjustment", MFI.HasOpaqueSPAdjustment, 715 false); 716 YamlIO.mapOptional("hasVAStart", MFI.HasVAStart, false); 717 YamlIO.mapOptional("hasMustTailInVarArgFunc", MFI.HasMustTailInVarArgFunc, 718 false); 719 YamlIO.mapOptional("hasTailCall", MFI.HasTailCall, false); 720 YamlIO.mapOptional("isCalleeSavedInfoValid", MFI.IsCalleeSavedInfoValid, 721 false); 722 YamlIO.mapOptional("localFrameSize", MFI.LocalFrameSize, (unsigned)0); 723 YamlIO.mapOptional("savePoint", MFI.SavePoint, 724 StringValue()); // Don't print it out when it's empty. 725 YamlIO.mapOptional("restorePoint", MFI.RestorePoint, 726 StringValue()); // Don't print it out when it's empty. 727 } 728 }; 729 730 /// Targets should override this in a way that mirrors the implementation of 731 /// llvm::MachineFunctionInfo. 732 struct MachineFunctionInfo { 733 virtual ~MachineFunctionInfo() = default; 734 virtual void mappingImpl(IO &YamlIO) {} 735 }; 736 737 template <> struct MappingTraits<std::unique_ptr<MachineFunctionInfo>> { 738 static void mapping(IO &YamlIO, std::unique_ptr<MachineFunctionInfo> &MFI) { 739 if (MFI) 740 MFI->mappingImpl(YamlIO); 741 } 742 }; 743 744 struct MachineFunction { 745 StringRef Name; 746 MaybeAlign Alignment = std::nullopt; 747 bool ExposesReturnsTwice = false; 748 // GISel MachineFunctionProperties. 749 bool Legalized = false; 750 bool RegBankSelected = false; 751 bool Selected = false; 752 bool FailedISel = false; 753 // Register information 754 bool TracksRegLiveness = false; 755 bool HasWinCFI = false; 756 757 // Computed properties that should be overridable 758 std::optional<bool> NoPHIs; 759 std::optional<bool> IsSSA; 760 std::optional<bool> NoVRegs; 761 std::optional<bool> HasFakeUses; 762 763 bool CallsEHReturn = false; 764 bool CallsUnwindInit = false; 765 bool HasEHCatchret = false; 766 bool HasEHScopes = false; 767 bool HasEHFunclets = false; 768 bool IsOutlined = false; 769 770 bool FailsVerification = false; 771 bool TracksDebugUserValues = false; 772 bool UseDebugInstrRef = false; 773 std::vector<VirtualRegisterDefinition> VirtualRegisters; 774 std::vector<MachineFunctionLiveIn> LiveIns; 775 std::optional<std::vector<FlowStringValue>> CalleeSavedRegisters; 776 // TODO: Serialize the various register masks. 777 // Frame information 778 MachineFrameInfo FrameInfo; 779 std::vector<FixedMachineStackObject> FixedStackObjects; 780 std::vector<EntryValueObject> EntryValueObjects; 781 std::vector<MachineStackObject> StackObjects; 782 std::vector<MachineConstantPoolValue> Constants; /// Constant pool. 783 std::unique_ptr<MachineFunctionInfo> MachineFuncInfo; 784 std::vector<CallSiteInfo> CallSitesInfo; 785 std::vector<DebugValueSubstitution> DebugValueSubstitutions; 786 MachineJumpTable JumpTableInfo; 787 std::vector<StringValue> MachineMetadataNodes; 788 std::vector<CalledGlobal> CalledGlobals; 789 BlockStringValue Body; 790 }; 791 792 template <> struct MappingTraits<MachineFunction> { 793 static void mapping(IO &YamlIO, MachineFunction &MF) { 794 YamlIO.mapRequired("name", MF.Name); 795 YamlIO.mapOptional("alignment", MF.Alignment, std::nullopt); 796 YamlIO.mapOptional("exposesReturnsTwice", MF.ExposesReturnsTwice, false); 797 YamlIO.mapOptional("legalized", MF.Legalized, false); 798 YamlIO.mapOptional("regBankSelected", MF.RegBankSelected, false); 799 YamlIO.mapOptional("selected", MF.Selected, false); 800 YamlIO.mapOptional("failedISel", MF.FailedISel, false); 801 YamlIO.mapOptional("tracksRegLiveness", MF.TracksRegLiveness, false); 802 YamlIO.mapOptional("hasWinCFI", MF.HasWinCFI, false); 803 804 // PHIs must be not be capitalized, since it will clash with the MIR opcode 805 // leading to false-positive FileCheck hits with CHECK-NOT 806 YamlIO.mapOptional("noPhis", MF.NoPHIs, std::optional<bool>()); 807 YamlIO.mapOptional("isSSA", MF.IsSSA, std::optional<bool>()); 808 YamlIO.mapOptional("noVRegs", MF.NoVRegs, std::optional<bool>()); 809 YamlIO.mapOptional("hasFakeUses", MF.HasFakeUses, std::optional<bool>()); 810 811 YamlIO.mapOptional("callsEHReturn", MF.CallsEHReturn, false); 812 YamlIO.mapOptional("callsUnwindInit", MF.CallsUnwindInit, false); 813 YamlIO.mapOptional("hasEHCatchret", MF.HasEHCatchret, false); 814 YamlIO.mapOptional("hasEHScopes", MF.HasEHScopes, false); 815 YamlIO.mapOptional("hasEHFunclets", MF.HasEHFunclets, false); 816 YamlIO.mapOptional("isOutlined", MF.IsOutlined, false); 817 YamlIO.mapOptional("debugInstrRef", MF.UseDebugInstrRef, false); 818 819 YamlIO.mapOptional("failsVerification", MF.FailsVerification, false); 820 YamlIO.mapOptional("tracksDebugUserValues", MF.TracksDebugUserValues, 821 false); 822 YamlIO.mapOptional("registers", MF.VirtualRegisters, 823 std::vector<VirtualRegisterDefinition>()); 824 YamlIO.mapOptional("liveins", MF.LiveIns, 825 std::vector<MachineFunctionLiveIn>()); 826 YamlIO.mapOptional("calleeSavedRegisters", MF.CalleeSavedRegisters, 827 std::optional<std::vector<FlowStringValue>>()); 828 YamlIO.mapOptional("frameInfo", MF.FrameInfo, MachineFrameInfo()); 829 YamlIO.mapOptional("fixedStack", MF.FixedStackObjects, 830 std::vector<FixedMachineStackObject>()); 831 YamlIO.mapOptional("stack", MF.StackObjects, 832 std::vector<MachineStackObject>()); 833 YamlIO.mapOptional("entry_values", MF.EntryValueObjects, 834 std::vector<EntryValueObject>()); 835 YamlIO.mapOptional("callSites", MF.CallSitesInfo, 836 std::vector<CallSiteInfo>()); 837 YamlIO.mapOptional("debugValueSubstitutions", MF.DebugValueSubstitutions, 838 std::vector<DebugValueSubstitution>()); 839 YamlIO.mapOptional("constants", MF.Constants, 840 std::vector<MachineConstantPoolValue>()); 841 YamlIO.mapOptional("machineFunctionInfo", MF.MachineFuncInfo); 842 if (!YamlIO.outputting() || !MF.JumpTableInfo.Entries.empty()) 843 YamlIO.mapOptional("jumpTable", MF.JumpTableInfo, MachineJumpTable()); 844 if (!YamlIO.outputting() || !MF.MachineMetadataNodes.empty()) 845 YamlIO.mapOptional("machineMetadataNodes", MF.MachineMetadataNodes, 846 std::vector<StringValue>()); 847 if (!YamlIO.outputting() || !MF.CalledGlobals.empty()) 848 YamlIO.mapOptional("calledGlobals", MF.CalledGlobals, 849 std::vector<CalledGlobal>()); 850 YamlIO.mapOptional("body", MF.Body, BlockStringValue()); 851 } 852 }; 853 854 } // end namespace yaml 855 } // end namespace llvm 856 857 #endif // LLVM_CODEGEN_MIRYAMLMAPPING_H 858