1 //===- llvm/IR/Metadata.h - Metadata definitions ----------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 /// @file 10 /// This file contains the declarations for metadata subclasses. 11 /// They represent the different flavors of metadata that live in LLVM. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #ifndef LLVM_IR_METADATA_H 16 #define LLVM_IR_METADATA_H 17 18 #include "llvm/ADT/ArrayRef.h" 19 #include "llvm/ADT/DenseMap.h" 20 #include "llvm/ADT/DenseMapInfo.h" 21 #include "llvm/ADT/PointerUnion.h" 22 #include "llvm/ADT/SmallVector.h" 23 #include "llvm/ADT/StringRef.h" 24 #include "llvm/ADT/ilist_node.h" 25 #include "llvm/ADT/iterator_range.h" 26 #include "llvm/IR/Constant.h" 27 #include "llvm/IR/LLVMContext.h" 28 #include "llvm/IR/Value.h" 29 #include "llvm/Support/CBindingWrapping.h" 30 #include "llvm/Support/Casting.h" 31 #include "llvm/Support/ErrorHandling.h" 32 #include <cassert> 33 #include <cstddef> 34 #include <cstdint> 35 #include <iterator> 36 #include <memory> 37 #include <string> 38 #include <type_traits> 39 #include <utility> 40 41 namespace llvm { 42 43 class Module; 44 class ModuleSlotTracker; 45 class raw_ostream; 46 class DbgVariableRecord; 47 template <typename T> class StringMapEntry; 48 template <typename ValueTy> class StringMapEntryStorage; 49 class Type; 50 51 enum LLVMConstants : uint32_t { 52 DEBUG_METADATA_VERSION = 3 // Current debug info version number. 53 }; 54 55 /// Magic number in the value profile metadata showing a target has been 56 /// promoted for the instruction and shouldn't be promoted again. 57 const uint64_t NOMORE_ICP_MAGICNUM = -1; 58 59 /// Root of the metadata hierarchy. 60 /// 61 /// This is a root class for typeless data in the IR. 62 class Metadata { 63 friend class ReplaceableMetadataImpl; 64 65 /// RTTI. 66 const unsigned char SubclassID; 67 68 protected: 69 /// Active type of storage. 70 enum StorageType { Uniqued, Distinct, Temporary }; 71 72 /// Storage flag for non-uniqued, otherwise unowned, metadata. 73 unsigned char Storage : 7; 74 75 unsigned char SubclassData1 : 1; 76 unsigned short SubclassData16 = 0; 77 unsigned SubclassData32 = 0; 78 79 public: 80 enum MetadataKind { 81 #define HANDLE_METADATA_LEAF(CLASS) CLASS##Kind, 82 #include "llvm/IR/Metadata.def" 83 }; 84 85 protected: 86 Metadata(unsigned ID, StorageType Storage) 87 : SubclassID(ID), Storage(Storage), SubclassData1(false) { 88 static_assert(sizeof(*this) == 8, "Metadata fields poorly packed"); 89 } 90 91 ~Metadata() = default; 92 93 /// Default handling of a changed operand, which asserts. 94 /// 95 /// If subclasses pass themselves in as owners to a tracking node reference, 96 /// they must provide an implementation of this method. 97 void handleChangedOperand(void *, Metadata *) { 98 llvm_unreachable("Unimplemented in Metadata subclass"); 99 } 100 101 public: 102 unsigned getMetadataID() const { return SubclassID; } 103 104 /// User-friendly dump. 105 /// 106 /// If \c M is provided, metadata nodes will be numbered canonically; 107 /// otherwise, pointer addresses are substituted. 108 /// 109 /// Note: this uses an explicit overload instead of default arguments so that 110 /// the nullptr version is easy to call from a debugger. 111 /// 112 /// @{ 113 void dump() const; 114 void dump(const Module *M) const; 115 /// @} 116 117 /// Print. 118 /// 119 /// Prints definition of \c this. 120 /// 121 /// If \c M is provided, metadata nodes will be numbered canonically; 122 /// otherwise, pointer addresses are substituted. 123 /// @{ 124 void print(raw_ostream &OS, const Module *M = nullptr, 125 bool IsForDebug = false) const; 126 void print(raw_ostream &OS, ModuleSlotTracker &MST, const Module *M = nullptr, 127 bool IsForDebug = false) const; 128 /// @} 129 130 /// Print as operand. 131 /// 132 /// Prints reference of \c this. 133 /// 134 /// If \c M is provided, metadata nodes will be numbered canonically; 135 /// otherwise, pointer addresses are substituted. 136 /// @{ 137 void printAsOperand(raw_ostream &OS, const Module *M = nullptr) const; 138 void printAsOperand(raw_ostream &OS, ModuleSlotTracker &MST, 139 const Module *M = nullptr) const; 140 /// @} 141 }; 142 143 // Create wrappers for C Binding types (see CBindingWrapping.h). 144 DEFINE_ISA_CONVERSION_FUNCTIONS(Metadata, LLVMMetadataRef) 145 146 // Specialized opaque metadata conversions. 147 inline Metadata **unwrap(LLVMMetadataRef *MDs) { 148 return reinterpret_cast<Metadata**>(MDs); 149 } 150 151 #define HANDLE_METADATA(CLASS) class CLASS; 152 #include "llvm/IR/Metadata.def" 153 154 // Provide specializations of isa so that we don't need definitions of 155 // subclasses to see if the metadata is a subclass. 156 #define HANDLE_METADATA_LEAF(CLASS) \ 157 template <> struct isa_impl<CLASS, Metadata> { \ 158 static inline bool doit(const Metadata &MD) { \ 159 return MD.getMetadataID() == Metadata::CLASS##Kind; \ 160 } \ 161 }; 162 #include "llvm/IR/Metadata.def" 163 164 inline raw_ostream &operator<<(raw_ostream &OS, const Metadata &MD) { 165 MD.print(OS); 166 return OS; 167 } 168 169 /// Metadata wrapper in the Value hierarchy. 170 /// 171 /// A member of the \a Value hierarchy to represent a reference to metadata. 172 /// This allows, e.g., intrinsics to have metadata as operands. 173 /// 174 /// Notably, this is the only thing in either hierarchy that is allowed to 175 /// reference \a LocalAsMetadata. 176 class MetadataAsValue : public Value { 177 friend class ReplaceableMetadataImpl; 178 friend class LLVMContextImpl; 179 180 Metadata *MD; 181 182 MetadataAsValue(Type *Ty, Metadata *MD); 183 184 /// Drop use of metadata (during teardown). 185 void dropUse() { MD = nullptr; } 186 187 public: 188 ~MetadataAsValue(); 189 190 static MetadataAsValue *get(LLVMContext &Context, Metadata *MD); 191 static MetadataAsValue *getIfExists(LLVMContext &Context, Metadata *MD); 192 193 Metadata *getMetadata() const { return MD; } 194 195 static bool classof(const Value *V) { 196 return V->getValueID() == MetadataAsValueVal; 197 } 198 199 private: 200 void handleChangedMetadata(Metadata *MD); 201 void track(); 202 void untrack(); 203 }; 204 205 /// Base class for tracking ValueAsMetadata/DIArgLists with user lookups and 206 /// Owner callbacks outside of ValueAsMetadata. 207 /// 208 /// Currently only inherited by DbgVariableRecord; if other classes need to use 209 /// it, then a SubclassID will need to be added (either as a new field or by 210 /// making DebugValue into a PointerIntUnion) to discriminate between the 211 /// subclasses in lookup and callback handling. 212 class DebugValueUser { 213 protected: 214 // Capacity to store 3 debug values. 215 // TODO: Not all DebugValueUser instances need all 3 elements, if we 216 // restructure the DbgVariableRecord class then we can template parameterize 217 // this array size. 218 std::array<Metadata *, 3> DebugValues; 219 220 ArrayRef<Metadata *> getDebugValues() const { return DebugValues; } 221 222 public: 223 DbgVariableRecord *getUser(); 224 const DbgVariableRecord *getUser() const; 225 /// To be called by ReplaceableMetadataImpl::replaceAllUsesWith, where `Old` 226 /// is a pointer to one of the pointers in `DebugValues` (so should be type 227 /// Metadata**), and `NewDebugValue` is the new Metadata* that is replacing 228 /// *Old. 229 /// For manually replacing elements of DebugValues, 230 /// `resetDebugValue(Idx, NewDebugValue)` should be used instead. 231 void handleChangedValue(void *Old, Metadata *NewDebugValue); 232 DebugValueUser() = default; 233 explicit DebugValueUser(std::array<Metadata *, 3> DebugValues) 234 : DebugValues(DebugValues) { 235 trackDebugValues(); 236 } 237 DebugValueUser(DebugValueUser &&X) { 238 DebugValues = X.DebugValues; 239 retrackDebugValues(X); 240 } 241 DebugValueUser(const DebugValueUser &X) { 242 DebugValues = X.DebugValues; 243 trackDebugValues(); 244 } 245 246 DebugValueUser &operator=(DebugValueUser &&X) { 247 if (&X == this) 248 return *this; 249 250 untrackDebugValues(); 251 DebugValues = X.DebugValues; 252 retrackDebugValues(X); 253 return *this; 254 } 255 256 DebugValueUser &operator=(const DebugValueUser &X) { 257 if (&X == this) 258 return *this; 259 260 untrackDebugValues(); 261 DebugValues = X.DebugValues; 262 trackDebugValues(); 263 return *this; 264 } 265 266 ~DebugValueUser() { untrackDebugValues(); } 267 268 void resetDebugValues() { 269 untrackDebugValues(); 270 DebugValues.fill(nullptr); 271 } 272 273 void resetDebugValue(size_t Idx, Metadata *DebugValue) { 274 assert(Idx < 3 && "Invalid debug value index."); 275 untrackDebugValue(Idx); 276 DebugValues[Idx] = DebugValue; 277 trackDebugValue(Idx); 278 } 279 280 bool operator==(const DebugValueUser &X) const { 281 return DebugValues == X.DebugValues; 282 } 283 bool operator!=(const DebugValueUser &X) const { 284 return DebugValues != X.DebugValues; 285 } 286 287 private: 288 void trackDebugValue(size_t Idx); 289 void trackDebugValues(); 290 291 void untrackDebugValue(size_t Idx); 292 void untrackDebugValues(); 293 294 void retrackDebugValues(DebugValueUser &X); 295 }; 296 297 /// API for tracking metadata references through RAUW and deletion. 298 /// 299 /// Shared API for updating \a Metadata pointers in subclasses that support 300 /// RAUW. 301 /// 302 /// This API is not meant to be used directly. See \a TrackingMDRef for a 303 /// user-friendly tracking reference. 304 class MetadataTracking { 305 public: 306 /// Track the reference to metadata. 307 /// 308 /// Register \c MD with \c *MD, if the subclass supports tracking. If \c *MD 309 /// gets RAUW'ed, \c MD will be updated to the new address. If \c *MD gets 310 /// deleted, \c MD will be set to \c nullptr. 311 /// 312 /// If tracking isn't supported, \c *MD will not change. 313 /// 314 /// \return true iff tracking is supported by \c MD. 315 static bool track(Metadata *&MD) { 316 return track(&MD, *MD, static_cast<Metadata *>(nullptr)); 317 } 318 319 /// Track the reference to metadata for \a Metadata. 320 /// 321 /// As \a track(Metadata*&), but with support for calling back to \c Owner to 322 /// tell it that its operand changed. This could trigger \c Owner being 323 /// re-uniqued. 324 static bool track(void *Ref, Metadata &MD, Metadata &Owner) { 325 return track(Ref, MD, &Owner); 326 } 327 328 /// Track the reference to metadata for \a MetadataAsValue. 329 /// 330 /// As \a track(Metadata*&), but with support for calling back to \c Owner to 331 /// tell it that its operand changed. This could trigger \c Owner being 332 /// re-uniqued. 333 static bool track(void *Ref, Metadata &MD, MetadataAsValue &Owner) { 334 return track(Ref, MD, &Owner); 335 } 336 337 /// Track the reference to metadata for \a DebugValueUser. 338 /// 339 /// As \a track(Metadata*&), but with support for calling back to \c Owner to 340 /// tell it that its operand changed. This could trigger \c Owner being 341 /// re-uniqued. 342 static bool track(void *Ref, Metadata &MD, DebugValueUser &Owner) { 343 return track(Ref, MD, &Owner); 344 } 345 346 /// Stop tracking a reference to metadata. 347 /// 348 /// Stops \c *MD from tracking \c MD. 349 static void untrack(Metadata *&MD) { untrack(&MD, *MD); } 350 static void untrack(void *Ref, Metadata &MD); 351 352 /// Move tracking from one reference to another. 353 /// 354 /// Semantically equivalent to \c untrack(MD) followed by \c track(New), 355 /// except that ownership callbacks are maintained. 356 /// 357 /// Note: it is an error if \c *MD does not equal \c New. 358 /// 359 /// \return true iff tracking is supported by \c MD. 360 static bool retrack(Metadata *&MD, Metadata *&New) { 361 return retrack(&MD, *MD, &New); 362 } 363 static bool retrack(void *Ref, Metadata &MD, void *New); 364 365 /// Check whether metadata is replaceable. 366 static bool isReplaceable(const Metadata &MD); 367 368 using OwnerTy = PointerUnion<MetadataAsValue *, Metadata *, DebugValueUser *>; 369 370 private: 371 /// Track a reference to metadata for an owner. 372 /// 373 /// Generalized version of tracking. 374 static bool track(void *Ref, Metadata &MD, OwnerTy Owner); 375 }; 376 377 /// Shared implementation of use-lists for replaceable metadata. 378 /// 379 /// Most metadata cannot be RAUW'ed. This is a shared implementation of 380 /// use-lists and associated API for the three that support it ( 381 /// \a ValueAsMetadata, \a TempMDNode, and \a DIArgList). 382 class ReplaceableMetadataImpl { 383 friend class MetadataTracking; 384 385 public: 386 using OwnerTy = MetadataTracking::OwnerTy; 387 388 private: 389 LLVMContext &Context; 390 uint64_t NextIndex = 0; 391 SmallDenseMap<void *, std::pair<OwnerTy, uint64_t>, 4> UseMap; 392 393 public: 394 ReplaceableMetadataImpl(LLVMContext &Context) : Context(Context) {} 395 396 ~ReplaceableMetadataImpl() { 397 assert(UseMap.empty() && "Cannot destroy in-use replaceable metadata"); 398 } 399 400 LLVMContext &getContext() const { return Context; } 401 402 /// Replace all uses of this with MD. 403 /// 404 /// Replace all uses of this with \c MD, which is allowed to be null. 405 void replaceAllUsesWith(Metadata *MD); 406 /// Replace all uses of the constant with Undef in debug info metadata 407 static void SalvageDebugInfo(const Constant &C); 408 /// Returns the list of all DIArgList users of this. 409 SmallVector<Metadata *> getAllArgListUsers(); 410 /// Returns the list of all DbgVariableRecord users of this. 411 SmallVector<DbgVariableRecord *> getAllDbgVariableRecordUsers(); 412 413 /// Resolve all uses of this. 414 /// 415 /// Resolve all uses of this, turning off RAUW permanently. If \c 416 /// ResolveUsers, call \a MDNode::resolve() on any users whose last operand 417 /// is resolved. 418 void resolveAllUses(bool ResolveUsers = true); 419 420 unsigned getNumUses() const { return UseMap.size(); } 421 422 private: 423 void addRef(void *Ref, OwnerTy Owner); 424 void dropRef(void *Ref); 425 void moveRef(void *Ref, void *New, const Metadata &MD); 426 427 /// Lazily construct RAUW support on MD. 428 /// 429 /// If this is an unresolved MDNode, RAUW support will be created on-demand. 430 /// ValueAsMetadata always has RAUW support. 431 static ReplaceableMetadataImpl *getOrCreate(Metadata &MD); 432 433 /// Get RAUW support on MD, if it exists. 434 static ReplaceableMetadataImpl *getIfExists(Metadata &MD); 435 436 /// Check whether this node will support RAUW. 437 /// 438 /// Returns \c true unless getOrCreate() would return null. 439 static bool isReplaceable(const Metadata &MD); 440 }; 441 442 /// Value wrapper in the Metadata hierarchy. 443 /// 444 /// This is a custom value handle that allows other metadata to refer to 445 /// classes in the Value hierarchy. 446 /// 447 /// Because of full uniquing support, each value is only wrapped by a single \a 448 /// ValueAsMetadata object, so the lookup maps are far more efficient than 449 /// those using ValueHandleBase. 450 class ValueAsMetadata : public Metadata, ReplaceableMetadataImpl { 451 friend class ReplaceableMetadataImpl; 452 friend class LLVMContextImpl; 453 454 Value *V; 455 456 /// Drop users without RAUW (during teardown). 457 void dropUsers() { 458 ReplaceableMetadataImpl::resolveAllUses(/* ResolveUsers */ false); 459 } 460 461 protected: 462 ValueAsMetadata(unsigned ID, Value *V) 463 : Metadata(ID, Uniqued), ReplaceableMetadataImpl(V->getContext()), V(V) { 464 assert(V && "Expected valid value"); 465 } 466 467 ~ValueAsMetadata() = default; 468 469 public: 470 static ValueAsMetadata *get(Value *V); 471 472 static ConstantAsMetadata *getConstant(Value *C) { 473 return cast<ConstantAsMetadata>(get(C)); 474 } 475 476 static LocalAsMetadata *getLocal(Value *Local) { 477 return cast<LocalAsMetadata>(get(Local)); 478 } 479 480 static ValueAsMetadata *getIfExists(Value *V); 481 482 static ConstantAsMetadata *getConstantIfExists(Value *C) { 483 return cast_or_null<ConstantAsMetadata>(getIfExists(C)); 484 } 485 486 static LocalAsMetadata *getLocalIfExists(Value *Local) { 487 return cast_or_null<LocalAsMetadata>(getIfExists(Local)); 488 } 489 490 Value *getValue() const { return V; } 491 Type *getType() const { return V->getType(); } 492 LLVMContext &getContext() const { return V->getContext(); } 493 494 SmallVector<Metadata *> getAllArgListUsers() { 495 return ReplaceableMetadataImpl::getAllArgListUsers(); 496 } 497 SmallVector<DbgVariableRecord *> getAllDbgVariableRecordUsers() { 498 return ReplaceableMetadataImpl::getAllDbgVariableRecordUsers(); 499 } 500 501 static void handleDeletion(Value *V); 502 static void handleRAUW(Value *From, Value *To); 503 504 protected: 505 /// Handle collisions after \a Value::replaceAllUsesWith(). 506 /// 507 /// RAUW isn't supported directly for \a ValueAsMetadata, but if the wrapped 508 /// \a Value gets RAUW'ed and the target already exists, this is used to 509 /// merge the two metadata nodes. 510 void replaceAllUsesWith(Metadata *MD) { 511 ReplaceableMetadataImpl::replaceAllUsesWith(MD); 512 } 513 514 public: 515 static bool classof(const Metadata *MD) { 516 return MD->getMetadataID() == LocalAsMetadataKind || 517 MD->getMetadataID() == ConstantAsMetadataKind; 518 } 519 }; 520 521 class ConstantAsMetadata : public ValueAsMetadata { 522 friend class ValueAsMetadata; 523 524 ConstantAsMetadata(Constant *C) 525 : ValueAsMetadata(ConstantAsMetadataKind, C) {} 526 527 public: 528 static ConstantAsMetadata *get(Constant *C) { 529 return ValueAsMetadata::getConstant(C); 530 } 531 532 static ConstantAsMetadata *getIfExists(Constant *C) { 533 return ValueAsMetadata::getConstantIfExists(C); 534 } 535 536 Constant *getValue() const { 537 return cast<Constant>(ValueAsMetadata::getValue()); 538 } 539 540 static bool classof(const Metadata *MD) { 541 return MD->getMetadataID() == ConstantAsMetadataKind; 542 } 543 }; 544 545 class LocalAsMetadata : public ValueAsMetadata { 546 friend class ValueAsMetadata; 547 548 LocalAsMetadata(Value *Local) 549 : ValueAsMetadata(LocalAsMetadataKind, Local) { 550 assert(!isa<Constant>(Local) && "Expected local value"); 551 } 552 553 public: 554 static LocalAsMetadata *get(Value *Local) { 555 return ValueAsMetadata::getLocal(Local); 556 } 557 558 static LocalAsMetadata *getIfExists(Value *Local) { 559 return ValueAsMetadata::getLocalIfExists(Local); 560 } 561 562 static bool classof(const Metadata *MD) { 563 return MD->getMetadataID() == LocalAsMetadataKind; 564 } 565 }; 566 567 /// Transitional API for extracting constants from Metadata. 568 /// 569 /// This namespace contains transitional functions for metadata that points to 570 /// \a Constants. 571 /// 572 /// In prehistory -- when metadata was a subclass of \a Value -- \a MDNode 573 /// operands could refer to any \a Value. There's was a lot of code like this: 574 /// 575 /// \code 576 /// MDNode *N = ...; 577 /// auto *CI = dyn_cast<ConstantInt>(N->getOperand(2)); 578 /// \endcode 579 /// 580 /// Now that \a Value and \a Metadata are in separate hierarchies, maintaining 581 /// the semantics for \a isa(), \a cast(), \a dyn_cast() (etc.) requires three 582 /// steps: cast in the \a Metadata hierarchy, extraction of the \a Value, and 583 /// cast in the \a Value hierarchy. Besides creating boiler-plate, this 584 /// requires subtle control flow changes. 585 /// 586 /// The end-goal is to create a new type of metadata, called (e.g.) \a MDInt, 587 /// so that metadata can refer to numbers without traversing a bridge to the \a 588 /// Value hierarchy. In this final state, the code above would look like this: 589 /// 590 /// \code 591 /// MDNode *N = ...; 592 /// auto *MI = dyn_cast<MDInt>(N->getOperand(2)); 593 /// \endcode 594 /// 595 /// The API in this namespace supports the transition. \a MDInt doesn't exist 596 /// yet, and even once it does, changing each metadata schema to use it is its 597 /// own mini-project. In the meantime this API prevents us from introducing 598 /// complex and bug-prone control flow that will disappear in the end. In 599 /// particular, the above code looks like this: 600 /// 601 /// \code 602 /// MDNode *N = ...; 603 /// auto *CI = mdconst::dyn_extract<ConstantInt>(N->getOperand(2)); 604 /// \endcode 605 /// 606 /// The full set of provided functions includes: 607 /// 608 /// mdconst::hasa <=> isa 609 /// mdconst::extract <=> cast 610 /// mdconst::extract_or_null <=> cast_or_null 611 /// mdconst::dyn_extract <=> dyn_cast 612 /// mdconst::dyn_extract_or_null <=> dyn_cast_or_null 613 /// 614 /// The target of the cast must be a subclass of \a Constant. 615 namespace mdconst { 616 617 namespace detail { 618 619 template <class T> T &make(); 620 template <class T, class Result> struct HasDereference { 621 using Yes = char[1]; 622 using No = char[2]; 623 template <size_t N> struct SFINAE {}; 624 625 template <class U, class V> 626 static Yes &hasDereference(SFINAE<sizeof(static_cast<V>(*make<U>()))> * = 0); 627 template <class U, class V> static No &hasDereference(...); 628 629 static const bool value = 630 sizeof(hasDereference<T, Result>(nullptr)) == sizeof(Yes); 631 }; 632 template <class V, class M> struct IsValidPointer { 633 static const bool value = std::is_base_of<Constant, V>::value && 634 HasDereference<M, const Metadata &>::value; 635 }; 636 template <class V, class M> struct IsValidReference { 637 static const bool value = std::is_base_of<Constant, V>::value && 638 std::is_convertible<M, const Metadata &>::value; 639 }; 640 641 } // end namespace detail 642 643 /// Check whether Metadata has a Value. 644 /// 645 /// As an analogue to \a isa(), check whether \c MD has an \a Value inside of 646 /// type \c X. 647 template <class X, class Y> 648 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, bool> 649 hasa(Y &&MD) { 650 assert(MD && "Null pointer sent into hasa"); 651 if (auto *V = dyn_cast<ConstantAsMetadata>(MD)) 652 return isa<X>(V->getValue()); 653 return false; 654 } 655 template <class X, class Y> 656 inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, bool> 657 hasa(Y &MD) { 658 return hasa(&MD); 659 } 660 661 /// Extract a Value from Metadata. 662 /// 663 /// As an analogue to \a cast(), extract the \a Value subclass \c X from \c MD. 664 template <class X, class Y> 665 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 666 extract(Y &&MD) { 667 return cast<X>(cast<ConstantAsMetadata>(MD)->getValue()); 668 } 669 template <class X, class Y> 670 inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, X *> 671 extract(Y &MD) { 672 return extract(&MD); 673 } 674 675 /// Extract a Value from Metadata, allowing null. 676 /// 677 /// As an analogue to \a cast_or_null(), extract the \a Value subclass \c X 678 /// from \c MD, allowing \c MD to be null. 679 template <class X, class Y> 680 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 681 extract_or_null(Y &&MD) { 682 if (auto *V = cast_or_null<ConstantAsMetadata>(MD)) 683 return cast<X>(V->getValue()); 684 return nullptr; 685 } 686 687 /// Extract a Value from Metadata, if any. 688 /// 689 /// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X 690 /// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a 691 /// Value it does contain is of the wrong subclass. 692 template <class X, class Y> 693 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 694 dyn_extract(Y &&MD) { 695 if (auto *V = dyn_cast<ConstantAsMetadata>(MD)) 696 return dyn_cast<X>(V->getValue()); 697 return nullptr; 698 } 699 700 /// Extract a Value from Metadata, if any, allowing null. 701 /// 702 /// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X 703 /// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a 704 /// Value it does contain is of the wrong subclass, allowing \c MD to be null. 705 template <class X, class Y> 706 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 707 dyn_extract_or_null(Y &&MD) { 708 if (auto *V = dyn_cast_or_null<ConstantAsMetadata>(MD)) 709 return dyn_cast<X>(V->getValue()); 710 return nullptr; 711 } 712 713 } // end namespace mdconst 714 715 //===----------------------------------------------------------------------===// 716 /// A single uniqued string. 717 /// 718 /// These are used to efficiently contain a byte sequence for metadata. 719 /// MDString is always unnamed. 720 class MDString : public Metadata { 721 friend class StringMapEntryStorage<MDString>; 722 723 StringMapEntry<MDString> *Entry = nullptr; 724 725 MDString() : Metadata(MDStringKind, Uniqued) {} 726 727 public: 728 MDString(const MDString &) = delete; 729 MDString &operator=(MDString &&) = delete; 730 MDString &operator=(const MDString &) = delete; 731 732 static MDString *get(LLVMContext &Context, StringRef Str); 733 static MDString *get(LLVMContext &Context, const char *Str) { 734 return get(Context, Str ? StringRef(Str) : StringRef()); 735 } 736 737 StringRef getString() const; 738 739 unsigned getLength() const { return (unsigned)getString().size(); } 740 741 using iterator = StringRef::iterator; 742 743 /// Pointer to the first byte of the string. 744 iterator begin() const { return getString().begin(); } 745 746 /// Pointer to one byte past the end of the string. 747 iterator end() const { return getString().end(); } 748 749 const unsigned char *bytes_begin() const { return getString().bytes_begin(); } 750 const unsigned char *bytes_end() const { return getString().bytes_end(); } 751 752 /// Methods for support type inquiry through isa, cast, and dyn_cast. 753 static bool classof(const Metadata *MD) { 754 return MD->getMetadataID() == MDStringKind; 755 } 756 }; 757 758 /// A collection of metadata nodes that might be associated with a 759 /// memory access used by the alias-analysis infrastructure. 760 struct AAMDNodes { 761 explicit AAMDNodes() = default; 762 explicit AAMDNodes(MDNode *T, MDNode *TS, MDNode *S, MDNode *N) 763 : TBAA(T), TBAAStruct(TS), Scope(S), NoAlias(N) {} 764 765 bool operator==(const AAMDNodes &A) const { 766 return TBAA == A.TBAA && TBAAStruct == A.TBAAStruct && Scope == A.Scope && 767 NoAlias == A.NoAlias; 768 } 769 770 bool operator!=(const AAMDNodes &A) const { return !(*this == A); } 771 772 explicit operator bool() const { 773 return TBAA || TBAAStruct || Scope || NoAlias; 774 } 775 776 /// The tag for type-based alias analysis. 777 MDNode *TBAA = nullptr; 778 779 /// The tag for type-based alias analysis (tbaa struct). 780 MDNode *TBAAStruct = nullptr; 781 782 /// The tag for alias scope specification (used with noalias). 783 MDNode *Scope = nullptr; 784 785 /// The tag specifying the noalias scope. 786 MDNode *NoAlias = nullptr; 787 788 // Shift tbaa Metadata node to start off bytes later 789 static MDNode *shiftTBAA(MDNode *M, size_t off); 790 791 // Shift tbaa.struct Metadata node to start off bytes later 792 static MDNode *shiftTBAAStruct(MDNode *M, size_t off); 793 794 // Extend tbaa Metadata node to apply to a series of bytes of length len. 795 // A size of -1 denotes an unknown size. 796 static MDNode *extendToTBAA(MDNode *TBAA, ssize_t len); 797 798 /// Given two sets of AAMDNodes that apply to the same pointer, 799 /// give the best AAMDNodes that are compatible with both (i.e. a set of 800 /// nodes whose allowable aliasing conclusions are a subset of those 801 /// allowable by both of the inputs). However, for efficiency 802 /// reasons, do not create any new MDNodes. 803 AAMDNodes intersect(const AAMDNodes &Other) const { 804 AAMDNodes Result; 805 Result.TBAA = Other.TBAA == TBAA ? TBAA : nullptr; 806 Result.TBAAStruct = Other.TBAAStruct == TBAAStruct ? TBAAStruct : nullptr; 807 Result.Scope = Other.Scope == Scope ? Scope : nullptr; 808 Result.NoAlias = Other.NoAlias == NoAlias ? NoAlias : nullptr; 809 return Result; 810 } 811 812 /// Create a new AAMDNode that describes this AAMDNode after applying a 813 /// constant offset to the start of the pointer. 814 AAMDNodes shift(size_t Offset) const { 815 AAMDNodes Result; 816 Result.TBAA = TBAA ? shiftTBAA(TBAA, Offset) : nullptr; 817 Result.TBAAStruct = 818 TBAAStruct ? shiftTBAAStruct(TBAAStruct, Offset) : nullptr; 819 Result.Scope = Scope; 820 Result.NoAlias = NoAlias; 821 return Result; 822 } 823 824 /// Create a new AAMDNode that describes this AAMDNode after extending it to 825 /// apply to a series of bytes of length Len. A size of -1 denotes an unknown 826 /// size. 827 AAMDNodes extendTo(ssize_t Len) const { 828 AAMDNodes Result; 829 Result.TBAA = TBAA ? extendToTBAA(TBAA, Len) : nullptr; 830 // tbaa.struct contains (offset, size, type) triples. Extending the length 831 // of the tbaa.struct doesn't require changing this (though more information 832 // could be provided by adding more triples at subsequent lengths). 833 Result.TBAAStruct = TBAAStruct; 834 Result.Scope = Scope; 835 Result.NoAlias = NoAlias; 836 return Result; 837 } 838 839 /// Given two sets of AAMDNodes applying to potentially different locations, 840 /// determine the best AAMDNodes that apply to both. 841 AAMDNodes merge(const AAMDNodes &Other) const; 842 843 /// Determine the best AAMDNodes after concatenating two different locations 844 /// together. Different from `merge`, where different locations should 845 /// overlap each other, `concat` puts non-overlapping locations together. 846 AAMDNodes concat(const AAMDNodes &Other) const; 847 848 /// Create a new AAMDNode for accessing \p AccessSize bytes of this AAMDNode. 849 /// If this AAMDNode has !tbaa.struct and \p AccessSize matches the size of 850 /// the field at offset 0, get the TBAA tag describing the accessed field. 851 /// If such an AAMDNode already embeds !tbaa, the existing one is retrieved. 852 /// Finally, !tbaa.struct is zeroed out. 853 AAMDNodes adjustForAccess(unsigned AccessSize); 854 AAMDNodes adjustForAccess(size_t Offset, Type *AccessTy, 855 const DataLayout &DL); 856 AAMDNodes adjustForAccess(size_t Offset, unsigned AccessSize); 857 }; 858 859 // Specialize DenseMapInfo for AAMDNodes. 860 template<> 861 struct DenseMapInfo<AAMDNodes> { 862 static inline AAMDNodes getEmptyKey() { 863 return AAMDNodes(DenseMapInfo<MDNode *>::getEmptyKey(), 864 nullptr, nullptr, nullptr); 865 } 866 867 static inline AAMDNodes getTombstoneKey() { 868 return AAMDNodes(DenseMapInfo<MDNode *>::getTombstoneKey(), 869 nullptr, nullptr, nullptr); 870 } 871 872 static unsigned getHashValue(const AAMDNodes &Val) { 873 return DenseMapInfo<MDNode *>::getHashValue(Val.TBAA) ^ 874 DenseMapInfo<MDNode *>::getHashValue(Val.TBAAStruct) ^ 875 DenseMapInfo<MDNode *>::getHashValue(Val.Scope) ^ 876 DenseMapInfo<MDNode *>::getHashValue(Val.NoAlias); 877 } 878 879 static bool isEqual(const AAMDNodes &LHS, const AAMDNodes &RHS) { 880 return LHS == RHS; 881 } 882 }; 883 884 /// Tracking metadata reference owned by Metadata. 885 /// 886 /// Similar to \a TrackingMDRef, but it's expected to be owned by an instance 887 /// of \a Metadata, which has the option of registering itself for callbacks to 888 /// re-unique itself. 889 /// 890 /// In particular, this is used by \a MDNode. 891 class MDOperand { 892 Metadata *MD = nullptr; 893 894 public: 895 MDOperand() = default; 896 MDOperand(const MDOperand &) = delete; 897 MDOperand(MDOperand &&Op) { 898 MD = Op.MD; 899 if (MD) 900 (void)MetadataTracking::retrack(Op.MD, MD); 901 Op.MD = nullptr; 902 } 903 MDOperand &operator=(const MDOperand &) = delete; 904 MDOperand &operator=(MDOperand &&Op) { 905 MD = Op.MD; 906 if (MD) 907 (void)MetadataTracking::retrack(Op.MD, MD); 908 Op.MD = nullptr; 909 return *this; 910 } 911 912 // Check if MDOperand is of type MDString and equals `Str`. 913 bool equalsStr(StringRef Str) const { 914 return isa<MDString>(this->get()) && 915 cast<MDString>(this->get())->getString() == Str; 916 } 917 918 ~MDOperand() { untrack(); } 919 920 Metadata *get() const { return MD; } 921 operator Metadata *() const { return get(); } 922 Metadata *operator->() const { return get(); } 923 Metadata &operator*() const { return *get(); } 924 925 void reset() { 926 untrack(); 927 MD = nullptr; 928 } 929 void reset(Metadata *MD, Metadata *Owner) { 930 untrack(); 931 this->MD = MD; 932 track(Owner); 933 } 934 935 private: 936 void track(Metadata *Owner) { 937 if (MD) { 938 if (Owner) 939 MetadataTracking::track(this, *MD, *Owner); 940 else 941 MetadataTracking::track(MD); 942 } 943 } 944 945 void untrack() { 946 assert(static_cast<void *>(this) == &MD && "Expected same address"); 947 if (MD) 948 MetadataTracking::untrack(MD); 949 } 950 }; 951 952 template <> struct simplify_type<MDOperand> { 953 using SimpleType = Metadata *; 954 955 static SimpleType getSimplifiedValue(MDOperand &MD) { return MD.get(); } 956 }; 957 958 template <> struct simplify_type<const MDOperand> { 959 using SimpleType = Metadata *; 960 961 static SimpleType getSimplifiedValue(const MDOperand &MD) { return MD.get(); } 962 }; 963 964 /// Pointer to the context, with optional RAUW support. 965 /// 966 /// Either a raw (non-null) pointer to the \a LLVMContext, or an owned pointer 967 /// to \a ReplaceableMetadataImpl (which has a reference to \a LLVMContext). 968 class ContextAndReplaceableUses { 969 PointerUnion<LLVMContext *, ReplaceableMetadataImpl *> Ptr; 970 971 public: 972 ContextAndReplaceableUses(LLVMContext &Context) : Ptr(&Context) {} 973 ContextAndReplaceableUses( 974 std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses) 975 : Ptr(ReplaceableUses.release()) { 976 assert(getReplaceableUses() && "Expected non-null replaceable uses"); 977 } 978 ContextAndReplaceableUses() = delete; 979 ContextAndReplaceableUses(ContextAndReplaceableUses &&) = delete; 980 ContextAndReplaceableUses(const ContextAndReplaceableUses &) = delete; 981 ContextAndReplaceableUses &operator=(ContextAndReplaceableUses &&) = delete; 982 ContextAndReplaceableUses & 983 operator=(const ContextAndReplaceableUses &) = delete; 984 ~ContextAndReplaceableUses() { delete getReplaceableUses(); } 985 986 operator LLVMContext &() { return getContext(); } 987 988 /// Whether this contains RAUW support. 989 bool hasReplaceableUses() const { 990 return isa<ReplaceableMetadataImpl *>(Ptr); 991 } 992 993 LLVMContext &getContext() const { 994 if (hasReplaceableUses()) 995 return getReplaceableUses()->getContext(); 996 return *cast<LLVMContext *>(Ptr); 997 } 998 999 ReplaceableMetadataImpl *getReplaceableUses() const { 1000 if (hasReplaceableUses()) 1001 return cast<ReplaceableMetadataImpl *>(Ptr); 1002 return nullptr; 1003 } 1004 1005 /// Ensure that this has RAUW support, and then return it. 1006 ReplaceableMetadataImpl *getOrCreateReplaceableUses() { 1007 if (!hasReplaceableUses()) 1008 makeReplaceable(std::make_unique<ReplaceableMetadataImpl>(getContext())); 1009 return getReplaceableUses(); 1010 } 1011 1012 /// Assign RAUW support to this. 1013 /// 1014 /// Make this replaceable, taking ownership of \c ReplaceableUses (which must 1015 /// not be null). 1016 void 1017 makeReplaceable(std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses) { 1018 assert(ReplaceableUses && "Expected non-null replaceable uses"); 1019 assert(&ReplaceableUses->getContext() == &getContext() && 1020 "Expected same context"); 1021 delete getReplaceableUses(); 1022 Ptr = ReplaceableUses.release(); 1023 } 1024 1025 /// Drop RAUW support. 1026 /// 1027 /// Cede ownership of RAUW support, returning it. 1028 std::unique_ptr<ReplaceableMetadataImpl> takeReplaceableUses() { 1029 assert(hasReplaceableUses() && "Expected to own replaceable uses"); 1030 std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses( 1031 getReplaceableUses()); 1032 Ptr = &ReplaceableUses->getContext(); 1033 return ReplaceableUses; 1034 } 1035 }; 1036 1037 struct TempMDNodeDeleter { 1038 inline void operator()(MDNode *Node) const; 1039 }; 1040 1041 #define HANDLE_MDNODE_LEAF(CLASS) \ 1042 using Temp##CLASS = std::unique_ptr<CLASS, TempMDNodeDeleter>; 1043 #define HANDLE_MDNODE_BRANCH(CLASS) HANDLE_MDNODE_LEAF(CLASS) 1044 #include "llvm/IR/Metadata.def" 1045 1046 /// Metadata node. 1047 /// 1048 /// Metadata nodes can be uniqued, like constants, or distinct. Temporary 1049 /// metadata nodes (with full support for RAUW) can be used to delay uniquing 1050 /// until forward references are known. The basic metadata node is an \a 1051 /// MDTuple. 1052 /// 1053 /// There is limited support for RAUW at construction time. At construction 1054 /// time, if any operand is a temporary node (or an unresolved uniqued node, 1055 /// which indicates a transitive temporary operand), the node itself will be 1056 /// unresolved. As soon as all operands become resolved, it will drop RAUW 1057 /// support permanently. 1058 /// 1059 /// If an unresolved node is part of a cycle, \a resolveCycles() needs 1060 /// to be called on some member of the cycle once all temporary nodes have been 1061 /// replaced. 1062 /// 1063 /// MDNodes can be large or small, as well as resizable or non-resizable. 1064 /// Large MDNodes' operands are allocated in a separate storage vector, 1065 /// whereas small MDNodes' operands are co-allocated. Distinct and temporary 1066 /// MDnodes are resizable, but only MDTuples support this capability. 1067 /// 1068 /// Clients can add operands to resizable MDNodes using push_back(). 1069 class MDNode : public Metadata { 1070 friend class ReplaceableMetadataImpl; 1071 friend class LLVMContextImpl; 1072 friend class DIAssignID; 1073 1074 /// The header that is coallocated with an MDNode along with its "small" 1075 /// operands. It is located immediately before the main body of the node. 1076 /// The operands are in turn located immediately before the header. 1077 /// For resizable MDNodes, the space for the storage vector is also allocated 1078 /// immediately before the header, overlapping with the operands. 1079 /// Explicity set alignment because bitfields by default have an 1080 /// alignment of 1 on z/OS. 1081 struct alignas(alignof(size_t)) Header { 1082 bool IsResizable : 1; 1083 bool IsLarge : 1; 1084 size_t SmallSize : 4; 1085 size_t SmallNumOps : 4; 1086 size_t : sizeof(size_t) * CHAR_BIT - 10; 1087 1088 unsigned NumUnresolved = 0; 1089 using LargeStorageVector = SmallVector<MDOperand, 0>; 1090 1091 static constexpr size_t NumOpsFitInVector = 1092 sizeof(LargeStorageVector) / sizeof(MDOperand); 1093 static_assert( 1094 NumOpsFitInVector * sizeof(MDOperand) == sizeof(LargeStorageVector), 1095 "sizeof(LargeStorageVector) must be a multiple of sizeof(MDOperand)"); 1096 1097 static constexpr size_t MaxSmallSize = 15; 1098 1099 static constexpr size_t getOpSize(unsigned NumOps) { 1100 return sizeof(MDOperand) * NumOps; 1101 } 1102 /// Returns the number of operands the node has space for based on its 1103 /// allocation characteristics. 1104 static size_t getSmallSize(size_t NumOps, bool IsResizable, bool IsLarge) { 1105 return IsLarge ? NumOpsFitInVector 1106 : std::max(NumOps, NumOpsFitInVector * IsResizable); 1107 } 1108 /// Returns the number of bytes allocated for operands and header. 1109 static size_t getAllocSize(StorageType Storage, size_t NumOps) { 1110 return getOpSize( 1111 getSmallSize(NumOps, isResizable(Storage), isLarge(NumOps))) + 1112 sizeof(Header); 1113 } 1114 1115 /// Only temporary and distinct nodes are resizable. 1116 static bool isResizable(StorageType Storage) { return Storage != Uniqued; } 1117 static bool isLarge(size_t NumOps) { return NumOps > MaxSmallSize; } 1118 1119 size_t getAllocSize() const { 1120 return getOpSize(SmallSize) + sizeof(Header); 1121 } 1122 void *getAllocation() { 1123 return reinterpret_cast<char *>(this + 1) - 1124 alignTo(getAllocSize(), alignof(uint64_t)); 1125 } 1126 1127 void *getLargePtr() const { 1128 static_assert(alignof(LargeStorageVector) <= alignof(Header), 1129 "LargeStorageVector too strongly aligned"); 1130 return reinterpret_cast<char *>(const_cast<Header *>(this)) - 1131 sizeof(LargeStorageVector); 1132 } 1133 1134 void *getSmallPtr(); 1135 1136 LargeStorageVector &getLarge() { 1137 assert(IsLarge); 1138 return *reinterpret_cast<LargeStorageVector *>(getLargePtr()); 1139 } 1140 1141 const LargeStorageVector &getLarge() const { 1142 assert(IsLarge); 1143 return *reinterpret_cast<const LargeStorageVector *>(getLargePtr()); 1144 } 1145 1146 void resizeSmall(size_t NumOps); 1147 void resizeSmallToLarge(size_t NumOps); 1148 void resize(size_t NumOps); 1149 1150 explicit Header(size_t NumOps, StorageType Storage); 1151 ~Header(); 1152 1153 MutableArrayRef<MDOperand> operands() { 1154 if (IsLarge) 1155 return getLarge(); 1156 return MutableArrayRef( 1157 reinterpret_cast<MDOperand *>(this) - SmallSize, SmallNumOps); 1158 } 1159 1160 ArrayRef<MDOperand> operands() const { 1161 if (IsLarge) 1162 return getLarge(); 1163 return ArrayRef(reinterpret_cast<const MDOperand *>(this) - SmallSize, 1164 SmallNumOps); 1165 } 1166 1167 unsigned getNumOperands() const { 1168 if (!IsLarge) 1169 return SmallNumOps; 1170 return getLarge().size(); 1171 } 1172 }; 1173 1174 Header &getHeader() { return *(reinterpret_cast<Header *>(this) - 1); } 1175 1176 const Header &getHeader() const { 1177 return *(reinterpret_cast<const Header *>(this) - 1); 1178 } 1179 1180 ContextAndReplaceableUses Context; 1181 1182 protected: 1183 MDNode(LLVMContext &Context, unsigned ID, StorageType Storage, 1184 ArrayRef<Metadata *> Ops1, ArrayRef<Metadata *> Ops2 = std::nullopt); 1185 ~MDNode() = default; 1186 1187 void *operator new(size_t Size, size_t NumOps, StorageType Storage); 1188 void operator delete(void *Mem); 1189 1190 /// Required by std, but never called. 1191 void operator delete(void *, unsigned) { 1192 llvm_unreachable("Constructor throws?"); 1193 } 1194 1195 /// Required by std, but never called. 1196 void operator delete(void *, unsigned, bool) { 1197 llvm_unreachable("Constructor throws?"); 1198 } 1199 1200 void dropAllReferences(); 1201 1202 MDOperand *mutable_begin() { return getHeader().operands().begin(); } 1203 MDOperand *mutable_end() { return getHeader().operands().end(); } 1204 1205 using mutable_op_range = iterator_range<MDOperand *>; 1206 1207 mutable_op_range mutable_operands() { 1208 return mutable_op_range(mutable_begin(), mutable_end()); 1209 } 1210 1211 public: 1212 MDNode(const MDNode &) = delete; 1213 void operator=(const MDNode &) = delete; 1214 void *operator new(size_t) = delete; 1215 1216 static inline MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs); 1217 static inline MDTuple *getIfExists(LLVMContext &Context, 1218 ArrayRef<Metadata *> MDs); 1219 static inline MDTuple *getDistinct(LLVMContext &Context, 1220 ArrayRef<Metadata *> MDs); 1221 static inline TempMDTuple getTemporary(LLVMContext &Context, 1222 ArrayRef<Metadata *> MDs); 1223 1224 /// Create a (temporary) clone of this. 1225 TempMDNode clone() const; 1226 1227 /// Deallocate a node created by getTemporary. 1228 /// 1229 /// Calls \c replaceAllUsesWith(nullptr) before deleting, so any remaining 1230 /// references will be reset. 1231 static void deleteTemporary(MDNode *N); 1232 1233 LLVMContext &getContext() const { return Context.getContext(); } 1234 1235 /// Replace a specific operand. 1236 void replaceOperandWith(unsigned I, Metadata *New); 1237 1238 /// Check if node is fully resolved. 1239 /// 1240 /// If \a isTemporary(), this always returns \c false; if \a isDistinct(), 1241 /// this always returns \c true. 1242 /// 1243 /// If \a isUniqued(), returns \c true if this has already dropped RAUW 1244 /// support (because all operands are resolved). 1245 /// 1246 /// As forward declarations are resolved, their containers should get 1247 /// resolved automatically. However, if this (or one of its operands) is 1248 /// involved in a cycle, \a resolveCycles() needs to be called explicitly. 1249 bool isResolved() const { return !isTemporary() && !getNumUnresolved(); } 1250 1251 bool isUniqued() const { return Storage == Uniqued; } 1252 bool isDistinct() const { return Storage == Distinct; } 1253 bool isTemporary() const { return Storage == Temporary; } 1254 1255 bool isReplaceable() const { return isTemporary() || isAlwaysReplaceable(); } 1256 bool isAlwaysReplaceable() const { return getMetadataID() == DIAssignIDKind; } 1257 1258 unsigned getNumTemporaryUses() const { 1259 assert(isTemporary() && "Only for temporaries"); 1260 return Context.getReplaceableUses()->getNumUses(); 1261 } 1262 1263 /// RAUW a temporary. 1264 /// 1265 /// \pre \a isTemporary() must be \c true. 1266 void replaceAllUsesWith(Metadata *MD) { 1267 assert(isReplaceable() && "Expected temporary/replaceable node"); 1268 if (Context.hasReplaceableUses()) 1269 Context.getReplaceableUses()->replaceAllUsesWith(MD); 1270 } 1271 1272 /// Resolve cycles. 1273 /// 1274 /// Once all forward declarations have been resolved, force cycles to be 1275 /// resolved. 1276 /// 1277 /// \pre No operands (or operands' operands, etc.) have \a isTemporary(). 1278 void resolveCycles(); 1279 1280 /// Resolve a unique, unresolved node. 1281 void resolve(); 1282 1283 /// Replace a temporary node with a permanent one. 1284 /// 1285 /// Try to create a uniqued version of \c N -- in place, if possible -- and 1286 /// return it. If \c N cannot be uniqued, return a distinct node instead. 1287 template <class T> 1288 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *> 1289 replaceWithPermanent(std::unique_ptr<T, TempMDNodeDeleter> N) { 1290 return cast<T>(N.release()->replaceWithPermanentImpl()); 1291 } 1292 1293 /// Replace a temporary node with a uniqued one. 1294 /// 1295 /// Create a uniqued version of \c N -- in place, if possible -- and return 1296 /// it. Takes ownership of the temporary node. 1297 /// 1298 /// \pre N does not self-reference. 1299 template <class T> 1300 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *> 1301 replaceWithUniqued(std::unique_ptr<T, TempMDNodeDeleter> N) { 1302 return cast<T>(N.release()->replaceWithUniquedImpl()); 1303 } 1304 1305 /// Replace a temporary node with a distinct one. 1306 /// 1307 /// Create a distinct version of \c N -- in place, if possible -- and return 1308 /// it. Takes ownership of the temporary node. 1309 template <class T> 1310 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *> 1311 replaceWithDistinct(std::unique_ptr<T, TempMDNodeDeleter> N) { 1312 return cast<T>(N.release()->replaceWithDistinctImpl()); 1313 } 1314 1315 /// Print in tree shape. 1316 /// 1317 /// Prints definition of \c this in tree shape. 1318 /// 1319 /// If \c M is provided, metadata nodes will be numbered canonically; 1320 /// otherwise, pointer addresses are substituted. 1321 /// @{ 1322 void printTree(raw_ostream &OS, const Module *M = nullptr) const; 1323 void printTree(raw_ostream &OS, ModuleSlotTracker &MST, 1324 const Module *M = nullptr) const; 1325 /// @} 1326 1327 /// User-friendly dump in tree shape. 1328 /// 1329 /// If \c M is provided, metadata nodes will be numbered canonically; 1330 /// otherwise, pointer addresses are substituted. 1331 /// 1332 /// Note: this uses an explicit overload instead of default arguments so that 1333 /// the nullptr version is easy to call from a debugger. 1334 /// 1335 /// @{ 1336 void dumpTree() const; 1337 void dumpTree(const Module *M) const; 1338 /// @} 1339 1340 private: 1341 MDNode *replaceWithPermanentImpl(); 1342 MDNode *replaceWithUniquedImpl(); 1343 MDNode *replaceWithDistinctImpl(); 1344 1345 protected: 1346 /// Set an operand. 1347 /// 1348 /// Sets the operand directly, without worrying about uniquing. 1349 void setOperand(unsigned I, Metadata *New); 1350 1351 unsigned getNumUnresolved() const { return getHeader().NumUnresolved; } 1352 1353 void setNumUnresolved(unsigned N) { getHeader().NumUnresolved = N; } 1354 void storeDistinctInContext(); 1355 template <class T, class StoreT> 1356 static T *storeImpl(T *N, StorageType Storage, StoreT &Store); 1357 template <class T> static T *storeImpl(T *N, StorageType Storage); 1358 1359 /// Resize the node to hold \a NumOps operands. 1360 /// 1361 /// \pre \a isTemporary() or \a isDistinct() 1362 /// \pre MetadataID == MDTupleKind 1363 void resize(size_t NumOps) { 1364 assert(!isUniqued() && "Resizing is not supported for uniqued nodes"); 1365 assert(getMetadataID() == MDTupleKind && 1366 "Resizing is not supported for this node kind"); 1367 getHeader().resize(NumOps); 1368 } 1369 1370 private: 1371 void handleChangedOperand(void *Ref, Metadata *New); 1372 1373 /// Drop RAUW support, if any. 1374 void dropReplaceableUses(); 1375 1376 void resolveAfterOperandChange(Metadata *Old, Metadata *New); 1377 void decrementUnresolvedOperandCount(); 1378 void countUnresolvedOperands(); 1379 1380 /// Mutate this to be "uniqued". 1381 /// 1382 /// Mutate this so that \a isUniqued(). 1383 /// \pre \a isTemporary(). 1384 /// \pre already added to uniquing set. 1385 void makeUniqued(); 1386 1387 /// Mutate this to be "distinct". 1388 /// 1389 /// Mutate this so that \a isDistinct(). 1390 /// \pre \a isTemporary(). 1391 void makeDistinct(); 1392 1393 void deleteAsSubclass(); 1394 MDNode *uniquify(); 1395 void eraseFromStore(); 1396 1397 template <class NodeTy> struct HasCachedHash; 1398 template <class NodeTy> 1399 static void dispatchRecalculateHash(NodeTy *N, std::true_type) { 1400 N->recalculateHash(); 1401 } 1402 template <class NodeTy> 1403 static void dispatchRecalculateHash(NodeTy *, std::false_type) {} 1404 template <class NodeTy> 1405 static void dispatchResetHash(NodeTy *N, std::true_type) { 1406 N->setHash(0); 1407 } 1408 template <class NodeTy> 1409 static void dispatchResetHash(NodeTy *, std::false_type) {} 1410 1411 /// Merge branch weights from two direct callsites. 1412 static MDNode *mergeDirectCallProfMetadata(MDNode *A, MDNode *B, 1413 const Instruction *AInstr, 1414 const Instruction *BInstr); 1415 1416 public: 1417 using op_iterator = const MDOperand *; 1418 using op_range = iterator_range<op_iterator>; 1419 1420 op_iterator op_begin() const { 1421 return const_cast<MDNode *>(this)->mutable_begin(); 1422 } 1423 1424 op_iterator op_end() const { 1425 return const_cast<MDNode *>(this)->mutable_end(); 1426 } 1427 1428 ArrayRef<MDOperand> operands() const { return getHeader().operands(); } 1429 1430 const MDOperand &getOperand(unsigned I) const { 1431 assert(I < getNumOperands() && "Out of range"); 1432 return getHeader().operands()[I]; 1433 } 1434 1435 /// Return number of MDNode operands. 1436 unsigned getNumOperands() const { return getHeader().getNumOperands(); } 1437 1438 /// Methods for support type inquiry through isa, cast, and dyn_cast: 1439 static bool classof(const Metadata *MD) { 1440 switch (MD->getMetadataID()) { 1441 default: 1442 return false; 1443 #define HANDLE_MDNODE_LEAF(CLASS) \ 1444 case CLASS##Kind: \ 1445 return true; 1446 #include "llvm/IR/Metadata.def" 1447 } 1448 } 1449 1450 /// Check whether MDNode is a vtable access. 1451 bool isTBAAVtableAccess() const; 1452 1453 /// Methods for metadata merging. 1454 static MDNode *concatenate(MDNode *A, MDNode *B); 1455 static MDNode *intersect(MDNode *A, MDNode *B); 1456 static MDNode *getMostGenericTBAA(MDNode *A, MDNode *B); 1457 static MDNode *getMostGenericFPMath(MDNode *A, MDNode *B); 1458 static MDNode *getMostGenericRange(MDNode *A, MDNode *B); 1459 static MDNode *getMostGenericAliasScope(MDNode *A, MDNode *B); 1460 static MDNode *getMostGenericAlignmentOrDereferenceable(MDNode *A, MDNode *B); 1461 /// Merge !prof metadata from two instructions. 1462 /// Currently only implemented with direct callsites with branch weights. 1463 static MDNode *getMergedProfMetadata(MDNode *A, MDNode *B, 1464 const Instruction *AInstr, 1465 const Instruction *BInstr); 1466 }; 1467 1468 /// Tuple of metadata. 1469 /// 1470 /// This is the simple \a MDNode arbitrary tuple. Nodes are uniqued by 1471 /// default based on their operands. 1472 class MDTuple : public MDNode { 1473 friend class LLVMContextImpl; 1474 friend class MDNode; 1475 1476 MDTuple(LLVMContext &C, StorageType Storage, unsigned Hash, 1477 ArrayRef<Metadata *> Vals) 1478 : MDNode(C, MDTupleKind, Storage, Vals) { 1479 setHash(Hash); 1480 } 1481 1482 ~MDTuple() { dropAllReferences(); } 1483 1484 void setHash(unsigned Hash) { SubclassData32 = Hash; } 1485 void recalculateHash(); 1486 1487 static MDTuple *getImpl(LLVMContext &Context, ArrayRef<Metadata *> MDs, 1488 StorageType Storage, bool ShouldCreate = true); 1489 1490 TempMDTuple cloneImpl() const { 1491 ArrayRef<MDOperand> Operands = operands(); 1492 return getTemporary(getContext(), SmallVector<Metadata *, 4>( 1493 Operands.begin(), Operands.end())); 1494 } 1495 1496 public: 1497 /// Get the hash, if any. 1498 unsigned getHash() const { return SubclassData32; } 1499 1500 static MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1501 return getImpl(Context, MDs, Uniqued); 1502 } 1503 1504 static MDTuple *getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1505 return getImpl(Context, MDs, Uniqued, /* ShouldCreate */ false); 1506 } 1507 1508 /// Return a distinct node. 1509 /// 1510 /// Return a distinct node -- i.e., a node that is not uniqued. 1511 static MDTuple *getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1512 return getImpl(Context, MDs, Distinct); 1513 } 1514 1515 /// Return a temporary node. 1516 /// 1517 /// For use in constructing cyclic MDNode structures. A temporary MDNode is 1518 /// not uniqued, may be RAUW'd, and must be manually deleted with 1519 /// deleteTemporary. 1520 static TempMDTuple getTemporary(LLVMContext &Context, 1521 ArrayRef<Metadata *> MDs) { 1522 return TempMDTuple(getImpl(Context, MDs, Temporary)); 1523 } 1524 1525 /// Return a (temporary) clone of this. 1526 TempMDTuple clone() const { return cloneImpl(); } 1527 1528 /// Append an element to the tuple. This will resize the node. 1529 void push_back(Metadata *MD) { 1530 size_t NumOps = getNumOperands(); 1531 resize(NumOps + 1); 1532 setOperand(NumOps, MD); 1533 } 1534 1535 /// Shrink the operands by 1. 1536 void pop_back() { resize(getNumOperands() - 1); } 1537 1538 static bool classof(const Metadata *MD) { 1539 return MD->getMetadataID() == MDTupleKind; 1540 } 1541 }; 1542 1543 MDTuple *MDNode::get(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1544 return MDTuple::get(Context, MDs); 1545 } 1546 1547 MDTuple *MDNode::getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1548 return MDTuple::getIfExists(Context, MDs); 1549 } 1550 1551 MDTuple *MDNode::getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1552 return MDTuple::getDistinct(Context, MDs); 1553 } 1554 1555 TempMDTuple MDNode::getTemporary(LLVMContext &Context, 1556 ArrayRef<Metadata *> MDs) { 1557 return MDTuple::getTemporary(Context, MDs); 1558 } 1559 1560 void TempMDNodeDeleter::operator()(MDNode *Node) const { 1561 MDNode::deleteTemporary(Node); 1562 } 1563 1564 /// This is a simple wrapper around an MDNode which provides a higher-level 1565 /// interface by hiding the details of how alias analysis information is encoded 1566 /// in its operands. 1567 class AliasScopeNode { 1568 const MDNode *Node = nullptr; 1569 1570 public: 1571 AliasScopeNode() = default; 1572 explicit AliasScopeNode(const MDNode *N) : Node(N) {} 1573 1574 /// Get the MDNode for this AliasScopeNode. 1575 const MDNode *getNode() const { return Node; } 1576 1577 /// Get the MDNode for this AliasScopeNode's domain. 1578 const MDNode *getDomain() const { 1579 if (Node->getNumOperands() < 2) 1580 return nullptr; 1581 return dyn_cast_or_null<MDNode>(Node->getOperand(1)); 1582 } 1583 StringRef getName() const { 1584 if (Node->getNumOperands() > 2) 1585 if (MDString *N = dyn_cast_or_null<MDString>(Node->getOperand(2))) 1586 return N->getString(); 1587 return StringRef(); 1588 } 1589 }; 1590 1591 /// Typed iterator through MDNode operands. 1592 /// 1593 /// An iterator that transforms an \a MDNode::iterator into an iterator over a 1594 /// particular Metadata subclass. 1595 template <class T> class TypedMDOperandIterator { 1596 MDNode::op_iterator I = nullptr; 1597 1598 public: 1599 using iterator_category = std::input_iterator_tag; 1600 using value_type = T *; 1601 using difference_type = std::ptrdiff_t; 1602 using pointer = void; 1603 using reference = T *; 1604 1605 TypedMDOperandIterator() = default; 1606 explicit TypedMDOperandIterator(MDNode::op_iterator I) : I(I) {} 1607 1608 T *operator*() const { return cast_or_null<T>(*I); } 1609 1610 TypedMDOperandIterator &operator++() { 1611 ++I; 1612 return *this; 1613 } 1614 1615 TypedMDOperandIterator operator++(int) { 1616 TypedMDOperandIterator Temp(*this); 1617 ++I; 1618 return Temp; 1619 } 1620 1621 bool operator==(const TypedMDOperandIterator &X) const { return I == X.I; } 1622 bool operator!=(const TypedMDOperandIterator &X) const { return I != X.I; } 1623 }; 1624 1625 /// Typed, array-like tuple of metadata. 1626 /// 1627 /// This is a wrapper for \a MDTuple that makes it act like an array holding a 1628 /// particular type of metadata. 1629 template <class T> class MDTupleTypedArrayWrapper { 1630 const MDTuple *N = nullptr; 1631 1632 public: 1633 MDTupleTypedArrayWrapper() = default; 1634 MDTupleTypedArrayWrapper(const MDTuple *N) : N(N) {} 1635 1636 template <class U> 1637 MDTupleTypedArrayWrapper( 1638 const MDTupleTypedArrayWrapper<U> &Other, 1639 std::enable_if_t<std::is_convertible<U *, T *>::value> * = nullptr) 1640 : N(Other.get()) {} 1641 1642 template <class U> 1643 explicit MDTupleTypedArrayWrapper( 1644 const MDTupleTypedArrayWrapper<U> &Other, 1645 std::enable_if_t<!std::is_convertible<U *, T *>::value> * = nullptr) 1646 : N(Other.get()) {} 1647 1648 explicit operator bool() const { return get(); } 1649 explicit operator MDTuple *() const { return get(); } 1650 1651 MDTuple *get() const { return const_cast<MDTuple *>(N); } 1652 MDTuple *operator->() const { return get(); } 1653 MDTuple &operator*() const { return *get(); } 1654 1655 // FIXME: Fix callers and remove condition on N. 1656 unsigned size() const { return N ? N->getNumOperands() : 0u; } 1657 bool empty() const { return N ? N->getNumOperands() == 0 : true; } 1658 T *operator[](unsigned I) const { return cast_or_null<T>(N->getOperand(I)); } 1659 1660 // FIXME: Fix callers and remove condition on N. 1661 using iterator = TypedMDOperandIterator<T>; 1662 1663 iterator begin() const { return N ? iterator(N->op_begin()) : iterator(); } 1664 iterator end() const { return N ? iterator(N->op_end()) : iterator(); } 1665 }; 1666 1667 #define HANDLE_METADATA(CLASS) \ 1668 using CLASS##Array = MDTupleTypedArrayWrapper<CLASS>; 1669 #include "llvm/IR/Metadata.def" 1670 1671 /// Placeholder metadata for operands of distinct MDNodes. 1672 /// 1673 /// This is a lightweight placeholder for an operand of a distinct node. It's 1674 /// purpose is to help track forward references when creating a distinct node. 1675 /// This allows distinct nodes involved in a cycle to be constructed before 1676 /// their operands without requiring a heavyweight temporary node with 1677 /// full-blown RAUW support. 1678 /// 1679 /// Each placeholder supports only a single MDNode user. Clients should pass 1680 /// an ID, retrieved via \a getID(), to indicate the "real" operand that this 1681 /// should be replaced with. 1682 /// 1683 /// While it would be possible to implement move operators, they would be 1684 /// fairly expensive. Leave them unimplemented to discourage their use 1685 /// (clients can use std::deque, std::list, BumpPtrAllocator, etc.). 1686 class DistinctMDOperandPlaceholder : public Metadata { 1687 friend class MetadataTracking; 1688 1689 Metadata **Use = nullptr; 1690 1691 public: 1692 explicit DistinctMDOperandPlaceholder(unsigned ID) 1693 : Metadata(DistinctMDOperandPlaceholderKind, Distinct) { 1694 SubclassData32 = ID; 1695 } 1696 1697 DistinctMDOperandPlaceholder() = delete; 1698 DistinctMDOperandPlaceholder(DistinctMDOperandPlaceholder &&) = delete; 1699 DistinctMDOperandPlaceholder(const DistinctMDOperandPlaceholder &) = delete; 1700 1701 ~DistinctMDOperandPlaceholder() { 1702 if (Use) 1703 *Use = nullptr; 1704 } 1705 1706 unsigned getID() const { return SubclassData32; } 1707 1708 /// Replace the use of this with MD. 1709 void replaceUseWith(Metadata *MD) { 1710 if (!Use) 1711 return; 1712 *Use = MD; 1713 1714 if (*Use) 1715 MetadataTracking::track(*Use); 1716 1717 Metadata *T = cast<Metadata>(this); 1718 MetadataTracking::untrack(T); 1719 assert(!Use && "Use is still being tracked despite being untracked!"); 1720 } 1721 }; 1722 1723 //===----------------------------------------------------------------------===// 1724 /// A tuple of MDNodes. 1725 /// 1726 /// Despite its name, a NamedMDNode isn't itself an MDNode. 1727 /// 1728 /// NamedMDNodes are named module-level entities that contain lists of MDNodes. 1729 /// 1730 /// It is illegal for a NamedMDNode to appear as an operand of an MDNode. 1731 class NamedMDNode : public ilist_node<NamedMDNode> { 1732 friend class LLVMContextImpl; 1733 friend class Module; 1734 1735 std::string Name; 1736 Module *Parent = nullptr; 1737 void *Operands; // SmallVector<TrackingMDRef, 4> 1738 1739 void setParent(Module *M) { Parent = M; } 1740 1741 explicit NamedMDNode(const Twine &N); 1742 1743 template <class T1> class op_iterator_impl { 1744 friend class NamedMDNode; 1745 1746 const NamedMDNode *Node = nullptr; 1747 unsigned Idx = 0; 1748 1749 op_iterator_impl(const NamedMDNode *N, unsigned i) : Node(N), Idx(i) {} 1750 1751 public: 1752 using iterator_category = std::bidirectional_iterator_tag; 1753 using value_type = T1; 1754 using difference_type = std::ptrdiff_t; 1755 using pointer = value_type *; 1756 using reference = value_type; 1757 1758 op_iterator_impl() = default; 1759 1760 bool operator==(const op_iterator_impl &o) const { return Idx == o.Idx; } 1761 bool operator!=(const op_iterator_impl &o) const { return Idx != o.Idx; } 1762 1763 op_iterator_impl &operator++() { 1764 ++Idx; 1765 return *this; 1766 } 1767 1768 op_iterator_impl operator++(int) { 1769 op_iterator_impl tmp(*this); 1770 operator++(); 1771 return tmp; 1772 } 1773 1774 op_iterator_impl &operator--() { 1775 --Idx; 1776 return *this; 1777 } 1778 1779 op_iterator_impl operator--(int) { 1780 op_iterator_impl tmp(*this); 1781 operator--(); 1782 return tmp; 1783 } 1784 1785 T1 operator*() const { return Node->getOperand(Idx); } 1786 }; 1787 1788 public: 1789 NamedMDNode(const NamedMDNode &) = delete; 1790 ~NamedMDNode(); 1791 1792 /// Drop all references and remove the node from parent module. 1793 void eraseFromParent(); 1794 1795 /// Remove all uses and clear node vector. 1796 void dropAllReferences() { clearOperands(); } 1797 /// Drop all references to this node's operands. 1798 void clearOperands(); 1799 1800 /// Get the module that holds this named metadata collection. 1801 inline Module *getParent() { return Parent; } 1802 inline const Module *getParent() const { return Parent; } 1803 1804 MDNode *getOperand(unsigned i) const; 1805 unsigned getNumOperands() const; 1806 void addOperand(MDNode *M); 1807 void setOperand(unsigned I, MDNode *New); 1808 StringRef getName() const; 1809 void print(raw_ostream &ROS, bool IsForDebug = false) const; 1810 void print(raw_ostream &ROS, ModuleSlotTracker &MST, 1811 bool IsForDebug = false) const; 1812 void dump() const; 1813 1814 // --------------------------------------------------------------------------- 1815 // Operand Iterator interface... 1816 // 1817 using op_iterator = op_iterator_impl<MDNode *>; 1818 1819 op_iterator op_begin() { return op_iterator(this, 0); } 1820 op_iterator op_end() { return op_iterator(this, getNumOperands()); } 1821 1822 using const_op_iterator = op_iterator_impl<const MDNode *>; 1823 1824 const_op_iterator op_begin() const { return const_op_iterator(this, 0); } 1825 const_op_iterator op_end() const { return const_op_iterator(this, getNumOperands()); } 1826 1827 inline iterator_range<op_iterator> operands() { 1828 return make_range(op_begin(), op_end()); 1829 } 1830 inline iterator_range<const_op_iterator> operands() const { 1831 return make_range(op_begin(), op_end()); 1832 } 1833 }; 1834 1835 // Create wrappers for C Binding types (see CBindingWrapping.h). 1836 DEFINE_ISA_CONVERSION_FUNCTIONS(NamedMDNode, LLVMNamedMDNodeRef) 1837 1838 } // end namespace llvm 1839 1840 #endif // LLVM_IR_METADATA_H 1841