1 //===- llvm/IR/Metadata.h - Metadata definitions ----------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 /// @file 10 /// This file contains the declarations for metadata subclasses. 11 /// They represent the different flavors of metadata that live in LLVM. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #ifndef LLVM_IR_METADATA_H 16 #define LLVM_IR_METADATA_H 17 18 #include "llvm/ADT/ArrayRef.h" 19 #include "llvm/ADT/DenseMap.h" 20 #include "llvm/ADT/DenseMapInfo.h" 21 #include "llvm/ADT/PointerUnion.h" 22 #include "llvm/ADT/SmallVector.h" 23 #include "llvm/ADT/StringRef.h" 24 #include "llvm/ADT/ilist_node.h" 25 #include "llvm/ADT/iterator_range.h" 26 #include "llvm/IR/Constant.h" 27 #include "llvm/IR/LLVMContext.h" 28 #include "llvm/IR/Value.h" 29 #include "llvm/Support/CBindingWrapping.h" 30 #include "llvm/Support/Casting.h" 31 #include "llvm/Support/ErrorHandling.h" 32 #include <cassert> 33 #include <cstddef> 34 #include <cstdint> 35 #include <iterator> 36 #include <memory> 37 #include <string> 38 #include <type_traits> 39 #include <utility> 40 41 namespace llvm { 42 43 class Module; 44 class ModuleSlotTracker; 45 class raw_ostream; 46 class DbgVariableRecord; 47 template <typename T> class StringMapEntry; 48 template <typename ValueTy> class StringMapEntryStorage; 49 class Type; 50 51 enum LLVMConstants : uint32_t { 52 DEBUG_METADATA_VERSION = 3 // Current debug info version number. 53 }; 54 55 /// Magic number in the value profile metadata showing a target has been 56 /// promoted for the instruction and shouldn't be promoted again. 57 const uint64_t NOMORE_ICP_MAGICNUM = -1; 58 59 /// Root of the metadata hierarchy. 60 /// 61 /// This is a root class for typeless data in the IR. 62 class Metadata { 63 friend class ReplaceableMetadataImpl; 64 65 /// RTTI. 66 const unsigned char SubclassID; 67 68 protected: 69 /// Active type of storage. 70 enum StorageType { Uniqued, Distinct, Temporary }; 71 72 /// Storage flag for non-uniqued, otherwise unowned, metadata. 73 unsigned char Storage : 7; 74 75 unsigned char SubclassData1 : 1; 76 unsigned short SubclassData16 = 0; 77 unsigned SubclassData32 = 0; 78 79 public: 80 enum MetadataKind { 81 #define HANDLE_METADATA_LEAF(CLASS) CLASS##Kind, 82 #include "llvm/IR/Metadata.def" 83 }; 84 85 protected: 86 Metadata(unsigned ID, StorageType Storage) 87 : SubclassID(ID), Storage(Storage), SubclassData1(false) { 88 static_assert(sizeof(*this) == 8, "Metadata fields poorly packed"); 89 } 90 91 ~Metadata() = default; 92 93 /// Default handling of a changed operand, which asserts. 94 /// 95 /// If subclasses pass themselves in as owners to a tracking node reference, 96 /// they must provide an implementation of this method. 97 void handleChangedOperand(void *, Metadata *) { 98 llvm_unreachable("Unimplemented in Metadata subclass"); 99 } 100 101 public: 102 unsigned getMetadataID() const { return SubclassID; } 103 104 /// User-friendly dump. 105 /// 106 /// If \c M is provided, metadata nodes will be numbered canonically; 107 /// otherwise, pointer addresses are substituted. 108 /// 109 /// Note: this uses an explicit overload instead of default arguments so that 110 /// the nullptr version is easy to call from a debugger. 111 /// 112 /// @{ 113 void dump() const; 114 void dump(const Module *M) const; 115 /// @} 116 117 /// Print. 118 /// 119 /// Prints definition of \c this. 120 /// 121 /// If \c M is provided, metadata nodes will be numbered canonically; 122 /// otherwise, pointer addresses are substituted. 123 /// @{ 124 void print(raw_ostream &OS, const Module *M = nullptr, 125 bool IsForDebug = false) const; 126 void print(raw_ostream &OS, ModuleSlotTracker &MST, const Module *M = nullptr, 127 bool IsForDebug = false) const; 128 /// @} 129 130 /// Print as operand. 131 /// 132 /// Prints reference of \c this. 133 /// 134 /// If \c M is provided, metadata nodes will be numbered canonically; 135 /// otherwise, pointer addresses are substituted. 136 /// @{ 137 void printAsOperand(raw_ostream &OS, const Module *M = nullptr) const; 138 void printAsOperand(raw_ostream &OS, ModuleSlotTracker &MST, 139 const Module *M = nullptr) const; 140 /// @} 141 142 /// Metadata IDs that may generate poison. 143 constexpr static const unsigned PoisonGeneratingIDs[] = { 144 LLVMContext::MD_range, LLVMContext::MD_nonnull, LLVMContext::MD_align}; 145 }; 146 147 // Create wrappers for C Binding types (see CBindingWrapping.h). 148 DEFINE_ISA_CONVERSION_FUNCTIONS(Metadata, LLVMMetadataRef) 149 150 // Specialized opaque metadata conversions. 151 inline Metadata **unwrap(LLVMMetadataRef *MDs) { 152 return reinterpret_cast<Metadata**>(MDs); 153 } 154 155 #define HANDLE_METADATA(CLASS) class CLASS; 156 #include "llvm/IR/Metadata.def" 157 158 // Provide specializations of isa so that we don't need definitions of 159 // subclasses to see if the metadata is a subclass. 160 #define HANDLE_METADATA_LEAF(CLASS) \ 161 template <> struct isa_impl<CLASS, Metadata> { \ 162 static inline bool doit(const Metadata &MD) { \ 163 return MD.getMetadataID() == Metadata::CLASS##Kind; \ 164 } \ 165 }; 166 #include "llvm/IR/Metadata.def" 167 168 inline raw_ostream &operator<<(raw_ostream &OS, const Metadata &MD) { 169 MD.print(OS); 170 return OS; 171 } 172 173 /// Metadata wrapper in the Value hierarchy. 174 /// 175 /// A member of the \a Value hierarchy to represent a reference to metadata. 176 /// This allows, e.g., intrinsics to have metadata as operands. 177 /// 178 /// Notably, this is the only thing in either hierarchy that is allowed to 179 /// reference \a LocalAsMetadata. 180 class MetadataAsValue : public Value { 181 friend class ReplaceableMetadataImpl; 182 friend class LLVMContextImpl; 183 184 Metadata *MD; 185 186 MetadataAsValue(Type *Ty, Metadata *MD); 187 188 /// Drop use of metadata (during teardown). 189 void dropUse() { MD = nullptr; } 190 191 public: 192 ~MetadataAsValue(); 193 194 static MetadataAsValue *get(LLVMContext &Context, Metadata *MD); 195 static MetadataAsValue *getIfExists(LLVMContext &Context, Metadata *MD); 196 197 Metadata *getMetadata() const { return MD; } 198 199 static bool classof(const Value *V) { 200 return V->getValueID() == MetadataAsValueVal; 201 } 202 203 private: 204 void handleChangedMetadata(Metadata *MD); 205 void track(); 206 void untrack(); 207 }; 208 209 /// Base class for tracking ValueAsMetadata/DIArgLists with user lookups and 210 /// Owner callbacks outside of ValueAsMetadata. 211 /// 212 /// Currently only inherited by DbgVariableRecord; if other classes need to use 213 /// it, then a SubclassID will need to be added (either as a new field or by 214 /// making DebugValue into a PointerIntUnion) to discriminate between the 215 /// subclasses in lookup and callback handling. 216 class DebugValueUser { 217 protected: 218 // Capacity to store 3 debug values. 219 // TODO: Not all DebugValueUser instances need all 3 elements, if we 220 // restructure the DbgVariableRecord class then we can template parameterize 221 // this array size. 222 std::array<Metadata *, 3> DebugValues; 223 224 ArrayRef<Metadata *> getDebugValues() const { return DebugValues; } 225 226 public: 227 DbgVariableRecord *getUser(); 228 const DbgVariableRecord *getUser() const; 229 /// To be called by ReplaceableMetadataImpl::replaceAllUsesWith, where `Old` 230 /// is a pointer to one of the pointers in `DebugValues` (so should be type 231 /// Metadata**), and `NewDebugValue` is the new Metadata* that is replacing 232 /// *Old. 233 /// For manually replacing elements of DebugValues, 234 /// `resetDebugValue(Idx, NewDebugValue)` should be used instead. 235 void handleChangedValue(void *Old, Metadata *NewDebugValue); 236 DebugValueUser() = default; 237 explicit DebugValueUser(std::array<Metadata *, 3> DebugValues) 238 : DebugValues(DebugValues) { 239 trackDebugValues(); 240 } 241 DebugValueUser(DebugValueUser &&X) { 242 DebugValues = X.DebugValues; 243 retrackDebugValues(X); 244 } 245 DebugValueUser(const DebugValueUser &X) { 246 DebugValues = X.DebugValues; 247 trackDebugValues(); 248 } 249 250 DebugValueUser &operator=(DebugValueUser &&X) { 251 if (&X == this) 252 return *this; 253 254 untrackDebugValues(); 255 DebugValues = X.DebugValues; 256 retrackDebugValues(X); 257 return *this; 258 } 259 260 DebugValueUser &operator=(const DebugValueUser &X) { 261 if (&X == this) 262 return *this; 263 264 untrackDebugValues(); 265 DebugValues = X.DebugValues; 266 trackDebugValues(); 267 return *this; 268 } 269 270 ~DebugValueUser() { untrackDebugValues(); } 271 272 void resetDebugValues() { 273 untrackDebugValues(); 274 DebugValues.fill(nullptr); 275 } 276 277 void resetDebugValue(size_t Idx, Metadata *DebugValue) { 278 assert(Idx < 3 && "Invalid debug value index."); 279 untrackDebugValue(Idx); 280 DebugValues[Idx] = DebugValue; 281 trackDebugValue(Idx); 282 } 283 284 bool operator==(const DebugValueUser &X) const { 285 return DebugValues == X.DebugValues; 286 } 287 bool operator!=(const DebugValueUser &X) const { 288 return DebugValues != X.DebugValues; 289 } 290 291 private: 292 void trackDebugValue(size_t Idx); 293 void trackDebugValues(); 294 295 void untrackDebugValue(size_t Idx); 296 void untrackDebugValues(); 297 298 void retrackDebugValues(DebugValueUser &X); 299 }; 300 301 /// API for tracking metadata references through RAUW and deletion. 302 /// 303 /// Shared API for updating \a Metadata pointers in subclasses that support 304 /// RAUW. 305 /// 306 /// This API is not meant to be used directly. See \a TrackingMDRef for a 307 /// user-friendly tracking reference. 308 class MetadataTracking { 309 public: 310 /// Track the reference to metadata. 311 /// 312 /// Register \c MD with \c *MD, if the subclass supports tracking. If \c *MD 313 /// gets RAUW'ed, \c MD will be updated to the new address. If \c *MD gets 314 /// deleted, \c MD will be set to \c nullptr. 315 /// 316 /// If tracking isn't supported, \c *MD will not change. 317 /// 318 /// \return true iff tracking is supported by \c MD. 319 static bool track(Metadata *&MD) { 320 return track(&MD, *MD, static_cast<Metadata *>(nullptr)); 321 } 322 323 /// Track the reference to metadata for \a Metadata. 324 /// 325 /// As \a track(Metadata*&), but with support for calling back to \c Owner to 326 /// tell it that its operand changed. This could trigger \c Owner being 327 /// re-uniqued. 328 static bool track(void *Ref, Metadata &MD, Metadata &Owner) { 329 return track(Ref, MD, &Owner); 330 } 331 332 /// Track the reference to metadata for \a MetadataAsValue. 333 /// 334 /// As \a track(Metadata*&), but with support for calling back to \c Owner to 335 /// tell it that its operand changed. This could trigger \c Owner being 336 /// re-uniqued. 337 static bool track(void *Ref, Metadata &MD, MetadataAsValue &Owner) { 338 return track(Ref, MD, &Owner); 339 } 340 341 /// Track the reference to metadata for \a DebugValueUser. 342 /// 343 /// As \a track(Metadata*&), but with support for calling back to \c Owner to 344 /// tell it that its operand changed. This could trigger \c Owner being 345 /// re-uniqued. 346 static bool track(void *Ref, Metadata &MD, DebugValueUser &Owner) { 347 return track(Ref, MD, &Owner); 348 } 349 350 /// Stop tracking a reference to metadata. 351 /// 352 /// Stops \c *MD from tracking \c MD. 353 static void untrack(Metadata *&MD) { untrack(&MD, *MD); } 354 static void untrack(void *Ref, Metadata &MD); 355 356 /// Move tracking from one reference to another. 357 /// 358 /// Semantically equivalent to \c untrack(MD) followed by \c track(New), 359 /// except that ownership callbacks are maintained. 360 /// 361 /// Note: it is an error if \c *MD does not equal \c New. 362 /// 363 /// \return true iff tracking is supported by \c MD. 364 static bool retrack(Metadata *&MD, Metadata *&New) { 365 return retrack(&MD, *MD, &New); 366 } 367 static bool retrack(void *Ref, Metadata &MD, void *New); 368 369 /// Check whether metadata is replaceable. 370 static bool isReplaceable(const Metadata &MD); 371 372 using OwnerTy = PointerUnion<MetadataAsValue *, Metadata *, DebugValueUser *>; 373 374 private: 375 /// Track a reference to metadata for an owner. 376 /// 377 /// Generalized version of tracking. 378 static bool track(void *Ref, Metadata &MD, OwnerTy Owner); 379 }; 380 381 /// Shared implementation of use-lists for replaceable metadata. 382 /// 383 /// Most metadata cannot be RAUW'ed. This is a shared implementation of 384 /// use-lists and associated API for the three that support it ( 385 /// \a ValueAsMetadata, \a TempMDNode, and \a DIArgList). 386 class ReplaceableMetadataImpl { 387 friend class MetadataTracking; 388 389 public: 390 using OwnerTy = MetadataTracking::OwnerTy; 391 392 private: 393 LLVMContext &Context; 394 uint64_t NextIndex = 0; 395 SmallDenseMap<void *, std::pair<OwnerTy, uint64_t>, 4> UseMap; 396 397 public: 398 ReplaceableMetadataImpl(LLVMContext &Context) : Context(Context) {} 399 400 ~ReplaceableMetadataImpl() { 401 assert(UseMap.empty() && "Cannot destroy in-use replaceable metadata"); 402 } 403 404 LLVMContext &getContext() const { return Context; } 405 406 /// Replace all uses of this with MD. 407 /// 408 /// Replace all uses of this with \c MD, which is allowed to be null. 409 void replaceAllUsesWith(Metadata *MD); 410 /// Replace all uses of the constant with Undef in debug info metadata 411 static void SalvageDebugInfo(const Constant &C); 412 /// Returns the list of all DIArgList users of this. 413 SmallVector<Metadata *> getAllArgListUsers(); 414 /// Returns the list of all DbgVariableRecord users of this. 415 SmallVector<DbgVariableRecord *> getAllDbgVariableRecordUsers(); 416 417 /// Resolve all uses of this. 418 /// 419 /// Resolve all uses of this, turning off RAUW permanently. If \c 420 /// ResolveUsers, call \a MDNode::resolve() on any users whose last operand 421 /// is resolved. 422 void resolveAllUses(bool ResolveUsers = true); 423 424 unsigned getNumUses() const { return UseMap.size(); } 425 426 private: 427 void addRef(void *Ref, OwnerTy Owner); 428 void dropRef(void *Ref); 429 void moveRef(void *Ref, void *New, const Metadata &MD); 430 431 /// Lazily construct RAUW support on MD. 432 /// 433 /// If this is an unresolved MDNode, RAUW support will be created on-demand. 434 /// ValueAsMetadata always has RAUW support. 435 static ReplaceableMetadataImpl *getOrCreate(Metadata &MD); 436 437 /// Get RAUW support on MD, if it exists. 438 static ReplaceableMetadataImpl *getIfExists(Metadata &MD); 439 440 /// Check whether this node will support RAUW. 441 /// 442 /// Returns \c true unless getOrCreate() would return null. 443 static bool isReplaceable(const Metadata &MD); 444 }; 445 446 /// Value wrapper in the Metadata hierarchy. 447 /// 448 /// This is a custom value handle that allows other metadata to refer to 449 /// classes in the Value hierarchy. 450 /// 451 /// Because of full uniquing support, each value is only wrapped by a single \a 452 /// ValueAsMetadata object, so the lookup maps are far more efficient than 453 /// those using ValueHandleBase. 454 class ValueAsMetadata : public Metadata, ReplaceableMetadataImpl { 455 friend class ReplaceableMetadataImpl; 456 friend class LLVMContextImpl; 457 458 Value *V; 459 460 /// Drop users without RAUW (during teardown). 461 void dropUsers() { 462 ReplaceableMetadataImpl::resolveAllUses(/* ResolveUsers */ false); 463 } 464 465 protected: 466 ValueAsMetadata(unsigned ID, Value *V) 467 : Metadata(ID, Uniqued), ReplaceableMetadataImpl(V->getContext()), V(V) { 468 assert(V && "Expected valid value"); 469 } 470 471 ~ValueAsMetadata() = default; 472 473 public: 474 static ValueAsMetadata *get(Value *V); 475 476 static ConstantAsMetadata *getConstant(Value *C) { 477 return cast<ConstantAsMetadata>(get(C)); 478 } 479 480 static LocalAsMetadata *getLocal(Value *Local) { 481 return cast<LocalAsMetadata>(get(Local)); 482 } 483 484 static ValueAsMetadata *getIfExists(Value *V); 485 486 static ConstantAsMetadata *getConstantIfExists(Value *C) { 487 return cast_or_null<ConstantAsMetadata>(getIfExists(C)); 488 } 489 490 static LocalAsMetadata *getLocalIfExists(Value *Local) { 491 return cast_or_null<LocalAsMetadata>(getIfExists(Local)); 492 } 493 494 Value *getValue() const { return V; } 495 Type *getType() const { return V->getType(); } 496 LLVMContext &getContext() const { return V->getContext(); } 497 498 SmallVector<Metadata *> getAllArgListUsers() { 499 return ReplaceableMetadataImpl::getAllArgListUsers(); 500 } 501 SmallVector<DbgVariableRecord *> getAllDbgVariableRecordUsers() { 502 return ReplaceableMetadataImpl::getAllDbgVariableRecordUsers(); 503 } 504 505 static void handleDeletion(Value *V); 506 static void handleRAUW(Value *From, Value *To); 507 508 protected: 509 /// Handle collisions after \a Value::replaceAllUsesWith(). 510 /// 511 /// RAUW isn't supported directly for \a ValueAsMetadata, but if the wrapped 512 /// \a Value gets RAUW'ed and the target already exists, this is used to 513 /// merge the two metadata nodes. 514 void replaceAllUsesWith(Metadata *MD) { 515 ReplaceableMetadataImpl::replaceAllUsesWith(MD); 516 } 517 518 public: 519 static bool classof(const Metadata *MD) { 520 return MD->getMetadataID() == LocalAsMetadataKind || 521 MD->getMetadataID() == ConstantAsMetadataKind; 522 } 523 }; 524 525 class ConstantAsMetadata : public ValueAsMetadata { 526 friend class ValueAsMetadata; 527 528 ConstantAsMetadata(Constant *C) 529 : ValueAsMetadata(ConstantAsMetadataKind, C) {} 530 531 public: 532 static ConstantAsMetadata *get(Constant *C) { 533 return ValueAsMetadata::getConstant(C); 534 } 535 536 static ConstantAsMetadata *getIfExists(Constant *C) { 537 return ValueAsMetadata::getConstantIfExists(C); 538 } 539 540 Constant *getValue() const { 541 return cast<Constant>(ValueAsMetadata::getValue()); 542 } 543 544 static bool classof(const Metadata *MD) { 545 return MD->getMetadataID() == ConstantAsMetadataKind; 546 } 547 }; 548 549 class LocalAsMetadata : public ValueAsMetadata { 550 friend class ValueAsMetadata; 551 552 LocalAsMetadata(Value *Local) 553 : ValueAsMetadata(LocalAsMetadataKind, Local) { 554 assert(!isa<Constant>(Local) && "Expected local value"); 555 } 556 557 public: 558 static LocalAsMetadata *get(Value *Local) { 559 return ValueAsMetadata::getLocal(Local); 560 } 561 562 static LocalAsMetadata *getIfExists(Value *Local) { 563 return ValueAsMetadata::getLocalIfExists(Local); 564 } 565 566 static bool classof(const Metadata *MD) { 567 return MD->getMetadataID() == LocalAsMetadataKind; 568 } 569 }; 570 571 /// Transitional API for extracting constants from Metadata. 572 /// 573 /// This namespace contains transitional functions for metadata that points to 574 /// \a Constants. 575 /// 576 /// In prehistory -- when metadata was a subclass of \a Value -- \a MDNode 577 /// operands could refer to any \a Value. There's was a lot of code like this: 578 /// 579 /// \code 580 /// MDNode *N = ...; 581 /// auto *CI = dyn_cast<ConstantInt>(N->getOperand(2)); 582 /// \endcode 583 /// 584 /// Now that \a Value and \a Metadata are in separate hierarchies, maintaining 585 /// the semantics for \a isa(), \a cast(), \a dyn_cast() (etc.) requires three 586 /// steps: cast in the \a Metadata hierarchy, extraction of the \a Value, and 587 /// cast in the \a Value hierarchy. Besides creating boiler-plate, this 588 /// requires subtle control flow changes. 589 /// 590 /// The end-goal is to create a new type of metadata, called (e.g.) \a MDInt, 591 /// so that metadata can refer to numbers without traversing a bridge to the \a 592 /// Value hierarchy. In this final state, the code above would look like this: 593 /// 594 /// \code 595 /// MDNode *N = ...; 596 /// auto *MI = dyn_cast<MDInt>(N->getOperand(2)); 597 /// \endcode 598 /// 599 /// The API in this namespace supports the transition. \a MDInt doesn't exist 600 /// yet, and even once it does, changing each metadata schema to use it is its 601 /// own mini-project. In the meantime this API prevents us from introducing 602 /// complex and bug-prone control flow that will disappear in the end. In 603 /// particular, the above code looks like this: 604 /// 605 /// \code 606 /// MDNode *N = ...; 607 /// auto *CI = mdconst::dyn_extract<ConstantInt>(N->getOperand(2)); 608 /// \endcode 609 /// 610 /// The full set of provided functions includes: 611 /// 612 /// mdconst::hasa <=> isa 613 /// mdconst::extract <=> cast 614 /// mdconst::extract_or_null <=> cast_or_null 615 /// mdconst::dyn_extract <=> dyn_cast 616 /// mdconst::dyn_extract_or_null <=> dyn_cast_or_null 617 /// 618 /// The target of the cast must be a subclass of \a Constant. 619 namespace mdconst { 620 621 namespace detail { 622 623 template <class T> T &make(); 624 template <class T, class Result> struct HasDereference { 625 using Yes = char[1]; 626 using No = char[2]; 627 template <size_t N> struct SFINAE {}; 628 629 template <class U, class V> 630 static Yes &hasDereference(SFINAE<sizeof(static_cast<V>(*make<U>()))> * = 0); 631 template <class U, class V> static No &hasDereference(...); 632 633 static const bool value = 634 sizeof(hasDereference<T, Result>(nullptr)) == sizeof(Yes); 635 }; 636 template <class V, class M> struct IsValidPointer { 637 static const bool value = std::is_base_of<Constant, V>::value && 638 HasDereference<M, const Metadata &>::value; 639 }; 640 template <class V, class M> struct IsValidReference { 641 static const bool value = std::is_base_of<Constant, V>::value && 642 std::is_convertible<M, const Metadata &>::value; 643 }; 644 645 } // end namespace detail 646 647 /// Check whether Metadata has a Value. 648 /// 649 /// As an analogue to \a isa(), check whether \c MD has an \a Value inside of 650 /// type \c X. 651 template <class X, class Y> 652 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, bool> 653 hasa(Y &&MD) { 654 assert(MD && "Null pointer sent into hasa"); 655 if (auto *V = dyn_cast<ConstantAsMetadata>(MD)) 656 return isa<X>(V->getValue()); 657 return false; 658 } 659 template <class X, class Y> 660 inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, bool> 661 hasa(Y &MD) { 662 return hasa(&MD); 663 } 664 665 /// Extract a Value from Metadata. 666 /// 667 /// As an analogue to \a cast(), extract the \a Value subclass \c X from \c MD. 668 template <class X, class Y> 669 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 670 extract(Y &&MD) { 671 return cast<X>(cast<ConstantAsMetadata>(MD)->getValue()); 672 } 673 template <class X, class Y> 674 inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, X *> 675 extract(Y &MD) { 676 return extract(&MD); 677 } 678 679 /// Extract a Value from Metadata, allowing null. 680 /// 681 /// As an analogue to \a cast_or_null(), extract the \a Value subclass \c X 682 /// from \c MD, allowing \c MD to be null. 683 template <class X, class Y> 684 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 685 extract_or_null(Y &&MD) { 686 if (auto *V = cast_or_null<ConstantAsMetadata>(MD)) 687 return cast<X>(V->getValue()); 688 return nullptr; 689 } 690 691 /// Extract a Value from Metadata, if any. 692 /// 693 /// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X 694 /// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a 695 /// Value it does contain is of the wrong subclass. 696 template <class X, class Y> 697 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 698 dyn_extract(Y &&MD) { 699 if (auto *V = dyn_cast<ConstantAsMetadata>(MD)) 700 return dyn_cast<X>(V->getValue()); 701 return nullptr; 702 } 703 704 /// Extract a Value from Metadata, if any, allowing null. 705 /// 706 /// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X 707 /// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a 708 /// Value it does contain is of the wrong subclass, allowing \c MD to be null. 709 template <class X, class Y> 710 inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *> 711 dyn_extract_or_null(Y &&MD) { 712 if (auto *V = dyn_cast_or_null<ConstantAsMetadata>(MD)) 713 return dyn_cast<X>(V->getValue()); 714 return nullptr; 715 } 716 717 } // end namespace mdconst 718 719 //===----------------------------------------------------------------------===// 720 /// A single uniqued string. 721 /// 722 /// These are used to efficiently contain a byte sequence for metadata. 723 /// MDString is always unnamed. 724 class MDString : public Metadata { 725 friend class StringMapEntryStorage<MDString>; 726 727 StringMapEntry<MDString> *Entry = nullptr; 728 729 MDString() : Metadata(MDStringKind, Uniqued) {} 730 731 public: 732 MDString(const MDString &) = delete; 733 MDString &operator=(MDString &&) = delete; 734 MDString &operator=(const MDString &) = delete; 735 736 static MDString *get(LLVMContext &Context, StringRef Str); 737 static MDString *get(LLVMContext &Context, const char *Str) { 738 return get(Context, Str ? StringRef(Str) : StringRef()); 739 } 740 741 StringRef getString() const; 742 743 unsigned getLength() const { return (unsigned)getString().size(); } 744 745 using iterator = StringRef::iterator; 746 747 /// Pointer to the first byte of the string. 748 iterator begin() const { return getString().begin(); } 749 750 /// Pointer to one byte past the end of the string. 751 iterator end() const { return getString().end(); } 752 753 const unsigned char *bytes_begin() const { return getString().bytes_begin(); } 754 const unsigned char *bytes_end() const { return getString().bytes_end(); } 755 756 /// Methods for support type inquiry through isa, cast, and dyn_cast. 757 static bool classof(const Metadata *MD) { 758 return MD->getMetadataID() == MDStringKind; 759 } 760 }; 761 762 /// A collection of metadata nodes that might be associated with a 763 /// memory access used by the alias-analysis infrastructure. 764 struct AAMDNodes { 765 explicit AAMDNodes() = default; 766 explicit AAMDNodes(MDNode *T, MDNode *TS, MDNode *S, MDNode *N) 767 : TBAA(T), TBAAStruct(TS), Scope(S), NoAlias(N) {} 768 769 bool operator==(const AAMDNodes &A) const { 770 return TBAA == A.TBAA && TBAAStruct == A.TBAAStruct && Scope == A.Scope && 771 NoAlias == A.NoAlias; 772 } 773 774 bool operator!=(const AAMDNodes &A) const { return !(*this == A); } 775 776 explicit operator bool() const { 777 return TBAA || TBAAStruct || Scope || NoAlias; 778 } 779 780 /// The tag for type-based alias analysis. 781 MDNode *TBAA = nullptr; 782 783 /// The tag for type-based alias analysis (tbaa struct). 784 MDNode *TBAAStruct = nullptr; 785 786 /// The tag for alias scope specification (used with noalias). 787 MDNode *Scope = nullptr; 788 789 /// The tag specifying the noalias scope. 790 MDNode *NoAlias = nullptr; 791 792 // Shift tbaa Metadata node to start off bytes later 793 static MDNode *shiftTBAA(MDNode *M, size_t off); 794 795 // Shift tbaa.struct Metadata node to start off bytes later 796 static MDNode *shiftTBAAStruct(MDNode *M, size_t off); 797 798 // Extend tbaa Metadata node to apply to a series of bytes of length len. 799 // A size of -1 denotes an unknown size. 800 static MDNode *extendToTBAA(MDNode *TBAA, ssize_t len); 801 802 /// Given two sets of AAMDNodes that apply to the same pointer, 803 /// give the best AAMDNodes that are compatible with both (i.e. a set of 804 /// nodes whose allowable aliasing conclusions are a subset of those 805 /// allowable by both of the inputs). However, for efficiency 806 /// reasons, do not create any new MDNodes. 807 AAMDNodes intersect(const AAMDNodes &Other) const { 808 AAMDNodes Result; 809 Result.TBAA = Other.TBAA == TBAA ? TBAA : nullptr; 810 Result.TBAAStruct = Other.TBAAStruct == TBAAStruct ? TBAAStruct : nullptr; 811 Result.Scope = Other.Scope == Scope ? Scope : nullptr; 812 Result.NoAlias = Other.NoAlias == NoAlias ? NoAlias : nullptr; 813 return Result; 814 } 815 816 /// Create a new AAMDNode that describes this AAMDNode after applying a 817 /// constant offset to the start of the pointer. 818 AAMDNodes shift(size_t Offset) const { 819 AAMDNodes Result; 820 Result.TBAA = TBAA ? shiftTBAA(TBAA, Offset) : nullptr; 821 Result.TBAAStruct = 822 TBAAStruct ? shiftTBAAStruct(TBAAStruct, Offset) : nullptr; 823 Result.Scope = Scope; 824 Result.NoAlias = NoAlias; 825 return Result; 826 } 827 828 /// Create a new AAMDNode that describes this AAMDNode after extending it to 829 /// apply to a series of bytes of length Len. A size of -1 denotes an unknown 830 /// size. 831 AAMDNodes extendTo(ssize_t Len) const { 832 AAMDNodes Result; 833 Result.TBAA = TBAA ? extendToTBAA(TBAA, Len) : nullptr; 834 // tbaa.struct contains (offset, size, type) triples. Extending the length 835 // of the tbaa.struct doesn't require changing this (though more information 836 // could be provided by adding more triples at subsequent lengths). 837 Result.TBAAStruct = TBAAStruct; 838 Result.Scope = Scope; 839 Result.NoAlias = NoAlias; 840 return Result; 841 } 842 843 /// Given two sets of AAMDNodes applying to potentially different locations, 844 /// determine the best AAMDNodes that apply to both. 845 AAMDNodes merge(const AAMDNodes &Other) const; 846 847 /// Determine the best AAMDNodes after concatenating two different locations 848 /// together. Different from `merge`, where different locations should 849 /// overlap each other, `concat` puts non-overlapping locations together. 850 AAMDNodes concat(const AAMDNodes &Other) const; 851 852 /// Create a new AAMDNode for accessing \p AccessSize bytes of this AAMDNode. 853 /// If this AAMDNode has !tbaa.struct and \p AccessSize matches the size of 854 /// the field at offset 0, get the TBAA tag describing the accessed field. 855 /// If such an AAMDNode already embeds !tbaa, the existing one is retrieved. 856 /// Finally, !tbaa.struct is zeroed out. 857 AAMDNodes adjustForAccess(unsigned AccessSize); 858 AAMDNodes adjustForAccess(size_t Offset, Type *AccessTy, 859 const DataLayout &DL); 860 AAMDNodes adjustForAccess(size_t Offset, unsigned AccessSize); 861 }; 862 863 // Specialize DenseMapInfo for AAMDNodes. 864 template<> 865 struct DenseMapInfo<AAMDNodes> { 866 static inline AAMDNodes getEmptyKey() { 867 return AAMDNodes(DenseMapInfo<MDNode *>::getEmptyKey(), 868 nullptr, nullptr, nullptr); 869 } 870 871 static inline AAMDNodes getTombstoneKey() { 872 return AAMDNodes(DenseMapInfo<MDNode *>::getTombstoneKey(), 873 nullptr, nullptr, nullptr); 874 } 875 876 static unsigned getHashValue(const AAMDNodes &Val) { 877 return DenseMapInfo<MDNode *>::getHashValue(Val.TBAA) ^ 878 DenseMapInfo<MDNode *>::getHashValue(Val.TBAAStruct) ^ 879 DenseMapInfo<MDNode *>::getHashValue(Val.Scope) ^ 880 DenseMapInfo<MDNode *>::getHashValue(Val.NoAlias); 881 } 882 883 static bool isEqual(const AAMDNodes &LHS, const AAMDNodes &RHS) { 884 return LHS == RHS; 885 } 886 }; 887 888 /// Tracking metadata reference owned by Metadata. 889 /// 890 /// Similar to \a TrackingMDRef, but it's expected to be owned by an instance 891 /// of \a Metadata, which has the option of registering itself for callbacks to 892 /// re-unique itself. 893 /// 894 /// In particular, this is used by \a MDNode. 895 class MDOperand { 896 Metadata *MD = nullptr; 897 898 public: 899 MDOperand() = default; 900 MDOperand(const MDOperand &) = delete; 901 MDOperand(MDOperand &&Op) { 902 MD = Op.MD; 903 if (MD) 904 (void)MetadataTracking::retrack(Op.MD, MD); 905 Op.MD = nullptr; 906 } 907 MDOperand &operator=(const MDOperand &) = delete; 908 MDOperand &operator=(MDOperand &&Op) { 909 MD = Op.MD; 910 if (MD) 911 (void)MetadataTracking::retrack(Op.MD, MD); 912 Op.MD = nullptr; 913 return *this; 914 } 915 916 // Check if MDOperand is of type MDString and equals `Str`. 917 bool equalsStr(StringRef Str) const { 918 return isa<MDString>(this->get()) && 919 cast<MDString>(this->get())->getString() == Str; 920 } 921 922 ~MDOperand() { untrack(); } 923 924 Metadata *get() const { return MD; } 925 operator Metadata *() const { return get(); } 926 Metadata *operator->() const { return get(); } 927 Metadata &operator*() const { return *get(); } 928 929 void reset() { 930 untrack(); 931 MD = nullptr; 932 } 933 void reset(Metadata *MD, Metadata *Owner) { 934 untrack(); 935 this->MD = MD; 936 track(Owner); 937 } 938 939 private: 940 void track(Metadata *Owner) { 941 if (MD) { 942 if (Owner) 943 MetadataTracking::track(this, *MD, *Owner); 944 else 945 MetadataTracking::track(MD); 946 } 947 } 948 949 void untrack() { 950 assert(static_cast<void *>(this) == &MD && "Expected same address"); 951 if (MD) 952 MetadataTracking::untrack(MD); 953 } 954 }; 955 956 template <> struct simplify_type<MDOperand> { 957 using SimpleType = Metadata *; 958 959 static SimpleType getSimplifiedValue(MDOperand &MD) { return MD.get(); } 960 }; 961 962 template <> struct simplify_type<const MDOperand> { 963 using SimpleType = Metadata *; 964 965 static SimpleType getSimplifiedValue(const MDOperand &MD) { return MD.get(); } 966 }; 967 968 /// Pointer to the context, with optional RAUW support. 969 /// 970 /// Either a raw (non-null) pointer to the \a LLVMContext, or an owned pointer 971 /// to \a ReplaceableMetadataImpl (which has a reference to \a LLVMContext). 972 class ContextAndReplaceableUses { 973 PointerUnion<LLVMContext *, ReplaceableMetadataImpl *> Ptr; 974 975 public: 976 ContextAndReplaceableUses(LLVMContext &Context) : Ptr(&Context) {} 977 ContextAndReplaceableUses( 978 std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses) 979 : Ptr(ReplaceableUses.release()) { 980 assert(getReplaceableUses() && "Expected non-null replaceable uses"); 981 } 982 ContextAndReplaceableUses() = delete; 983 ContextAndReplaceableUses(ContextAndReplaceableUses &&) = delete; 984 ContextAndReplaceableUses(const ContextAndReplaceableUses &) = delete; 985 ContextAndReplaceableUses &operator=(ContextAndReplaceableUses &&) = delete; 986 ContextAndReplaceableUses & 987 operator=(const ContextAndReplaceableUses &) = delete; 988 ~ContextAndReplaceableUses() { delete getReplaceableUses(); } 989 990 operator LLVMContext &() { return getContext(); } 991 992 /// Whether this contains RAUW support. 993 bool hasReplaceableUses() const { 994 return isa<ReplaceableMetadataImpl *>(Ptr); 995 } 996 997 LLVMContext &getContext() const { 998 if (hasReplaceableUses()) 999 return getReplaceableUses()->getContext(); 1000 return *cast<LLVMContext *>(Ptr); 1001 } 1002 1003 ReplaceableMetadataImpl *getReplaceableUses() const { 1004 if (hasReplaceableUses()) 1005 return cast<ReplaceableMetadataImpl *>(Ptr); 1006 return nullptr; 1007 } 1008 1009 /// Ensure that this has RAUW support, and then return it. 1010 ReplaceableMetadataImpl *getOrCreateReplaceableUses() { 1011 if (!hasReplaceableUses()) 1012 makeReplaceable(std::make_unique<ReplaceableMetadataImpl>(getContext())); 1013 return getReplaceableUses(); 1014 } 1015 1016 /// Assign RAUW support to this. 1017 /// 1018 /// Make this replaceable, taking ownership of \c ReplaceableUses (which must 1019 /// not be null). 1020 void 1021 makeReplaceable(std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses) { 1022 assert(ReplaceableUses && "Expected non-null replaceable uses"); 1023 assert(&ReplaceableUses->getContext() == &getContext() && 1024 "Expected same context"); 1025 delete getReplaceableUses(); 1026 Ptr = ReplaceableUses.release(); 1027 } 1028 1029 /// Drop RAUW support. 1030 /// 1031 /// Cede ownership of RAUW support, returning it. 1032 std::unique_ptr<ReplaceableMetadataImpl> takeReplaceableUses() { 1033 assert(hasReplaceableUses() && "Expected to own replaceable uses"); 1034 std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses( 1035 getReplaceableUses()); 1036 Ptr = &ReplaceableUses->getContext(); 1037 return ReplaceableUses; 1038 } 1039 }; 1040 1041 struct TempMDNodeDeleter { 1042 inline void operator()(MDNode *Node) const; 1043 }; 1044 1045 #define HANDLE_MDNODE_LEAF(CLASS) \ 1046 using Temp##CLASS = std::unique_ptr<CLASS, TempMDNodeDeleter>; 1047 #define HANDLE_MDNODE_BRANCH(CLASS) HANDLE_MDNODE_LEAF(CLASS) 1048 #include "llvm/IR/Metadata.def" 1049 1050 /// Metadata node. 1051 /// 1052 /// Metadata nodes can be uniqued, like constants, or distinct. Temporary 1053 /// metadata nodes (with full support for RAUW) can be used to delay uniquing 1054 /// until forward references are known. The basic metadata node is an \a 1055 /// MDTuple. 1056 /// 1057 /// There is limited support for RAUW at construction time. At construction 1058 /// time, if any operand is a temporary node (or an unresolved uniqued node, 1059 /// which indicates a transitive temporary operand), the node itself will be 1060 /// unresolved. As soon as all operands become resolved, it will drop RAUW 1061 /// support permanently. 1062 /// 1063 /// If an unresolved node is part of a cycle, \a resolveCycles() needs 1064 /// to be called on some member of the cycle once all temporary nodes have been 1065 /// replaced. 1066 /// 1067 /// MDNodes can be large or small, as well as resizable or non-resizable. 1068 /// Large MDNodes' operands are allocated in a separate storage vector, 1069 /// whereas small MDNodes' operands are co-allocated. Distinct and temporary 1070 /// MDnodes are resizable, but only MDTuples support this capability. 1071 /// 1072 /// Clients can add operands to resizable MDNodes using push_back(). 1073 class MDNode : public Metadata { 1074 friend class ReplaceableMetadataImpl; 1075 friend class LLVMContextImpl; 1076 friend class DIAssignID; 1077 1078 /// The header that is coallocated with an MDNode along with its "small" 1079 /// operands. It is located immediately before the main body of the node. 1080 /// The operands are in turn located immediately before the header. 1081 /// For resizable MDNodes, the space for the storage vector is also allocated 1082 /// immediately before the header, overlapping with the operands. 1083 /// Explicity set alignment because bitfields by default have an 1084 /// alignment of 1 on z/OS. 1085 struct alignas(alignof(size_t)) Header { 1086 bool IsResizable : 1; 1087 bool IsLarge : 1; 1088 size_t SmallSize : 4; 1089 size_t SmallNumOps : 4; 1090 size_t : sizeof(size_t) * CHAR_BIT - 10; 1091 1092 unsigned NumUnresolved = 0; 1093 using LargeStorageVector = SmallVector<MDOperand, 0>; 1094 1095 static constexpr size_t NumOpsFitInVector = 1096 sizeof(LargeStorageVector) / sizeof(MDOperand); 1097 static_assert( 1098 NumOpsFitInVector * sizeof(MDOperand) == sizeof(LargeStorageVector), 1099 "sizeof(LargeStorageVector) must be a multiple of sizeof(MDOperand)"); 1100 1101 static constexpr size_t MaxSmallSize = 15; 1102 1103 static constexpr size_t getOpSize(unsigned NumOps) { 1104 return sizeof(MDOperand) * NumOps; 1105 } 1106 /// Returns the number of operands the node has space for based on its 1107 /// allocation characteristics. 1108 static size_t getSmallSize(size_t NumOps, bool IsResizable, bool IsLarge) { 1109 return IsLarge ? NumOpsFitInVector 1110 : std::max(NumOps, NumOpsFitInVector * IsResizable); 1111 } 1112 /// Returns the number of bytes allocated for operands and header. 1113 static size_t getAllocSize(StorageType Storage, size_t NumOps) { 1114 return getOpSize( 1115 getSmallSize(NumOps, isResizable(Storage), isLarge(NumOps))) + 1116 sizeof(Header); 1117 } 1118 1119 /// Only temporary and distinct nodes are resizable. 1120 static bool isResizable(StorageType Storage) { return Storage != Uniqued; } 1121 static bool isLarge(size_t NumOps) { return NumOps > MaxSmallSize; } 1122 1123 size_t getAllocSize() const { 1124 return getOpSize(SmallSize) + sizeof(Header); 1125 } 1126 void *getAllocation() { 1127 return reinterpret_cast<char *>(this + 1) - 1128 alignTo(getAllocSize(), alignof(uint64_t)); 1129 } 1130 1131 void *getLargePtr() const { 1132 static_assert(alignof(LargeStorageVector) <= alignof(Header), 1133 "LargeStorageVector too strongly aligned"); 1134 return reinterpret_cast<char *>(const_cast<Header *>(this)) - 1135 sizeof(LargeStorageVector); 1136 } 1137 1138 void *getSmallPtr(); 1139 1140 LargeStorageVector &getLarge() { 1141 assert(IsLarge); 1142 return *reinterpret_cast<LargeStorageVector *>(getLargePtr()); 1143 } 1144 1145 const LargeStorageVector &getLarge() const { 1146 assert(IsLarge); 1147 return *reinterpret_cast<const LargeStorageVector *>(getLargePtr()); 1148 } 1149 1150 void resizeSmall(size_t NumOps); 1151 void resizeSmallToLarge(size_t NumOps); 1152 void resize(size_t NumOps); 1153 1154 explicit Header(size_t NumOps, StorageType Storage); 1155 ~Header(); 1156 1157 MutableArrayRef<MDOperand> operands() { 1158 if (IsLarge) 1159 return getLarge(); 1160 return MutableArrayRef( 1161 reinterpret_cast<MDOperand *>(this) - SmallSize, SmallNumOps); 1162 } 1163 1164 ArrayRef<MDOperand> operands() const { 1165 if (IsLarge) 1166 return getLarge(); 1167 return ArrayRef(reinterpret_cast<const MDOperand *>(this) - SmallSize, 1168 SmallNumOps); 1169 } 1170 1171 unsigned getNumOperands() const { 1172 if (!IsLarge) 1173 return SmallNumOps; 1174 return getLarge().size(); 1175 } 1176 }; 1177 1178 Header &getHeader() { return *(reinterpret_cast<Header *>(this) - 1); } 1179 1180 const Header &getHeader() const { 1181 return *(reinterpret_cast<const Header *>(this) - 1); 1182 } 1183 1184 ContextAndReplaceableUses Context; 1185 1186 protected: 1187 MDNode(LLVMContext &Context, unsigned ID, StorageType Storage, 1188 ArrayRef<Metadata *> Ops1, ArrayRef<Metadata *> Ops2 = {}); 1189 ~MDNode() = default; 1190 1191 void *operator new(size_t Size, size_t NumOps, StorageType Storage); 1192 void operator delete(void *Mem); 1193 1194 /// Required by std, but never called. 1195 void operator delete(void *, unsigned) { 1196 llvm_unreachable("Constructor throws?"); 1197 } 1198 1199 /// Required by std, but never called. 1200 void operator delete(void *, unsigned, bool) { 1201 llvm_unreachable("Constructor throws?"); 1202 } 1203 1204 void dropAllReferences(); 1205 1206 MDOperand *mutable_begin() { return getHeader().operands().begin(); } 1207 MDOperand *mutable_end() { return getHeader().operands().end(); } 1208 1209 using mutable_op_range = iterator_range<MDOperand *>; 1210 1211 mutable_op_range mutable_operands() { 1212 return mutable_op_range(mutable_begin(), mutable_end()); 1213 } 1214 1215 public: 1216 MDNode(const MDNode &) = delete; 1217 void operator=(const MDNode &) = delete; 1218 void *operator new(size_t) = delete; 1219 1220 static inline MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs); 1221 static inline MDTuple *getIfExists(LLVMContext &Context, 1222 ArrayRef<Metadata *> MDs); 1223 static inline MDTuple *getDistinct(LLVMContext &Context, 1224 ArrayRef<Metadata *> MDs); 1225 static inline TempMDTuple getTemporary(LLVMContext &Context, 1226 ArrayRef<Metadata *> MDs); 1227 1228 /// Create a (temporary) clone of this. 1229 TempMDNode clone() const; 1230 1231 /// Deallocate a node created by getTemporary. 1232 /// 1233 /// Calls \c replaceAllUsesWith(nullptr) before deleting, so any remaining 1234 /// references will be reset. 1235 static void deleteTemporary(MDNode *N); 1236 1237 LLVMContext &getContext() const { return Context.getContext(); } 1238 1239 /// Replace a specific operand. 1240 void replaceOperandWith(unsigned I, Metadata *New); 1241 1242 /// Check if node is fully resolved. 1243 /// 1244 /// If \a isTemporary(), this always returns \c false; if \a isDistinct(), 1245 /// this always returns \c true. 1246 /// 1247 /// If \a isUniqued(), returns \c true if this has already dropped RAUW 1248 /// support (because all operands are resolved). 1249 /// 1250 /// As forward declarations are resolved, their containers should get 1251 /// resolved automatically. However, if this (or one of its operands) is 1252 /// involved in a cycle, \a resolveCycles() needs to be called explicitly. 1253 bool isResolved() const { return !isTemporary() && !getNumUnresolved(); } 1254 1255 bool isUniqued() const { return Storage == Uniqued; } 1256 bool isDistinct() const { return Storage == Distinct; } 1257 bool isTemporary() const { return Storage == Temporary; } 1258 1259 bool isReplaceable() const { return isTemporary() || isAlwaysReplaceable(); } 1260 bool isAlwaysReplaceable() const { return getMetadataID() == DIAssignIDKind; } 1261 1262 unsigned getNumTemporaryUses() const { 1263 assert(isTemporary() && "Only for temporaries"); 1264 return Context.getReplaceableUses()->getNumUses(); 1265 } 1266 1267 /// RAUW a temporary. 1268 /// 1269 /// \pre \a isTemporary() must be \c true. 1270 void replaceAllUsesWith(Metadata *MD) { 1271 assert(isReplaceable() && "Expected temporary/replaceable node"); 1272 if (Context.hasReplaceableUses()) 1273 Context.getReplaceableUses()->replaceAllUsesWith(MD); 1274 } 1275 1276 /// Resolve cycles. 1277 /// 1278 /// Once all forward declarations have been resolved, force cycles to be 1279 /// resolved. 1280 /// 1281 /// \pre No operands (or operands' operands, etc.) have \a isTemporary(). 1282 void resolveCycles(); 1283 1284 /// Resolve a unique, unresolved node. 1285 void resolve(); 1286 1287 /// Replace a temporary node with a permanent one. 1288 /// 1289 /// Try to create a uniqued version of \c N -- in place, if possible -- and 1290 /// return it. If \c N cannot be uniqued, return a distinct node instead. 1291 template <class T> 1292 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *> 1293 replaceWithPermanent(std::unique_ptr<T, TempMDNodeDeleter> N) { 1294 return cast<T>(N.release()->replaceWithPermanentImpl()); 1295 } 1296 1297 /// Replace a temporary node with a uniqued one. 1298 /// 1299 /// Create a uniqued version of \c N -- in place, if possible -- and return 1300 /// it. Takes ownership of the temporary node. 1301 /// 1302 /// \pre N does not self-reference. 1303 template <class T> 1304 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *> 1305 replaceWithUniqued(std::unique_ptr<T, TempMDNodeDeleter> N) { 1306 return cast<T>(N.release()->replaceWithUniquedImpl()); 1307 } 1308 1309 /// Replace a temporary node with a distinct one. 1310 /// 1311 /// Create a distinct version of \c N -- in place, if possible -- and return 1312 /// it. Takes ownership of the temporary node. 1313 template <class T> 1314 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *> 1315 replaceWithDistinct(std::unique_ptr<T, TempMDNodeDeleter> N) { 1316 return cast<T>(N.release()->replaceWithDistinctImpl()); 1317 } 1318 1319 /// Print in tree shape. 1320 /// 1321 /// Prints definition of \c this in tree shape. 1322 /// 1323 /// If \c M is provided, metadata nodes will be numbered canonically; 1324 /// otherwise, pointer addresses are substituted. 1325 /// @{ 1326 void printTree(raw_ostream &OS, const Module *M = nullptr) const; 1327 void printTree(raw_ostream &OS, ModuleSlotTracker &MST, 1328 const Module *M = nullptr) const; 1329 /// @} 1330 1331 /// User-friendly dump in tree shape. 1332 /// 1333 /// If \c M is provided, metadata nodes will be numbered canonically; 1334 /// otherwise, pointer addresses are substituted. 1335 /// 1336 /// Note: this uses an explicit overload instead of default arguments so that 1337 /// the nullptr version is easy to call from a debugger. 1338 /// 1339 /// @{ 1340 void dumpTree() const; 1341 void dumpTree(const Module *M) const; 1342 /// @} 1343 1344 private: 1345 MDNode *replaceWithPermanentImpl(); 1346 MDNode *replaceWithUniquedImpl(); 1347 MDNode *replaceWithDistinctImpl(); 1348 1349 protected: 1350 /// Set an operand. 1351 /// 1352 /// Sets the operand directly, without worrying about uniquing. 1353 void setOperand(unsigned I, Metadata *New); 1354 1355 unsigned getNumUnresolved() const { return getHeader().NumUnresolved; } 1356 1357 void setNumUnresolved(unsigned N) { getHeader().NumUnresolved = N; } 1358 void storeDistinctInContext(); 1359 template <class T, class StoreT> 1360 static T *storeImpl(T *N, StorageType Storage, StoreT &Store); 1361 template <class T> static T *storeImpl(T *N, StorageType Storage); 1362 1363 /// Resize the node to hold \a NumOps operands. 1364 /// 1365 /// \pre \a isTemporary() or \a isDistinct() 1366 /// \pre MetadataID == MDTupleKind 1367 void resize(size_t NumOps) { 1368 assert(!isUniqued() && "Resizing is not supported for uniqued nodes"); 1369 assert(getMetadataID() == MDTupleKind && 1370 "Resizing is not supported for this node kind"); 1371 getHeader().resize(NumOps); 1372 } 1373 1374 private: 1375 void handleChangedOperand(void *Ref, Metadata *New); 1376 1377 /// Drop RAUW support, if any. 1378 void dropReplaceableUses(); 1379 1380 void resolveAfterOperandChange(Metadata *Old, Metadata *New); 1381 void decrementUnresolvedOperandCount(); 1382 void countUnresolvedOperands(); 1383 1384 /// Mutate this to be "uniqued". 1385 /// 1386 /// Mutate this so that \a isUniqued(). 1387 /// \pre \a isTemporary(). 1388 /// \pre already added to uniquing set. 1389 void makeUniqued(); 1390 1391 /// Mutate this to be "distinct". 1392 /// 1393 /// Mutate this so that \a isDistinct(). 1394 /// \pre \a isTemporary(). 1395 void makeDistinct(); 1396 1397 void deleteAsSubclass(); 1398 MDNode *uniquify(); 1399 void eraseFromStore(); 1400 1401 template <class NodeTy> struct HasCachedHash; 1402 template <class NodeTy> 1403 static void dispatchRecalculateHash(NodeTy *N, std::true_type) { 1404 N->recalculateHash(); 1405 } 1406 template <class NodeTy> 1407 static void dispatchRecalculateHash(NodeTy *, std::false_type) {} 1408 template <class NodeTy> 1409 static void dispatchResetHash(NodeTy *N, std::true_type) { 1410 N->setHash(0); 1411 } 1412 template <class NodeTy> 1413 static void dispatchResetHash(NodeTy *, std::false_type) {} 1414 1415 /// Merge branch weights from two direct callsites. 1416 static MDNode *mergeDirectCallProfMetadata(MDNode *A, MDNode *B, 1417 const Instruction *AInstr, 1418 const Instruction *BInstr); 1419 1420 public: 1421 using op_iterator = const MDOperand *; 1422 using op_range = iterator_range<op_iterator>; 1423 1424 op_iterator op_begin() const { 1425 return const_cast<MDNode *>(this)->mutable_begin(); 1426 } 1427 1428 op_iterator op_end() const { 1429 return const_cast<MDNode *>(this)->mutable_end(); 1430 } 1431 1432 ArrayRef<MDOperand> operands() const { return getHeader().operands(); } 1433 1434 const MDOperand &getOperand(unsigned I) const { 1435 assert(I < getNumOperands() && "Out of range"); 1436 return getHeader().operands()[I]; 1437 } 1438 1439 /// Return number of MDNode operands. 1440 unsigned getNumOperands() const { return getHeader().getNumOperands(); } 1441 1442 /// Methods for support type inquiry through isa, cast, and dyn_cast: 1443 static bool classof(const Metadata *MD) { 1444 switch (MD->getMetadataID()) { 1445 default: 1446 return false; 1447 #define HANDLE_MDNODE_LEAF(CLASS) \ 1448 case CLASS##Kind: \ 1449 return true; 1450 #include "llvm/IR/Metadata.def" 1451 } 1452 } 1453 1454 /// Check whether MDNode is a vtable access. 1455 bool isTBAAVtableAccess() const; 1456 1457 /// Methods for metadata merging. 1458 static MDNode *concatenate(MDNode *A, MDNode *B); 1459 static MDNode *intersect(MDNode *A, MDNode *B); 1460 static MDNode *getMostGenericTBAA(MDNode *A, MDNode *B); 1461 static MDNode *getMostGenericFPMath(MDNode *A, MDNode *B); 1462 static MDNode *getMostGenericRange(MDNode *A, MDNode *B); 1463 static MDNode *getMostGenericNoaliasAddrspace(MDNode *A, MDNode *B); 1464 static MDNode *getMostGenericAliasScope(MDNode *A, MDNode *B); 1465 static MDNode *getMostGenericAlignmentOrDereferenceable(MDNode *A, MDNode *B); 1466 /// Merge !prof metadata from two instructions. 1467 /// Currently only implemented with direct callsites with branch weights. 1468 static MDNode *getMergedProfMetadata(MDNode *A, MDNode *B, 1469 const Instruction *AInstr, 1470 const Instruction *BInstr); 1471 static MDNode *getMergedMemProfMetadata(MDNode *A, MDNode *B); 1472 static MDNode *getMergedCallsiteMetadata(MDNode *A, MDNode *B); 1473 }; 1474 1475 /// Tuple of metadata. 1476 /// 1477 /// This is the simple \a MDNode arbitrary tuple. Nodes are uniqued by 1478 /// default based on their operands. 1479 class MDTuple : public MDNode { 1480 friend class LLVMContextImpl; 1481 friend class MDNode; 1482 1483 MDTuple(LLVMContext &C, StorageType Storage, unsigned Hash, 1484 ArrayRef<Metadata *> Vals) 1485 : MDNode(C, MDTupleKind, Storage, Vals) { 1486 setHash(Hash); 1487 } 1488 1489 ~MDTuple() { dropAllReferences(); } 1490 1491 void setHash(unsigned Hash) { SubclassData32 = Hash; } 1492 void recalculateHash(); 1493 1494 static MDTuple *getImpl(LLVMContext &Context, ArrayRef<Metadata *> MDs, 1495 StorageType Storage, bool ShouldCreate = true); 1496 1497 TempMDTuple cloneImpl() const { 1498 ArrayRef<MDOperand> Operands = operands(); 1499 return getTemporary(getContext(), SmallVector<Metadata *, 4>(Operands)); 1500 } 1501 1502 public: 1503 /// Get the hash, if any. 1504 unsigned getHash() const { return SubclassData32; } 1505 1506 static MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1507 return getImpl(Context, MDs, Uniqued); 1508 } 1509 1510 static MDTuple *getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1511 return getImpl(Context, MDs, Uniqued, /* ShouldCreate */ false); 1512 } 1513 1514 /// Return a distinct node. 1515 /// 1516 /// Return a distinct node -- i.e., a node that is not uniqued. 1517 static MDTuple *getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1518 return getImpl(Context, MDs, Distinct); 1519 } 1520 1521 /// Return a temporary node. 1522 /// 1523 /// For use in constructing cyclic MDNode structures. A temporary MDNode is 1524 /// not uniqued, may be RAUW'd, and must be manually deleted with 1525 /// deleteTemporary. 1526 static TempMDTuple getTemporary(LLVMContext &Context, 1527 ArrayRef<Metadata *> MDs) { 1528 return TempMDTuple(getImpl(Context, MDs, Temporary)); 1529 } 1530 1531 /// Return a (temporary) clone of this. 1532 TempMDTuple clone() const { return cloneImpl(); } 1533 1534 /// Append an element to the tuple. This will resize the node. 1535 void push_back(Metadata *MD) { 1536 size_t NumOps = getNumOperands(); 1537 resize(NumOps + 1); 1538 setOperand(NumOps, MD); 1539 } 1540 1541 /// Shrink the operands by 1. 1542 void pop_back() { resize(getNumOperands() - 1); } 1543 1544 static bool classof(const Metadata *MD) { 1545 return MD->getMetadataID() == MDTupleKind; 1546 } 1547 }; 1548 1549 MDTuple *MDNode::get(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1550 return MDTuple::get(Context, MDs); 1551 } 1552 1553 MDTuple *MDNode::getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1554 return MDTuple::getIfExists(Context, MDs); 1555 } 1556 1557 MDTuple *MDNode::getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) { 1558 return MDTuple::getDistinct(Context, MDs); 1559 } 1560 1561 TempMDTuple MDNode::getTemporary(LLVMContext &Context, 1562 ArrayRef<Metadata *> MDs) { 1563 return MDTuple::getTemporary(Context, MDs); 1564 } 1565 1566 void TempMDNodeDeleter::operator()(MDNode *Node) const { 1567 MDNode::deleteTemporary(Node); 1568 } 1569 1570 /// This is a simple wrapper around an MDNode which provides a higher-level 1571 /// interface by hiding the details of how alias analysis information is encoded 1572 /// in its operands. 1573 class AliasScopeNode { 1574 const MDNode *Node = nullptr; 1575 1576 public: 1577 AliasScopeNode() = default; 1578 explicit AliasScopeNode(const MDNode *N) : Node(N) {} 1579 1580 /// Get the MDNode for this AliasScopeNode. 1581 const MDNode *getNode() const { return Node; } 1582 1583 /// Get the MDNode for this AliasScopeNode's domain. 1584 const MDNode *getDomain() const { 1585 if (Node->getNumOperands() < 2) 1586 return nullptr; 1587 return dyn_cast_or_null<MDNode>(Node->getOperand(1)); 1588 } 1589 StringRef getName() const { 1590 if (Node->getNumOperands() > 2) 1591 if (MDString *N = dyn_cast_or_null<MDString>(Node->getOperand(2))) 1592 return N->getString(); 1593 return StringRef(); 1594 } 1595 }; 1596 1597 /// Typed iterator through MDNode operands. 1598 /// 1599 /// An iterator that transforms an \a MDNode::iterator into an iterator over a 1600 /// particular Metadata subclass. 1601 template <class T> class TypedMDOperandIterator { 1602 MDNode::op_iterator I = nullptr; 1603 1604 public: 1605 using iterator_category = std::input_iterator_tag; 1606 using value_type = T *; 1607 using difference_type = std::ptrdiff_t; 1608 using pointer = void; 1609 using reference = T *; 1610 1611 TypedMDOperandIterator() = default; 1612 explicit TypedMDOperandIterator(MDNode::op_iterator I) : I(I) {} 1613 1614 T *operator*() const { return cast_or_null<T>(*I); } 1615 1616 TypedMDOperandIterator &operator++() { 1617 ++I; 1618 return *this; 1619 } 1620 1621 TypedMDOperandIterator operator++(int) { 1622 TypedMDOperandIterator Temp(*this); 1623 ++I; 1624 return Temp; 1625 } 1626 1627 bool operator==(const TypedMDOperandIterator &X) const { return I == X.I; } 1628 bool operator!=(const TypedMDOperandIterator &X) const { return I != X.I; } 1629 }; 1630 1631 /// Typed, array-like tuple of metadata. 1632 /// 1633 /// This is a wrapper for \a MDTuple that makes it act like an array holding a 1634 /// particular type of metadata. 1635 template <class T> class MDTupleTypedArrayWrapper { 1636 const MDTuple *N = nullptr; 1637 1638 public: 1639 MDTupleTypedArrayWrapper() = default; 1640 MDTupleTypedArrayWrapper(const MDTuple *N) : N(N) {} 1641 1642 template <class U> 1643 MDTupleTypedArrayWrapper( 1644 const MDTupleTypedArrayWrapper<U> &Other, 1645 std::enable_if_t<std::is_convertible<U *, T *>::value> * = nullptr) 1646 : N(Other.get()) {} 1647 1648 template <class U> 1649 explicit MDTupleTypedArrayWrapper( 1650 const MDTupleTypedArrayWrapper<U> &Other, 1651 std::enable_if_t<!std::is_convertible<U *, T *>::value> * = nullptr) 1652 : N(Other.get()) {} 1653 1654 explicit operator bool() const { return get(); } 1655 explicit operator MDTuple *() const { return get(); } 1656 1657 MDTuple *get() const { return const_cast<MDTuple *>(N); } 1658 MDTuple *operator->() const { return get(); } 1659 MDTuple &operator*() const { return *get(); } 1660 1661 // FIXME: Fix callers and remove condition on N. 1662 unsigned size() const { return N ? N->getNumOperands() : 0u; } 1663 bool empty() const { return N ? N->getNumOperands() == 0 : true; } 1664 T *operator[](unsigned I) const { return cast_or_null<T>(N->getOperand(I)); } 1665 1666 // FIXME: Fix callers and remove condition on N. 1667 using iterator = TypedMDOperandIterator<T>; 1668 1669 iterator begin() const { return N ? iterator(N->op_begin()) : iterator(); } 1670 iterator end() const { return N ? iterator(N->op_end()) : iterator(); } 1671 }; 1672 1673 #define HANDLE_METADATA(CLASS) \ 1674 using CLASS##Array = MDTupleTypedArrayWrapper<CLASS>; 1675 #include "llvm/IR/Metadata.def" 1676 1677 /// Placeholder metadata for operands of distinct MDNodes. 1678 /// 1679 /// This is a lightweight placeholder for an operand of a distinct node. It's 1680 /// purpose is to help track forward references when creating a distinct node. 1681 /// This allows distinct nodes involved in a cycle to be constructed before 1682 /// their operands without requiring a heavyweight temporary node with 1683 /// full-blown RAUW support. 1684 /// 1685 /// Each placeholder supports only a single MDNode user. Clients should pass 1686 /// an ID, retrieved via \a getID(), to indicate the "real" operand that this 1687 /// should be replaced with. 1688 /// 1689 /// While it would be possible to implement move operators, they would be 1690 /// fairly expensive. Leave them unimplemented to discourage their use 1691 /// (clients can use std::deque, std::list, BumpPtrAllocator, etc.). 1692 class DistinctMDOperandPlaceholder : public Metadata { 1693 friend class MetadataTracking; 1694 1695 Metadata **Use = nullptr; 1696 1697 public: 1698 explicit DistinctMDOperandPlaceholder(unsigned ID) 1699 : Metadata(DistinctMDOperandPlaceholderKind, Distinct) { 1700 SubclassData32 = ID; 1701 } 1702 1703 DistinctMDOperandPlaceholder() = delete; 1704 DistinctMDOperandPlaceholder(DistinctMDOperandPlaceholder &&) = delete; 1705 DistinctMDOperandPlaceholder(const DistinctMDOperandPlaceholder &) = delete; 1706 1707 ~DistinctMDOperandPlaceholder() { 1708 if (Use) 1709 *Use = nullptr; 1710 } 1711 1712 unsigned getID() const { return SubclassData32; } 1713 1714 /// Replace the use of this with MD. 1715 void replaceUseWith(Metadata *MD) { 1716 if (!Use) 1717 return; 1718 *Use = MD; 1719 1720 if (*Use) 1721 MetadataTracking::track(*Use); 1722 1723 Metadata *T = cast<Metadata>(this); 1724 MetadataTracking::untrack(T); 1725 assert(!Use && "Use is still being tracked despite being untracked!"); 1726 } 1727 }; 1728 1729 //===----------------------------------------------------------------------===// 1730 /// A tuple of MDNodes. 1731 /// 1732 /// Despite its name, a NamedMDNode isn't itself an MDNode. 1733 /// 1734 /// NamedMDNodes are named module-level entities that contain lists of MDNodes. 1735 /// 1736 /// It is illegal for a NamedMDNode to appear as an operand of an MDNode. 1737 class NamedMDNode : public ilist_node<NamedMDNode> { 1738 friend class LLVMContextImpl; 1739 friend class Module; 1740 1741 std::string Name; 1742 Module *Parent = nullptr; 1743 void *Operands; // SmallVector<TrackingMDRef, 4> 1744 1745 void setParent(Module *M) { Parent = M; } 1746 1747 explicit NamedMDNode(const Twine &N); 1748 1749 template <class T1> class op_iterator_impl { 1750 friend class NamedMDNode; 1751 1752 const NamedMDNode *Node = nullptr; 1753 unsigned Idx = 0; 1754 1755 op_iterator_impl(const NamedMDNode *N, unsigned i) : Node(N), Idx(i) {} 1756 1757 public: 1758 using iterator_category = std::bidirectional_iterator_tag; 1759 using value_type = T1; 1760 using difference_type = std::ptrdiff_t; 1761 using pointer = value_type *; 1762 using reference = value_type; 1763 1764 op_iterator_impl() = default; 1765 1766 bool operator==(const op_iterator_impl &o) const { return Idx == o.Idx; } 1767 bool operator!=(const op_iterator_impl &o) const { return Idx != o.Idx; } 1768 1769 op_iterator_impl &operator++() { 1770 ++Idx; 1771 return *this; 1772 } 1773 1774 op_iterator_impl operator++(int) { 1775 op_iterator_impl tmp(*this); 1776 operator++(); 1777 return tmp; 1778 } 1779 1780 op_iterator_impl &operator--() { 1781 --Idx; 1782 return *this; 1783 } 1784 1785 op_iterator_impl operator--(int) { 1786 op_iterator_impl tmp(*this); 1787 operator--(); 1788 return tmp; 1789 } 1790 1791 T1 operator*() const { return Node->getOperand(Idx); } 1792 }; 1793 1794 public: 1795 NamedMDNode(const NamedMDNode &) = delete; 1796 ~NamedMDNode(); 1797 1798 /// Drop all references and remove the node from parent module. 1799 void eraseFromParent(); 1800 1801 /// Remove all uses and clear node vector. 1802 void dropAllReferences() { clearOperands(); } 1803 /// Drop all references to this node's operands. 1804 void clearOperands(); 1805 1806 /// Get the module that holds this named metadata collection. 1807 inline Module *getParent() { return Parent; } 1808 inline const Module *getParent() const { return Parent; } 1809 1810 MDNode *getOperand(unsigned i) const; 1811 unsigned getNumOperands() const; 1812 void addOperand(MDNode *M); 1813 void setOperand(unsigned I, MDNode *New); 1814 StringRef getName() const; 1815 void print(raw_ostream &ROS, bool IsForDebug = false) const; 1816 void print(raw_ostream &ROS, ModuleSlotTracker &MST, 1817 bool IsForDebug = false) const; 1818 void dump() const; 1819 1820 // --------------------------------------------------------------------------- 1821 // Operand Iterator interface... 1822 // 1823 using op_iterator = op_iterator_impl<MDNode *>; 1824 1825 op_iterator op_begin() { return op_iterator(this, 0); } 1826 op_iterator op_end() { return op_iterator(this, getNumOperands()); } 1827 1828 using const_op_iterator = op_iterator_impl<const MDNode *>; 1829 1830 const_op_iterator op_begin() const { return const_op_iterator(this, 0); } 1831 const_op_iterator op_end() const { return const_op_iterator(this, getNumOperands()); } 1832 1833 inline iterator_range<op_iterator> operands() { 1834 return make_range(op_begin(), op_end()); 1835 } 1836 inline iterator_range<const_op_iterator> operands() const { 1837 return make_range(op_begin(), op_end()); 1838 } 1839 }; 1840 1841 // Create wrappers for C Binding types (see CBindingWrapping.h). 1842 DEFINE_ISA_CONVERSION_FUNCTIONS(NamedMDNode, LLVMNamedMDNodeRef) 1843 1844 } // end namespace llvm 1845 1846 #endif // LLVM_IR_METADATA_H 1847