10b57cec5SDimitry Andric //===- MemorySSA.cpp - Memory SSA Builder ---------------------------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This file implements the MemorySSA class. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 120b57cec5SDimitry Andric 130b57cec5SDimitry Andric #include "llvm/Analysis/MemorySSA.h" 140b57cec5SDimitry Andric #include "llvm/ADT/DenseMap.h" 150b57cec5SDimitry Andric #include "llvm/ADT/DenseMapInfo.h" 160b57cec5SDimitry Andric #include "llvm/ADT/DenseSet.h" 170b57cec5SDimitry Andric #include "llvm/ADT/DepthFirstIterator.h" 180b57cec5SDimitry Andric #include "llvm/ADT/Hashing.h" 190b57cec5SDimitry Andric #include "llvm/ADT/STLExtras.h" 200b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 210b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h" 22fe6060f1SDimitry Andric #include "llvm/ADT/StringExtras.h" 230b57cec5SDimitry Andric #include "llvm/ADT/iterator.h" 240b57cec5SDimitry Andric #include "llvm/ADT/iterator_range.h" 250b57cec5SDimitry Andric #include "llvm/Analysis/AliasAnalysis.h" 26e8d8bef9SDimitry Andric #include "llvm/Analysis/CFGPrinter.h" 270b57cec5SDimitry Andric #include "llvm/Analysis/IteratedDominanceFrontier.h" 28*0fca6ea1SDimitry Andric #include "llvm/Analysis/LoopInfo.h" 290b57cec5SDimitry Andric #include "llvm/Analysis/MemoryLocation.h" 300b57cec5SDimitry Andric #include "llvm/Config/llvm-config.h" 310b57cec5SDimitry Andric #include "llvm/IR/AssemblyAnnotationWriter.h" 320b57cec5SDimitry Andric #include "llvm/IR/BasicBlock.h" 330b57cec5SDimitry Andric #include "llvm/IR/Dominators.h" 340b57cec5SDimitry Andric #include "llvm/IR/Function.h" 350b57cec5SDimitry Andric #include "llvm/IR/Instruction.h" 360b57cec5SDimitry Andric #include "llvm/IR/Instructions.h" 370b57cec5SDimitry Andric #include "llvm/IR/IntrinsicInst.h" 380b57cec5SDimitry Andric #include "llvm/IR/LLVMContext.h" 3981ad6265SDimitry Andric #include "llvm/IR/Operator.h" 400b57cec5SDimitry Andric #include "llvm/IR/PassManager.h" 410b57cec5SDimitry Andric #include "llvm/IR/Use.h" 42480093f4SDimitry Andric #include "llvm/InitializePasses.h" 430b57cec5SDimitry Andric #include "llvm/Pass.h" 440b57cec5SDimitry Andric #include "llvm/Support/AtomicOrdering.h" 450b57cec5SDimitry Andric #include "llvm/Support/Casting.h" 460b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h" 470b57cec5SDimitry Andric #include "llvm/Support/Compiler.h" 480b57cec5SDimitry Andric #include "llvm/Support/Debug.h" 490b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h" 500b57cec5SDimitry Andric #include "llvm/Support/FormattedStream.h" 5181ad6265SDimitry Andric #include "llvm/Support/GraphWriter.h" 520b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h" 530b57cec5SDimitry Andric #include <algorithm> 540b57cec5SDimitry Andric #include <cassert> 550b57cec5SDimitry Andric #include <iterator> 560b57cec5SDimitry Andric #include <memory> 570b57cec5SDimitry Andric #include <utility> 580b57cec5SDimitry Andric 590b57cec5SDimitry Andric using namespace llvm; 600b57cec5SDimitry Andric 610b57cec5SDimitry Andric #define DEBUG_TYPE "memoryssa" 620b57cec5SDimitry Andric 63e8d8bef9SDimitry Andric static cl::opt<std::string> 64e8d8bef9SDimitry Andric DotCFGMSSA("dot-cfg-mssa", 65e8d8bef9SDimitry Andric cl::value_desc("file name for generated dot file"), 66e8d8bef9SDimitry Andric cl::desc("file name for generated dot file"), cl::init("")); 67e8d8bef9SDimitry Andric 680b57cec5SDimitry Andric INITIALIZE_PASS_BEGIN(MemorySSAWrapperPass, "memoryssa", "Memory SSA", false, 690b57cec5SDimitry Andric true) 700b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 710b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass) 720b57cec5SDimitry Andric INITIALIZE_PASS_END(MemorySSAWrapperPass, "memoryssa", "Memory SSA", false, 730b57cec5SDimitry Andric true) 740b57cec5SDimitry Andric 750b57cec5SDimitry Andric static cl::opt<unsigned> MaxCheckLimit( 760b57cec5SDimitry Andric "memssa-check-limit", cl::Hidden, cl::init(100), 770b57cec5SDimitry Andric cl::desc("The maximum number of stores/phis MemorySSA" 780b57cec5SDimitry Andric "will consider trying to walk past (default = 100)")); 790b57cec5SDimitry Andric 800b57cec5SDimitry Andric // Always verify MemorySSA if expensive checking is enabled. 810b57cec5SDimitry Andric #ifdef EXPENSIVE_CHECKS 820b57cec5SDimitry Andric bool llvm::VerifyMemorySSA = true; 830b57cec5SDimitry Andric #else 840b57cec5SDimitry Andric bool llvm::VerifyMemorySSA = false; 850b57cec5SDimitry Andric #endif 860b57cec5SDimitry Andric 870b57cec5SDimitry Andric static cl::opt<bool, true> 880b57cec5SDimitry Andric VerifyMemorySSAX("verify-memoryssa", cl::location(VerifyMemorySSA), 890b57cec5SDimitry Andric cl::Hidden, cl::desc("Enable verification of MemorySSA.")); 900b57cec5SDimitry Andric 91349cc55cSDimitry Andric const static char LiveOnEntryStr[] = "liveOnEntry"; 92349cc55cSDimitry Andric 93349cc55cSDimitry Andric namespace { 940b57cec5SDimitry Andric 950b57cec5SDimitry Andric /// An assembly annotator class to print Memory SSA information in 960b57cec5SDimitry Andric /// comments. 970b57cec5SDimitry Andric class MemorySSAAnnotatedWriter : public AssemblyAnnotationWriter { 980b57cec5SDimitry Andric const MemorySSA *MSSA; 990b57cec5SDimitry Andric 1000b57cec5SDimitry Andric public: 1010b57cec5SDimitry Andric MemorySSAAnnotatedWriter(const MemorySSA *M) : MSSA(M) {} 1020b57cec5SDimitry Andric 1030b57cec5SDimitry Andric void emitBasicBlockStartAnnot(const BasicBlock *BB, 1040b57cec5SDimitry Andric formatted_raw_ostream &OS) override { 1050b57cec5SDimitry Andric if (MemoryAccess *MA = MSSA->getMemoryAccess(BB)) 1060b57cec5SDimitry Andric OS << "; " << *MA << "\n"; 1070b57cec5SDimitry Andric } 1080b57cec5SDimitry Andric 1090b57cec5SDimitry Andric void emitInstructionAnnot(const Instruction *I, 1100b57cec5SDimitry Andric formatted_raw_ostream &OS) override { 1110b57cec5SDimitry Andric if (MemoryAccess *MA = MSSA->getMemoryAccess(I)) 1120b57cec5SDimitry Andric OS << "; " << *MA << "\n"; 1130b57cec5SDimitry Andric } 1140b57cec5SDimitry Andric }; 1150b57cec5SDimitry Andric 116349cc55cSDimitry Andric /// An assembly annotator class to print Memory SSA information in 117349cc55cSDimitry Andric /// comments. 118349cc55cSDimitry Andric class MemorySSAWalkerAnnotatedWriter : public AssemblyAnnotationWriter { 119349cc55cSDimitry Andric MemorySSA *MSSA; 120349cc55cSDimitry Andric MemorySSAWalker *Walker; 121bdd1243dSDimitry Andric BatchAAResults BAA; 122349cc55cSDimitry Andric 123349cc55cSDimitry Andric public: 124349cc55cSDimitry Andric MemorySSAWalkerAnnotatedWriter(MemorySSA *M) 125bdd1243dSDimitry Andric : MSSA(M), Walker(M->getWalker()), BAA(M->getAA()) {} 126349cc55cSDimitry Andric 12781ad6265SDimitry Andric void emitBasicBlockStartAnnot(const BasicBlock *BB, 12881ad6265SDimitry Andric formatted_raw_ostream &OS) override { 12981ad6265SDimitry Andric if (MemoryAccess *MA = MSSA->getMemoryAccess(BB)) 13081ad6265SDimitry Andric OS << "; " << *MA << "\n"; 13181ad6265SDimitry Andric } 13281ad6265SDimitry Andric 133349cc55cSDimitry Andric void emitInstructionAnnot(const Instruction *I, 134349cc55cSDimitry Andric formatted_raw_ostream &OS) override { 135349cc55cSDimitry Andric if (MemoryAccess *MA = MSSA->getMemoryAccess(I)) { 136bdd1243dSDimitry Andric MemoryAccess *Clobber = Walker->getClobberingMemoryAccess(MA, BAA); 137349cc55cSDimitry Andric OS << "; " << *MA; 138349cc55cSDimitry Andric if (Clobber) { 139349cc55cSDimitry Andric OS << " - clobbered by "; 140349cc55cSDimitry Andric if (MSSA->isLiveOnEntryDef(Clobber)) 141349cc55cSDimitry Andric OS << LiveOnEntryStr; 142349cc55cSDimitry Andric else 143349cc55cSDimitry Andric OS << *Clobber; 144349cc55cSDimitry Andric } 145349cc55cSDimitry Andric OS << "\n"; 146349cc55cSDimitry Andric } 147349cc55cSDimitry Andric } 148349cc55cSDimitry Andric }; 149349cc55cSDimitry Andric 150349cc55cSDimitry Andric } // namespace 1510b57cec5SDimitry Andric 1520b57cec5SDimitry Andric namespace { 1530b57cec5SDimitry Andric 1540b57cec5SDimitry Andric /// Our current alias analysis API differentiates heavily between calls and 1550b57cec5SDimitry Andric /// non-calls, and functions called on one usually assert on the other. 1560b57cec5SDimitry Andric /// This class encapsulates the distinction to simplify other code that wants 1570b57cec5SDimitry Andric /// "Memory affecting instructions and related data" to use as a key. 1580b57cec5SDimitry Andric /// For example, this class is used as a densemap key in the use optimizer. 1590b57cec5SDimitry Andric class MemoryLocOrCall { 1600b57cec5SDimitry Andric public: 1610b57cec5SDimitry Andric bool IsCall = false; 1620b57cec5SDimitry Andric 1630b57cec5SDimitry Andric MemoryLocOrCall(MemoryUseOrDef *MUD) 1640b57cec5SDimitry Andric : MemoryLocOrCall(MUD->getMemoryInst()) {} 1650b57cec5SDimitry Andric MemoryLocOrCall(const MemoryUseOrDef *MUD) 1660b57cec5SDimitry Andric : MemoryLocOrCall(MUD->getMemoryInst()) {} 1670b57cec5SDimitry Andric 1680b57cec5SDimitry Andric MemoryLocOrCall(Instruction *Inst) { 1690b57cec5SDimitry Andric if (auto *C = dyn_cast<CallBase>(Inst)) { 1700b57cec5SDimitry Andric IsCall = true; 1710b57cec5SDimitry Andric Call = C; 1720b57cec5SDimitry Andric } else { 1730b57cec5SDimitry Andric IsCall = false; 1740b57cec5SDimitry Andric // There is no such thing as a memorylocation for a fence inst, and it is 1750b57cec5SDimitry Andric // unique in that regard. 1760b57cec5SDimitry Andric if (!isa<FenceInst>(Inst)) 1770b57cec5SDimitry Andric Loc = MemoryLocation::get(Inst); 1780b57cec5SDimitry Andric } 1790b57cec5SDimitry Andric } 1800b57cec5SDimitry Andric 1810b57cec5SDimitry Andric explicit MemoryLocOrCall(const MemoryLocation &Loc) : Loc(Loc) {} 1820b57cec5SDimitry Andric 1830b57cec5SDimitry Andric const CallBase *getCall() const { 1840b57cec5SDimitry Andric assert(IsCall); 1850b57cec5SDimitry Andric return Call; 1860b57cec5SDimitry Andric } 1870b57cec5SDimitry Andric 1880b57cec5SDimitry Andric MemoryLocation getLoc() const { 1890b57cec5SDimitry Andric assert(!IsCall); 1900b57cec5SDimitry Andric return Loc; 1910b57cec5SDimitry Andric } 1920b57cec5SDimitry Andric 1930b57cec5SDimitry Andric bool operator==(const MemoryLocOrCall &Other) const { 1940b57cec5SDimitry Andric if (IsCall != Other.IsCall) 1950b57cec5SDimitry Andric return false; 1960b57cec5SDimitry Andric 1970b57cec5SDimitry Andric if (!IsCall) 1980b57cec5SDimitry Andric return Loc == Other.Loc; 1990b57cec5SDimitry Andric 2005ffd83dbSDimitry Andric if (Call->getCalledOperand() != Other.Call->getCalledOperand()) 2010b57cec5SDimitry Andric return false; 2020b57cec5SDimitry Andric 2030b57cec5SDimitry Andric return Call->arg_size() == Other.Call->arg_size() && 2040b57cec5SDimitry Andric std::equal(Call->arg_begin(), Call->arg_end(), 2050b57cec5SDimitry Andric Other.Call->arg_begin()); 2060b57cec5SDimitry Andric } 2070b57cec5SDimitry Andric 2080b57cec5SDimitry Andric private: 2090b57cec5SDimitry Andric union { 2100b57cec5SDimitry Andric const CallBase *Call; 2110b57cec5SDimitry Andric MemoryLocation Loc; 2120b57cec5SDimitry Andric }; 2130b57cec5SDimitry Andric }; 2140b57cec5SDimitry Andric 2150b57cec5SDimitry Andric } // end anonymous namespace 2160b57cec5SDimitry Andric 2170b57cec5SDimitry Andric namespace llvm { 2180b57cec5SDimitry Andric 2190b57cec5SDimitry Andric template <> struct DenseMapInfo<MemoryLocOrCall> { 2200b57cec5SDimitry Andric static inline MemoryLocOrCall getEmptyKey() { 2210b57cec5SDimitry Andric return MemoryLocOrCall(DenseMapInfo<MemoryLocation>::getEmptyKey()); 2220b57cec5SDimitry Andric } 2230b57cec5SDimitry Andric 2240b57cec5SDimitry Andric static inline MemoryLocOrCall getTombstoneKey() { 2250b57cec5SDimitry Andric return MemoryLocOrCall(DenseMapInfo<MemoryLocation>::getTombstoneKey()); 2260b57cec5SDimitry Andric } 2270b57cec5SDimitry Andric 2280b57cec5SDimitry Andric static unsigned getHashValue(const MemoryLocOrCall &MLOC) { 2290b57cec5SDimitry Andric if (!MLOC.IsCall) 2300b57cec5SDimitry Andric return hash_combine( 2310b57cec5SDimitry Andric MLOC.IsCall, 2320b57cec5SDimitry Andric DenseMapInfo<MemoryLocation>::getHashValue(MLOC.getLoc())); 2330b57cec5SDimitry Andric 2340b57cec5SDimitry Andric hash_code hash = 2350b57cec5SDimitry Andric hash_combine(MLOC.IsCall, DenseMapInfo<const Value *>::getHashValue( 2365ffd83dbSDimitry Andric MLOC.getCall()->getCalledOperand())); 2370b57cec5SDimitry Andric 2380b57cec5SDimitry Andric for (const Value *Arg : MLOC.getCall()->args()) 2390b57cec5SDimitry Andric hash = hash_combine(hash, DenseMapInfo<const Value *>::getHashValue(Arg)); 2400b57cec5SDimitry Andric return hash; 2410b57cec5SDimitry Andric } 2420b57cec5SDimitry Andric 2430b57cec5SDimitry Andric static bool isEqual(const MemoryLocOrCall &LHS, const MemoryLocOrCall &RHS) { 2440b57cec5SDimitry Andric return LHS == RHS; 2450b57cec5SDimitry Andric } 2460b57cec5SDimitry Andric }; 2470b57cec5SDimitry Andric 2480b57cec5SDimitry Andric } // end namespace llvm 2490b57cec5SDimitry Andric 2500b57cec5SDimitry Andric /// This does one-way checks to see if Use could theoretically be hoisted above 2510b57cec5SDimitry Andric /// MayClobber. This will not check the other way around. 2520b57cec5SDimitry Andric /// 2530b57cec5SDimitry Andric /// This assumes that, for the purposes of MemorySSA, Use comes directly after 2540b57cec5SDimitry Andric /// MayClobber, with no potentially clobbering operations in between them. 2550b57cec5SDimitry Andric /// (Where potentially clobbering ops are memory barriers, aliased stores, etc.) 2560b57cec5SDimitry Andric static bool areLoadsReorderable(const LoadInst *Use, 2570b57cec5SDimitry Andric const LoadInst *MayClobber) { 2580b57cec5SDimitry Andric bool VolatileUse = Use->isVolatile(); 2590b57cec5SDimitry Andric bool VolatileClobber = MayClobber->isVolatile(); 2600b57cec5SDimitry Andric // Volatile operations may never be reordered with other volatile operations. 2610b57cec5SDimitry Andric if (VolatileUse && VolatileClobber) 2620b57cec5SDimitry Andric return false; 2630b57cec5SDimitry Andric // Otherwise, volatile doesn't matter here. From the language reference: 2640b57cec5SDimitry Andric // 'optimizers may change the order of volatile operations relative to 2650b57cec5SDimitry Andric // non-volatile operations.'" 2660b57cec5SDimitry Andric 2670b57cec5SDimitry Andric // If a load is seq_cst, it cannot be moved above other loads. If its ordering 2680b57cec5SDimitry Andric // is weaker, it can be moved above other loads. We just need to be sure that 2690b57cec5SDimitry Andric // MayClobber isn't an acquire load, because loads can't be moved above 2700b57cec5SDimitry Andric // acquire loads. 2710b57cec5SDimitry Andric // 2720b57cec5SDimitry Andric // Note that this explicitly *does* allow the free reordering of monotonic (or 2730b57cec5SDimitry Andric // weaker) loads of the same address. 2740b57cec5SDimitry Andric bool SeqCstUse = Use->getOrdering() == AtomicOrdering::SequentiallyConsistent; 2750b57cec5SDimitry Andric bool MayClobberIsAcquire = isAtLeastOrStrongerThan(MayClobber->getOrdering(), 2760b57cec5SDimitry Andric AtomicOrdering::Acquire); 2770b57cec5SDimitry Andric return !(SeqCstUse || MayClobberIsAcquire); 2780b57cec5SDimitry Andric } 2790b57cec5SDimitry Andric 2800b57cec5SDimitry Andric template <typename AliasAnalysisType> 281bdd1243dSDimitry Andric static bool 2820b57cec5SDimitry Andric instructionClobbersQuery(const MemoryDef *MD, const MemoryLocation &UseLoc, 2830b57cec5SDimitry Andric const Instruction *UseInst, AliasAnalysisType &AA) { 2840b57cec5SDimitry Andric Instruction *DefInst = MD->getMemoryInst(); 2850b57cec5SDimitry Andric assert(DefInst && "Defining instruction not actually an instruction"); 2860b57cec5SDimitry Andric 2870b57cec5SDimitry Andric if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(DefInst)) { 2880b57cec5SDimitry Andric // These intrinsics will show up as affecting memory, but they are just 2890b57cec5SDimitry Andric // markers, mostly. 2900b57cec5SDimitry Andric // 2910b57cec5SDimitry Andric // FIXME: We probably don't actually want MemorySSA to model these at all 2920b57cec5SDimitry Andric // (including creating MemoryAccesses for them): we just end up inventing 2930b57cec5SDimitry Andric // clobbers where they don't really exist at all. Please see D43269 for 2940b57cec5SDimitry Andric // context. 2950b57cec5SDimitry Andric switch (II->getIntrinsicID()) { 296*0fca6ea1SDimitry Andric case Intrinsic::allow_runtime_check: 297*0fca6ea1SDimitry Andric case Intrinsic::allow_ubsan_check: 2980b57cec5SDimitry Andric case Intrinsic::invariant_start: 2990b57cec5SDimitry Andric case Intrinsic::invariant_end: 3000b57cec5SDimitry Andric case Intrinsic::assume: 301e8d8bef9SDimitry Andric case Intrinsic::experimental_noalias_scope_decl: 302349cc55cSDimitry Andric case Intrinsic::pseudoprobe: 303bdd1243dSDimitry Andric return false; 3048bcb0991SDimitry Andric case Intrinsic::dbg_declare: 3058bcb0991SDimitry Andric case Intrinsic::dbg_label: 3068bcb0991SDimitry Andric case Intrinsic::dbg_value: 3078bcb0991SDimitry Andric llvm_unreachable("debuginfo shouldn't have associated defs!"); 3080b57cec5SDimitry Andric default: 3090b57cec5SDimitry Andric break; 3100b57cec5SDimitry Andric } 3110b57cec5SDimitry Andric } 3120b57cec5SDimitry Andric 313e8d8bef9SDimitry Andric if (auto *CB = dyn_cast_or_null<CallBase>(UseInst)) { 314e8d8bef9SDimitry Andric ModRefInfo I = AA.getModRefInfo(DefInst, CB); 315bdd1243dSDimitry Andric return isModOrRefSet(I); 3160b57cec5SDimitry Andric } 3170b57cec5SDimitry Andric 3180b57cec5SDimitry Andric if (auto *DefLoad = dyn_cast<LoadInst>(DefInst)) 319e8d8bef9SDimitry Andric if (auto *UseLoad = dyn_cast_or_null<LoadInst>(UseInst)) 320bdd1243dSDimitry Andric return !areLoadsReorderable(UseLoad, DefLoad); 3210b57cec5SDimitry Andric 3220b57cec5SDimitry Andric ModRefInfo I = AA.getModRefInfo(DefInst, UseLoc); 323bdd1243dSDimitry Andric return isModSet(I); 3240b57cec5SDimitry Andric } 3250b57cec5SDimitry Andric 3260b57cec5SDimitry Andric template <typename AliasAnalysisType> 327bdd1243dSDimitry Andric static bool instructionClobbersQuery(MemoryDef *MD, const MemoryUseOrDef *MU, 3280b57cec5SDimitry Andric const MemoryLocOrCall &UseMLOC, 3290b57cec5SDimitry Andric AliasAnalysisType &AA) { 3300b57cec5SDimitry Andric // FIXME: This is a temporary hack to allow a single instructionClobbersQuery 3310b57cec5SDimitry Andric // to exist while MemoryLocOrCall is pushed through places. 3320b57cec5SDimitry Andric if (UseMLOC.IsCall) 3330b57cec5SDimitry Andric return instructionClobbersQuery(MD, MemoryLocation(), MU->getMemoryInst(), 3340b57cec5SDimitry Andric AA); 3350b57cec5SDimitry Andric return instructionClobbersQuery(MD, UseMLOC.getLoc(), MU->getMemoryInst(), 3360b57cec5SDimitry Andric AA); 3370b57cec5SDimitry Andric } 3380b57cec5SDimitry Andric 3390b57cec5SDimitry Andric // Return true when MD may alias MU, return false otherwise. 3400b57cec5SDimitry Andric bool MemorySSAUtil::defClobbersUseOrDef(MemoryDef *MD, const MemoryUseOrDef *MU, 3410b57cec5SDimitry Andric AliasAnalysis &AA) { 342bdd1243dSDimitry Andric return instructionClobbersQuery(MD, MU, MemoryLocOrCall(MU), AA); 3430b57cec5SDimitry Andric } 3440b57cec5SDimitry Andric 3450b57cec5SDimitry Andric namespace { 3460b57cec5SDimitry Andric 3470b57cec5SDimitry Andric struct UpwardsMemoryQuery { 3480b57cec5SDimitry Andric // True if our original query started off as a call 3490b57cec5SDimitry Andric bool IsCall = false; 3500b57cec5SDimitry Andric // The pointer location we started the query with. This will be empty if 3510b57cec5SDimitry Andric // IsCall is true. 3520b57cec5SDimitry Andric MemoryLocation StartingLoc; 3530b57cec5SDimitry Andric // This is the instruction we were querying about. 3540b57cec5SDimitry Andric const Instruction *Inst = nullptr; 3550b57cec5SDimitry Andric // The MemoryAccess we actually got called with, used to test local domination 3560b57cec5SDimitry Andric const MemoryAccess *OriginalAccess = nullptr; 3570b57cec5SDimitry Andric bool SkipSelfAccess = false; 3580b57cec5SDimitry Andric 3590b57cec5SDimitry Andric UpwardsMemoryQuery() = default; 3600b57cec5SDimitry Andric 3610b57cec5SDimitry Andric UpwardsMemoryQuery(const Instruction *Inst, const MemoryAccess *Access) 3620b57cec5SDimitry Andric : IsCall(isa<CallBase>(Inst)), Inst(Inst), OriginalAccess(Access) { 3630b57cec5SDimitry Andric if (!IsCall) 3640b57cec5SDimitry Andric StartingLoc = MemoryLocation::get(Inst); 3650b57cec5SDimitry Andric } 3660b57cec5SDimitry Andric }; 3670b57cec5SDimitry Andric 3680b57cec5SDimitry Andric } // end anonymous namespace 3690b57cec5SDimitry Andric 37006c3fb27SDimitry Andric template <typename AliasAnalysisType> 37106c3fb27SDimitry Andric static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysisType &AA, 3720b57cec5SDimitry Andric const Instruction *I) { 3730b57cec5SDimitry Andric // If the memory can't be changed, then loads of the memory can't be 3740b57cec5SDimitry Andric // clobbered. 375bdd1243dSDimitry Andric if (auto *LI = dyn_cast<LoadInst>(I)) { 376e8d8bef9SDimitry Andric return I->hasMetadata(LLVMContext::MD_invariant_load) || 377bdd1243dSDimitry Andric !isModSet(AA.getModRefInfoMask(MemoryLocation::get(LI))); 378bdd1243dSDimitry Andric } 379e8d8bef9SDimitry Andric return false; 3800b57cec5SDimitry Andric } 3810b57cec5SDimitry Andric 3820b57cec5SDimitry Andric /// Verifies that `Start` is clobbered by `ClobberAt`, and that nothing 3830b57cec5SDimitry Andric /// inbetween `Start` and `ClobberAt` can clobbers `Start`. 3840b57cec5SDimitry Andric /// 3850b57cec5SDimitry Andric /// This is meant to be as simple and self-contained as possible. Because it 3860b57cec5SDimitry Andric /// uses no cache, etc., it can be relatively expensive. 3870b57cec5SDimitry Andric /// 3880b57cec5SDimitry Andric /// \param Start The MemoryAccess that we want to walk from. 3890b57cec5SDimitry Andric /// \param ClobberAt A clobber for Start. 3900b57cec5SDimitry Andric /// \param StartLoc The MemoryLocation for Start. 3910b57cec5SDimitry Andric /// \param MSSA The MemorySSA instance that Start and ClobberAt belong to. 3920b57cec5SDimitry Andric /// \param Query The UpwardsMemoryQuery we used for our search. 3930b57cec5SDimitry Andric /// \param AA The AliasAnalysis we used for our search. 3940b57cec5SDimitry Andric /// \param AllowImpreciseClobber Always false, unless we do relaxed verify. 3950b57cec5SDimitry Andric 3960b57cec5SDimitry Andric LLVM_ATTRIBUTE_UNUSED static void 3970b57cec5SDimitry Andric checkClobberSanity(const MemoryAccess *Start, MemoryAccess *ClobberAt, 3980b57cec5SDimitry Andric const MemoryLocation &StartLoc, const MemorySSA &MSSA, 399bdd1243dSDimitry Andric const UpwardsMemoryQuery &Query, BatchAAResults &AA, 4000b57cec5SDimitry Andric bool AllowImpreciseClobber = false) { 4010b57cec5SDimitry Andric assert(MSSA.dominates(ClobberAt, Start) && "Clobber doesn't dominate start?"); 4020b57cec5SDimitry Andric 4030b57cec5SDimitry Andric if (MSSA.isLiveOnEntryDef(Start)) { 4040b57cec5SDimitry Andric assert(MSSA.isLiveOnEntryDef(ClobberAt) && 4050b57cec5SDimitry Andric "liveOnEntry must clobber itself"); 4060b57cec5SDimitry Andric return; 4070b57cec5SDimitry Andric } 4080b57cec5SDimitry Andric 4090b57cec5SDimitry Andric bool FoundClobber = false; 4100b57cec5SDimitry Andric DenseSet<ConstMemoryAccessPair> VisitedPhis; 4110b57cec5SDimitry Andric SmallVector<ConstMemoryAccessPair, 8> Worklist; 4120b57cec5SDimitry Andric Worklist.emplace_back(Start, StartLoc); 4130b57cec5SDimitry Andric // Walk all paths from Start to ClobberAt, while looking for clobbers. If one 4140b57cec5SDimitry Andric // is found, complain. 4150b57cec5SDimitry Andric while (!Worklist.empty()) { 4160b57cec5SDimitry Andric auto MAP = Worklist.pop_back_val(); 4170b57cec5SDimitry Andric // All we care about is that nothing from Start to ClobberAt clobbers Start. 4180b57cec5SDimitry Andric // We learn nothing from revisiting nodes. 4190b57cec5SDimitry Andric if (!VisitedPhis.insert(MAP).second) 4200b57cec5SDimitry Andric continue; 4210b57cec5SDimitry Andric 4220b57cec5SDimitry Andric for (const auto *MA : def_chain(MAP.first)) { 4230b57cec5SDimitry Andric if (MA == ClobberAt) { 4240b57cec5SDimitry Andric if (const auto *MD = dyn_cast<MemoryDef>(MA)) { 4250b57cec5SDimitry Andric // instructionClobbersQuery isn't essentially free, so don't use `|=`, 4260b57cec5SDimitry Andric // since it won't let us short-circuit. 4270b57cec5SDimitry Andric // 4280b57cec5SDimitry Andric // Also, note that this can't be hoisted out of the `Worklist` loop, 4290b57cec5SDimitry Andric // since MD may only act as a clobber for 1 of N MemoryLocations. 4300b57cec5SDimitry Andric FoundClobber = FoundClobber || MSSA.isLiveOnEntryDef(MD); 4310b57cec5SDimitry Andric if (!FoundClobber) { 432bdd1243dSDimitry Andric if (instructionClobbersQuery(MD, MAP.second, Query.Inst, AA)) 4330b57cec5SDimitry Andric FoundClobber = true; 4340b57cec5SDimitry Andric } 4350b57cec5SDimitry Andric } 4360b57cec5SDimitry Andric break; 4370b57cec5SDimitry Andric } 4380b57cec5SDimitry Andric 4390b57cec5SDimitry Andric // We should never hit liveOnEntry, unless it's the clobber. 4400b57cec5SDimitry Andric assert(!MSSA.isLiveOnEntryDef(MA) && "Hit liveOnEntry before clobber?"); 4410b57cec5SDimitry Andric 4420b57cec5SDimitry Andric if (const auto *MD = dyn_cast<MemoryDef>(MA)) { 4430b57cec5SDimitry Andric // If Start is a Def, skip self. 4440b57cec5SDimitry Andric if (MD == Start) 4450b57cec5SDimitry Andric continue; 4460b57cec5SDimitry Andric 447bdd1243dSDimitry Andric assert(!instructionClobbersQuery(MD, MAP.second, Query.Inst, AA) && 4480b57cec5SDimitry Andric "Found clobber before reaching ClobberAt!"); 4490b57cec5SDimitry Andric continue; 4500b57cec5SDimitry Andric } 4510b57cec5SDimitry Andric 4520b57cec5SDimitry Andric if (const auto *MU = dyn_cast<MemoryUse>(MA)) { 4530b57cec5SDimitry Andric (void)MU; 4540b57cec5SDimitry Andric assert (MU == Start && 4550b57cec5SDimitry Andric "Can only find use in def chain if Start is a use"); 4560b57cec5SDimitry Andric continue; 4570b57cec5SDimitry Andric } 4580b57cec5SDimitry Andric 4590b57cec5SDimitry Andric assert(isa<MemoryPhi>(MA)); 460e8d8bef9SDimitry Andric 461e8d8bef9SDimitry Andric // Add reachable phi predecessors 462e8d8bef9SDimitry Andric for (auto ItB = upward_defs_begin( 463e8d8bef9SDimitry Andric {const_cast<MemoryAccess *>(MA), MAP.second}, 4645ffd83dbSDimitry Andric MSSA.getDomTree()), 465e8d8bef9SDimitry Andric ItE = upward_defs_end(); 466e8d8bef9SDimitry Andric ItB != ItE; ++ItB) 467e8d8bef9SDimitry Andric if (MSSA.getDomTree().isReachableFromEntry(ItB.getPhiArgBlock())) 468e8d8bef9SDimitry Andric Worklist.emplace_back(*ItB); 4690b57cec5SDimitry Andric } 4700b57cec5SDimitry Andric } 4710b57cec5SDimitry Andric 4720b57cec5SDimitry Andric // If the verify is done following an optimization, it's possible that 4730b57cec5SDimitry Andric // ClobberAt was a conservative clobbering, that we can now infer is not a 4740b57cec5SDimitry Andric // true clobbering access. Don't fail the verify if that's the case. 4750b57cec5SDimitry Andric // We do have accesses that claim they're optimized, but could be optimized 4760b57cec5SDimitry Andric // further. Updating all these can be expensive, so allow it for now (FIXME). 4770b57cec5SDimitry Andric if (AllowImpreciseClobber) 4780b57cec5SDimitry Andric return; 4790b57cec5SDimitry Andric 4800b57cec5SDimitry Andric // If ClobberAt is a MemoryPhi, we can assume something above it acted as a 4810b57cec5SDimitry Andric // clobber. Otherwise, `ClobberAt` should've acted as a clobber at some point. 4820b57cec5SDimitry Andric assert((isa<MemoryPhi>(ClobberAt) || FoundClobber) && 4830b57cec5SDimitry Andric "ClobberAt never acted as a clobber"); 4840b57cec5SDimitry Andric } 4850b57cec5SDimitry Andric 4860b57cec5SDimitry Andric namespace { 4870b57cec5SDimitry Andric 4880b57cec5SDimitry Andric /// Our algorithm for walking (and trying to optimize) clobbers, all wrapped up 4890b57cec5SDimitry Andric /// in one class. 490bdd1243dSDimitry Andric class ClobberWalker { 4910b57cec5SDimitry Andric /// Save a few bytes by using unsigned instead of size_t. 4920b57cec5SDimitry Andric using ListIndex = unsigned; 4930b57cec5SDimitry Andric 4940b57cec5SDimitry Andric /// Represents a span of contiguous MemoryDefs, potentially ending in a 4950b57cec5SDimitry Andric /// MemoryPhi. 4960b57cec5SDimitry Andric struct DefPath { 4970b57cec5SDimitry Andric MemoryLocation Loc; 4980b57cec5SDimitry Andric // Note that, because we always walk in reverse, Last will always dominate 4990b57cec5SDimitry Andric // First. Also note that First and Last are inclusive. 5000b57cec5SDimitry Andric MemoryAccess *First; 5010b57cec5SDimitry Andric MemoryAccess *Last; 502bdd1243dSDimitry Andric std::optional<ListIndex> Previous; 5030b57cec5SDimitry Andric 5040b57cec5SDimitry Andric DefPath(const MemoryLocation &Loc, MemoryAccess *First, MemoryAccess *Last, 505bdd1243dSDimitry Andric std::optional<ListIndex> Previous) 5060b57cec5SDimitry Andric : Loc(Loc), First(First), Last(Last), Previous(Previous) {} 5070b57cec5SDimitry Andric 5080b57cec5SDimitry Andric DefPath(const MemoryLocation &Loc, MemoryAccess *Init, 509bdd1243dSDimitry Andric std::optional<ListIndex> Previous) 5100b57cec5SDimitry Andric : DefPath(Loc, Init, Init, Previous) {} 5110b57cec5SDimitry Andric }; 5120b57cec5SDimitry Andric 5130b57cec5SDimitry Andric const MemorySSA &MSSA; 5140b57cec5SDimitry Andric DominatorTree &DT; 515bdd1243dSDimitry Andric BatchAAResults *AA; 5160b57cec5SDimitry Andric UpwardsMemoryQuery *Query; 5170b57cec5SDimitry Andric unsigned *UpwardWalkLimit; 5180b57cec5SDimitry Andric 519e8d8bef9SDimitry Andric // Phi optimization bookkeeping: 520e8d8bef9SDimitry Andric // List of DefPath to process during the current phi optimization walk. 5210b57cec5SDimitry Andric SmallVector<DefPath, 32> Paths; 522e8d8bef9SDimitry Andric // List of visited <Access, Location> pairs; we can skip paths already 523e8d8bef9SDimitry Andric // visited with the same memory location. 5240b57cec5SDimitry Andric DenseSet<ConstMemoryAccessPair> VisitedPhis; 5250b57cec5SDimitry Andric 5260b57cec5SDimitry Andric /// Find the nearest def or phi that `From` can legally be optimized to. 5270b57cec5SDimitry Andric const MemoryAccess *getWalkTarget(const MemoryPhi *From) const { 5280b57cec5SDimitry Andric assert(From->getNumOperands() && "Phi with no operands?"); 5290b57cec5SDimitry Andric 5300b57cec5SDimitry Andric BasicBlock *BB = From->getBlock(); 5310b57cec5SDimitry Andric MemoryAccess *Result = MSSA.getLiveOnEntryDef(); 5320b57cec5SDimitry Andric DomTreeNode *Node = DT.getNode(BB); 5330b57cec5SDimitry Andric while ((Node = Node->getIDom())) { 5340b57cec5SDimitry Andric auto *Defs = MSSA.getBlockDefs(Node->getBlock()); 5350b57cec5SDimitry Andric if (Defs) 5360b57cec5SDimitry Andric return &*Defs->rbegin(); 5370b57cec5SDimitry Andric } 5380b57cec5SDimitry Andric return Result; 5390b57cec5SDimitry Andric } 5400b57cec5SDimitry Andric 5410b57cec5SDimitry Andric /// Result of calling walkToPhiOrClobber. 5420b57cec5SDimitry Andric struct UpwardsWalkResult { 5430b57cec5SDimitry Andric /// The "Result" of the walk. Either a clobber, the last thing we walked, or 5440b57cec5SDimitry Andric /// both. Include alias info when clobber found. 5450b57cec5SDimitry Andric MemoryAccess *Result; 5460b57cec5SDimitry Andric bool IsKnownClobber; 5470b57cec5SDimitry Andric }; 5480b57cec5SDimitry Andric 5490b57cec5SDimitry Andric /// Walk to the next Phi or Clobber in the def chain starting at Desc.Last. 5500b57cec5SDimitry Andric /// This will update Desc.Last as it walks. It will (optionally) also stop at 5510b57cec5SDimitry Andric /// StopAt. 5520b57cec5SDimitry Andric /// 5530b57cec5SDimitry Andric /// This does not test for whether StopAt is a clobber 5540b57cec5SDimitry Andric UpwardsWalkResult 5550b57cec5SDimitry Andric walkToPhiOrClobber(DefPath &Desc, const MemoryAccess *StopAt = nullptr, 5560b57cec5SDimitry Andric const MemoryAccess *SkipStopAt = nullptr) const { 5570b57cec5SDimitry Andric assert(!isa<MemoryUse>(Desc.Last) && "Uses don't exist in my world"); 5580b57cec5SDimitry Andric assert(UpwardWalkLimit && "Need a valid walk limit"); 5590b57cec5SDimitry Andric bool LimitAlreadyReached = false; 5600b57cec5SDimitry Andric // (*UpwardWalkLimit) may be 0 here, due to the loop in tryOptimizePhi. Set 5610b57cec5SDimitry Andric // it to 1. This will not do any alias() calls. It either returns in the 5620b57cec5SDimitry Andric // first iteration in the loop below, or is set back to 0 if all def chains 5630b57cec5SDimitry Andric // are free of MemoryDefs. 5640b57cec5SDimitry Andric if (!*UpwardWalkLimit) { 5650b57cec5SDimitry Andric *UpwardWalkLimit = 1; 5660b57cec5SDimitry Andric LimitAlreadyReached = true; 5670b57cec5SDimitry Andric } 5680b57cec5SDimitry Andric 5690b57cec5SDimitry Andric for (MemoryAccess *Current : def_chain(Desc.Last)) { 5700b57cec5SDimitry Andric Desc.Last = Current; 5710b57cec5SDimitry Andric if (Current == StopAt || Current == SkipStopAt) 572bdd1243dSDimitry Andric return {Current, false}; 5730b57cec5SDimitry Andric 5740b57cec5SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(Current)) { 5750b57cec5SDimitry Andric if (MSSA.isLiveOnEntryDef(MD)) 576bdd1243dSDimitry Andric return {MD, true}; 5770b57cec5SDimitry Andric 5780b57cec5SDimitry Andric if (!--*UpwardWalkLimit) 579bdd1243dSDimitry Andric return {Current, true}; 5800b57cec5SDimitry Andric 581bdd1243dSDimitry Andric if (instructionClobbersQuery(MD, Desc.Loc, Query->Inst, *AA)) 582bdd1243dSDimitry Andric return {MD, true}; 5830b57cec5SDimitry Andric } 5840b57cec5SDimitry Andric } 5850b57cec5SDimitry Andric 5860b57cec5SDimitry Andric if (LimitAlreadyReached) 5870b57cec5SDimitry Andric *UpwardWalkLimit = 0; 5880b57cec5SDimitry Andric 5890b57cec5SDimitry Andric assert(isa<MemoryPhi>(Desc.Last) && 5900b57cec5SDimitry Andric "Ended at a non-clobber that's not a phi?"); 591bdd1243dSDimitry Andric return {Desc.Last, false}; 5920b57cec5SDimitry Andric } 5930b57cec5SDimitry Andric 5940b57cec5SDimitry Andric void addSearches(MemoryPhi *Phi, SmallVectorImpl<ListIndex> &PausedSearches, 5950b57cec5SDimitry Andric ListIndex PriorNode) { 596bdd1243dSDimitry Andric auto UpwardDefsBegin = upward_defs_begin({Phi, Paths[PriorNode].Loc}, DT); 597e8d8bef9SDimitry Andric auto UpwardDefs = make_range(UpwardDefsBegin, upward_defs_end()); 5980b57cec5SDimitry Andric for (const MemoryAccessPair &P : UpwardDefs) { 5990b57cec5SDimitry Andric PausedSearches.push_back(Paths.size()); 6000b57cec5SDimitry Andric Paths.emplace_back(P.second, P.first, PriorNode); 6010b57cec5SDimitry Andric } 6020b57cec5SDimitry Andric } 6030b57cec5SDimitry Andric 6040b57cec5SDimitry Andric /// Represents a search that terminated after finding a clobber. This clobber 6050b57cec5SDimitry Andric /// may or may not be present in the path of defs from LastNode..SearchStart, 6060b57cec5SDimitry Andric /// since it may have been retrieved from cache. 6070b57cec5SDimitry Andric struct TerminatedPath { 6080b57cec5SDimitry Andric MemoryAccess *Clobber; 6090b57cec5SDimitry Andric ListIndex LastNode; 6100b57cec5SDimitry Andric }; 6110b57cec5SDimitry Andric 6120b57cec5SDimitry Andric /// Get an access that keeps us from optimizing to the given phi. 6130b57cec5SDimitry Andric /// 6140b57cec5SDimitry Andric /// PausedSearches is an array of indices into the Paths array. Its incoming 6150b57cec5SDimitry Andric /// value is the indices of searches that stopped at the last phi optimization 6160b57cec5SDimitry Andric /// target. It's left in an unspecified state. 6170b57cec5SDimitry Andric /// 618bdd1243dSDimitry Andric /// If this returns std::nullopt, NewPaused is a vector of searches that 619bdd1243dSDimitry Andric /// terminated at StopWhere. Otherwise, NewPaused is left in an unspecified 620bdd1243dSDimitry Andric /// state. 621bdd1243dSDimitry Andric std::optional<TerminatedPath> 6220b57cec5SDimitry Andric getBlockingAccess(const MemoryAccess *StopWhere, 6230b57cec5SDimitry Andric SmallVectorImpl<ListIndex> &PausedSearches, 6240b57cec5SDimitry Andric SmallVectorImpl<ListIndex> &NewPaused, 6250b57cec5SDimitry Andric SmallVectorImpl<TerminatedPath> &Terminated) { 6260b57cec5SDimitry Andric assert(!PausedSearches.empty() && "No searches to continue?"); 6270b57cec5SDimitry Andric 6280b57cec5SDimitry Andric // BFS vs DFS really doesn't make a difference here, so just do a DFS with 6290b57cec5SDimitry Andric // PausedSearches as our stack. 6300b57cec5SDimitry Andric while (!PausedSearches.empty()) { 6310b57cec5SDimitry Andric ListIndex PathIndex = PausedSearches.pop_back_val(); 6320b57cec5SDimitry Andric DefPath &Node = Paths[PathIndex]; 6330b57cec5SDimitry Andric 6340b57cec5SDimitry Andric // If we've already visited this path with this MemoryLocation, we don't 6350b57cec5SDimitry Andric // need to do so again. 6360b57cec5SDimitry Andric // 6370b57cec5SDimitry Andric // NOTE: That we just drop these paths on the ground makes caching 6380b57cec5SDimitry Andric // behavior sporadic. e.g. given a diamond: 6390b57cec5SDimitry Andric // A 6400b57cec5SDimitry Andric // B C 6410b57cec5SDimitry Andric // D 6420b57cec5SDimitry Andric // 6430b57cec5SDimitry Andric // ...If we walk D, B, A, C, we'll only cache the result of phi 6440b57cec5SDimitry Andric // optimization for A, B, and D; C will be skipped because it dies here. 6450b57cec5SDimitry Andric // This arguably isn't the worst thing ever, since: 6460b57cec5SDimitry Andric // - We generally query things in a top-down order, so if we got below D 6470b57cec5SDimitry Andric // without needing cache entries for {C, MemLoc}, then chances are 6480b57cec5SDimitry Andric // that those cache entries would end up ultimately unused. 6490b57cec5SDimitry Andric // - We still cache things for A, so C only needs to walk up a bit. 6500b57cec5SDimitry Andric // If this behavior becomes problematic, we can fix without a ton of extra 6510b57cec5SDimitry Andric // work. 652bdd1243dSDimitry Andric if (!VisitedPhis.insert({Node.Last, Node.Loc}).second) 6530b57cec5SDimitry Andric continue; 6540b57cec5SDimitry Andric 6550b57cec5SDimitry Andric const MemoryAccess *SkipStopWhere = nullptr; 6560b57cec5SDimitry Andric if (Query->SkipSelfAccess && Node.Loc == Query->StartingLoc) { 6570b57cec5SDimitry Andric assert(isa<MemoryDef>(Query->OriginalAccess)); 6580b57cec5SDimitry Andric SkipStopWhere = Query->OriginalAccess; 6590b57cec5SDimitry Andric } 6600b57cec5SDimitry Andric 6610b57cec5SDimitry Andric UpwardsWalkResult Res = walkToPhiOrClobber(Node, 6620b57cec5SDimitry Andric /*StopAt=*/StopWhere, 6630b57cec5SDimitry Andric /*SkipStopAt=*/SkipStopWhere); 6640b57cec5SDimitry Andric if (Res.IsKnownClobber) { 6650b57cec5SDimitry Andric assert(Res.Result != StopWhere && Res.Result != SkipStopWhere); 6660b57cec5SDimitry Andric 6670b57cec5SDimitry Andric // If this wasn't a cache hit, we hit a clobber when walking. That's a 6680b57cec5SDimitry Andric // failure. 6690b57cec5SDimitry Andric TerminatedPath Term{Res.Result, PathIndex}; 6700b57cec5SDimitry Andric if (!MSSA.dominates(Res.Result, StopWhere)) 6710b57cec5SDimitry Andric return Term; 6720b57cec5SDimitry Andric 6730b57cec5SDimitry Andric // Otherwise, it's a valid thing to potentially optimize to. 6740b57cec5SDimitry Andric Terminated.push_back(Term); 6750b57cec5SDimitry Andric continue; 6760b57cec5SDimitry Andric } 6770b57cec5SDimitry Andric 6780b57cec5SDimitry Andric if (Res.Result == StopWhere || Res.Result == SkipStopWhere) { 6790b57cec5SDimitry Andric // We've hit our target. Save this path off for if we want to continue 6800b57cec5SDimitry Andric // walking. If we are in the mode of skipping the OriginalAccess, and 6810b57cec5SDimitry Andric // we've reached back to the OriginalAccess, do not save path, we've 6820b57cec5SDimitry Andric // just looped back to self. 6830b57cec5SDimitry Andric if (Res.Result != SkipStopWhere) 6840b57cec5SDimitry Andric NewPaused.push_back(PathIndex); 6850b57cec5SDimitry Andric continue; 6860b57cec5SDimitry Andric } 6870b57cec5SDimitry Andric 6880b57cec5SDimitry Andric assert(!MSSA.isLiveOnEntryDef(Res.Result) && "liveOnEntry is a clobber"); 6890b57cec5SDimitry Andric addSearches(cast<MemoryPhi>(Res.Result), PausedSearches, PathIndex); 6900b57cec5SDimitry Andric } 6910b57cec5SDimitry Andric 692bdd1243dSDimitry Andric return std::nullopt; 6930b57cec5SDimitry Andric } 6940b57cec5SDimitry Andric 6950b57cec5SDimitry Andric template <typename T, typename Walker> 6960b57cec5SDimitry Andric struct generic_def_path_iterator 6970b57cec5SDimitry Andric : public iterator_facade_base<generic_def_path_iterator<T, Walker>, 6980b57cec5SDimitry Andric std::forward_iterator_tag, T *> { 69981ad6265SDimitry Andric generic_def_path_iterator() = default; 7000b57cec5SDimitry Andric generic_def_path_iterator(Walker *W, ListIndex N) : W(W), N(N) {} 7010b57cec5SDimitry Andric 7020b57cec5SDimitry Andric T &operator*() const { return curNode(); } 7030b57cec5SDimitry Andric 7040b57cec5SDimitry Andric generic_def_path_iterator &operator++() { 7050b57cec5SDimitry Andric N = curNode().Previous; 7060b57cec5SDimitry Andric return *this; 7070b57cec5SDimitry Andric } 7080b57cec5SDimitry Andric 7090b57cec5SDimitry Andric bool operator==(const generic_def_path_iterator &O) const { 71081ad6265SDimitry Andric if (N.has_value() != O.N.has_value()) 7110b57cec5SDimitry Andric return false; 71281ad6265SDimitry Andric return !N || *N == *O.N; 7130b57cec5SDimitry Andric } 7140b57cec5SDimitry Andric 7150b57cec5SDimitry Andric private: 7160b57cec5SDimitry Andric T &curNode() const { return W->Paths[*N]; } 7170b57cec5SDimitry Andric 7180b57cec5SDimitry Andric Walker *W = nullptr; 719bdd1243dSDimitry Andric std::optional<ListIndex> N; 7200b57cec5SDimitry Andric }; 7210b57cec5SDimitry Andric 7220b57cec5SDimitry Andric using def_path_iterator = generic_def_path_iterator<DefPath, ClobberWalker>; 7230b57cec5SDimitry Andric using const_def_path_iterator = 7240b57cec5SDimitry Andric generic_def_path_iterator<const DefPath, const ClobberWalker>; 7250b57cec5SDimitry Andric 7260b57cec5SDimitry Andric iterator_range<def_path_iterator> def_path(ListIndex From) { 7270b57cec5SDimitry Andric return make_range(def_path_iterator(this, From), def_path_iterator()); 7280b57cec5SDimitry Andric } 7290b57cec5SDimitry Andric 7300b57cec5SDimitry Andric iterator_range<const_def_path_iterator> const_def_path(ListIndex From) const { 7310b57cec5SDimitry Andric return make_range(const_def_path_iterator(this, From), 7320b57cec5SDimitry Andric const_def_path_iterator()); 7330b57cec5SDimitry Andric } 7340b57cec5SDimitry Andric 7350b57cec5SDimitry Andric struct OptznResult { 7360b57cec5SDimitry Andric /// The path that contains our result. 7370b57cec5SDimitry Andric TerminatedPath PrimaryClobber; 7380b57cec5SDimitry Andric /// The paths that we can legally cache back from, but that aren't 7390b57cec5SDimitry Andric /// necessarily the result of the Phi optimization. 7400b57cec5SDimitry Andric SmallVector<TerminatedPath, 4> OtherClobbers; 7410b57cec5SDimitry Andric }; 7420b57cec5SDimitry Andric 7430b57cec5SDimitry Andric ListIndex defPathIndex(const DefPath &N) const { 7440b57cec5SDimitry Andric // The assert looks nicer if we don't need to do &N 7450b57cec5SDimitry Andric const DefPath *NP = &N; 7460b57cec5SDimitry Andric assert(!Paths.empty() && NP >= &Paths.front() && NP <= &Paths.back() && 7470b57cec5SDimitry Andric "Out of bounds DefPath!"); 7480b57cec5SDimitry Andric return NP - &Paths.front(); 7490b57cec5SDimitry Andric } 7500b57cec5SDimitry Andric 7510b57cec5SDimitry Andric /// Try to optimize a phi as best as we can. Returns a SmallVector of Paths 7520b57cec5SDimitry Andric /// that act as legal clobbers. Note that this won't return *all* clobbers. 7530b57cec5SDimitry Andric /// 7540b57cec5SDimitry Andric /// Phi optimization algorithm tl;dr: 7550b57cec5SDimitry Andric /// - Find the earliest def/phi, A, we can optimize to 7560b57cec5SDimitry Andric /// - Find if all paths from the starting memory access ultimately reach A 7570b57cec5SDimitry Andric /// - If not, optimization isn't possible. 7580b57cec5SDimitry Andric /// - Otherwise, walk from A to another clobber or phi, A'. 7590b57cec5SDimitry Andric /// - If A' is a def, we're done. 7600b57cec5SDimitry Andric /// - If A' is a phi, try to optimize it. 7610b57cec5SDimitry Andric /// 7620b57cec5SDimitry Andric /// A path is a series of {MemoryAccess, MemoryLocation} pairs. A path 7630b57cec5SDimitry Andric /// terminates when a MemoryAccess that clobbers said MemoryLocation is found. 7640b57cec5SDimitry Andric OptznResult tryOptimizePhi(MemoryPhi *Phi, MemoryAccess *Start, 7650b57cec5SDimitry Andric const MemoryLocation &Loc) { 766bdd1243dSDimitry Andric assert(Paths.empty() && VisitedPhis.empty() && 7670b57cec5SDimitry Andric "Reset the optimization state."); 7680b57cec5SDimitry Andric 769bdd1243dSDimitry Andric Paths.emplace_back(Loc, Start, Phi, std::nullopt); 7700b57cec5SDimitry Andric // Stores how many "valid" optimization nodes we had prior to calling 7710b57cec5SDimitry Andric // addSearches/getBlockingAccess. Necessary for caching if we had a blocker. 7720b57cec5SDimitry Andric auto PriorPathsSize = Paths.size(); 7730b57cec5SDimitry Andric 7740b57cec5SDimitry Andric SmallVector<ListIndex, 16> PausedSearches; 7750b57cec5SDimitry Andric SmallVector<ListIndex, 8> NewPaused; 7760b57cec5SDimitry Andric SmallVector<TerminatedPath, 4> TerminatedPaths; 7770b57cec5SDimitry Andric 7780b57cec5SDimitry Andric addSearches(Phi, PausedSearches, 0); 7790b57cec5SDimitry Andric 7800b57cec5SDimitry Andric // Moves the TerminatedPath with the "most dominated" Clobber to the end of 7810b57cec5SDimitry Andric // Paths. 7820b57cec5SDimitry Andric auto MoveDominatedPathToEnd = [&](SmallVectorImpl<TerminatedPath> &Paths) { 7830b57cec5SDimitry Andric assert(!Paths.empty() && "Need a path to move"); 7840b57cec5SDimitry Andric auto Dom = Paths.begin(); 7850b57cec5SDimitry Andric for (auto I = std::next(Dom), E = Paths.end(); I != E; ++I) 7860b57cec5SDimitry Andric if (!MSSA.dominates(I->Clobber, Dom->Clobber)) 7870b57cec5SDimitry Andric Dom = I; 7880b57cec5SDimitry Andric auto Last = Paths.end() - 1; 7890b57cec5SDimitry Andric if (Last != Dom) 7900b57cec5SDimitry Andric std::iter_swap(Last, Dom); 7910b57cec5SDimitry Andric }; 7920b57cec5SDimitry Andric 7930b57cec5SDimitry Andric MemoryPhi *Current = Phi; 7940b57cec5SDimitry Andric while (true) { 7950b57cec5SDimitry Andric assert(!MSSA.isLiveOnEntryDef(Current) && 7960b57cec5SDimitry Andric "liveOnEntry wasn't treated as a clobber?"); 7970b57cec5SDimitry Andric 7980b57cec5SDimitry Andric const auto *Target = getWalkTarget(Current); 7990b57cec5SDimitry Andric // If a TerminatedPath doesn't dominate Target, then it wasn't a legal 8000b57cec5SDimitry Andric // optimization for the prior phi. 8010b57cec5SDimitry Andric assert(all_of(TerminatedPaths, [&](const TerminatedPath &P) { 8020b57cec5SDimitry Andric return MSSA.dominates(P.Clobber, Target); 8030b57cec5SDimitry Andric })); 8040b57cec5SDimitry Andric 8050b57cec5SDimitry Andric // FIXME: This is broken, because the Blocker may be reported to be 8060b57cec5SDimitry Andric // liveOnEntry, and we'll happily wait for that to disappear (read: never) 8070b57cec5SDimitry Andric // For the moment, this is fine, since we do nothing with blocker info. 808bdd1243dSDimitry Andric if (std::optional<TerminatedPath> Blocker = getBlockingAccess( 8090b57cec5SDimitry Andric Target, PausedSearches, NewPaused, TerminatedPaths)) { 8100b57cec5SDimitry Andric 8110b57cec5SDimitry Andric // Find the node we started at. We can't search based on N->Last, since 8120b57cec5SDimitry Andric // we may have gone around a loop with a different MemoryLocation. 8130b57cec5SDimitry Andric auto Iter = find_if(def_path(Blocker->LastNode), [&](const DefPath &N) { 8140b57cec5SDimitry Andric return defPathIndex(N) < PriorPathsSize; 8150b57cec5SDimitry Andric }); 8160b57cec5SDimitry Andric assert(Iter != def_path_iterator()); 8170b57cec5SDimitry Andric 8180b57cec5SDimitry Andric DefPath &CurNode = *Iter; 8190b57cec5SDimitry Andric assert(CurNode.Last == Current); 8200b57cec5SDimitry Andric 8210b57cec5SDimitry Andric // Two things: 8220b57cec5SDimitry Andric // A. We can't reliably cache all of NewPaused back. Consider a case 8230b57cec5SDimitry Andric // where we have two paths in NewPaused; one of which can't optimize 8240b57cec5SDimitry Andric // above this phi, whereas the other can. If we cache the second path 8250b57cec5SDimitry Andric // back, we'll end up with suboptimal cache entries. We can handle 8260b57cec5SDimitry Andric // cases like this a bit better when we either try to find all 8270b57cec5SDimitry Andric // clobbers that block phi optimization, or when our cache starts 8280b57cec5SDimitry Andric // supporting unfinished searches. 8290b57cec5SDimitry Andric // B. We can't reliably cache TerminatedPaths back here without doing 8300b57cec5SDimitry Andric // extra checks; consider a case like: 8310b57cec5SDimitry Andric // T 8320b57cec5SDimitry Andric // / \ 8330b57cec5SDimitry Andric // D C 8340b57cec5SDimitry Andric // \ / 8350b57cec5SDimitry Andric // S 8360b57cec5SDimitry Andric // Where T is our target, C is a node with a clobber on it, D is a 8370b57cec5SDimitry Andric // diamond (with a clobber *only* on the left or right node, N), and 8380b57cec5SDimitry Andric // S is our start. Say we walk to D, through the node opposite N 8390b57cec5SDimitry Andric // (read: ignoring the clobber), and see a cache entry in the top 8400b57cec5SDimitry Andric // node of D. That cache entry gets put into TerminatedPaths. We then 8410b57cec5SDimitry Andric // walk up to C (N is later in our worklist), find the clobber, and 8420b57cec5SDimitry Andric // quit. If we append TerminatedPaths to OtherClobbers, we'll cache 8430b57cec5SDimitry Andric // the bottom part of D to the cached clobber, ignoring the clobber 8440b57cec5SDimitry Andric // in N. Again, this problem goes away if we start tracking all 8450b57cec5SDimitry Andric // blockers for a given phi optimization. 8460b57cec5SDimitry Andric TerminatedPath Result{CurNode.Last, defPathIndex(CurNode)}; 8470b57cec5SDimitry Andric return {Result, {}}; 8480b57cec5SDimitry Andric } 8490b57cec5SDimitry Andric 8500b57cec5SDimitry Andric // If there's nothing left to search, then all paths led to valid clobbers 8510b57cec5SDimitry Andric // that we got from our cache; pick the nearest to the start, and allow 8520b57cec5SDimitry Andric // the rest to be cached back. 8530b57cec5SDimitry Andric if (NewPaused.empty()) { 8540b57cec5SDimitry Andric MoveDominatedPathToEnd(TerminatedPaths); 8550b57cec5SDimitry Andric TerminatedPath Result = TerminatedPaths.pop_back_val(); 8560b57cec5SDimitry Andric return {Result, std::move(TerminatedPaths)}; 8570b57cec5SDimitry Andric } 8580b57cec5SDimitry Andric 8590b57cec5SDimitry Andric MemoryAccess *DefChainEnd = nullptr; 8600b57cec5SDimitry Andric SmallVector<TerminatedPath, 4> Clobbers; 8610b57cec5SDimitry Andric for (ListIndex Paused : NewPaused) { 8620b57cec5SDimitry Andric UpwardsWalkResult WR = walkToPhiOrClobber(Paths[Paused]); 8630b57cec5SDimitry Andric if (WR.IsKnownClobber) 8640b57cec5SDimitry Andric Clobbers.push_back({WR.Result, Paused}); 8650b57cec5SDimitry Andric else 8660b57cec5SDimitry Andric // Micro-opt: If we hit the end of the chain, save it. 8670b57cec5SDimitry Andric DefChainEnd = WR.Result; 8680b57cec5SDimitry Andric } 8690b57cec5SDimitry Andric 8700b57cec5SDimitry Andric if (!TerminatedPaths.empty()) { 8710b57cec5SDimitry Andric // If we couldn't find the dominating phi/liveOnEntry in the above loop, 8720b57cec5SDimitry Andric // do it now. 8730b57cec5SDimitry Andric if (!DefChainEnd) 8740b57cec5SDimitry Andric for (auto *MA : def_chain(const_cast<MemoryAccess *>(Target))) 8750b57cec5SDimitry Andric DefChainEnd = MA; 8768bcb0991SDimitry Andric assert(DefChainEnd && "Failed to find dominating phi/liveOnEntry"); 8770b57cec5SDimitry Andric 8780b57cec5SDimitry Andric // If any of the terminated paths don't dominate the phi we'll try to 8790b57cec5SDimitry Andric // optimize, we need to figure out what they are and quit. 8800b57cec5SDimitry Andric const BasicBlock *ChainBB = DefChainEnd->getBlock(); 8810b57cec5SDimitry Andric for (const TerminatedPath &TP : TerminatedPaths) { 8820b57cec5SDimitry Andric // Because we know that DefChainEnd is as "high" as we can go, we 8830b57cec5SDimitry Andric // don't need local dominance checks; BB dominance is sufficient. 8840b57cec5SDimitry Andric if (DT.dominates(ChainBB, TP.Clobber->getBlock())) 8850b57cec5SDimitry Andric Clobbers.push_back(TP); 8860b57cec5SDimitry Andric } 8870b57cec5SDimitry Andric } 8880b57cec5SDimitry Andric 8890b57cec5SDimitry Andric // If we have clobbers in the def chain, find the one closest to Current 8900b57cec5SDimitry Andric // and quit. 8910b57cec5SDimitry Andric if (!Clobbers.empty()) { 8920b57cec5SDimitry Andric MoveDominatedPathToEnd(Clobbers); 8930b57cec5SDimitry Andric TerminatedPath Result = Clobbers.pop_back_val(); 8940b57cec5SDimitry Andric return {Result, std::move(Clobbers)}; 8950b57cec5SDimitry Andric } 8960b57cec5SDimitry Andric 8970b57cec5SDimitry Andric assert(all_of(NewPaused, 8980b57cec5SDimitry Andric [&](ListIndex I) { return Paths[I].Last == DefChainEnd; })); 8990b57cec5SDimitry Andric 9000b57cec5SDimitry Andric // Because liveOnEntry is a clobber, this must be a phi. 9010b57cec5SDimitry Andric auto *DefChainPhi = cast<MemoryPhi>(DefChainEnd); 9020b57cec5SDimitry Andric 9030b57cec5SDimitry Andric PriorPathsSize = Paths.size(); 9040b57cec5SDimitry Andric PausedSearches.clear(); 9050b57cec5SDimitry Andric for (ListIndex I : NewPaused) 9060b57cec5SDimitry Andric addSearches(DefChainPhi, PausedSearches, I); 9070b57cec5SDimitry Andric NewPaused.clear(); 9080b57cec5SDimitry Andric 9090b57cec5SDimitry Andric Current = DefChainPhi; 9100b57cec5SDimitry Andric } 9110b57cec5SDimitry Andric } 9120b57cec5SDimitry Andric 9130b57cec5SDimitry Andric void verifyOptResult(const OptznResult &R) const { 9140b57cec5SDimitry Andric assert(all_of(R.OtherClobbers, [&](const TerminatedPath &P) { 9150b57cec5SDimitry Andric return MSSA.dominates(P.Clobber, R.PrimaryClobber.Clobber); 9160b57cec5SDimitry Andric })); 9170b57cec5SDimitry Andric } 9180b57cec5SDimitry Andric 9190b57cec5SDimitry Andric void resetPhiOptznState() { 9200b57cec5SDimitry Andric Paths.clear(); 9210b57cec5SDimitry Andric VisitedPhis.clear(); 9220b57cec5SDimitry Andric } 9230b57cec5SDimitry Andric 9240b57cec5SDimitry Andric public: 925bdd1243dSDimitry Andric ClobberWalker(const MemorySSA &MSSA, DominatorTree &DT) 926bdd1243dSDimitry Andric : MSSA(MSSA), DT(DT) {} 9270b57cec5SDimitry Andric 9280b57cec5SDimitry Andric /// Finds the nearest clobber for the given query, optimizing phis if 9290b57cec5SDimitry Andric /// possible. 930bdd1243dSDimitry Andric MemoryAccess *findClobber(BatchAAResults &BAA, MemoryAccess *Start, 931bdd1243dSDimitry Andric UpwardsMemoryQuery &Q, unsigned &UpWalkLimit) { 932bdd1243dSDimitry Andric AA = &BAA; 9330b57cec5SDimitry Andric Query = &Q; 9340b57cec5SDimitry Andric UpwardWalkLimit = &UpWalkLimit; 9350b57cec5SDimitry Andric // Starting limit must be > 0. 9360b57cec5SDimitry Andric if (!UpWalkLimit) 9370b57cec5SDimitry Andric UpWalkLimit++; 9380b57cec5SDimitry Andric 9390b57cec5SDimitry Andric MemoryAccess *Current = Start; 9400b57cec5SDimitry Andric // This walker pretends uses don't exist. If we're handed one, silently grab 9410b57cec5SDimitry Andric // its def. (This has the nice side-effect of ensuring we never cache uses) 9420b57cec5SDimitry Andric if (auto *MU = dyn_cast<MemoryUse>(Start)) 9430b57cec5SDimitry Andric Current = MU->getDefiningAccess(); 9440b57cec5SDimitry Andric 945bdd1243dSDimitry Andric DefPath FirstDesc(Q.StartingLoc, Current, Current, std::nullopt); 9460b57cec5SDimitry Andric // Fast path for the overly-common case (no crazy phi optimization 9470b57cec5SDimitry Andric // necessary) 9480b57cec5SDimitry Andric UpwardsWalkResult WalkResult = walkToPhiOrClobber(FirstDesc); 9490b57cec5SDimitry Andric MemoryAccess *Result; 9500b57cec5SDimitry Andric if (WalkResult.IsKnownClobber) { 9510b57cec5SDimitry Andric Result = WalkResult.Result; 9520b57cec5SDimitry Andric } else { 9530b57cec5SDimitry Andric OptznResult OptRes = tryOptimizePhi(cast<MemoryPhi>(FirstDesc.Last), 9540b57cec5SDimitry Andric Current, Q.StartingLoc); 9550b57cec5SDimitry Andric verifyOptResult(OptRes); 9560b57cec5SDimitry Andric resetPhiOptznState(); 9570b57cec5SDimitry Andric Result = OptRes.PrimaryClobber.Clobber; 9580b57cec5SDimitry Andric } 9590b57cec5SDimitry Andric 9600b57cec5SDimitry Andric #ifdef EXPENSIVE_CHECKS 9610b57cec5SDimitry Andric if (!Q.SkipSelfAccess && *UpwardWalkLimit > 0) 962bdd1243dSDimitry Andric checkClobberSanity(Current, Result, Q.StartingLoc, MSSA, Q, BAA); 9630b57cec5SDimitry Andric #endif 9640b57cec5SDimitry Andric return Result; 9650b57cec5SDimitry Andric } 9660b57cec5SDimitry Andric }; 9670b57cec5SDimitry Andric 9680b57cec5SDimitry Andric struct RenamePassData { 9690b57cec5SDimitry Andric DomTreeNode *DTN; 9700b57cec5SDimitry Andric DomTreeNode::const_iterator ChildIt; 9710b57cec5SDimitry Andric MemoryAccess *IncomingVal; 9720b57cec5SDimitry Andric 9730b57cec5SDimitry Andric RenamePassData(DomTreeNode *D, DomTreeNode::const_iterator It, 9740b57cec5SDimitry Andric MemoryAccess *M) 9750b57cec5SDimitry Andric : DTN(D), ChildIt(It), IncomingVal(M) {} 9760b57cec5SDimitry Andric 9770b57cec5SDimitry Andric void swap(RenamePassData &RHS) { 9780b57cec5SDimitry Andric std::swap(DTN, RHS.DTN); 9790b57cec5SDimitry Andric std::swap(ChildIt, RHS.ChildIt); 9800b57cec5SDimitry Andric std::swap(IncomingVal, RHS.IncomingVal); 9810b57cec5SDimitry Andric } 9820b57cec5SDimitry Andric }; 9830b57cec5SDimitry Andric 9840b57cec5SDimitry Andric } // end anonymous namespace 9850b57cec5SDimitry Andric 9860b57cec5SDimitry Andric namespace llvm { 9870b57cec5SDimitry Andric 988bdd1243dSDimitry Andric class MemorySSA::ClobberWalkerBase { 989bdd1243dSDimitry Andric ClobberWalker Walker; 9900b57cec5SDimitry Andric MemorySSA *MSSA; 9910b57cec5SDimitry Andric 9920b57cec5SDimitry Andric public: 993bdd1243dSDimitry Andric ClobberWalkerBase(MemorySSA *M, DominatorTree *D) : Walker(*M, *D), MSSA(M) {} 9940b57cec5SDimitry Andric 9950b57cec5SDimitry Andric MemoryAccess *getClobberingMemoryAccessBase(MemoryAccess *, 9960b57cec5SDimitry Andric const MemoryLocation &, 997bdd1243dSDimitry Andric BatchAAResults &, unsigned &); 9980b57cec5SDimitry Andric // Third argument (bool), defines whether the clobber search should skip the 9990b57cec5SDimitry Andric // original queried access. If true, there will be a follow-up query searching 10000b57cec5SDimitry Andric // for a clobber access past "self". Note that the Optimized access is not 10010b57cec5SDimitry Andric // updated if a new clobber is found by this SkipSelf search. If this 10020b57cec5SDimitry Andric // additional query becomes heavily used we may decide to cache the result. 10030b57cec5SDimitry Andric // Walker instantiations will decide how to set the SkipSelf bool. 1004bdd1243dSDimitry Andric MemoryAccess *getClobberingMemoryAccessBase(MemoryAccess *, BatchAAResults &, 1005bdd1243dSDimitry Andric unsigned &, bool, 1006349cc55cSDimitry Andric bool UseInvariantGroup = true); 10070b57cec5SDimitry Andric }; 10080b57cec5SDimitry Andric 10090b57cec5SDimitry Andric /// A MemorySSAWalker that does AA walks to disambiguate accesses. It no 10100b57cec5SDimitry Andric /// longer does caching on its own, but the name has been retained for the 10110b57cec5SDimitry Andric /// moment. 10120b57cec5SDimitry Andric class MemorySSA::CachingWalker final : public MemorySSAWalker { 1013bdd1243dSDimitry Andric ClobberWalkerBase *Walker; 10140b57cec5SDimitry Andric 10150b57cec5SDimitry Andric public: 1016bdd1243dSDimitry Andric CachingWalker(MemorySSA *M, ClobberWalkerBase *W) 10170b57cec5SDimitry Andric : MemorySSAWalker(M), Walker(W) {} 10180b57cec5SDimitry Andric ~CachingWalker() override = default; 10190b57cec5SDimitry Andric 10200b57cec5SDimitry Andric using MemorySSAWalker::getClobberingMemoryAccess; 10210b57cec5SDimitry Andric 1022bdd1243dSDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, BatchAAResults &BAA, 1023bdd1243dSDimitry Andric unsigned &UWL) { 1024bdd1243dSDimitry Andric return Walker->getClobberingMemoryAccessBase(MA, BAA, UWL, false); 10250b57cec5SDimitry Andric } 10260b57cec5SDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, 10270b57cec5SDimitry Andric const MemoryLocation &Loc, 1028bdd1243dSDimitry Andric BatchAAResults &BAA, unsigned &UWL) { 1029bdd1243dSDimitry Andric return Walker->getClobberingMemoryAccessBase(MA, Loc, BAA, UWL); 10300b57cec5SDimitry Andric } 1031349cc55cSDimitry Andric // This method is not accessible outside of this file. 1032bdd1243dSDimitry Andric MemoryAccess *getClobberingMemoryAccessWithoutInvariantGroup( 1033bdd1243dSDimitry Andric MemoryAccess *MA, BatchAAResults &BAA, unsigned &UWL) { 1034bdd1243dSDimitry Andric return Walker->getClobberingMemoryAccessBase(MA, BAA, UWL, false, false); 1035349cc55cSDimitry Andric } 10360b57cec5SDimitry Andric 1037bdd1243dSDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, 1038bdd1243dSDimitry Andric BatchAAResults &BAA) override { 10390b57cec5SDimitry Andric unsigned UpwardWalkLimit = MaxCheckLimit; 1040bdd1243dSDimitry Andric return getClobberingMemoryAccess(MA, BAA, UpwardWalkLimit); 10410b57cec5SDimitry Andric } 10420b57cec5SDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, 1043bdd1243dSDimitry Andric const MemoryLocation &Loc, 1044bdd1243dSDimitry Andric BatchAAResults &BAA) override { 10450b57cec5SDimitry Andric unsigned UpwardWalkLimit = MaxCheckLimit; 1046bdd1243dSDimitry Andric return getClobberingMemoryAccess(MA, Loc, BAA, UpwardWalkLimit); 10470b57cec5SDimitry Andric } 10480b57cec5SDimitry Andric 10490b57cec5SDimitry Andric void invalidateInfo(MemoryAccess *MA) override { 10500b57cec5SDimitry Andric if (auto *MUD = dyn_cast<MemoryUseOrDef>(MA)) 10510b57cec5SDimitry Andric MUD->resetOptimized(); 10520b57cec5SDimitry Andric } 10530b57cec5SDimitry Andric }; 10540b57cec5SDimitry Andric 10550b57cec5SDimitry Andric class MemorySSA::SkipSelfWalker final : public MemorySSAWalker { 1056bdd1243dSDimitry Andric ClobberWalkerBase *Walker; 10570b57cec5SDimitry Andric 10580b57cec5SDimitry Andric public: 1059bdd1243dSDimitry Andric SkipSelfWalker(MemorySSA *M, ClobberWalkerBase *W) 10600b57cec5SDimitry Andric : MemorySSAWalker(M), Walker(W) {} 10610b57cec5SDimitry Andric ~SkipSelfWalker() override = default; 10620b57cec5SDimitry Andric 10630b57cec5SDimitry Andric using MemorySSAWalker::getClobberingMemoryAccess; 10640b57cec5SDimitry Andric 1065bdd1243dSDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, BatchAAResults &BAA, 1066bdd1243dSDimitry Andric unsigned &UWL) { 1067bdd1243dSDimitry Andric return Walker->getClobberingMemoryAccessBase(MA, BAA, UWL, true); 10680b57cec5SDimitry Andric } 10690b57cec5SDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, 10700b57cec5SDimitry Andric const MemoryLocation &Loc, 1071bdd1243dSDimitry Andric BatchAAResults &BAA, unsigned &UWL) { 1072bdd1243dSDimitry Andric return Walker->getClobberingMemoryAccessBase(MA, Loc, BAA, UWL); 10730b57cec5SDimitry Andric } 10740b57cec5SDimitry Andric 1075bdd1243dSDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, 1076bdd1243dSDimitry Andric BatchAAResults &BAA) override { 10770b57cec5SDimitry Andric unsigned UpwardWalkLimit = MaxCheckLimit; 1078bdd1243dSDimitry Andric return getClobberingMemoryAccess(MA, BAA, UpwardWalkLimit); 10790b57cec5SDimitry Andric } 10800b57cec5SDimitry Andric MemoryAccess *getClobberingMemoryAccess(MemoryAccess *MA, 1081bdd1243dSDimitry Andric const MemoryLocation &Loc, 1082bdd1243dSDimitry Andric BatchAAResults &BAA) override { 10830b57cec5SDimitry Andric unsigned UpwardWalkLimit = MaxCheckLimit; 1084bdd1243dSDimitry Andric return getClobberingMemoryAccess(MA, Loc, BAA, UpwardWalkLimit); 10850b57cec5SDimitry Andric } 10860b57cec5SDimitry Andric 10870b57cec5SDimitry Andric void invalidateInfo(MemoryAccess *MA) override { 10880b57cec5SDimitry Andric if (auto *MUD = dyn_cast<MemoryUseOrDef>(MA)) 10890b57cec5SDimitry Andric MUD->resetOptimized(); 10900b57cec5SDimitry Andric } 10910b57cec5SDimitry Andric }; 10920b57cec5SDimitry Andric 10930b57cec5SDimitry Andric } // end namespace llvm 10940b57cec5SDimitry Andric 10950b57cec5SDimitry Andric void MemorySSA::renameSuccessorPhis(BasicBlock *BB, MemoryAccess *IncomingVal, 10960b57cec5SDimitry Andric bool RenameAllUses) { 10970b57cec5SDimitry Andric // Pass through values to our successors 10980b57cec5SDimitry Andric for (const BasicBlock *S : successors(BB)) { 10990b57cec5SDimitry Andric auto It = PerBlockAccesses.find(S); 11000b57cec5SDimitry Andric // Rename the phi nodes in our successor block 11010b57cec5SDimitry Andric if (It == PerBlockAccesses.end() || !isa<MemoryPhi>(It->second->front())) 11020b57cec5SDimitry Andric continue; 11030b57cec5SDimitry Andric AccessList *Accesses = It->second.get(); 11040b57cec5SDimitry Andric auto *Phi = cast<MemoryPhi>(&Accesses->front()); 11050b57cec5SDimitry Andric if (RenameAllUses) { 11068bcb0991SDimitry Andric bool ReplacementDone = false; 11078bcb0991SDimitry Andric for (unsigned I = 0, E = Phi->getNumIncomingValues(); I != E; ++I) 11088bcb0991SDimitry Andric if (Phi->getIncomingBlock(I) == BB) { 11098bcb0991SDimitry Andric Phi->setIncomingValue(I, IncomingVal); 11108bcb0991SDimitry Andric ReplacementDone = true; 11118bcb0991SDimitry Andric } 11128bcb0991SDimitry Andric (void) ReplacementDone; 11138bcb0991SDimitry Andric assert(ReplacementDone && "Incomplete phi during partial rename"); 11140b57cec5SDimitry Andric } else 11150b57cec5SDimitry Andric Phi->addIncoming(IncomingVal, BB); 11160b57cec5SDimitry Andric } 11170b57cec5SDimitry Andric } 11180b57cec5SDimitry Andric 11190b57cec5SDimitry Andric /// Rename a single basic block into MemorySSA form. 11200b57cec5SDimitry Andric /// Uses the standard SSA renaming algorithm. 11210b57cec5SDimitry Andric /// \returns The new incoming value. 11220b57cec5SDimitry Andric MemoryAccess *MemorySSA::renameBlock(BasicBlock *BB, MemoryAccess *IncomingVal, 11230b57cec5SDimitry Andric bool RenameAllUses) { 11240b57cec5SDimitry Andric auto It = PerBlockAccesses.find(BB); 11250b57cec5SDimitry Andric // Skip most processing if the list is empty. 11260b57cec5SDimitry Andric if (It != PerBlockAccesses.end()) { 11270b57cec5SDimitry Andric AccessList *Accesses = It->second.get(); 11280b57cec5SDimitry Andric for (MemoryAccess &L : *Accesses) { 11290b57cec5SDimitry Andric if (MemoryUseOrDef *MUD = dyn_cast<MemoryUseOrDef>(&L)) { 11300b57cec5SDimitry Andric if (MUD->getDefiningAccess() == nullptr || RenameAllUses) 11310b57cec5SDimitry Andric MUD->setDefiningAccess(IncomingVal); 11320b57cec5SDimitry Andric if (isa<MemoryDef>(&L)) 11330b57cec5SDimitry Andric IncomingVal = &L; 11340b57cec5SDimitry Andric } else { 11350b57cec5SDimitry Andric IncomingVal = &L; 11360b57cec5SDimitry Andric } 11370b57cec5SDimitry Andric } 11380b57cec5SDimitry Andric } 11390b57cec5SDimitry Andric return IncomingVal; 11400b57cec5SDimitry Andric } 11410b57cec5SDimitry Andric 11420b57cec5SDimitry Andric /// This is the standard SSA renaming algorithm. 11430b57cec5SDimitry Andric /// 11440b57cec5SDimitry Andric /// We walk the dominator tree in preorder, renaming accesses, and then filling 11450b57cec5SDimitry Andric /// in phi nodes in our successors. 11460b57cec5SDimitry Andric void MemorySSA::renamePass(DomTreeNode *Root, MemoryAccess *IncomingVal, 11470b57cec5SDimitry Andric SmallPtrSetImpl<BasicBlock *> &Visited, 11480b57cec5SDimitry Andric bool SkipVisited, bool RenameAllUses) { 11490b57cec5SDimitry Andric assert(Root && "Trying to rename accesses in an unreachable block"); 11500b57cec5SDimitry Andric 11510b57cec5SDimitry Andric SmallVector<RenamePassData, 32> WorkStack; 11520b57cec5SDimitry Andric // Skip everything if we already renamed this block and we are skipping. 11530b57cec5SDimitry Andric // Note: You can't sink this into the if, because we need it to occur 11540b57cec5SDimitry Andric // regardless of whether we skip blocks or not. 11550b57cec5SDimitry Andric bool AlreadyVisited = !Visited.insert(Root->getBlock()).second; 11560b57cec5SDimitry Andric if (SkipVisited && AlreadyVisited) 11570b57cec5SDimitry Andric return; 11580b57cec5SDimitry Andric 11590b57cec5SDimitry Andric IncomingVal = renameBlock(Root->getBlock(), IncomingVal, RenameAllUses); 11600b57cec5SDimitry Andric renameSuccessorPhis(Root->getBlock(), IncomingVal, RenameAllUses); 11610b57cec5SDimitry Andric WorkStack.push_back({Root, Root->begin(), IncomingVal}); 11620b57cec5SDimitry Andric 11630b57cec5SDimitry Andric while (!WorkStack.empty()) { 11640b57cec5SDimitry Andric DomTreeNode *Node = WorkStack.back().DTN; 11650b57cec5SDimitry Andric DomTreeNode::const_iterator ChildIt = WorkStack.back().ChildIt; 11660b57cec5SDimitry Andric IncomingVal = WorkStack.back().IncomingVal; 11670b57cec5SDimitry Andric 11680b57cec5SDimitry Andric if (ChildIt == Node->end()) { 11690b57cec5SDimitry Andric WorkStack.pop_back(); 11700b57cec5SDimitry Andric } else { 11710b57cec5SDimitry Andric DomTreeNode *Child = *ChildIt; 11720b57cec5SDimitry Andric ++WorkStack.back().ChildIt; 11730b57cec5SDimitry Andric BasicBlock *BB = Child->getBlock(); 11740b57cec5SDimitry Andric // Note: You can't sink this into the if, because we need it to occur 11750b57cec5SDimitry Andric // regardless of whether we skip blocks or not. 11760b57cec5SDimitry Andric AlreadyVisited = !Visited.insert(BB).second; 11770b57cec5SDimitry Andric if (SkipVisited && AlreadyVisited) { 11780b57cec5SDimitry Andric // We already visited this during our renaming, which can happen when 11790b57cec5SDimitry Andric // being asked to rename multiple blocks. Figure out the incoming val, 11800b57cec5SDimitry Andric // which is the last def. 11810b57cec5SDimitry Andric // Incoming value can only change if there is a block def, and in that 11820b57cec5SDimitry Andric // case, it's the last block def in the list. 11830b57cec5SDimitry Andric if (auto *BlockDefs = getWritableBlockDefs(BB)) 11840b57cec5SDimitry Andric IncomingVal = &*BlockDefs->rbegin(); 11850b57cec5SDimitry Andric } else 11860b57cec5SDimitry Andric IncomingVal = renameBlock(BB, IncomingVal, RenameAllUses); 11870b57cec5SDimitry Andric renameSuccessorPhis(BB, IncomingVal, RenameAllUses); 11880b57cec5SDimitry Andric WorkStack.push_back({Child, Child->begin(), IncomingVal}); 11890b57cec5SDimitry Andric } 11900b57cec5SDimitry Andric } 11910b57cec5SDimitry Andric } 11920b57cec5SDimitry Andric 11930b57cec5SDimitry Andric /// This handles unreachable block accesses by deleting phi nodes in 11940b57cec5SDimitry Andric /// unreachable blocks, and marking all other unreachable MemoryAccess's as 11950b57cec5SDimitry Andric /// being uses of the live on entry definition. 11960b57cec5SDimitry Andric void MemorySSA::markUnreachableAsLiveOnEntry(BasicBlock *BB) { 11970b57cec5SDimitry Andric assert(!DT->isReachableFromEntry(BB) && 11980b57cec5SDimitry Andric "Reachable block found while handling unreachable blocks"); 11990b57cec5SDimitry Andric 12000b57cec5SDimitry Andric // Make sure phi nodes in our reachable successors end up with a 12010b57cec5SDimitry Andric // LiveOnEntryDef for our incoming edge, even though our block is forward 12020b57cec5SDimitry Andric // unreachable. We could just disconnect these blocks from the CFG fully, 12030b57cec5SDimitry Andric // but we do not right now. 12040b57cec5SDimitry Andric for (const BasicBlock *S : successors(BB)) { 12050b57cec5SDimitry Andric if (!DT->isReachableFromEntry(S)) 12060b57cec5SDimitry Andric continue; 12070b57cec5SDimitry Andric auto It = PerBlockAccesses.find(S); 12080b57cec5SDimitry Andric // Rename the phi nodes in our successor block 12090b57cec5SDimitry Andric if (It == PerBlockAccesses.end() || !isa<MemoryPhi>(It->second->front())) 12100b57cec5SDimitry Andric continue; 12110b57cec5SDimitry Andric AccessList *Accesses = It->second.get(); 12120b57cec5SDimitry Andric auto *Phi = cast<MemoryPhi>(&Accesses->front()); 12130b57cec5SDimitry Andric Phi->addIncoming(LiveOnEntryDef.get(), BB); 12140b57cec5SDimitry Andric } 12150b57cec5SDimitry Andric 12160b57cec5SDimitry Andric auto It = PerBlockAccesses.find(BB); 12170b57cec5SDimitry Andric if (It == PerBlockAccesses.end()) 12180b57cec5SDimitry Andric return; 12190b57cec5SDimitry Andric 12200b57cec5SDimitry Andric auto &Accesses = It->second; 12210b57cec5SDimitry Andric for (auto AI = Accesses->begin(), AE = Accesses->end(); AI != AE;) { 12220b57cec5SDimitry Andric auto Next = std::next(AI); 12230b57cec5SDimitry Andric // If we have a phi, just remove it. We are going to replace all 12240b57cec5SDimitry Andric // users with live on entry. 12250b57cec5SDimitry Andric if (auto *UseOrDef = dyn_cast<MemoryUseOrDef>(AI)) 12260b57cec5SDimitry Andric UseOrDef->setDefiningAccess(LiveOnEntryDef.get()); 12270b57cec5SDimitry Andric else 12280b57cec5SDimitry Andric Accesses->erase(AI); 12290b57cec5SDimitry Andric AI = Next; 12300b57cec5SDimitry Andric } 12310b57cec5SDimitry Andric } 12320b57cec5SDimitry Andric 12330b57cec5SDimitry Andric MemorySSA::MemorySSA(Function &Func, AliasAnalysis *AA, DominatorTree *DT) 1234*0fca6ea1SDimitry Andric : DT(DT), F(&Func), LiveOnEntryDef(nullptr), Walker(nullptr), 123504eeddc0SDimitry Andric SkipWalker(nullptr) { 12360b57cec5SDimitry Andric // Build MemorySSA using a batch alias analysis. This reuses the internal 12370b57cec5SDimitry Andric // state that AA collects during an alias()/getModRefInfo() call. This is 12380b57cec5SDimitry Andric // safe because there are no CFG changes while building MemorySSA and can 12390b57cec5SDimitry Andric // significantly reduce the time spent by the compiler in AA, because we will 12400b57cec5SDimitry Andric // make queries about all the instructions in the Function. 1241480093f4SDimitry Andric assert(AA && "No alias analysis?"); 12420b57cec5SDimitry Andric BatchAAResults BatchAA(*AA); 1243*0fca6ea1SDimitry Andric buildMemorySSA(BatchAA, iterator_range(F->begin(), F->end())); 1244*0fca6ea1SDimitry Andric // Intentionally leave AA to nullptr while building so we don't accidentally 1245*0fca6ea1SDimitry Andric // use non-batch AliasAnalysis. 1246*0fca6ea1SDimitry Andric this->AA = AA; 1247*0fca6ea1SDimitry Andric // Also create the walker here. 1248*0fca6ea1SDimitry Andric getWalker(); 1249*0fca6ea1SDimitry Andric } 1250*0fca6ea1SDimitry Andric 1251*0fca6ea1SDimitry Andric MemorySSA::MemorySSA(Loop &L, AliasAnalysis *AA, DominatorTree *DT) 1252*0fca6ea1SDimitry Andric : DT(DT), L(&L), LiveOnEntryDef(nullptr), Walker(nullptr), 1253*0fca6ea1SDimitry Andric SkipWalker(nullptr) { 1254*0fca6ea1SDimitry Andric // Build MemorySSA using a batch alias analysis. This reuses the internal 1255*0fca6ea1SDimitry Andric // state that AA collects during an alias()/getModRefInfo() call. This is 1256*0fca6ea1SDimitry Andric // safe because there are no CFG changes while building MemorySSA and can 1257*0fca6ea1SDimitry Andric // significantly reduce the time spent by the compiler in AA, because we will 1258*0fca6ea1SDimitry Andric // make queries about all the instructions in the Function. 1259*0fca6ea1SDimitry Andric assert(AA && "No alias analysis?"); 1260*0fca6ea1SDimitry Andric BatchAAResults BatchAA(*AA); 1261*0fca6ea1SDimitry Andric buildMemorySSA( 1262*0fca6ea1SDimitry Andric BatchAA, map_range(L.blocks(), [](const BasicBlock *BB) -> BasicBlock & { 1263*0fca6ea1SDimitry Andric return *const_cast<BasicBlock *>(BB); 1264*0fca6ea1SDimitry Andric })); 1265*0fca6ea1SDimitry Andric // Intentionally leave AA to nullptr while building so we don't accidentally 12660b57cec5SDimitry Andric // use non-batch AliasAnalysis. 12670b57cec5SDimitry Andric this->AA = AA; 12680b57cec5SDimitry Andric // Also create the walker here. 12690b57cec5SDimitry Andric getWalker(); 12700b57cec5SDimitry Andric } 12710b57cec5SDimitry Andric 12720b57cec5SDimitry Andric MemorySSA::~MemorySSA() { 12730b57cec5SDimitry Andric // Drop all our references 12740b57cec5SDimitry Andric for (const auto &Pair : PerBlockAccesses) 12750b57cec5SDimitry Andric for (MemoryAccess &MA : *Pair.second) 12760b57cec5SDimitry Andric MA.dropAllReferences(); 12770b57cec5SDimitry Andric } 12780b57cec5SDimitry Andric 12790b57cec5SDimitry Andric MemorySSA::AccessList *MemorySSA::getOrCreateAccessList(const BasicBlock *BB) { 12800b57cec5SDimitry Andric auto Res = PerBlockAccesses.insert(std::make_pair(BB, nullptr)); 12810b57cec5SDimitry Andric 12820b57cec5SDimitry Andric if (Res.second) 12838bcb0991SDimitry Andric Res.first->second = std::make_unique<AccessList>(); 12840b57cec5SDimitry Andric return Res.first->second.get(); 12850b57cec5SDimitry Andric } 12860b57cec5SDimitry Andric 12870b57cec5SDimitry Andric MemorySSA::DefsList *MemorySSA::getOrCreateDefsList(const BasicBlock *BB) { 12880b57cec5SDimitry Andric auto Res = PerBlockDefs.insert(std::make_pair(BB, nullptr)); 12890b57cec5SDimitry Andric 12900b57cec5SDimitry Andric if (Res.second) 12918bcb0991SDimitry Andric Res.first->second = std::make_unique<DefsList>(); 12920b57cec5SDimitry Andric return Res.first->second.get(); 12930b57cec5SDimitry Andric } 12940b57cec5SDimitry Andric 12950b57cec5SDimitry Andric namespace llvm { 12960b57cec5SDimitry Andric 12970b57cec5SDimitry Andric /// This class is a batch walker of all MemoryUse's in the program, and points 12980b57cec5SDimitry Andric /// their defining access at the thing that actually clobbers them. Because it 12990b57cec5SDimitry Andric /// is a batch walker that touches everything, it does not operate like the 13000b57cec5SDimitry Andric /// other walkers. This walker is basically performing a top-down SSA renaming 13010b57cec5SDimitry Andric /// pass, where the version stack is used as the cache. This enables it to be 13020b57cec5SDimitry Andric /// significantly more time and memory efficient than using the regular walker, 13030b57cec5SDimitry Andric /// which is walking bottom-up. 13040b57cec5SDimitry Andric class MemorySSA::OptimizeUses { 13050b57cec5SDimitry Andric public: 1306bdd1243dSDimitry Andric OptimizeUses(MemorySSA *MSSA, CachingWalker *Walker, BatchAAResults *BAA, 1307bdd1243dSDimitry Andric DominatorTree *DT) 13080b57cec5SDimitry Andric : MSSA(MSSA), Walker(Walker), AA(BAA), DT(DT) {} 13090b57cec5SDimitry Andric 13100b57cec5SDimitry Andric void optimizeUses(); 13110b57cec5SDimitry Andric 13120b57cec5SDimitry Andric private: 13130b57cec5SDimitry Andric /// This represents where a given memorylocation is in the stack. 13140b57cec5SDimitry Andric struct MemlocStackInfo { 13150b57cec5SDimitry Andric // This essentially is keeping track of versions of the stack. Whenever 13160b57cec5SDimitry Andric // the stack changes due to pushes or pops, these versions increase. 13170b57cec5SDimitry Andric unsigned long StackEpoch; 13180b57cec5SDimitry Andric unsigned long PopEpoch; 13190b57cec5SDimitry Andric // This is the lower bound of places on the stack to check. It is equal to 13200b57cec5SDimitry Andric // the place the last stack walk ended. 13210b57cec5SDimitry Andric // Note: Correctness depends on this being initialized to 0, which densemap 13220b57cec5SDimitry Andric // does 13230b57cec5SDimitry Andric unsigned long LowerBound; 13240b57cec5SDimitry Andric const BasicBlock *LowerBoundBlock; 13250b57cec5SDimitry Andric // This is where the last walk for this memory location ended. 13260b57cec5SDimitry Andric unsigned long LastKill; 13270b57cec5SDimitry Andric bool LastKillValid; 13280b57cec5SDimitry Andric }; 13290b57cec5SDimitry Andric 13300b57cec5SDimitry Andric void optimizeUsesInBlock(const BasicBlock *, unsigned long &, unsigned long &, 13310b57cec5SDimitry Andric SmallVectorImpl<MemoryAccess *> &, 13320b57cec5SDimitry Andric DenseMap<MemoryLocOrCall, MemlocStackInfo> &); 13330b57cec5SDimitry Andric 13340b57cec5SDimitry Andric MemorySSA *MSSA; 1335bdd1243dSDimitry Andric CachingWalker *Walker; 13360b57cec5SDimitry Andric BatchAAResults *AA; 13370b57cec5SDimitry Andric DominatorTree *DT; 13380b57cec5SDimitry Andric }; 13390b57cec5SDimitry Andric 13400b57cec5SDimitry Andric } // end namespace llvm 13410b57cec5SDimitry Andric 13420b57cec5SDimitry Andric /// Optimize the uses in a given block This is basically the SSA renaming 13430b57cec5SDimitry Andric /// algorithm, with one caveat: We are able to use a single stack for all 13440b57cec5SDimitry Andric /// MemoryUses. This is because the set of *possible* reaching MemoryDefs is 13450b57cec5SDimitry Andric /// the same for every MemoryUse. The *actual* clobbering MemoryDef is just 13460b57cec5SDimitry Andric /// going to be some position in that stack of possible ones. 13470b57cec5SDimitry Andric /// 13480b57cec5SDimitry Andric /// We track the stack positions that each MemoryLocation needs 13490b57cec5SDimitry Andric /// to check, and last ended at. This is because we only want to check the 13500b57cec5SDimitry Andric /// things that changed since last time. The same MemoryLocation should 13510b57cec5SDimitry Andric /// get clobbered by the same store (getModRefInfo does not use invariantness or 13520b57cec5SDimitry Andric /// things like this, and if they start, we can modify MemoryLocOrCall to 13530b57cec5SDimitry Andric /// include relevant data) 13540b57cec5SDimitry Andric void MemorySSA::OptimizeUses::optimizeUsesInBlock( 13550b57cec5SDimitry Andric const BasicBlock *BB, unsigned long &StackEpoch, unsigned long &PopEpoch, 13560b57cec5SDimitry Andric SmallVectorImpl<MemoryAccess *> &VersionStack, 13570b57cec5SDimitry Andric DenseMap<MemoryLocOrCall, MemlocStackInfo> &LocStackInfo) { 13580b57cec5SDimitry Andric 13590b57cec5SDimitry Andric /// If no accesses, nothing to do. 13600b57cec5SDimitry Andric MemorySSA::AccessList *Accesses = MSSA->getWritableBlockAccesses(BB); 13610b57cec5SDimitry Andric if (Accesses == nullptr) 13620b57cec5SDimitry Andric return; 13630b57cec5SDimitry Andric 13640b57cec5SDimitry Andric // Pop everything that doesn't dominate the current block off the stack, 13650b57cec5SDimitry Andric // increment the PopEpoch to account for this. 13660b57cec5SDimitry Andric while (true) { 13670b57cec5SDimitry Andric assert( 13680b57cec5SDimitry Andric !VersionStack.empty() && 13690b57cec5SDimitry Andric "Version stack should have liveOnEntry sentinel dominating everything"); 13700b57cec5SDimitry Andric BasicBlock *BackBlock = VersionStack.back()->getBlock(); 13710b57cec5SDimitry Andric if (DT->dominates(BackBlock, BB)) 13720b57cec5SDimitry Andric break; 13730b57cec5SDimitry Andric while (VersionStack.back()->getBlock() == BackBlock) 13740b57cec5SDimitry Andric VersionStack.pop_back(); 13750b57cec5SDimitry Andric ++PopEpoch; 13760b57cec5SDimitry Andric } 13770b57cec5SDimitry Andric 13780b57cec5SDimitry Andric for (MemoryAccess &MA : *Accesses) { 13790b57cec5SDimitry Andric auto *MU = dyn_cast<MemoryUse>(&MA); 13800b57cec5SDimitry Andric if (!MU) { 13810b57cec5SDimitry Andric VersionStack.push_back(&MA); 13820b57cec5SDimitry Andric ++StackEpoch; 13830b57cec5SDimitry Andric continue; 13840b57cec5SDimitry Andric } 13850b57cec5SDimitry Andric 138681ad6265SDimitry Andric if (MU->isOptimized()) 138781ad6265SDimitry Andric continue; 138881ad6265SDimitry Andric 13890b57cec5SDimitry Andric MemoryLocOrCall UseMLOC(MU); 13900b57cec5SDimitry Andric auto &LocInfo = LocStackInfo[UseMLOC]; 13910b57cec5SDimitry Andric // If the pop epoch changed, it means we've removed stuff from top of 13920b57cec5SDimitry Andric // stack due to changing blocks. We may have to reset the lower bound or 13930b57cec5SDimitry Andric // last kill info. 13940b57cec5SDimitry Andric if (LocInfo.PopEpoch != PopEpoch) { 13950b57cec5SDimitry Andric LocInfo.PopEpoch = PopEpoch; 13960b57cec5SDimitry Andric LocInfo.StackEpoch = StackEpoch; 13970b57cec5SDimitry Andric // If the lower bound was in something that no longer dominates us, we 13980b57cec5SDimitry Andric // have to reset it. 13990b57cec5SDimitry Andric // We can't simply track stack size, because the stack may have had 14000b57cec5SDimitry Andric // pushes/pops in the meantime. 14010b57cec5SDimitry Andric // XXX: This is non-optimal, but only is slower cases with heavily 14020b57cec5SDimitry Andric // branching dominator trees. To get the optimal number of queries would 14030b57cec5SDimitry Andric // be to make lowerbound and lastkill a per-loc stack, and pop it until 14040b57cec5SDimitry Andric // the top of that stack dominates us. This does not seem worth it ATM. 14050b57cec5SDimitry Andric // A much cheaper optimization would be to always explore the deepest 14060b57cec5SDimitry Andric // branch of the dominator tree first. This will guarantee this resets on 14070b57cec5SDimitry Andric // the smallest set of blocks. 14080b57cec5SDimitry Andric if (LocInfo.LowerBoundBlock && LocInfo.LowerBoundBlock != BB && 14090b57cec5SDimitry Andric !DT->dominates(LocInfo.LowerBoundBlock, BB)) { 14100b57cec5SDimitry Andric // Reset the lower bound of things to check. 14110b57cec5SDimitry Andric // TODO: Some day we should be able to reset to last kill, rather than 14120b57cec5SDimitry Andric // 0. 14130b57cec5SDimitry Andric LocInfo.LowerBound = 0; 14140b57cec5SDimitry Andric LocInfo.LowerBoundBlock = VersionStack[0]->getBlock(); 14150b57cec5SDimitry Andric LocInfo.LastKillValid = false; 14160b57cec5SDimitry Andric } 14170b57cec5SDimitry Andric } else if (LocInfo.StackEpoch != StackEpoch) { 14180b57cec5SDimitry Andric // If all that has changed is the StackEpoch, we only have to check the 14190b57cec5SDimitry Andric // new things on the stack, because we've checked everything before. In 14200b57cec5SDimitry Andric // this case, the lower bound of things to check remains the same. 14210b57cec5SDimitry Andric LocInfo.PopEpoch = PopEpoch; 14220b57cec5SDimitry Andric LocInfo.StackEpoch = StackEpoch; 14230b57cec5SDimitry Andric } 14240b57cec5SDimitry Andric if (!LocInfo.LastKillValid) { 14250b57cec5SDimitry Andric LocInfo.LastKill = VersionStack.size() - 1; 14260b57cec5SDimitry Andric LocInfo.LastKillValid = true; 14270b57cec5SDimitry Andric } 14280b57cec5SDimitry Andric 14290b57cec5SDimitry Andric // At this point, we should have corrected last kill and LowerBound to be 14300b57cec5SDimitry Andric // in bounds. 14310b57cec5SDimitry Andric assert(LocInfo.LowerBound < VersionStack.size() && 14320b57cec5SDimitry Andric "Lower bound out of range"); 14330b57cec5SDimitry Andric assert(LocInfo.LastKill < VersionStack.size() && 14340b57cec5SDimitry Andric "Last kill info out of range"); 14350b57cec5SDimitry Andric // In any case, the new upper bound is the top of the stack. 14360b57cec5SDimitry Andric unsigned long UpperBound = VersionStack.size() - 1; 14370b57cec5SDimitry Andric 14380b57cec5SDimitry Andric if (UpperBound - LocInfo.LowerBound > MaxCheckLimit) { 14390b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "MemorySSA skipping optimization of " << *MU << " (" 14400b57cec5SDimitry Andric << *(MU->getMemoryInst()) << ")" 14410b57cec5SDimitry Andric << " because there are " 14420b57cec5SDimitry Andric << UpperBound - LocInfo.LowerBound 14430b57cec5SDimitry Andric << " stores to disambiguate\n"); 14440b57cec5SDimitry Andric // Because we did not walk, LastKill is no longer valid, as this may 14450b57cec5SDimitry Andric // have been a kill. 14460b57cec5SDimitry Andric LocInfo.LastKillValid = false; 14470b57cec5SDimitry Andric continue; 14480b57cec5SDimitry Andric } 14490b57cec5SDimitry Andric bool FoundClobberResult = false; 14500b57cec5SDimitry Andric unsigned UpwardWalkLimit = MaxCheckLimit; 14510b57cec5SDimitry Andric while (UpperBound > LocInfo.LowerBound) { 14520b57cec5SDimitry Andric if (isa<MemoryPhi>(VersionStack[UpperBound])) { 1453349cc55cSDimitry Andric // For phis, use the walker, see where we ended up, go there. 1454349cc55cSDimitry Andric // The invariant.group handling in MemorySSA is ad-hoc and doesn't 1455349cc55cSDimitry Andric // support updates, so don't use it to optimize uses. 14560b57cec5SDimitry Andric MemoryAccess *Result = 1457349cc55cSDimitry Andric Walker->getClobberingMemoryAccessWithoutInvariantGroup( 1458bdd1243dSDimitry Andric MU, *AA, UpwardWalkLimit); 1459349cc55cSDimitry Andric // We are guaranteed to find it or something is wrong. 14600b57cec5SDimitry Andric while (VersionStack[UpperBound] != Result) { 14610b57cec5SDimitry Andric assert(UpperBound != 0); 14620b57cec5SDimitry Andric --UpperBound; 14630b57cec5SDimitry Andric } 14640b57cec5SDimitry Andric FoundClobberResult = true; 14650b57cec5SDimitry Andric break; 14660b57cec5SDimitry Andric } 14670b57cec5SDimitry Andric 14680b57cec5SDimitry Andric MemoryDef *MD = cast<MemoryDef>(VersionStack[UpperBound]); 1469bdd1243dSDimitry Andric if (instructionClobbersQuery(MD, MU, UseMLOC, *AA)) { 14700b57cec5SDimitry Andric FoundClobberResult = true; 14710b57cec5SDimitry Andric break; 14720b57cec5SDimitry Andric } 14730b57cec5SDimitry Andric --UpperBound; 14740b57cec5SDimitry Andric } 14750b57cec5SDimitry Andric 14760b57cec5SDimitry Andric // At the end of this loop, UpperBound is either a clobber, or lower bound 14770b57cec5SDimitry Andric // PHI walking may cause it to be < LowerBound, and in fact, < LastKill. 14780b57cec5SDimitry Andric if (FoundClobberResult || UpperBound < LocInfo.LastKill) { 1479bdd1243dSDimitry Andric MU->setDefiningAccess(VersionStack[UpperBound], true); 14800b57cec5SDimitry Andric LocInfo.LastKill = UpperBound; 14810b57cec5SDimitry Andric } else { 14820b57cec5SDimitry Andric // Otherwise, we checked all the new ones, and now we know we can get to 14830b57cec5SDimitry Andric // LastKill. 1484bdd1243dSDimitry Andric MU->setDefiningAccess(VersionStack[LocInfo.LastKill], true); 14850b57cec5SDimitry Andric } 14860b57cec5SDimitry Andric LocInfo.LowerBound = VersionStack.size() - 1; 14870b57cec5SDimitry Andric LocInfo.LowerBoundBlock = BB; 14880b57cec5SDimitry Andric } 14890b57cec5SDimitry Andric } 14900b57cec5SDimitry Andric 14910b57cec5SDimitry Andric /// Optimize uses to point to their actual clobbering definitions. 14920b57cec5SDimitry Andric void MemorySSA::OptimizeUses::optimizeUses() { 14930b57cec5SDimitry Andric SmallVector<MemoryAccess *, 16> VersionStack; 14940b57cec5SDimitry Andric DenseMap<MemoryLocOrCall, MemlocStackInfo> LocStackInfo; 14950b57cec5SDimitry Andric VersionStack.push_back(MSSA->getLiveOnEntryDef()); 14960b57cec5SDimitry Andric 14970b57cec5SDimitry Andric unsigned long StackEpoch = 1; 14980b57cec5SDimitry Andric unsigned long PopEpoch = 1; 14990b57cec5SDimitry Andric // We perform a non-recursive top-down dominator tree walk. 15000b57cec5SDimitry Andric for (const auto *DomNode : depth_first(DT->getRootNode())) 15010b57cec5SDimitry Andric optimizeUsesInBlock(DomNode->getBlock(), StackEpoch, PopEpoch, VersionStack, 15020b57cec5SDimitry Andric LocStackInfo); 15030b57cec5SDimitry Andric } 15040b57cec5SDimitry Andric 15050b57cec5SDimitry Andric void MemorySSA::placePHINodes( 15060b57cec5SDimitry Andric const SmallPtrSetImpl<BasicBlock *> &DefiningBlocks) { 15070b57cec5SDimitry Andric // Determine where our MemoryPhi's should go 15080b57cec5SDimitry Andric ForwardIDFCalculator IDFs(*DT); 15090b57cec5SDimitry Andric IDFs.setDefiningBlocks(DefiningBlocks); 15100b57cec5SDimitry Andric SmallVector<BasicBlock *, 32> IDFBlocks; 15110b57cec5SDimitry Andric IDFs.calculate(IDFBlocks); 15120b57cec5SDimitry Andric 15130b57cec5SDimitry Andric // Now place MemoryPhi nodes. 15140b57cec5SDimitry Andric for (auto &BB : IDFBlocks) 15150b57cec5SDimitry Andric createMemoryPhi(BB); 15160b57cec5SDimitry Andric } 15170b57cec5SDimitry Andric 1518*0fca6ea1SDimitry Andric template <typename IterT> 1519*0fca6ea1SDimitry Andric void MemorySSA::buildMemorySSA(BatchAAResults &BAA, IterT Blocks) { 15200b57cec5SDimitry Andric // We create an access to represent "live on entry", for things like 15210b57cec5SDimitry Andric // arguments or users of globals, where the memory they use is defined before 15220b57cec5SDimitry Andric // the beginning of the function. We do not actually insert it into the IR. 15230b57cec5SDimitry Andric // We do not define a live on exit for the immediate uses, and thus our 15240b57cec5SDimitry Andric // semantics do *not* imply that something with no immediate uses can simply 15250b57cec5SDimitry Andric // be removed. 1526*0fca6ea1SDimitry Andric BasicBlock &StartingPoint = *Blocks.begin(); 1527*0fca6ea1SDimitry Andric LiveOnEntryDef.reset(new MemoryDef(StartingPoint.getContext(), nullptr, 1528*0fca6ea1SDimitry Andric nullptr, &StartingPoint, NextID++)); 15290b57cec5SDimitry Andric 15300b57cec5SDimitry Andric // We maintain lists of memory accesses per-block, trading memory for time. We 15310b57cec5SDimitry Andric // could just look up the memory access for every possible instruction in the 15320b57cec5SDimitry Andric // stream. 15330b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 32> DefiningBlocks; 15340b57cec5SDimitry Andric // Go through each block, figure out where defs occur, and chain together all 15350b57cec5SDimitry Andric // the accesses. 1536*0fca6ea1SDimitry Andric for (BasicBlock &B : Blocks) { 15370b57cec5SDimitry Andric bool InsertIntoDef = false; 15380b57cec5SDimitry Andric AccessList *Accesses = nullptr; 15390b57cec5SDimitry Andric DefsList *Defs = nullptr; 15400b57cec5SDimitry Andric for (Instruction &I : B) { 15410b57cec5SDimitry Andric MemoryUseOrDef *MUD = createNewAccess(&I, &BAA); 15420b57cec5SDimitry Andric if (!MUD) 15430b57cec5SDimitry Andric continue; 15440b57cec5SDimitry Andric 15450b57cec5SDimitry Andric if (!Accesses) 15460b57cec5SDimitry Andric Accesses = getOrCreateAccessList(&B); 15470b57cec5SDimitry Andric Accesses->push_back(MUD); 15480b57cec5SDimitry Andric if (isa<MemoryDef>(MUD)) { 15490b57cec5SDimitry Andric InsertIntoDef = true; 15500b57cec5SDimitry Andric if (!Defs) 15510b57cec5SDimitry Andric Defs = getOrCreateDefsList(&B); 15520b57cec5SDimitry Andric Defs->push_back(*MUD); 15530b57cec5SDimitry Andric } 15540b57cec5SDimitry Andric } 15550b57cec5SDimitry Andric if (InsertIntoDef) 15560b57cec5SDimitry Andric DefiningBlocks.insert(&B); 15570b57cec5SDimitry Andric } 15580b57cec5SDimitry Andric placePHINodes(DefiningBlocks); 15590b57cec5SDimitry Andric 15600b57cec5SDimitry Andric // Now do regular SSA renaming on the MemoryDef/MemoryUse. Visited will get 15610b57cec5SDimitry Andric // filled in with all blocks. 15620b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 16> Visited; 1563*0fca6ea1SDimitry Andric if (L) { 1564*0fca6ea1SDimitry Andric // Only building MemorySSA for a single loop. placePHINodes may have 1565*0fca6ea1SDimitry Andric // inserted a MemoryPhi in the loop's preheader. As this is outside the 1566*0fca6ea1SDimitry Andric // scope of the loop, set them to LiveOnEntry. 1567*0fca6ea1SDimitry Andric if (auto *P = getMemoryAccess(L->getLoopPreheader())) { 1568*0fca6ea1SDimitry Andric for (Use &U : make_early_inc_range(P->uses())) 1569*0fca6ea1SDimitry Andric U.set(LiveOnEntryDef.get()); 1570*0fca6ea1SDimitry Andric removeFromLists(P); 1571*0fca6ea1SDimitry Andric } 1572*0fca6ea1SDimitry Andric // Now rename accesses in the loop. Populate Visited with the exit blocks of 1573*0fca6ea1SDimitry Andric // the loop, to limit the scope of the renaming. 1574*0fca6ea1SDimitry Andric SmallVector<BasicBlock *> ExitBlocks; 1575*0fca6ea1SDimitry Andric L->getExitBlocks(ExitBlocks); 1576*0fca6ea1SDimitry Andric Visited.insert(ExitBlocks.begin(), ExitBlocks.end()); 1577*0fca6ea1SDimitry Andric renamePass(DT->getNode(L->getLoopPreheader()), LiveOnEntryDef.get(), 1578*0fca6ea1SDimitry Andric Visited); 1579*0fca6ea1SDimitry Andric } else { 15800b57cec5SDimitry Andric renamePass(DT->getRootNode(), LiveOnEntryDef.get(), Visited); 1581*0fca6ea1SDimitry Andric } 15820b57cec5SDimitry Andric 15830b57cec5SDimitry Andric // Mark the uses in unreachable blocks as live on entry, so that they go 15840b57cec5SDimitry Andric // somewhere. 1585*0fca6ea1SDimitry Andric for (auto &BB : Blocks) 15860b57cec5SDimitry Andric if (!Visited.count(&BB)) 15870b57cec5SDimitry Andric markUnreachableAsLiveOnEntry(&BB); 15880b57cec5SDimitry Andric } 15890b57cec5SDimitry Andric 15900b57cec5SDimitry Andric MemorySSAWalker *MemorySSA::getWalker() { return getWalkerImpl(); } 15910b57cec5SDimitry Andric 1592bdd1243dSDimitry Andric MemorySSA::CachingWalker *MemorySSA::getWalkerImpl() { 15930b57cec5SDimitry Andric if (Walker) 15940b57cec5SDimitry Andric return Walker.get(); 15950b57cec5SDimitry Andric 15960b57cec5SDimitry Andric if (!WalkerBase) 1597bdd1243dSDimitry Andric WalkerBase = std::make_unique<ClobberWalkerBase>(this, DT); 15980b57cec5SDimitry Andric 1599bdd1243dSDimitry Andric Walker = std::make_unique<CachingWalker>(this, WalkerBase.get()); 16000b57cec5SDimitry Andric return Walker.get(); 16010b57cec5SDimitry Andric } 16020b57cec5SDimitry Andric 16030b57cec5SDimitry Andric MemorySSAWalker *MemorySSA::getSkipSelfWalker() { 16040b57cec5SDimitry Andric if (SkipWalker) 16050b57cec5SDimitry Andric return SkipWalker.get(); 16060b57cec5SDimitry Andric 16070b57cec5SDimitry Andric if (!WalkerBase) 1608bdd1243dSDimitry Andric WalkerBase = std::make_unique<ClobberWalkerBase>(this, DT); 16090b57cec5SDimitry Andric 1610bdd1243dSDimitry Andric SkipWalker = std::make_unique<SkipSelfWalker>(this, WalkerBase.get()); 16110b57cec5SDimitry Andric return SkipWalker.get(); 16120b57cec5SDimitry Andric } 16130b57cec5SDimitry Andric 16140b57cec5SDimitry Andric 16150b57cec5SDimitry Andric // This is a helper function used by the creation routines. It places NewAccess 16160b57cec5SDimitry Andric // into the access and defs lists for a given basic block, at the given 16170b57cec5SDimitry Andric // insertion point. 16180b57cec5SDimitry Andric void MemorySSA::insertIntoListsForBlock(MemoryAccess *NewAccess, 16190b57cec5SDimitry Andric const BasicBlock *BB, 16200b57cec5SDimitry Andric InsertionPlace Point) { 16210b57cec5SDimitry Andric auto *Accesses = getOrCreateAccessList(BB); 16220b57cec5SDimitry Andric if (Point == Beginning) { 16230b57cec5SDimitry Andric // If it's a phi node, it goes first, otherwise, it goes after any phi 16240b57cec5SDimitry Andric // nodes. 16250b57cec5SDimitry Andric if (isa<MemoryPhi>(NewAccess)) { 16260b57cec5SDimitry Andric Accesses->push_front(NewAccess); 16270b57cec5SDimitry Andric auto *Defs = getOrCreateDefsList(BB); 16280b57cec5SDimitry Andric Defs->push_front(*NewAccess); 16290b57cec5SDimitry Andric } else { 16300b57cec5SDimitry Andric auto AI = find_if_not( 16310b57cec5SDimitry Andric *Accesses, [](const MemoryAccess &MA) { return isa<MemoryPhi>(MA); }); 16320b57cec5SDimitry Andric Accesses->insert(AI, NewAccess); 16330b57cec5SDimitry Andric if (!isa<MemoryUse>(NewAccess)) { 16340b57cec5SDimitry Andric auto *Defs = getOrCreateDefsList(BB); 16350b57cec5SDimitry Andric auto DI = find_if_not( 16360b57cec5SDimitry Andric *Defs, [](const MemoryAccess &MA) { return isa<MemoryPhi>(MA); }); 16370b57cec5SDimitry Andric Defs->insert(DI, *NewAccess); 16380b57cec5SDimitry Andric } 16390b57cec5SDimitry Andric } 16400b57cec5SDimitry Andric } else { 16410b57cec5SDimitry Andric Accesses->push_back(NewAccess); 16420b57cec5SDimitry Andric if (!isa<MemoryUse>(NewAccess)) { 16430b57cec5SDimitry Andric auto *Defs = getOrCreateDefsList(BB); 16440b57cec5SDimitry Andric Defs->push_back(*NewAccess); 16450b57cec5SDimitry Andric } 16460b57cec5SDimitry Andric } 16470b57cec5SDimitry Andric BlockNumberingValid.erase(BB); 16480b57cec5SDimitry Andric } 16490b57cec5SDimitry Andric 16500b57cec5SDimitry Andric void MemorySSA::insertIntoListsBefore(MemoryAccess *What, const BasicBlock *BB, 16510b57cec5SDimitry Andric AccessList::iterator InsertPt) { 16520b57cec5SDimitry Andric auto *Accesses = getWritableBlockAccesses(BB); 16530b57cec5SDimitry Andric bool WasEnd = InsertPt == Accesses->end(); 16540b57cec5SDimitry Andric Accesses->insert(AccessList::iterator(InsertPt), What); 16550b57cec5SDimitry Andric if (!isa<MemoryUse>(What)) { 16560b57cec5SDimitry Andric auto *Defs = getOrCreateDefsList(BB); 16570b57cec5SDimitry Andric // If we got asked to insert at the end, we have an easy job, just shove it 16580b57cec5SDimitry Andric // at the end. If we got asked to insert before an existing def, we also get 16590b57cec5SDimitry Andric // an iterator. If we got asked to insert before a use, we have to hunt for 16600b57cec5SDimitry Andric // the next def. 16610b57cec5SDimitry Andric if (WasEnd) { 16620b57cec5SDimitry Andric Defs->push_back(*What); 16630b57cec5SDimitry Andric } else if (isa<MemoryDef>(InsertPt)) { 16640b57cec5SDimitry Andric Defs->insert(InsertPt->getDefsIterator(), *What); 16650b57cec5SDimitry Andric } else { 16660b57cec5SDimitry Andric while (InsertPt != Accesses->end() && !isa<MemoryDef>(InsertPt)) 16670b57cec5SDimitry Andric ++InsertPt; 16680b57cec5SDimitry Andric // Either we found a def, or we are inserting at the end 16690b57cec5SDimitry Andric if (InsertPt == Accesses->end()) 16700b57cec5SDimitry Andric Defs->push_back(*What); 16710b57cec5SDimitry Andric else 16720b57cec5SDimitry Andric Defs->insert(InsertPt->getDefsIterator(), *What); 16730b57cec5SDimitry Andric } 16740b57cec5SDimitry Andric } 16750b57cec5SDimitry Andric BlockNumberingValid.erase(BB); 16760b57cec5SDimitry Andric } 16770b57cec5SDimitry Andric 16780b57cec5SDimitry Andric void MemorySSA::prepareForMoveTo(MemoryAccess *What, BasicBlock *BB) { 16790b57cec5SDimitry Andric // Keep it in the lookup tables, remove from the lists 16800b57cec5SDimitry Andric removeFromLists(What, false); 16810b57cec5SDimitry Andric 16820b57cec5SDimitry Andric // Note that moving should implicitly invalidate the optimized state of a 16830b57cec5SDimitry Andric // MemoryUse (and Phis can't be optimized). However, it doesn't do so for a 16840b57cec5SDimitry Andric // MemoryDef. 16850b57cec5SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(What)) 16860b57cec5SDimitry Andric MD->resetOptimized(); 16870b57cec5SDimitry Andric What->setBlock(BB); 16880b57cec5SDimitry Andric } 16890b57cec5SDimitry Andric 16900b57cec5SDimitry Andric // Move What before Where in the IR. The end result is that What will belong to 16910b57cec5SDimitry Andric // the right lists and have the right Block set, but will not otherwise be 16920b57cec5SDimitry Andric // correct. It will not have the right defining access, and if it is a def, 16930b57cec5SDimitry Andric // things below it will not properly be updated. 16940b57cec5SDimitry Andric void MemorySSA::moveTo(MemoryUseOrDef *What, BasicBlock *BB, 16950b57cec5SDimitry Andric AccessList::iterator Where) { 16960b57cec5SDimitry Andric prepareForMoveTo(What, BB); 16970b57cec5SDimitry Andric insertIntoListsBefore(What, BB, Where); 16980b57cec5SDimitry Andric } 16990b57cec5SDimitry Andric 17000b57cec5SDimitry Andric void MemorySSA::moveTo(MemoryAccess *What, BasicBlock *BB, 17010b57cec5SDimitry Andric InsertionPlace Point) { 17020b57cec5SDimitry Andric if (isa<MemoryPhi>(What)) { 17030b57cec5SDimitry Andric assert(Point == Beginning && 17040b57cec5SDimitry Andric "Can only move a Phi at the beginning of the block"); 17050b57cec5SDimitry Andric // Update lookup table entry 17060b57cec5SDimitry Andric ValueToMemoryAccess.erase(What->getBlock()); 17070b57cec5SDimitry Andric bool Inserted = ValueToMemoryAccess.insert({BB, What}).second; 17080b57cec5SDimitry Andric (void)Inserted; 17090b57cec5SDimitry Andric assert(Inserted && "Cannot move a Phi to a block that already has one"); 17100b57cec5SDimitry Andric } 17110b57cec5SDimitry Andric 17120b57cec5SDimitry Andric prepareForMoveTo(What, BB); 17130b57cec5SDimitry Andric insertIntoListsForBlock(What, BB, Point); 17140b57cec5SDimitry Andric } 17150b57cec5SDimitry Andric 17160b57cec5SDimitry Andric MemoryPhi *MemorySSA::createMemoryPhi(BasicBlock *BB) { 17170b57cec5SDimitry Andric assert(!getMemoryAccess(BB) && "MemoryPhi already exists for this BB"); 17180b57cec5SDimitry Andric MemoryPhi *Phi = new MemoryPhi(BB->getContext(), BB, NextID++); 17190b57cec5SDimitry Andric // Phi's always are placed at the front of the block. 17200b57cec5SDimitry Andric insertIntoListsForBlock(Phi, BB, Beginning); 17210b57cec5SDimitry Andric ValueToMemoryAccess[BB] = Phi; 17220b57cec5SDimitry Andric return Phi; 17230b57cec5SDimitry Andric } 17240b57cec5SDimitry Andric 17250b57cec5SDimitry Andric MemoryUseOrDef *MemorySSA::createDefinedAccess(Instruction *I, 17260b57cec5SDimitry Andric MemoryAccess *Definition, 17278bcb0991SDimitry Andric const MemoryUseOrDef *Template, 17288bcb0991SDimitry Andric bool CreationMustSucceed) { 17290b57cec5SDimitry Andric assert(!isa<PHINode>(I) && "Cannot create a defined access for a PHI"); 17300b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = createNewAccess(I, AA, Template); 17318bcb0991SDimitry Andric if (CreationMustSucceed) 17328bcb0991SDimitry Andric assert(NewAccess != nullptr && "Tried to create a memory access for a " 17338bcb0991SDimitry Andric "non-memory touching instruction"); 1734e8d8bef9SDimitry Andric if (NewAccess) { 1735e8d8bef9SDimitry Andric assert((!Definition || !isa<MemoryUse>(Definition)) && 1736e8d8bef9SDimitry Andric "A use cannot be a defining access"); 17370b57cec5SDimitry Andric NewAccess->setDefiningAccess(Definition); 1738e8d8bef9SDimitry Andric } 17390b57cec5SDimitry Andric return NewAccess; 17400b57cec5SDimitry Andric } 17410b57cec5SDimitry Andric 17420b57cec5SDimitry Andric // Return true if the instruction has ordering constraints. 17430b57cec5SDimitry Andric // Note specifically that this only considers stores and loads 17440b57cec5SDimitry Andric // because others are still considered ModRef by getModRefInfo. 17450b57cec5SDimitry Andric static inline bool isOrdered(const Instruction *I) { 17460b57cec5SDimitry Andric if (auto *SI = dyn_cast<StoreInst>(I)) { 17470b57cec5SDimitry Andric if (!SI->isUnordered()) 17480b57cec5SDimitry Andric return true; 17490b57cec5SDimitry Andric } else if (auto *LI = dyn_cast<LoadInst>(I)) { 17500b57cec5SDimitry Andric if (!LI->isUnordered()) 17510b57cec5SDimitry Andric return true; 17520b57cec5SDimitry Andric } 17530b57cec5SDimitry Andric return false; 17540b57cec5SDimitry Andric } 17550b57cec5SDimitry Andric 17560b57cec5SDimitry Andric /// Helper function to create new memory accesses 17570b57cec5SDimitry Andric template <typename AliasAnalysisType> 17580b57cec5SDimitry Andric MemoryUseOrDef *MemorySSA::createNewAccess(Instruction *I, 17590b57cec5SDimitry Andric AliasAnalysisType *AAP, 17600b57cec5SDimitry Andric const MemoryUseOrDef *Template) { 17610b57cec5SDimitry Andric // The assume intrinsic has a control dependency which we model by claiming 17628bcb0991SDimitry Andric // that it writes arbitrarily. Debuginfo intrinsics may be considered 17638bcb0991SDimitry Andric // clobbers when we have a nonstandard AA pipeline. Ignore these fake memory 17648bcb0991SDimitry Andric // dependencies here. 17650b57cec5SDimitry Andric // FIXME: Replace this special casing with a more accurate modelling of 17660b57cec5SDimitry Andric // assume's control dependency. 1767e8d8bef9SDimitry Andric if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { 1768e8d8bef9SDimitry Andric switch (II->getIntrinsicID()) { 1769e8d8bef9SDimitry Andric default: 1770e8d8bef9SDimitry Andric break; 1771*0fca6ea1SDimitry Andric case Intrinsic::allow_runtime_check: 1772*0fca6ea1SDimitry Andric case Intrinsic::allow_ubsan_check: 1773e8d8bef9SDimitry Andric case Intrinsic::assume: 1774e8d8bef9SDimitry Andric case Intrinsic::experimental_noalias_scope_decl: 1775349cc55cSDimitry Andric case Intrinsic::pseudoprobe: 17760b57cec5SDimitry Andric return nullptr; 1777e8d8bef9SDimitry Andric } 1778e8d8bef9SDimitry Andric } 17790b57cec5SDimitry Andric 17808bcb0991SDimitry Andric // Using a nonstandard AA pipelines might leave us with unexpected modref 17818bcb0991SDimitry Andric // results for I, so add a check to not model instructions that may not read 17828bcb0991SDimitry Andric // from or write to memory. This is necessary for correctness. 17838bcb0991SDimitry Andric if (!I->mayReadFromMemory() && !I->mayWriteToMemory()) 17848bcb0991SDimitry Andric return nullptr; 17858bcb0991SDimitry Andric 17860b57cec5SDimitry Andric bool Def, Use; 17870b57cec5SDimitry Andric if (Template) { 1788e8d8bef9SDimitry Andric Def = isa<MemoryDef>(Template); 1789e8d8bef9SDimitry Andric Use = isa<MemoryUse>(Template); 17900b57cec5SDimitry Andric #if !defined(NDEBUG) 1791bdd1243dSDimitry Andric ModRefInfo ModRef = AAP->getModRefInfo(I, std::nullopt); 17920b57cec5SDimitry Andric bool DefCheck, UseCheck; 17930b57cec5SDimitry Andric DefCheck = isModSet(ModRef) || isOrdered(I); 17940b57cec5SDimitry Andric UseCheck = isRefSet(ModRef); 1795bdd1243dSDimitry Andric // Memory accesses should only be reduced and can not be increased since AA 1796bdd1243dSDimitry Andric // just might return better results as a result of some transformations. 1797bdd1243dSDimitry Andric assert((Def == DefCheck || !DefCheck) && 1798bdd1243dSDimitry Andric "Memory accesses should only be reduced"); 1799bdd1243dSDimitry Andric if (!Def && Use != UseCheck) { 1800bdd1243dSDimitry Andric // New Access should not have more power than template access 1801bdd1243dSDimitry Andric assert(!UseCheck && "Invalid template"); 1802bdd1243dSDimitry Andric } 18030b57cec5SDimitry Andric #endif 18040b57cec5SDimitry Andric } else { 18050b57cec5SDimitry Andric // Find out what affect this instruction has on memory. 1806bdd1243dSDimitry Andric ModRefInfo ModRef = AAP->getModRefInfo(I, std::nullopt); 18070b57cec5SDimitry Andric // The isOrdered check is used to ensure that volatiles end up as defs 18080b57cec5SDimitry Andric // (atomics end up as ModRef right now anyway). Until we separate the 18090b57cec5SDimitry Andric // ordering chain from the memory chain, this enables people to see at least 18100b57cec5SDimitry Andric // some relative ordering to volatiles. Note that getClobberingMemoryAccess 18110b57cec5SDimitry Andric // will still give an answer that bypasses other volatile loads. TODO: 18120b57cec5SDimitry Andric // Separate memory aliasing and ordering into two different chains so that 18130b57cec5SDimitry Andric // we can precisely represent both "what memory will this read/write/is 18140b57cec5SDimitry Andric // clobbered by" and "what instructions can I move this past". 18150b57cec5SDimitry Andric Def = isModSet(ModRef) || isOrdered(I); 18160b57cec5SDimitry Andric Use = isRefSet(ModRef); 18170b57cec5SDimitry Andric } 18180b57cec5SDimitry Andric 18190b57cec5SDimitry Andric // It's possible for an instruction to not modify memory at all. During 18200b57cec5SDimitry Andric // construction, we ignore them. 18210b57cec5SDimitry Andric if (!Def && !Use) 18220b57cec5SDimitry Andric return nullptr; 18230b57cec5SDimitry Andric 18240b57cec5SDimitry Andric MemoryUseOrDef *MUD; 182506c3fb27SDimitry Andric if (Def) { 18260b57cec5SDimitry Andric MUD = new MemoryDef(I->getContext(), nullptr, I, I->getParent(), NextID++); 182706c3fb27SDimitry Andric } else { 18280b57cec5SDimitry Andric MUD = new MemoryUse(I->getContext(), nullptr, I, I->getParent()); 182906c3fb27SDimitry Andric if (isUseTriviallyOptimizableToLiveOnEntry(*AAP, I)) { 183006c3fb27SDimitry Andric MemoryAccess *LiveOnEntry = getLiveOnEntryDef(); 183106c3fb27SDimitry Andric MUD->setOptimized(LiveOnEntry); 183206c3fb27SDimitry Andric } 183306c3fb27SDimitry Andric } 18340b57cec5SDimitry Andric ValueToMemoryAccess[I] = MUD; 18350b57cec5SDimitry Andric return MUD; 18360b57cec5SDimitry Andric } 18370b57cec5SDimitry Andric 18380b57cec5SDimitry Andric /// Properly remove \p MA from all of MemorySSA's lookup tables. 18390b57cec5SDimitry Andric void MemorySSA::removeFromLookups(MemoryAccess *MA) { 18400b57cec5SDimitry Andric assert(MA->use_empty() && 18410b57cec5SDimitry Andric "Trying to remove memory access that still has uses"); 18420b57cec5SDimitry Andric BlockNumbering.erase(MA); 18430b57cec5SDimitry Andric if (auto *MUD = dyn_cast<MemoryUseOrDef>(MA)) 18440b57cec5SDimitry Andric MUD->setDefiningAccess(nullptr); 18450b57cec5SDimitry Andric // Invalidate our walker's cache if necessary 18460b57cec5SDimitry Andric if (!isa<MemoryUse>(MA)) 18470b57cec5SDimitry Andric getWalker()->invalidateInfo(MA); 18480b57cec5SDimitry Andric 18490b57cec5SDimitry Andric Value *MemoryInst; 18500b57cec5SDimitry Andric if (const auto *MUD = dyn_cast<MemoryUseOrDef>(MA)) 18510b57cec5SDimitry Andric MemoryInst = MUD->getMemoryInst(); 18520b57cec5SDimitry Andric else 18530b57cec5SDimitry Andric MemoryInst = MA->getBlock(); 18540b57cec5SDimitry Andric 18550b57cec5SDimitry Andric auto VMA = ValueToMemoryAccess.find(MemoryInst); 18560b57cec5SDimitry Andric if (VMA->second == MA) 18570b57cec5SDimitry Andric ValueToMemoryAccess.erase(VMA); 18580b57cec5SDimitry Andric } 18590b57cec5SDimitry Andric 18600b57cec5SDimitry Andric /// Properly remove \p MA from all of MemorySSA's lists. 18610b57cec5SDimitry Andric /// 18620b57cec5SDimitry Andric /// Because of the way the intrusive list and use lists work, it is important to 18630b57cec5SDimitry Andric /// do removal in the right order. 18640b57cec5SDimitry Andric /// ShouldDelete defaults to true, and will cause the memory access to also be 18650b57cec5SDimitry Andric /// deleted, not just removed. 18660b57cec5SDimitry Andric void MemorySSA::removeFromLists(MemoryAccess *MA, bool ShouldDelete) { 18670b57cec5SDimitry Andric BasicBlock *BB = MA->getBlock(); 18680b57cec5SDimitry Andric // The access list owns the reference, so we erase it from the non-owning list 18690b57cec5SDimitry Andric // first. 18700b57cec5SDimitry Andric if (!isa<MemoryUse>(MA)) { 18710b57cec5SDimitry Andric auto DefsIt = PerBlockDefs.find(BB); 18720b57cec5SDimitry Andric std::unique_ptr<DefsList> &Defs = DefsIt->second; 18730b57cec5SDimitry Andric Defs->remove(*MA); 18740b57cec5SDimitry Andric if (Defs->empty()) 18750b57cec5SDimitry Andric PerBlockDefs.erase(DefsIt); 18760b57cec5SDimitry Andric } 18770b57cec5SDimitry Andric 18780b57cec5SDimitry Andric // The erase call here will delete it. If we don't want it deleted, we call 18790b57cec5SDimitry Andric // remove instead. 18800b57cec5SDimitry Andric auto AccessIt = PerBlockAccesses.find(BB); 18810b57cec5SDimitry Andric std::unique_ptr<AccessList> &Accesses = AccessIt->second; 18820b57cec5SDimitry Andric if (ShouldDelete) 18830b57cec5SDimitry Andric Accesses->erase(MA); 18840b57cec5SDimitry Andric else 18850b57cec5SDimitry Andric Accesses->remove(MA); 18860b57cec5SDimitry Andric 18870b57cec5SDimitry Andric if (Accesses->empty()) { 18880b57cec5SDimitry Andric PerBlockAccesses.erase(AccessIt); 18890b57cec5SDimitry Andric BlockNumberingValid.erase(BB); 18900b57cec5SDimitry Andric } 18910b57cec5SDimitry Andric } 18920b57cec5SDimitry Andric 18930b57cec5SDimitry Andric void MemorySSA::print(raw_ostream &OS) const { 18940b57cec5SDimitry Andric MemorySSAAnnotatedWriter Writer(this); 1895*0fca6ea1SDimitry Andric Function *F = this->F; 1896*0fca6ea1SDimitry Andric if (L) 1897*0fca6ea1SDimitry Andric F = L->getHeader()->getParent(); 1898*0fca6ea1SDimitry Andric F->print(OS, &Writer); 18990b57cec5SDimitry Andric } 19000b57cec5SDimitry Andric 19010b57cec5SDimitry Andric #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 19020b57cec5SDimitry Andric LLVM_DUMP_METHOD void MemorySSA::dump() const { print(dbgs()); } 19030b57cec5SDimitry Andric #endif 19040b57cec5SDimitry Andric 1905349cc55cSDimitry Andric void MemorySSA::verifyMemorySSA(VerificationLevel VL) const { 1906349cc55cSDimitry Andric #if !defined(NDEBUG) && defined(EXPENSIVE_CHECKS) 1907349cc55cSDimitry Andric VL = VerificationLevel::Full; 1908349cc55cSDimitry Andric #endif 1909349cc55cSDimitry Andric 1910349cc55cSDimitry Andric #ifndef NDEBUG 1911*0fca6ea1SDimitry Andric if (F) { 1912*0fca6ea1SDimitry Andric auto Blocks = iterator_range(F->begin(), F->end()); 1913*0fca6ea1SDimitry Andric verifyOrderingDominationAndDefUses(Blocks, VL); 1914*0fca6ea1SDimitry Andric verifyDominationNumbers(Blocks); 1915349cc55cSDimitry Andric if (VL == VerificationLevel::Full) 1916*0fca6ea1SDimitry Andric verifyPrevDefInPhis(Blocks); 1917*0fca6ea1SDimitry Andric } else { 1918*0fca6ea1SDimitry Andric assert(L && "must either have loop or function"); 1919*0fca6ea1SDimitry Andric auto Blocks = 1920*0fca6ea1SDimitry Andric map_range(L->blocks(), [](const BasicBlock *BB) -> BasicBlock & { 1921*0fca6ea1SDimitry Andric return *const_cast<BasicBlock *>(BB); 1922*0fca6ea1SDimitry Andric }); 1923*0fca6ea1SDimitry Andric verifyOrderingDominationAndDefUses(Blocks, VL); 1924*0fca6ea1SDimitry Andric verifyDominationNumbers(Blocks); 1925*0fca6ea1SDimitry Andric if (VL == VerificationLevel::Full) 1926*0fca6ea1SDimitry Andric verifyPrevDefInPhis(Blocks); 1927*0fca6ea1SDimitry Andric } 1928349cc55cSDimitry Andric #endif 19290b57cec5SDimitry Andric // Previously, the verification used to also verify that the clobberingAccess 19300b57cec5SDimitry Andric // cached by MemorySSA is the same as the clobberingAccess found at a later 19310b57cec5SDimitry Andric // query to AA. This does not hold true in general due to the current fragility 19320b57cec5SDimitry Andric // of BasicAA which has arbitrary caps on the things it analyzes before giving 19330b57cec5SDimitry Andric // up. As a result, transformations that are correct, will lead to BasicAA 19340b57cec5SDimitry Andric // returning different Alias answers before and after that transformation. 19350b57cec5SDimitry Andric // Invalidating MemorySSA is not an option, as the results in BasicAA can be so 19360b57cec5SDimitry Andric // random, in the worst case we'd need to rebuild MemorySSA from scratch after 19370b57cec5SDimitry Andric // every transformation, which defeats the purpose of using it. For such an 19380b57cec5SDimitry Andric // example, see test4 added in D51960. 19390b57cec5SDimitry Andric } 19400b57cec5SDimitry Andric 1941*0fca6ea1SDimitry Andric template <typename IterT> 1942*0fca6ea1SDimitry Andric void MemorySSA::verifyPrevDefInPhis(IterT Blocks) const { 1943*0fca6ea1SDimitry Andric for (const BasicBlock &BB : Blocks) { 19448bcb0991SDimitry Andric if (MemoryPhi *Phi = getMemoryAccess(&BB)) { 19458bcb0991SDimitry Andric for (unsigned I = 0, E = Phi->getNumIncomingValues(); I != E; ++I) { 19468bcb0991SDimitry Andric auto *Pred = Phi->getIncomingBlock(I); 19478bcb0991SDimitry Andric auto *IncAcc = Phi->getIncomingValue(I); 19488bcb0991SDimitry Andric // If Pred has no unreachable predecessors, get last def looking at 19498bcb0991SDimitry Andric // IDoms. If, while walkings IDoms, any of these has an unreachable 19508bcb0991SDimitry Andric // predecessor, then the incoming def can be any access. 19518bcb0991SDimitry Andric if (auto *DTNode = DT->getNode(Pred)) { 19528bcb0991SDimitry Andric while (DTNode) { 19538bcb0991SDimitry Andric if (auto *DefList = getBlockDefs(DTNode->getBlock())) { 19548bcb0991SDimitry Andric auto *LastAcc = &*(--DefList->end()); 19558bcb0991SDimitry Andric assert(LastAcc == IncAcc && 19568bcb0991SDimitry Andric "Incorrect incoming access into phi."); 1957349cc55cSDimitry Andric (void)IncAcc; 1958349cc55cSDimitry Andric (void)LastAcc; 19598bcb0991SDimitry Andric break; 19608bcb0991SDimitry Andric } 19618bcb0991SDimitry Andric DTNode = DTNode->getIDom(); 19628bcb0991SDimitry Andric } 19638bcb0991SDimitry Andric } else { 19648bcb0991SDimitry Andric // If Pred has unreachable predecessors, but has at least a Def, the 19658bcb0991SDimitry Andric // incoming access can be the last Def in Pred, or it could have been 19668bcb0991SDimitry Andric // optimized to LoE. After an update, though, the LoE may have been 19678bcb0991SDimitry Andric // replaced by another access, so IncAcc may be any access. 19688bcb0991SDimitry Andric // If Pred has unreachable predecessors and no Defs, incoming access 19698bcb0991SDimitry Andric // should be LoE; However, after an update, it may be any access. 19708bcb0991SDimitry Andric } 19718bcb0991SDimitry Andric } 19728bcb0991SDimitry Andric } 19738bcb0991SDimitry Andric } 19748bcb0991SDimitry Andric } 19758bcb0991SDimitry Andric 19760b57cec5SDimitry Andric /// Verify that all of the blocks we believe to have valid domination numbers 19770b57cec5SDimitry Andric /// actually have valid domination numbers. 1978*0fca6ea1SDimitry Andric template <typename IterT> 1979*0fca6ea1SDimitry Andric void MemorySSA::verifyDominationNumbers(IterT Blocks) const { 19800b57cec5SDimitry Andric if (BlockNumberingValid.empty()) 19810b57cec5SDimitry Andric return; 19820b57cec5SDimitry Andric 19830b57cec5SDimitry Andric SmallPtrSet<const BasicBlock *, 16> ValidBlocks = BlockNumberingValid; 1984*0fca6ea1SDimitry Andric for (const BasicBlock &BB : Blocks) { 19850b57cec5SDimitry Andric if (!ValidBlocks.count(&BB)) 19860b57cec5SDimitry Andric continue; 19870b57cec5SDimitry Andric 19880b57cec5SDimitry Andric ValidBlocks.erase(&BB); 19890b57cec5SDimitry Andric 19900b57cec5SDimitry Andric const AccessList *Accesses = getBlockAccesses(&BB); 19910b57cec5SDimitry Andric // It's correct to say an empty block has valid numbering. 19920b57cec5SDimitry Andric if (!Accesses) 19930b57cec5SDimitry Andric continue; 19940b57cec5SDimitry Andric 19950b57cec5SDimitry Andric // Block numbering starts at 1. 19960b57cec5SDimitry Andric unsigned long LastNumber = 0; 19970b57cec5SDimitry Andric for (const MemoryAccess &MA : *Accesses) { 19980b57cec5SDimitry Andric auto ThisNumberIter = BlockNumbering.find(&MA); 19990b57cec5SDimitry Andric assert(ThisNumberIter != BlockNumbering.end() && 20000b57cec5SDimitry Andric "MemoryAccess has no domination number in a valid block!"); 20010b57cec5SDimitry Andric 20020b57cec5SDimitry Andric unsigned long ThisNumber = ThisNumberIter->second; 20030b57cec5SDimitry Andric assert(ThisNumber > LastNumber && 20040b57cec5SDimitry Andric "Domination numbers should be strictly increasing!"); 2005349cc55cSDimitry Andric (void)LastNumber; 20060b57cec5SDimitry Andric LastNumber = ThisNumber; 20070b57cec5SDimitry Andric } 20080b57cec5SDimitry Andric } 20090b57cec5SDimitry Andric 20100b57cec5SDimitry Andric assert(ValidBlocks.empty() && 20110b57cec5SDimitry Andric "All valid BasicBlocks should exist in F -- dangling pointers?"); 20120b57cec5SDimitry Andric } 20130b57cec5SDimitry Andric 2014480093f4SDimitry Andric /// Verify ordering: the order and existence of MemoryAccesses matches the 20150b57cec5SDimitry Andric /// order and existence of memory affecting instructions. 2016480093f4SDimitry Andric /// Verify domination: each definition dominates all of its uses. 2017480093f4SDimitry Andric /// Verify def-uses: the immediate use information - walk all the memory 2018480093f4SDimitry Andric /// accesses and verifying that, for each use, it appears in the appropriate 2019480093f4SDimitry Andric /// def's use list 2020*0fca6ea1SDimitry Andric template <typename IterT> 2021*0fca6ea1SDimitry Andric void MemorySSA::verifyOrderingDominationAndDefUses(IterT Blocks, 2022349cc55cSDimitry Andric VerificationLevel VL) const { 20230b57cec5SDimitry Andric // Walk all the blocks, comparing what the lookups think and what the access 20240b57cec5SDimitry Andric // lists think, as well as the order in the blocks vs the order in the access 20250b57cec5SDimitry Andric // lists. 20260b57cec5SDimitry Andric SmallVector<MemoryAccess *, 32> ActualAccesses; 20270b57cec5SDimitry Andric SmallVector<MemoryAccess *, 32> ActualDefs; 2028*0fca6ea1SDimitry Andric for (BasicBlock &B : Blocks) { 20290b57cec5SDimitry Andric const AccessList *AL = getBlockAccesses(&B); 20300b57cec5SDimitry Andric const auto *DL = getBlockDefs(&B); 2031480093f4SDimitry Andric MemoryPhi *Phi = getMemoryAccess(&B); 20320b57cec5SDimitry Andric if (Phi) { 2033480093f4SDimitry Andric // Verify ordering. 20340b57cec5SDimitry Andric ActualAccesses.push_back(Phi); 20350b57cec5SDimitry Andric ActualDefs.push_back(Phi); 2036480093f4SDimitry Andric // Verify domination 2037349cc55cSDimitry Andric for (const Use &U : Phi->uses()) { 2038480093f4SDimitry Andric assert(dominates(Phi, U) && "Memory PHI does not dominate it's uses"); 2039349cc55cSDimitry Andric (void)U; 2040349cc55cSDimitry Andric } 2041349cc55cSDimitry Andric // Verify def-uses for full verify. 2042349cc55cSDimitry Andric if (VL == VerificationLevel::Full) { 2043*0fca6ea1SDimitry Andric assert(Phi->getNumOperands() == pred_size(&B) && 2044480093f4SDimitry Andric "Incomplete MemoryPhi Node"); 2045480093f4SDimitry Andric for (unsigned I = 0, E = Phi->getNumIncomingValues(); I != E; ++I) { 2046480093f4SDimitry Andric verifyUseInDefs(Phi->getIncomingValue(I), Phi); 2047e8d8bef9SDimitry Andric assert(is_contained(predecessors(&B), Phi->getIncomingBlock(I)) && 2048480093f4SDimitry Andric "Incoming phi block not a block predecessor"); 2049480093f4SDimitry Andric } 2050349cc55cSDimitry Andric } 20510b57cec5SDimitry Andric } 20520b57cec5SDimitry Andric 20530b57cec5SDimitry Andric for (Instruction &I : B) { 2054480093f4SDimitry Andric MemoryUseOrDef *MA = getMemoryAccess(&I); 20550b57cec5SDimitry Andric assert((!MA || (AL && (isa<MemoryUse>(MA) || DL))) && 20560b57cec5SDimitry Andric "We have memory affecting instructions " 20570b57cec5SDimitry Andric "in this block but they are not in the " 20580b57cec5SDimitry Andric "access list or defs list"); 20590b57cec5SDimitry Andric if (MA) { 2060480093f4SDimitry Andric // Verify ordering. 20610b57cec5SDimitry Andric ActualAccesses.push_back(MA); 2062480093f4SDimitry Andric if (MemoryAccess *MD = dyn_cast<MemoryDef>(MA)) { 2063480093f4SDimitry Andric // Verify ordering. 20640b57cec5SDimitry Andric ActualDefs.push_back(MA); 2065480093f4SDimitry Andric // Verify domination. 2066349cc55cSDimitry Andric for (const Use &U : MD->uses()) { 2067480093f4SDimitry Andric assert(dominates(MD, U) && 2068480093f4SDimitry Andric "Memory Def does not dominate it's uses"); 2069349cc55cSDimitry Andric (void)U; 2070480093f4SDimitry Andric } 2071349cc55cSDimitry Andric } 2072349cc55cSDimitry Andric // Verify def-uses for full verify. 2073349cc55cSDimitry Andric if (VL == VerificationLevel::Full) 2074480093f4SDimitry Andric verifyUseInDefs(MA->getDefiningAccess(), MA); 20750b57cec5SDimitry Andric } 20760b57cec5SDimitry Andric } 20770b57cec5SDimitry Andric // Either we hit the assert, really have no accesses, or we have both 2078480093f4SDimitry Andric // accesses and an access list. Same with defs. 20790b57cec5SDimitry Andric if (!AL && !DL) 20800b57cec5SDimitry Andric continue; 2081480093f4SDimitry Andric // Verify ordering. 20820b57cec5SDimitry Andric assert(AL->size() == ActualAccesses.size() && 20830b57cec5SDimitry Andric "We don't have the same number of accesses in the block as on the " 20840b57cec5SDimitry Andric "access list"); 20850b57cec5SDimitry Andric assert((DL || ActualDefs.size() == 0) && 20860b57cec5SDimitry Andric "Either we should have a defs list, or we should have no defs"); 20870b57cec5SDimitry Andric assert((!DL || DL->size() == ActualDefs.size()) && 20880b57cec5SDimitry Andric "We don't have the same number of defs in the block as on the " 20890b57cec5SDimitry Andric "def list"); 20900b57cec5SDimitry Andric auto ALI = AL->begin(); 20910b57cec5SDimitry Andric auto AAI = ActualAccesses.begin(); 20920b57cec5SDimitry Andric while (ALI != AL->end() && AAI != ActualAccesses.end()) { 20930b57cec5SDimitry Andric assert(&*ALI == *AAI && "Not the same accesses in the same order"); 20940b57cec5SDimitry Andric ++ALI; 20950b57cec5SDimitry Andric ++AAI; 20960b57cec5SDimitry Andric } 20970b57cec5SDimitry Andric ActualAccesses.clear(); 20980b57cec5SDimitry Andric if (DL) { 20990b57cec5SDimitry Andric auto DLI = DL->begin(); 21000b57cec5SDimitry Andric auto ADI = ActualDefs.begin(); 21010b57cec5SDimitry Andric while (DLI != DL->end() && ADI != ActualDefs.end()) { 21020b57cec5SDimitry Andric assert(&*DLI == *ADI && "Not the same defs in the same order"); 21030b57cec5SDimitry Andric ++DLI; 21040b57cec5SDimitry Andric ++ADI; 21050b57cec5SDimitry Andric } 21060b57cec5SDimitry Andric } 21070b57cec5SDimitry Andric ActualDefs.clear(); 21080b57cec5SDimitry Andric } 21090b57cec5SDimitry Andric } 21100b57cec5SDimitry Andric 21110b57cec5SDimitry Andric /// Verify the def-use lists in MemorySSA, by verifying that \p Use 21120b57cec5SDimitry Andric /// appears in the use list of \p Def. 21130b57cec5SDimitry Andric void MemorySSA::verifyUseInDefs(MemoryAccess *Def, MemoryAccess *Use) const { 21140b57cec5SDimitry Andric // The live on entry use may cause us to get a NULL def here 21150b57cec5SDimitry Andric if (!Def) 21160b57cec5SDimitry Andric assert(isLiveOnEntryDef(Use) && 21170b57cec5SDimitry Andric "Null def but use not point to live on entry def"); 21180b57cec5SDimitry Andric else 21190b57cec5SDimitry Andric assert(is_contained(Def->users(), Use) && 21200b57cec5SDimitry Andric "Did not find use in def's use list"); 21210b57cec5SDimitry Andric } 21220b57cec5SDimitry Andric 21230b57cec5SDimitry Andric /// Perform a local numbering on blocks so that instruction ordering can be 21240b57cec5SDimitry Andric /// determined in constant time. 21250b57cec5SDimitry Andric /// TODO: We currently just number in order. If we numbered by N, we could 21260b57cec5SDimitry Andric /// allow at least N-1 sequences of insertBefore or insertAfter (and at least 21270b57cec5SDimitry Andric /// log2(N) sequences of mixed before and after) without needing to invalidate 21280b57cec5SDimitry Andric /// the numbering. 21290b57cec5SDimitry Andric void MemorySSA::renumberBlock(const BasicBlock *B) const { 21300b57cec5SDimitry Andric // The pre-increment ensures the numbers really start at 1. 21310b57cec5SDimitry Andric unsigned long CurrentNumber = 0; 21320b57cec5SDimitry Andric const AccessList *AL = getBlockAccesses(B); 21330b57cec5SDimitry Andric assert(AL != nullptr && "Asking to renumber an empty block"); 21340b57cec5SDimitry Andric for (const auto &I : *AL) 21350b57cec5SDimitry Andric BlockNumbering[&I] = ++CurrentNumber; 21360b57cec5SDimitry Andric BlockNumberingValid.insert(B); 21370b57cec5SDimitry Andric } 21380b57cec5SDimitry Andric 21390b57cec5SDimitry Andric /// Determine, for two memory accesses in the same block, 21400b57cec5SDimitry Andric /// whether \p Dominator dominates \p Dominatee. 21410b57cec5SDimitry Andric /// \returns True if \p Dominator dominates \p Dominatee. 21420b57cec5SDimitry Andric bool MemorySSA::locallyDominates(const MemoryAccess *Dominator, 21430b57cec5SDimitry Andric const MemoryAccess *Dominatee) const { 21440b57cec5SDimitry Andric const BasicBlock *DominatorBlock = Dominator->getBlock(); 21450b57cec5SDimitry Andric 21460b57cec5SDimitry Andric assert((DominatorBlock == Dominatee->getBlock()) && 21470b57cec5SDimitry Andric "Asking for local domination when accesses are in different blocks!"); 21480b57cec5SDimitry Andric // A node dominates itself. 21490b57cec5SDimitry Andric if (Dominatee == Dominator) 21500b57cec5SDimitry Andric return true; 21510b57cec5SDimitry Andric 21520b57cec5SDimitry Andric // When Dominatee is defined on function entry, it is not dominated by another 21530b57cec5SDimitry Andric // memory access. 21540b57cec5SDimitry Andric if (isLiveOnEntryDef(Dominatee)) 21550b57cec5SDimitry Andric return false; 21560b57cec5SDimitry Andric 21570b57cec5SDimitry Andric // When Dominator is defined on function entry, it dominates the other memory 21580b57cec5SDimitry Andric // access. 21590b57cec5SDimitry Andric if (isLiveOnEntryDef(Dominator)) 21600b57cec5SDimitry Andric return true; 21610b57cec5SDimitry Andric 21620b57cec5SDimitry Andric if (!BlockNumberingValid.count(DominatorBlock)) 21630b57cec5SDimitry Andric renumberBlock(DominatorBlock); 21640b57cec5SDimitry Andric 21650b57cec5SDimitry Andric unsigned long DominatorNum = BlockNumbering.lookup(Dominator); 21660b57cec5SDimitry Andric // All numbers start with 1 21670b57cec5SDimitry Andric assert(DominatorNum != 0 && "Block was not numbered properly"); 21680b57cec5SDimitry Andric unsigned long DominateeNum = BlockNumbering.lookup(Dominatee); 21690b57cec5SDimitry Andric assert(DominateeNum != 0 && "Block was not numbered properly"); 21700b57cec5SDimitry Andric return DominatorNum < DominateeNum; 21710b57cec5SDimitry Andric } 21720b57cec5SDimitry Andric 21730b57cec5SDimitry Andric bool MemorySSA::dominates(const MemoryAccess *Dominator, 21740b57cec5SDimitry Andric const MemoryAccess *Dominatee) const { 21750b57cec5SDimitry Andric if (Dominator == Dominatee) 21760b57cec5SDimitry Andric return true; 21770b57cec5SDimitry Andric 21780b57cec5SDimitry Andric if (isLiveOnEntryDef(Dominatee)) 21790b57cec5SDimitry Andric return false; 21800b57cec5SDimitry Andric 21810b57cec5SDimitry Andric if (Dominator->getBlock() != Dominatee->getBlock()) 21820b57cec5SDimitry Andric return DT->dominates(Dominator->getBlock(), Dominatee->getBlock()); 21830b57cec5SDimitry Andric return locallyDominates(Dominator, Dominatee); 21840b57cec5SDimitry Andric } 21850b57cec5SDimitry Andric 21860b57cec5SDimitry Andric bool MemorySSA::dominates(const MemoryAccess *Dominator, 21870b57cec5SDimitry Andric const Use &Dominatee) const { 21880b57cec5SDimitry Andric if (MemoryPhi *MP = dyn_cast<MemoryPhi>(Dominatee.getUser())) { 21890b57cec5SDimitry Andric BasicBlock *UseBB = MP->getIncomingBlock(Dominatee); 21900b57cec5SDimitry Andric // The def must dominate the incoming block of the phi. 21910b57cec5SDimitry Andric if (UseBB != Dominator->getBlock()) 21920b57cec5SDimitry Andric return DT->dominates(Dominator->getBlock(), UseBB); 21930b57cec5SDimitry Andric // If the UseBB and the DefBB are the same, compare locally. 21940b57cec5SDimitry Andric return locallyDominates(Dominator, cast<MemoryAccess>(Dominatee)); 21950b57cec5SDimitry Andric } 21960b57cec5SDimitry Andric // If it's not a PHI node use, the normal dominates can already handle it. 21970b57cec5SDimitry Andric return dominates(Dominator, cast<MemoryAccess>(Dominatee.getUser())); 21980b57cec5SDimitry Andric } 21990b57cec5SDimitry Andric 220081ad6265SDimitry Andric void MemorySSA::ensureOptimizedUses() { 220181ad6265SDimitry Andric if (IsOptimized) 220281ad6265SDimitry Andric return; 220381ad6265SDimitry Andric 220481ad6265SDimitry Andric BatchAAResults BatchAA(*AA); 2205bdd1243dSDimitry Andric ClobberWalkerBase WalkerBase(this, DT); 2206bdd1243dSDimitry Andric CachingWalker WalkerLocal(this, &WalkerBase); 220781ad6265SDimitry Andric OptimizeUses(this, &WalkerLocal, &BatchAA, DT).optimizeUses(); 220881ad6265SDimitry Andric IsOptimized = true; 220981ad6265SDimitry Andric } 221081ad6265SDimitry Andric 22110b57cec5SDimitry Andric void MemoryAccess::print(raw_ostream &OS) const { 22120b57cec5SDimitry Andric switch (getValueID()) { 22130b57cec5SDimitry Andric case MemoryPhiVal: return static_cast<const MemoryPhi *>(this)->print(OS); 22140b57cec5SDimitry Andric case MemoryDefVal: return static_cast<const MemoryDef *>(this)->print(OS); 22150b57cec5SDimitry Andric case MemoryUseVal: return static_cast<const MemoryUse *>(this)->print(OS); 22160b57cec5SDimitry Andric } 22170b57cec5SDimitry Andric llvm_unreachable("invalid value id"); 22180b57cec5SDimitry Andric } 22190b57cec5SDimitry Andric 22200b57cec5SDimitry Andric void MemoryDef::print(raw_ostream &OS) const { 22210b57cec5SDimitry Andric MemoryAccess *UO = getDefiningAccess(); 22220b57cec5SDimitry Andric 22230b57cec5SDimitry Andric auto printID = [&OS](MemoryAccess *A) { 22240b57cec5SDimitry Andric if (A && A->getID()) 22250b57cec5SDimitry Andric OS << A->getID(); 22260b57cec5SDimitry Andric else 22270b57cec5SDimitry Andric OS << LiveOnEntryStr; 22280b57cec5SDimitry Andric }; 22290b57cec5SDimitry Andric 22300b57cec5SDimitry Andric OS << getID() << " = MemoryDef("; 22310b57cec5SDimitry Andric printID(UO); 22320b57cec5SDimitry Andric OS << ")"; 22330b57cec5SDimitry Andric 22340b57cec5SDimitry Andric if (isOptimized()) { 22350b57cec5SDimitry Andric OS << "->"; 22360b57cec5SDimitry Andric printID(getOptimized()); 22370b57cec5SDimitry Andric } 22380b57cec5SDimitry Andric } 22390b57cec5SDimitry Andric 22400b57cec5SDimitry Andric void MemoryPhi::print(raw_ostream &OS) const { 2241fe6060f1SDimitry Andric ListSeparator LS(","); 22420b57cec5SDimitry Andric OS << getID() << " = MemoryPhi("; 22430b57cec5SDimitry Andric for (const auto &Op : operands()) { 22440b57cec5SDimitry Andric BasicBlock *BB = getIncomingBlock(Op); 22450b57cec5SDimitry Andric MemoryAccess *MA = cast<MemoryAccess>(Op); 22460b57cec5SDimitry Andric 2247fe6060f1SDimitry Andric OS << LS << '{'; 22480b57cec5SDimitry Andric if (BB->hasName()) 22490b57cec5SDimitry Andric OS << BB->getName(); 22500b57cec5SDimitry Andric else 22510b57cec5SDimitry Andric BB->printAsOperand(OS, false); 22520b57cec5SDimitry Andric OS << ','; 22530b57cec5SDimitry Andric if (unsigned ID = MA->getID()) 22540b57cec5SDimitry Andric OS << ID; 22550b57cec5SDimitry Andric else 22560b57cec5SDimitry Andric OS << LiveOnEntryStr; 22570b57cec5SDimitry Andric OS << '}'; 22580b57cec5SDimitry Andric } 22590b57cec5SDimitry Andric OS << ')'; 22600b57cec5SDimitry Andric } 22610b57cec5SDimitry Andric 22620b57cec5SDimitry Andric void MemoryUse::print(raw_ostream &OS) const { 22630b57cec5SDimitry Andric MemoryAccess *UO = getDefiningAccess(); 22640b57cec5SDimitry Andric OS << "MemoryUse("; 22650b57cec5SDimitry Andric if (UO && UO->getID()) 22660b57cec5SDimitry Andric OS << UO->getID(); 22670b57cec5SDimitry Andric else 22680b57cec5SDimitry Andric OS << LiveOnEntryStr; 22690b57cec5SDimitry Andric OS << ')'; 22700b57cec5SDimitry Andric } 22710b57cec5SDimitry Andric 22720b57cec5SDimitry Andric void MemoryAccess::dump() const { 22730b57cec5SDimitry Andric // Cannot completely remove virtual function even in release mode. 22740b57cec5SDimitry Andric #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 22750b57cec5SDimitry Andric print(dbgs()); 22760b57cec5SDimitry Andric dbgs() << "\n"; 22770b57cec5SDimitry Andric #endif 22780b57cec5SDimitry Andric } 22790b57cec5SDimitry Andric 2280e8d8bef9SDimitry Andric class DOTFuncMSSAInfo { 2281e8d8bef9SDimitry Andric private: 2282e8d8bef9SDimitry Andric const Function &F; 2283e8d8bef9SDimitry Andric MemorySSAAnnotatedWriter MSSAWriter; 2284e8d8bef9SDimitry Andric 2285e8d8bef9SDimitry Andric public: 2286e8d8bef9SDimitry Andric DOTFuncMSSAInfo(const Function &F, MemorySSA &MSSA) 2287e8d8bef9SDimitry Andric : F(F), MSSAWriter(&MSSA) {} 2288e8d8bef9SDimitry Andric 2289e8d8bef9SDimitry Andric const Function *getFunction() { return &F; } 2290e8d8bef9SDimitry Andric MemorySSAAnnotatedWriter &getWriter() { return MSSAWriter; } 2291e8d8bef9SDimitry Andric }; 2292e8d8bef9SDimitry Andric 2293e8d8bef9SDimitry Andric namespace llvm { 2294e8d8bef9SDimitry Andric 2295e8d8bef9SDimitry Andric template <> 2296e8d8bef9SDimitry Andric struct GraphTraits<DOTFuncMSSAInfo *> : public GraphTraits<const BasicBlock *> { 2297e8d8bef9SDimitry Andric static NodeRef getEntryNode(DOTFuncMSSAInfo *CFGInfo) { 2298e8d8bef9SDimitry Andric return &(CFGInfo->getFunction()->getEntryBlock()); 2299e8d8bef9SDimitry Andric } 2300e8d8bef9SDimitry Andric 2301e8d8bef9SDimitry Andric // nodes_iterator/begin/end - Allow iteration over all nodes in the graph 2302e8d8bef9SDimitry Andric using nodes_iterator = pointer_iterator<Function::const_iterator>; 2303e8d8bef9SDimitry Andric 2304e8d8bef9SDimitry Andric static nodes_iterator nodes_begin(DOTFuncMSSAInfo *CFGInfo) { 2305e8d8bef9SDimitry Andric return nodes_iterator(CFGInfo->getFunction()->begin()); 2306e8d8bef9SDimitry Andric } 2307e8d8bef9SDimitry Andric 2308e8d8bef9SDimitry Andric static nodes_iterator nodes_end(DOTFuncMSSAInfo *CFGInfo) { 2309e8d8bef9SDimitry Andric return nodes_iterator(CFGInfo->getFunction()->end()); 2310e8d8bef9SDimitry Andric } 2311e8d8bef9SDimitry Andric 2312e8d8bef9SDimitry Andric static size_t size(DOTFuncMSSAInfo *CFGInfo) { 2313e8d8bef9SDimitry Andric return CFGInfo->getFunction()->size(); 2314e8d8bef9SDimitry Andric } 2315e8d8bef9SDimitry Andric }; 2316e8d8bef9SDimitry Andric 2317e8d8bef9SDimitry Andric template <> 2318e8d8bef9SDimitry Andric struct DOTGraphTraits<DOTFuncMSSAInfo *> : public DefaultDOTGraphTraits { 2319e8d8bef9SDimitry Andric 2320e8d8bef9SDimitry Andric DOTGraphTraits(bool IsSimple = false) : DefaultDOTGraphTraits(IsSimple) {} 2321e8d8bef9SDimitry Andric 2322e8d8bef9SDimitry Andric static std::string getGraphName(DOTFuncMSSAInfo *CFGInfo) { 2323e8d8bef9SDimitry Andric return "MSSA CFG for '" + CFGInfo->getFunction()->getName().str() + 2324e8d8bef9SDimitry Andric "' function"; 2325e8d8bef9SDimitry Andric } 2326e8d8bef9SDimitry Andric 2327e8d8bef9SDimitry Andric std::string getNodeLabel(const BasicBlock *Node, DOTFuncMSSAInfo *CFGInfo) { 2328e8d8bef9SDimitry Andric return DOTGraphTraits<DOTFuncInfo *>::getCompleteNodeLabel( 2329e8d8bef9SDimitry Andric Node, nullptr, 2330e8d8bef9SDimitry Andric [CFGInfo](raw_string_ostream &OS, const BasicBlock &BB) -> void { 2331e8d8bef9SDimitry Andric BB.print(OS, &CFGInfo->getWriter(), true, true); 2332e8d8bef9SDimitry Andric }, 2333e8d8bef9SDimitry Andric [](std::string &S, unsigned &I, unsigned Idx) -> void { 2334e8d8bef9SDimitry Andric std::string Str = S.substr(I, Idx - I); 2335e8d8bef9SDimitry Andric StringRef SR = Str; 2336e8d8bef9SDimitry Andric if (SR.count(" = MemoryDef(") || SR.count(" = MemoryPhi(") || 2337e8d8bef9SDimitry Andric SR.count("MemoryUse(")) 2338e8d8bef9SDimitry Andric return; 2339e8d8bef9SDimitry Andric DOTGraphTraits<DOTFuncInfo *>::eraseComment(S, I, Idx); 2340e8d8bef9SDimitry Andric }); 2341e8d8bef9SDimitry Andric } 2342e8d8bef9SDimitry Andric 2343e8d8bef9SDimitry Andric static std::string getEdgeSourceLabel(const BasicBlock *Node, 2344e8d8bef9SDimitry Andric const_succ_iterator I) { 2345e8d8bef9SDimitry Andric return DOTGraphTraits<DOTFuncInfo *>::getEdgeSourceLabel(Node, I); 2346e8d8bef9SDimitry Andric } 2347e8d8bef9SDimitry Andric 2348e8d8bef9SDimitry Andric /// Display the raw branch weights from PGO. 2349e8d8bef9SDimitry Andric std::string getEdgeAttributes(const BasicBlock *Node, const_succ_iterator I, 2350e8d8bef9SDimitry Andric DOTFuncMSSAInfo *CFGInfo) { 2351e8d8bef9SDimitry Andric return ""; 2352e8d8bef9SDimitry Andric } 2353e8d8bef9SDimitry Andric 2354e8d8bef9SDimitry Andric std::string getNodeAttributes(const BasicBlock *Node, 2355e8d8bef9SDimitry Andric DOTFuncMSSAInfo *CFGInfo) { 2356e8d8bef9SDimitry Andric return getNodeLabel(Node, CFGInfo).find(';') != std::string::npos 2357e8d8bef9SDimitry Andric ? "style=filled, fillcolor=lightpink" 2358e8d8bef9SDimitry Andric : ""; 2359e8d8bef9SDimitry Andric } 2360e8d8bef9SDimitry Andric }; 2361e8d8bef9SDimitry Andric 2362e8d8bef9SDimitry Andric } // namespace llvm 2363e8d8bef9SDimitry Andric 23640b57cec5SDimitry Andric AnalysisKey MemorySSAAnalysis::Key; 23650b57cec5SDimitry Andric 23660b57cec5SDimitry Andric MemorySSAAnalysis::Result MemorySSAAnalysis::run(Function &F, 23670b57cec5SDimitry Andric FunctionAnalysisManager &AM) { 23680b57cec5SDimitry Andric auto &DT = AM.getResult<DominatorTreeAnalysis>(F); 23690b57cec5SDimitry Andric auto &AA = AM.getResult<AAManager>(F); 23708bcb0991SDimitry Andric return MemorySSAAnalysis::Result(std::make_unique<MemorySSA>(F, &AA, &DT)); 23710b57cec5SDimitry Andric } 23720b57cec5SDimitry Andric 23730b57cec5SDimitry Andric bool MemorySSAAnalysis::Result::invalidate( 23740b57cec5SDimitry Andric Function &F, const PreservedAnalyses &PA, 23750b57cec5SDimitry Andric FunctionAnalysisManager::Invalidator &Inv) { 23760b57cec5SDimitry Andric auto PAC = PA.getChecker<MemorySSAAnalysis>(); 23770b57cec5SDimitry Andric return !(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Function>>()) || 23780b57cec5SDimitry Andric Inv.invalidate<AAManager>(F, PA) || 23790b57cec5SDimitry Andric Inv.invalidate<DominatorTreeAnalysis>(F, PA); 23800b57cec5SDimitry Andric } 23810b57cec5SDimitry Andric 23820b57cec5SDimitry Andric PreservedAnalyses MemorySSAPrinterPass::run(Function &F, 23830b57cec5SDimitry Andric FunctionAnalysisManager &AM) { 2384e8d8bef9SDimitry Andric auto &MSSA = AM.getResult<MemorySSAAnalysis>(F).getMSSA(); 238506c3fb27SDimitry Andric if (EnsureOptimizedUses) 238681ad6265SDimitry Andric MSSA.ensureOptimizedUses(); 2387e8d8bef9SDimitry Andric if (DotCFGMSSA != "") { 2388e8d8bef9SDimitry Andric DOTFuncMSSAInfo CFGInfo(F, MSSA); 2389e8d8bef9SDimitry Andric WriteGraph(&CFGInfo, "", false, "MSSA", DotCFGMSSA); 2390e8d8bef9SDimitry Andric } else { 23910b57cec5SDimitry Andric OS << "MemorySSA for function: " << F.getName() << "\n"; 2392e8d8bef9SDimitry Andric MSSA.print(OS); 2393e8d8bef9SDimitry Andric } 23940b57cec5SDimitry Andric 23950b57cec5SDimitry Andric return PreservedAnalyses::all(); 23960b57cec5SDimitry Andric } 23970b57cec5SDimitry Andric 2398349cc55cSDimitry Andric PreservedAnalyses MemorySSAWalkerPrinterPass::run(Function &F, 2399349cc55cSDimitry Andric FunctionAnalysisManager &AM) { 2400349cc55cSDimitry Andric auto &MSSA = AM.getResult<MemorySSAAnalysis>(F).getMSSA(); 2401349cc55cSDimitry Andric OS << "MemorySSA (walker) for function: " << F.getName() << "\n"; 2402349cc55cSDimitry Andric MemorySSAWalkerAnnotatedWriter Writer(&MSSA); 2403349cc55cSDimitry Andric F.print(OS, &Writer); 2404349cc55cSDimitry Andric 2405349cc55cSDimitry Andric return PreservedAnalyses::all(); 2406349cc55cSDimitry Andric } 2407349cc55cSDimitry Andric 24080b57cec5SDimitry Andric PreservedAnalyses MemorySSAVerifierPass::run(Function &F, 24090b57cec5SDimitry Andric FunctionAnalysisManager &AM) { 24100b57cec5SDimitry Andric AM.getResult<MemorySSAAnalysis>(F).getMSSA().verifyMemorySSA(); 24110b57cec5SDimitry Andric 24120b57cec5SDimitry Andric return PreservedAnalyses::all(); 24130b57cec5SDimitry Andric } 24140b57cec5SDimitry Andric 24150b57cec5SDimitry Andric char MemorySSAWrapperPass::ID = 0; 24160b57cec5SDimitry Andric 24170b57cec5SDimitry Andric MemorySSAWrapperPass::MemorySSAWrapperPass() : FunctionPass(ID) { 24180b57cec5SDimitry Andric initializeMemorySSAWrapperPassPass(*PassRegistry::getPassRegistry()); 24190b57cec5SDimitry Andric } 24200b57cec5SDimitry Andric 24210b57cec5SDimitry Andric void MemorySSAWrapperPass::releaseMemory() { MSSA.reset(); } 24220b57cec5SDimitry Andric 24230b57cec5SDimitry Andric void MemorySSAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const { 24240b57cec5SDimitry Andric AU.setPreservesAll(); 24250b57cec5SDimitry Andric AU.addRequiredTransitive<DominatorTreeWrapperPass>(); 24260b57cec5SDimitry Andric AU.addRequiredTransitive<AAResultsWrapperPass>(); 24270b57cec5SDimitry Andric } 24280b57cec5SDimitry Andric 24290b57cec5SDimitry Andric bool MemorySSAWrapperPass::runOnFunction(Function &F) { 24300b57cec5SDimitry Andric auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree(); 24310b57cec5SDimitry Andric auto &AA = getAnalysis<AAResultsWrapperPass>().getAAResults(); 24320b57cec5SDimitry Andric MSSA.reset(new MemorySSA(F, &AA, &DT)); 24330b57cec5SDimitry Andric return false; 24340b57cec5SDimitry Andric } 24350b57cec5SDimitry Andric 243647395794SDimitry Andric void MemorySSAWrapperPass::verifyAnalysis() const { 243747395794SDimitry Andric if (VerifyMemorySSA) 243847395794SDimitry Andric MSSA->verifyMemorySSA(); 243947395794SDimitry Andric } 24400b57cec5SDimitry Andric 24410b57cec5SDimitry Andric void MemorySSAWrapperPass::print(raw_ostream &OS, const Module *M) const { 24420b57cec5SDimitry Andric MSSA->print(OS); 24430b57cec5SDimitry Andric } 24440b57cec5SDimitry Andric 24450b57cec5SDimitry Andric MemorySSAWalker::MemorySSAWalker(MemorySSA *M) : MSSA(M) {} 24460b57cec5SDimitry Andric 24470b57cec5SDimitry Andric /// Walk the use-def chains starting at \p StartingAccess and find 24480b57cec5SDimitry Andric /// the MemoryAccess that actually clobbers Loc. 24490b57cec5SDimitry Andric /// 24500b57cec5SDimitry Andric /// \returns our clobbering memory access 2451bdd1243dSDimitry Andric MemoryAccess *MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase( 24520b57cec5SDimitry Andric MemoryAccess *StartingAccess, const MemoryLocation &Loc, 2453bdd1243dSDimitry Andric BatchAAResults &BAA, unsigned &UpwardWalkLimit) { 2454fe6060f1SDimitry Andric assert(!isa<MemoryUse>(StartingAccess) && "Use cannot be defining access"); 24550b57cec5SDimitry Andric 24565f757f3fSDimitry Andric // If location is undefined, conservatively return starting access. 24575f757f3fSDimitry Andric if (Loc.Ptr == nullptr) 24585f757f3fSDimitry Andric return StartingAccess; 24595f757f3fSDimitry Andric 2460fe6060f1SDimitry Andric Instruction *I = nullptr; 2461fe6060f1SDimitry Andric if (auto *StartingUseOrDef = dyn_cast<MemoryUseOrDef>(StartingAccess)) { 24620b57cec5SDimitry Andric if (MSSA->isLiveOnEntryDef(StartingUseOrDef)) 24630b57cec5SDimitry Andric return StartingUseOrDef; 24640b57cec5SDimitry Andric 2465fe6060f1SDimitry Andric I = StartingUseOrDef->getMemoryInst(); 24660b57cec5SDimitry Andric 2467fe6060f1SDimitry Andric // Conservatively, fences are always clobbers, so don't perform the walk if 2468fe6060f1SDimitry Andric // we hit a fence. 24690b57cec5SDimitry Andric if (!isa<CallBase>(I) && I->isFenceLike()) 24700b57cec5SDimitry Andric return StartingUseOrDef; 2471fe6060f1SDimitry Andric } 24720b57cec5SDimitry Andric 24730b57cec5SDimitry Andric UpwardsMemoryQuery Q; 2474fe6060f1SDimitry Andric Q.OriginalAccess = StartingAccess; 24750b57cec5SDimitry Andric Q.StartingLoc = Loc; 2476e8d8bef9SDimitry Andric Q.Inst = nullptr; 24770b57cec5SDimitry Andric Q.IsCall = false; 24780b57cec5SDimitry Andric 24790b57cec5SDimitry Andric // Unlike the other function, do not walk to the def of a def, because we are 24800b57cec5SDimitry Andric // handed something we already believe is the clobbering access. 24810b57cec5SDimitry Andric // We never set SkipSelf to true in Q in this method. 24820b57cec5SDimitry Andric MemoryAccess *Clobber = 2483bdd1243dSDimitry Andric Walker.findClobber(BAA, StartingAccess, Q, UpwardWalkLimit); 2484fe6060f1SDimitry Andric LLVM_DEBUG({ 2485fe6060f1SDimitry Andric dbgs() << "Clobber starting at access " << *StartingAccess << "\n"; 2486fe6060f1SDimitry Andric if (I) 2487fe6060f1SDimitry Andric dbgs() << " for instruction " << *I << "\n"; 2488fe6060f1SDimitry Andric dbgs() << " is " << *Clobber << "\n"; 2489fe6060f1SDimitry Andric }); 24900b57cec5SDimitry Andric return Clobber; 24910b57cec5SDimitry Andric } 24920b57cec5SDimitry Andric 2493349cc55cSDimitry Andric static const Instruction * 2494349cc55cSDimitry Andric getInvariantGroupClobberingInstruction(Instruction &I, DominatorTree &DT) { 2495349cc55cSDimitry Andric if (!I.hasMetadata(LLVMContext::MD_invariant_group) || I.isVolatile()) 2496349cc55cSDimitry Andric return nullptr; 2497349cc55cSDimitry Andric 2498349cc55cSDimitry Andric // We consider bitcasts and zero GEPs to be the same pointer value. Start by 2499349cc55cSDimitry Andric // stripping bitcasts and zero GEPs, then we will recursively look at loads 2500349cc55cSDimitry Andric // and stores through bitcasts and zero GEPs. 2501349cc55cSDimitry Andric Value *PointerOperand = getLoadStorePointerOperand(&I)->stripPointerCasts(); 2502349cc55cSDimitry Andric 2503349cc55cSDimitry Andric // It's not safe to walk the use list of a global value because function 2504349cc55cSDimitry Andric // passes aren't allowed to look outside their functions. 2505349cc55cSDimitry Andric // FIXME: this could be fixed by filtering instructions from outside of 2506349cc55cSDimitry Andric // current function. 2507349cc55cSDimitry Andric if (isa<Constant>(PointerOperand)) 2508349cc55cSDimitry Andric return nullptr; 2509349cc55cSDimitry Andric 2510349cc55cSDimitry Andric // Queue to process all pointers that are equivalent to load operand. 2511349cc55cSDimitry Andric SmallVector<const Value *, 8> PointerUsesQueue; 2512349cc55cSDimitry Andric PointerUsesQueue.push_back(PointerOperand); 2513349cc55cSDimitry Andric 2514349cc55cSDimitry Andric const Instruction *MostDominatingInstruction = &I; 2515349cc55cSDimitry Andric 2516349cc55cSDimitry Andric // FIXME: This loop is O(n^2) because dominates can be O(n) and in worst case 2517349cc55cSDimitry Andric // we will see all the instructions. It may not matter in practice. If it 2518349cc55cSDimitry Andric // does, we will have to support MemorySSA construction and updates. 2519349cc55cSDimitry Andric while (!PointerUsesQueue.empty()) { 2520349cc55cSDimitry Andric const Value *Ptr = PointerUsesQueue.pop_back_val(); 2521349cc55cSDimitry Andric assert(Ptr && !isa<GlobalValue>(Ptr) && 2522349cc55cSDimitry Andric "Null or GlobalValue should not be inserted"); 2523349cc55cSDimitry Andric 2524349cc55cSDimitry Andric for (const User *Us : Ptr->users()) { 2525349cc55cSDimitry Andric auto *U = dyn_cast<Instruction>(Us); 2526349cc55cSDimitry Andric if (!U || U == &I || !DT.dominates(U, MostDominatingInstruction)) 2527349cc55cSDimitry Andric continue; 2528349cc55cSDimitry Andric 2529349cc55cSDimitry Andric // Add bitcasts and zero GEPs to queue. 2530349cc55cSDimitry Andric if (isa<BitCastInst>(U)) { 2531349cc55cSDimitry Andric PointerUsesQueue.push_back(U); 2532349cc55cSDimitry Andric continue; 2533349cc55cSDimitry Andric } 2534349cc55cSDimitry Andric if (auto *GEP = dyn_cast<GetElementPtrInst>(U)) { 2535349cc55cSDimitry Andric if (GEP->hasAllZeroIndices()) 2536349cc55cSDimitry Andric PointerUsesQueue.push_back(U); 2537349cc55cSDimitry Andric continue; 2538349cc55cSDimitry Andric } 2539349cc55cSDimitry Andric 2540349cc55cSDimitry Andric // If we hit a load/store with an invariant.group metadata and the same 2541349cc55cSDimitry Andric // pointer operand, we can assume that value pointed to by the pointer 2542349cc55cSDimitry Andric // operand didn't change. 2543349cc55cSDimitry Andric if (U->hasMetadata(LLVMContext::MD_invariant_group) && 2544349cc55cSDimitry Andric getLoadStorePointerOperand(U) == Ptr && !U->isVolatile()) { 2545349cc55cSDimitry Andric MostDominatingInstruction = U; 2546349cc55cSDimitry Andric } 2547349cc55cSDimitry Andric } 2548349cc55cSDimitry Andric } 2549349cc55cSDimitry Andric return MostDominatingInstruction == &I ? nullptr : MostDominatingInstruction; 2550349cc55cSDimitry Andric } 2551349cc55cSDimitry Andric 2552bdd1243dSDimitry Andric MemoryAccess *MemorySSA::ClobberWalkerBase::getClobberingMemoryAccessBase( 2553bdd1243dSDimitry Andric MemoryAccess *MA, BatchAAResults &BAA, unsigned &UpwardWalkLimit, 2554bdd1243dSDimitry Andric bool SkipSelf, bool UseInvariantGroup) { 25550b57cec5SDimitry Andric auto *StartingAccess = dyn_cast<MemoryUseOrDef>(MA); 25560b57cec5SDimitry Andric // If this is a MemoryPhi, we can't do anything. 25570b57cec5SDimitry Andric if (!StartingAccess) 25580b57cec5SDimitry Andric return MA; 25590b57cec5SDimitry Andric 2560349cc55cSDimitry Andric if (UseInvariantGroup) { 2561349cc55cSDimitry Andric if (auto *I = getInvariantGroupClobberingInstruction( 2562349cc55cSDimitry Andric *StartingAccess->getMemoryInst(), MSSA->getDomTree())) { 2563349cc55cSDimitry Andric assert(isa<LoadInst>(I) || isa<StoreInst>(I)); 2564349cc55cSDimitry Andric 2565349cc55cSDimitry Andric auto *ClobberMA = MSSA->getMemoryAccess(I); 2566349cc55cSDimitry Andric assert(ClobberMA); 2567349cc55cSDimitry Andric if (isa<MemoryUse>(ClobberMA)) 2568349cc55cSDimitry Andric return ClobberMA->getDefiningAccess(); 2569349cc55cSDimitry Andric return ClobberMA; 2570349cc55cSDimitry Andric } 2571349cc55cSDimitry Andric } 2572349cc55cSDimitry Andric 25730b57cec5SDimitry Andric bool IsOptimized = false; 25740b57cec5SDimitry Andric 25750b57cec5SDimitry Andric // If this is an already optimized use or def, return the optimized result. 25760b57cec5SDimitry Andric // Note: Currently, we store the optimized def result in a separate field, 25770b57cec5SDimitry Andric // since we can't use the defining access. 25780b57cec5SDimitry Andric if (StartingAccess->isOptimized()) { 25790b57cec5SDimitry Andric if (!SkipSelf || !isa<MemoryDef>(StartingAccess)) 25800b57cec5SDimitry Andric return StartingAccess->getOptimized(); 25810b57cec5SDimitry Andric IsOptimized = true; 25820b57cec5SDimitry Andric } 25830b57cec5SDimitry Andric 25840b57cec5SDimitry Andric const Instruction *I = StartingAccess->getMemoryInst(); 25850b57cec5SDimitry Andric // We can't sanely do anything with a fence, since they conservatively clobber 25860b57cec5SDimitry Andric // all memory, and have no locations to get pointers from to try to 25870b57cec5SDimitry Andric // disambiguate. 25880b57cec5SDimitry Andric if (!isa<CallBase>(I) && I->isFenceLike()) 25890b57cec5SDimitry Andric return StartingAccess; 25900b57cec5SDimitry Andric 25910b57cec5SDimitry Andric UpwardsMemoryQuery Q(I, StartingAccess); 25920b57cec5SDimitry Andric 2593bdd1243dSDimitry Andric if (isUseTriviallyOptimizableToLiveOnEntry(BAA, I)) { 25940b57cec5SDimitry Andric MemoryAccess *LiveOnEntry = MSSA->getLiveOnEntryDef(); 25950b57cec5SDimitry Andric StartingAccess->setOptimized(LiveOnEntry); 25960b57cec5SDimitry Andric return LiveOnEntry; 25970b57cec5SDimitry Andric } 25980b57cec5SDimitry Andric 25990b57cec5SDimitry Andric MemoryAccess *OptimizedAccess; 26000b57cec5SDimitry Andric if (!IsOptimized) { 26010b57cec5SDimitry Andric // Start with the thing we already think clobbers this location 26020b57cec5SDimitry Andric MemoryAccess *DefiningAccess = StartingAccess->getDefiningAccess(); 26030b57cec5SDimitry Andric 26040b57cec5SDimitry Andric // At this point, DefiningAccess may be the live on entry def. 26050b57cec5SDimitry Andric // If it is, we will not get a better result. 26060b57cec5SDimitry Andric if (MSSA->isLiveOnEntryDef(DefiningAccess)) { 26070b57cec5SDimitry Andric StartingAccess->setOptimized(DefiningAccess); 26080b57cec5SDimitry Andric return DefiningAccess; 26090b57cec5SDimitry Andric } 26100b57cec5SDimitry Andric 2611bdd1243dSDimitry Andric OptimizedAccess = 2612bdd1243dSDimitry Andric Walker.findClobber(BAA, DefiningAccess, Q, UpwardWalkLimit); 26130b57cec5SDimitry Andric StartingAccess->setOptimized(OptimizedAccess); 26140b57cec5SDimitry Andric } else 26150b57cec5SDimitry Andric OptimizedAccess = StartingAccess->getOptimized(); 26160b57cec5SDimitry Andric 26170b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Starting Memory SSA clobber for " << *I << " is "); 26180b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << *StartingAccess << "\n"); 26190b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Optimized Memory SSA clobber for " << *I << " is "); 26200b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << *OptimizedAccess << "\n"); 26210b57cec5SDimitry Andric 26220b57cec5SDimitry Andric MemoryAccess *Result; 26230b57cec5SDimitry Andric if (SkipSelf && isa<MemoryPhi>(OptimizedAccess) && 26240b57cec5SDimitry Andric isa<MemoryDef>(StartingAccess) && UpwardWalkLimit) { 26250b57cec5SDimitry Andric assert(isa<MemoryDef>(Q.OriginalAccess)); 26260b57cec5SDimitry Andric Q.SkipSelfAccess = true; 2627bdd1243dSDimitry Andric Result = Walker.findClobber(BAA, OptimizedAccess, Q, UpwardWalkLimit); 26280b57cec5SDimitry Andric } else 26290b57cec5SDimitry Andric Result = OptimizedAccess; 26300b57cec5SDimitry Andric 26310b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Result Memory SSA clobber [SkipSelf = " << SkipSelf); 26320b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "] for " << *I << " is " << *Result << "\n"); 26330b57cec5SDimitry Andric 26340b57cec5SDimitry Andric return Result; 26350b57cec5SDimitry Andric } 26360b57cec5SDimitry Andric 26370b57cec5SDimitry Andric MemoryAccess * 2638bdd1243dSDimitry Andric DoNothingMemorySSAWalker::getClobberingMemoryAccess(MemoryAccess *MA, 2639bdd1243dSDimitry Andric BatchAAResults &) { 26400b57cec5SDimitry Andric if (auto *Use = dyn_cast<MemoryUseOrDef>(MA)) 26410b57cec5SDimitry Andric return Use->getDefiningAccess(); 26420b57cec5SDimitry Andric return MA; 26430b57cec5SDimitry Andric } 26440b57cec5SDimitry Andric 26450b57cec5SDimitry Andric MemoryAccess *DoNothingMemorySSAWalker::getClobberingMemoryAccess( 2646bdd1243dSDimitry Andric MemoryAccess *StartingAccess, const MemoryLocation &, BatchAAResults &) { 26470b57cec5SDimitry Andric if (auto *Use = dyn_cast<MemoryUseOrDef>(StartingAccess)) 26480b57cec5SDimitry Andric return Use->getDefiningAccess(); 26490b57cec5SDimitry Andric return StartingAccess; 26500b57cec5SDimitry Andric } 26510b57cec5SDimitry Andric 26520b57cec5SDimitry Andric void MemoryPhi::deleteMe(DerivedUser *Self) { 26530b57cec5SDimitry Andric delete static_cast<MemoryPhi *>(Self); 26540b57cec5SDimitry Andric } 26550b57cec5SDimitry Andric 26560b57cec5SDimitry Andric void MemoryDef::deleteMe(DerivedUser *Self) { 26570b57cec5SDimitry Andric delete static_cast<MemoryDef *>(Self); 26580b57cec5SDimitry Andric } 26590b57cec5SDimitry Andric 26600b57cec5SDimitry Andric void MemoryUse::deleteMe(DerivedUser *Self) { 26610b57cec5SDimitry Andric delete static_cast<MemoryUse *>(Self); 26620b57cec5SDimitry Andric } 2663e8d8bef9SDimitry Andric 2664bdd1243dSDimitry Andric bool upward_defs_iterator::IsGuaranteedLoopInvariant(const Value *Ptr) const { 2665bdd1243dSDimitry Andric auto IsGuaranteedLoopInvariantBase = [](const Value *Ptr) { 2666e8d8bef9SDimitry Andric Ptr = Ptr->stripPointerCasts(); 2667e8d8bef9SDimitry Andric if (!isa<Instruction>(Ptr)) 2668e8d8bef9SDimitry Andric return true; 2669e8d8bef9SDimitry Andric return isa<AllocaInst>(Ptr); 2670e8d8bef9SDimitry Andric }; 2671e8d8bef9SDimitry Andric 2672e8d8bef9SDimitry Andric Ptr = Ptr->stripPointerCasts(); 2673fe6060f1SDimitry Andric if (auto *I = dyn_cast<Instruction>(Ptr)) { 2674fe6060f1SDimitry Andric if (I->getParent()->isEntryBlock()) 2675fe6060f1SDimitry Andric return true; 2676fe6060f1SDimitry Andric } 2677e8d8bef9SDimitry Andric if (auto *GEP = dyn_cast<GEPOperator>(Ptr)) { 2678e8d8bef9SDimitry Andric return IsGuaranteedLoopInvariantBase(GEP->getPointerOperand()) && 2679e8d8bef9SDimitry Andric GEP->hasAllConstantIndices(); 2680e8d8bef9SDimitry Andric } 2681e8d8bef9SDimitry Andric return IsGuaranteedLoopInvariantBase(Ptr); 2682e8d8bef9SDimitry Andric } 2683