10b57cec5SDimitry Andric //===- Loads.cpp - Local load analysis ------------------------------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This file defines simple local analyses for load instructions. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 120b57cec5SDimitry Andric 130b57cec5SDimitry Andric #include "llvm/Analysis/Loads.h" 140b57cec5SDimitry Andric #include "llvm/Analysis/AliasAnalysis.h" 15fe6060f1SDimitry Andric #include "llvm/Analysis/AssumeBundleQueries.h" 168bcb0991SDimitry Andric #include "llvm/Analysis/LoopInfo.h" 17e8d8bef9SDimitry Andric #include "llvm/Analysis/MemoryBuiltins.h" 18fe6060f1SDimitry Andric #include "llvm/Analysis/MemoryLocation.h" 198bcb0991SDimitry Andric #include "llvm/Analysis/ScalarEvolution.h" 208bcb0991SDimitry Andric #include "llvm/Analysis/ScalarEvolutionExpressions.h" 210b57cec5SDimitry Andric #include "llvm/Analysis/ValueTracking.h" 220b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h" 230b57cec5SDimitry Andric #include "llvm/IR/IntrinsicInst.h" 240b57cec5SDimitry Andric #include "llvm/IR/Module.h" 250b57cec5SDimitry Andric #include "llvm/IR/Operator.h" 260b57cec5SDimitry Andric 270b57cec5SDimitry Andric using namespace llvm; 280b57cec5SDimitry Andric 298bcb0991SDimitry Andric static bool isAligned(const Value *Base, const APInt &Offset, Align Alignment, 308bcb0991SDimitry Andric const DataLayout &DL) { 315ffd83dbSDimitry Andric Align BA = Base->getPointerAlignment(DL); 3206c3fb27SDimitry Andric return BA >= Alignment && Offset.isAligned(BA); 330b57cec5SDimitry Andric } 340b57cec5SDimitry Andric 350b57cec5SDimitry Andric /// Test if V is always a pointer to allocated and suitably aligned memory for 360b57cec5SDimitry Andric /// a simple load or store. 370b57cec5SDimitry Andric static bool isDereferenceableAndAlignedPointer( 388bcb0991SDimitry Andric const Value *V, Align Alignment, const APInt &Size, const DataLayout &DL, 39bdd1243dSDimitry Andric const Instruction *CtxI, AssumptionCache *AC, const DominatorTree *DT, 40fe6060f1SDimitry Andric const TargetLibraryInfo *TLI, SmallPtrSetImpl<const Value *> &Visited, 41fe6060f1SDimitry Andric unsigned MaxDepth) { 425ffd83dbSDimitry Andric assert(V->getType()->isPointerTy() && "Base must be pointer"); 435ffd83dbSDimitry Andric 445ffd83dbSDimitry Andric // Recursion limit. 455ffd83dbSDimitry Andric if (MaxDepth-- == 0) 465ffd83dbSDimitry Andric return false; 475ffd83dbSDimitry Andric 480b57cec5SDimitry Andric // Already visited? Bail out, we've likely hit unreachable code. 490b57cec5SDimitry Andric if (!Visited.insert(V).second) 500b57cec5SDimitry Andric return false; 510b57cec5SDimitry Andric 520b57cec5SDimitry Andric // Note that it is not safe to speculate into a malloc'd region because 530b57cec5SDimitry Andric // malloc may return null. 540b57cec5SDimitry Andric 550b57cec5SDimitry Andric // For GEPs, determine if the indexing lands within the allocated object. 560b57cec5SDimitry Andric if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) { 570b57cec5SDimitry Andric const Value *Base = GEP->getPointerOperand(); 580b57cec5SDimitry Andric 590b57cec5SDimitry Andric APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0); 600b57cec5SDimitry Andric if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() || 618bcb0991SDimitry Andric !Offset.urem(APInt(Offset.getBitWidth(), Alignment.value())) 628bcb0991SDimitry Andric .isMinValue()) 630b57cec5SDimitry Andric return false; 640b57cec5SDimitry Andric 650b57cec5SDimitry Andric // If the base pointer is dereferenceable for Offset+Size bytes, then the 660b57cec5SDimitry Andric // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base 670b57cec5SDimitry Andric // pointer is aligned to Align bytes, and the Offset is divisible by Align 680b57cec5SDimitry Andric // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also 690b57cec5SDimitry Andric // aligned to Align bytes. 700b57cec5SDimitry Andric 710b57cec5SDimitry Andric // Offset and Size may have different bit widths if we have visited an 720b57cec5SDimitry Andric // addrspacecast, so we can't do arithmetic directly on the APInt values. 730b57cec5SDimitry Andric return isDereferenceableAndAlignedPointer( 748bcb0991SDimitry Andric Base, Alignment, Offset + Size.sextOrTrunc(Offset.getBitWidth()), DL, 75bdd1243dSDimitry Andric CtxI, AC, DT, TLI, Visited, MaxDepth); 760b57cec5SDimitry Andric } 770b57cec5SDimitry Andric 78bdd1243dSDimitry Andric // bitcast instructions are no-ops as far as dereferenceability is concerned. 79bdd1243dSDimitry Andric if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) { 80bdd1243dSDimitry Andric if (BC->getSrcTy()->isPointerTy()) 81bdd1243dSDimitry Andric return isDereferenceableAndAlignedPointer( 82bdd1243dSDimitry Andric BC->getOperand(0), Alignment, Size, DL, CtxI, AC, DT, TLI, 83fe6060f1SDimitry Andric Visited, MaxDepth); 84bdd1243dSDimitry Andric } 85bdd1243dSDimitry Andric 86bdd1243dSDimitry Andric // Recurse into both hands of select. 87bdd1243dSDimitry Andric if (const SelectInst *Sel = dyn_cast<SelectInst>(V)) { 88bdd1243dSDimitry Andric return isDereferenceableAndAlignedPointer(Sel->getTrueValue(), Alignment, 89bdd1243dSDimitry Andric Size, DL, CtxI, AC, DT, TLI, 90bdd1243dSDimitry Andric Visited, MaxDepth) && 91bdd1243dSDimitry Andric isDereferenceableAndAlignedPointer(Sel->getFalseValue(), Alignment, 92bdd1243dSDimitry Andric Size, DL, CtxI, AC, DT, TLI, 93bdd1243dSDimitry Andric Visited, MaxDepth); 94bdd1243dSDimitry Andric } 95bdd1243dSDimitry Andric 96bdd1243dSDimitry Andric bool CheckForNonNull, CheckForFreed; 97bdd1243dSDimitry Andric APInt KnownDerefBytes(Size.getBitWidth(), 98bdd1243dSDimitry Andric V->getPointerDereferenceableBytes(DL, CheckForNonNull, 99bdd1243dSDimitry Andric CheckForFreed)); 100bdd1243dSDimitry Andric if (KnownDerefBytes.getBoolValue() && KnownDerefBytes.uge(Size) && 101bdd1243dSDimitry Andric !CheckForFreed) 102*0fca6ea1SDimitry Andric if (!CheckForNonNull || 103*0fca6ea1SDimitry Andric isKnownNonZero(V, SimplifyQuery(DL, DT, AC, CtxI))) { 104bdd1243dSDimitry Andric // As we recursed through GEPs to get here, we've incrementally checked 105bdd1243dSDimitry Andric // that each step advanced by a multiple of the alignment. If our base is 106bdd1243dSDimitry Andric // properly aligned, then the original offset accessed must also be. 107bdd1243dSDimitry Andric APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0); 108bdd1243dSDimitry Andric return isAligned(V, Offset, Alignment, DL); 109bdd1243dSDimitry Andric } 110bdd1243dSDimitry Andric 111bdd1243dSDimitry Andric /// TODO refactor this function to be able to search independently for 112bdd1243dSDimitry Andric /// Dereferencability and Alignment requirements. 113bdd1243dSDimitry Andric 1140b57cec5SDimitry Andric 115e8d8bef9SDimitry Andric if (const auto *Call = dyn_cast<CallBase>(V)) { 1168bcb0991SDimitry Andric if (auto *RP = getArgumentAliasingToReturnedPointer(Call, true)) 1178bcb0991SDimitry Andric return isDereferenceableAndAlignedPointer(RP, Alignment, Size, DL, CtxI, 118bdd1243dSDimitry Andric AC, DT, TLI, Visited, MaxDepth); 1190b57cec5SDimitry Andric 120e8d8bef9SDimitry Andric // If we have a call we can't recurse through, check to see if this is an 121e8d8bef9SDimitry Andric // allocation function for which we can establish an minimum object size. 122e8d8bef9SDimitry Andric // Such a minimum object size is analogous to a deref_or_null attribute in 123e8d8bef9SDimitry Andric // that we still need to prove the result non-null at point of use. 124e8d8bef9SDimitry Andric // NOTE: We can only use the object size as a base fact as we a) need to 125e8d8bef9SDimitry Andric // prove alignment too, and b) don't want the compile time impact of a 126e8d8bef9SDimitry Andric // separate recursive walk. 127e8d8bef9SDimitry Andric ObjectSizeOpts Opts; 128e8d8bef9SDimitry Andric // TODO: It may be okay to round to align, but that would imply that 129e8d8bef9SDimitry Andric // accessing slightly out of bounds was legal, and we're currently 130e8d8bef9SDimitry Andric // inconsistent about that. For the moment, be conservative. 131e8d8bef9SDimitry Andric Opts.RoundToAlign = false; 132e8d8bef9SDimitry Andric Opts.NullIsUnknownSize = true; 133e8d8bef9SDimitry Andric uint64_t ObjSize; 134fe6060f1SDimitry Andric if (getObjectSize(V, ObjSize, DL, TLI, Opts)) { 135e8d8bef9SDimitry Andric APInt KnownDerefBytes(Size.getBitWidth(), ObjSize); 136e8d8bef9SDimitry Andric if (KnownDerefBytes.getBoolValue() && KnownDerefBytes.uge(Size) && 137*0fca6ea1SDimitry Andric isKnownNonZero(V, SimplifyQuery(DL, DT, AC, CtxI)) && 138*0fca6ea1SDimitry Andric !V->canBeFreed()) { 139e8d8bef9SDimitry Andric // As we recursed through GEPs to get here, we've incrementally 140e8d8bef9SDimitry Andric // checked that each step advanced by a multiple of the alignment. If 141e8d8bef9SDimitry Andric // our base is properly aligned, then the original offset accessed 142e8d8bef9SDimitry Andric // must also be. 143bdd1243dSDimitry Andric APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0); 144e8d8bef9SDimitry Andric return isAligned(V, Offset, Alignment, DL); 145e8d8bef9SDimitry Andric } 146e8d8bef9SDimitry Andric } 147e8d8bef9SDimitry Andric } 148e8d8bef9SDimitry Andric 149bdd1243dSDimitry Andric // For gc.relocate, look through relocations 150bdd1243dSDimitry Andric if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V)) 151bdd1243dSDimitry Andric return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(), 152bdd1243dSDimitry Andric Alignment, Size, DL, CtxI, AC, DT, 153bdd1243dSDimitry Andric TLI, Visited, MaxDepth); 154bdd1243dSDimitry Andric 155bdd1243dSDimitry Andric if (const AddrSpaceCastOperator *ASC = dyn_cast<AddrSpaceCastOperator>(V)) 156bdd1243dSDimitry Andric return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment, 157bdd1243dSDimitry Andric Size, DL, CtxI, AC, DT, TLI, 158bdd1243dSDimitry Andric Visited, MaxDepth); 159bdd1243dSDimitry Andric 160bdd1243dSDimitry Andric if (CtxI) { 161bdd1243dSDimitry Andric /// Look through assumes to see if both dereferencability and alignment can 162bdd1243dSDimitry Andric /// be provent by an assume 163bdd1243dSDimitry Andric RetainedKnowledge AlignRK; 164bdd1243dSDimitry Andric RetainedKnowledge DerefRK; 165bdd1243dSDimitry Andric if (getKnowledgeForValue( 166bdd1243dSDimitry Andric V, {Attribute::Dereferenceable, Attribute::Alignment}, AC, 167bdd1243dSDimitry Andric [&](RetainedKnowledge RK, Instruction *Assume, auto) { 168*0fca6ea1SDimitry Andric if (!isValidAssumeForContext(Assume, CtxI, DT)) 169bdd1243dSDimitry Andric return false; 170bdd1243dSDimitry Andric if (RK.AttrKind == Attribute::Alignment) 171bdd1243dSDimitry Andric AlignRK = std::max(AlignRK, RK); 172bdd1243dSDimitry Andric if (RK.AttrKind == Attribute::Dereferenceable) 173bdd1243dSDimitry Andric DerefRK = std::max(DerefRK, RK); 174bdd1243dSDimitry Andric if (AlignRK && DerefRK && AlignRK.ArgValue >= Alignment.value() && 175bdd1243dSDimitry Andric DerefRK.ArgValue >= Size.getZExtValue()) 176bdd1243dSDimitry Andric return true; // We have found what we needed so we stop looking 177bdd1243dSDimitry Andric return false; // Other assumes may have better information. so 178bdd1243dSDimitry Andric // keep looking 179bdd1243dSDimitry Andric })) 180bdd1243dSDimitry Andric return true; 181bdd1243dSDimitry Andric } 182bdd1243dSDimitry Andric 1830b57cec5SDimitry Andric // If we don't know, assume the worst. 1840b57cec5SDimitry Andric return false; 1850b57cec5SDimitry Andric } 1860b57cec5SDimitry Andric 187bdd1243dSDimitry Andric bool llvm::isDereferenceableAndAlignedPointer( 188bdd1243dSDimitry Andric const Value *V, Align Alignment, const APInt &Size, const DataLayout &DL, 189bdd1243dSDimitry Andric const Instruction *CtxI, AssumptionCache *AC, const DominatorTree *DT, 190fe6060f1SDimitry Andric const TargetLibraryInfo *TLI) { 1918bcb0991SDimitry Andric // Note: At the moment, Size can be zero. This ends up being interpreted as 1928bcb0991SDimitry Andric // a query of whether [Base, V] is dereferenceable and V is aligned (since 1938bcb0991SDimitry Andric // that's what the implementation happened to do). It's unclear if this is 1948bcb0991SDimitry Andric // the desired semantic, but at least SelectionDAG does exercise this case. 1958bcb0991SDimitry Andric 1960b57cec5SDimitry Andric SmallPtrSet<const Value *, 32> Visited; 197bdd1243dSDimitry Andric return ::isDereferenceableAndAlignedPointer(V, Alignment, Size, DL, CtxI, AC, 198bdd1243dSDimitry Andric DT, TLI, Visited, 16); 1990b57cec5SDimitry Andric } 2000b57cec5SDimitry Andric 201bdd1243dSDimitry Andric bool llvm::isDereferenceableAndAlignedPointer( 202bdd1243dSDimitry Andric const Value *V, Type *Ty, Align Alignment, const DataLayout &DL, 203bdd1243dSDimitry Andric const Instruction *CtxI, AssumptionCache *AC, const DominatorTree *DT, 204fe6060f1SDimitry Andric const TargetLibraryInfo *TLI) { 2055ffd83dbSDimitry Andric // For unsized types or scalable vectors we don't know exactly how many bytes 2065ffd83dbSDimitry Andric // are dereferenced, so bail out. 20706c3fb27SDimitry Andric if (!Ty->isSized() || Ty->isScalableTy()) 2088bcb0991SDimitry Andric return false; 2098bcb0991SDimitry Andric 2100b57cec5SDimitry Andric // When dereferenceability information is provided by a dereferenceable 2110b57cec5SDimitry Andric // attribute, we know exactly how many bytes are dereferenceable. If we can 2120b57cec5SDimitry Andric // determine the exact offset to the attributed variable, we can use that 2130b57cec5SDimitry Andric // information here. 2140b57cec5SDimitry Andric 215480093f4SDimitry Andric APInt AccessSize(DL.getPointerTypeSizeInBits(V->getType()), 2168bcb0991SDimitry Andric DL.getTypeStoreSize(Ty)); 2178bcb0991SDimitry Andric return isDereferenceableAndAlignedPointer(V, Alignment, AccessSize, DL, CtxI, 218bdd1243dSDimitry Andric AC, DT, TLI); 2190b57cec5SDimitry Andric } 2200b57cec5SDimitry Andric 2210b57cec5SDimitry Andric bool llvm::isDereferenceablePointer(const Value *V, Type *Ty, 2220b57cec5SDimitry Andric const DataLayout &DL, 2230b57cec5SDimitry Andric const Instruction *CtxI, 224bdd1243dSDimitry Andric AssumptionCache *AC, 225fe6060f1SDimitry Andric const DominatorTree *DT, 226fe6060f1SDimitry Andric const TargetLibraryInfo *TLI) { 227bdd1243dSDimitry Andric return isDereferenceableAndAlignedPointer(V, Ty, Align(1), DL, CtxI, AC, DT, 228bdd1243dSDimitry Andric TLI); 2290b57cec5SDimitry Andric } 2300b57cec5SDimitry Andric 2310b57cec5SDimitry Andric /// Test if A and B will obviously have the same value. 2320b57cec5SDimitry Andric /// 2330b57cec5SDimitry Andric /// This includes recognizing that %t0 and %t1 will have the same 2340b57cec5SDimitry Andric /// value in code like this: 2350b57cec5SDimitry Andric /// \code 2360b57cec5SDimitry Andric /// %t0 = getelementptr \@a, 0, 3 2370b57cec5SDimitry Andric /// store i32 0, i32* %t0 2380b57cec5SDimitry Andric /// %t1 = getelementptr \@a, 0, 3 2390b57cec5SDimitry Andric /// %t2 = load i32* %t1 2400b57cec5SDimitry Andric /// \endcode 2410b57cec5SDimitry Andric /// 2420b57cec5SDimitry Andric static bool AreEquivalentAddressValues(const Value *A, const Value *B) { 2430b57cec5SDimitry Andric // Test if the values are trivially equivalent. 2440b57cec5SDimitry Andric if (A == B) 2450b57cec5SDimitry Andric return true; 2460b57cec5SDimitry Andric 2470b57cec5SDimitry Andric // Test if the values come from identical arithmetic instructions. 2480b57cec5SDimitry Andric // Use isIdenticalToWhenDefined instead of isIdenticalTo because 2490b57cec5SDimitry Andric // this function is only used when one address use dominates the 2500b57cec5SDimitry Andric // other, which means that they'll always either have the same 2510b57cec5SDimitry Andric // value or one of them will have an undefined value. 2520b57cec5SDimitry Andric if (isa<BinaryOperator>(A) || isa<CastInst>(A) || isa<PHINode>(A) || 2530b57cec5SDimitry Andric isa<GetElementPtrInst>(A)) 2540b57cec5SDimitry Andric if (const Instruction *BI = dyn_cast<Instruction>(B)) 2550b57cec5SDimitry Andric if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI)) 2560b57cec5SDimitry Andric return true; 2570b57cec5SDimitry Andric 2580b57cec5SDimitry Andric // Otherwise they may not be equivalent. 2590b57cec5SDimitry Andric return false; 2600b57cec5SDimitry Andric } 2610b57cec5SDimitry Andric 2628bcb0991SDimitry Andric bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L, 2638bcb0991SDimitry Andric ScalarEvolution &SE, 264bdd1243dSDimitry Andric DominatorTree &DT, 265bdd1243dSDimitry Andric AssumptionCache *AC) { 266*0fca6ea1SDimitry Andric auto &DL = LI->getDataLayout(); 2678bcb0991SDimitry Andric Value *Ptr = LI->getPointerOperand(); 2688bcb0991SDimitry Andric 2698bcb0991SDimitry Andric APInt EltSize(DL.getIndexTypeSizeInBits(Ptr->getType()), 270bdd1243dSDimitry Andric DL.getTypeStoreSize(LI->getType()).getFixedValue()); 2715ffd83dbSDimitry Andric const Align Alignment = LI->getAlign(); 2728bcb0991SDimitry Andric 2738bcb0991SDimitry Andric Instruction *HeaderFirstNonPHI = L->getHeader()->getFirstNonPHI(); 2748bcb0991SDimitry Andric 2758bcb0991SDimitry Andric // If given a uniform (i.e. non-varying) address, see if we can prove the 2768bcb0991SDimitry Andric // access is safe within the loop w/o needing predication. 2778bcb0991SDimitry Andric if (L->isLoopInvariant(Ptr)) 2788bcb0991SDimitry Andric return isDereferenceableAndAlignedPointer(Ptr, Alignment, EltSize, DL, 279bdd1243dSDimitry Andric HeaderFirstNonPHI, AC, &DT); 2808bcb0991SDimitry Andric 2818bcb0991SDimitry Andric // Otherwise, check to see if we have a repeating access pattern where we can 2828bcb0991SDimitry Andric // prove that all accesses are well aligned and dereferenceable. 2838bcb0991SDimitry Andric auto *AddRec = dyn_cast<SCEVAddRecExpr>(SE.getSCEV(Ptr)); 2848bcb0991SDimitry Andric if (!AddRec || AddRec->getLoop() != L || !AddRec->isAffine()) 2858bcb0991SDimitry Andric return false; 2868bcb0991SDimitry Andric auto* Step = dyn_cast<SCEVConstant>(AddRec->getStepRecurrence(SE)); 2878bcb0991SDimitry Andric if (!Step) 2888bcb0991SDimitry Andric return false; 2898bcb0991SDimitry Andric 290e8d8bef9SDimitry Andric auto TC = SE.getSmallConstantMaxTripCount(L); 2918bcb0991SDimitry Andric if (!TC) 2928bcb0991SDimitry Andric return false; 2938bcb0991SDimitry Andric 29406c3fb27SDimitry Andric // TODO: Handle overlapping accesses. 29506c3fb27SDimitry Andric // We should be computing AccessSize as (TC - 1) * Step + EltSize. 29606c3fb27SDimitry Andric if (EltSize.sgt(Step->getAPInt())) 2978bcb0991SDimitry Andric return false; 29806c3fb27SDimitry Andric 29906c3fb27SDimitry Andric // Compute the total access size for access patterns with unit stride and 30006c3fb27SDimitry Andric // patterns with gaps. For patterns with unit stride, Step and EltSize are the 30106c3fb27SDimitry Andric // same. 30206c3fb27SDimitry Andric // For patterns with gaps (i.e. non unit stride), we are 30306c3fb27SDimitry Andric // accessing EltSize bytes at every Step. 30406c3fb27SDimitry Andric APInt AccessSize = TC * Step->getAPInt(); 30506c3fb27SDimitry Andric 30606c3fb27SDimitry Andric assert(SE.isLoopInvariant(AddRec->getStart(), L) && 30706c3fb27SDimitry Andric "implied by addrec definition"); 30806c3fb27SDimitry Andric Value *Base = nullptr; 30906c3fb27SDimitry Andric if (auto *StartS = dyn_cast<SCEVUnknown>(AddRec->getStart())) { 31006c3fb27SDimitry Andric Base = StartS->getValue(); 31106c3fb27SDimitry Andric } else if (auto *StartS = dyn_cast<SCEVAddExpr>(AddRec->getStart())) { 31206c3fb27SDimitry Andric // Handle (NewBase + offset) as start value. 31306c3fb27SDimitry Andric const auto *Offset = dyn_cast<SCEVConstant>(StartS->getOperand(0)); 31406c3fb27SDimitry Andric const auto *NewBase = dyn_cast<SCEVUnknown>(StartS->getOperand(1)); 31506c3fb27SDimitry Andric if (StartS->getNumOperands() == 2 && Offset && NewBase) { 316*0fca6ea1SDimitry Andric // The following code below assumes the offset is unsigned, but GEP 317*0fca6ea1SDimitry Andric // offsets are treated as signed so we can end up with a signed value 318*0fca6ea1SDimitry Andric // here too. For example, suppose the initial PHI value is (i8 255), 319*0fca6ea1SDimitry Andric // the offset will be treated as (i8 -1) and sign-extended to (i64 -1). 320*0fca6ea1SDimitry Andric if (Offset->getAPInt().isNegative()) 321*0fca6ea1SDimitry Andric return false; 322*0fca6ea1SDimitry Andric 32306c3fb27SDimitry Andric // For the moment, restrict ourselves to the case where the offset is a 32406c3fb27SDimitry Andric // multiple of the requested alignment and the base is aligned. 32506c3fb27SDimitry Andric // TODO: generalize if a case found which warrants 32606c3fb27SDimitry Andric if (Offset->getAPInt().urem(Alignment.value()) != 0) 32706c3fb27SDimitry Andric return false; 32806c3fb27SDimitry Andric Base = NewBase->getValue(); 32906c3fb27SDimitry Andric bool Overflow = false; 33006c3fb27SDimitry Andric AccessSize = AccessSize.uadd_ov(Offset->getAPInt(), Overflow); 33106c3fb27SDimitry Andric if (Overflow) 33206c3fb27SDimitry Andric return false; 33306c3fb27SDimitry Andric } 33406c3fb27SDimitry Andric } 33506c3fb27SDimitry Andric 33606c3fb27SDimitry Andric if (!Base) 33706c3fb27SDimitry Andric return false; 3388bcb0991SDimitry Andric 3398bcb0991SDimitry Andric // For the moment, restrict ourselves to the case where the access size is a 3408bcb0991SDimitry Andric // multiple of the requested alignment and the base is aligned. 3418bcb0991SDimitry Andric // TODO: generalize if a case found which warrants 3428bcb0991SDimitry Andric if (EltSize.urem(Alignment.value()) != 0) 3438bcb0991SDimitry Andric return false; 3448bcb0991SDimitry Andric return isDereferenceableAndAlignedPointer(Base, Alignment, AccessSize, DL, 345bdd1243dSDimitry Andric HeaderFirstNonPHI, AC, &DT); 3468bcb0991SDimitry Andric } 3478bcb0991SDimitry Andric 3480b57cec5SDimitry Andric /// Check if executing a load of this pointer value cannot trap. 3490b57cec5SDimitry Andric /// 3500b57cec5SDimitry Andric /// If DT and ScanFrom are specified this method performs context-sensitive 3510b57cec5SDimitry Andric /// analysis and returns true if it is safe to load immediately before ScanFrom. 3520b57cec5SDimitry Andric /// 3530b57cec5SDimitry Andric /// If it is not obviously safe to load from the specified pointer, we do 3540b57cec5SDimitry Andric /// a quick local scan of the basic block containing \c ScanFrom, to determine 3550b57cec5SDimitry Andric /// if the address is already accessed. 3560b57cec5SDimitry Andric /// 3570b57cec5SDimitry Andric /// This uses the pointee type to determine how many bytes need to be safe to 3580b57cec5SDimitry Andric /// load from the pointer. 359*0fca6ea1SDimitry Andric bool llvm::isSafeToLoadUnconditionally(Value *V, Align Alignment, const APInt &Size, 3600b57cec5SDimitry Andric const DataLayout &DL, 3610b57cec5SDimitry Andric Instruction *ScanFrom, 362bdd1243dSDimitry Andric AssumptionCache *AC, 363fe6060f1SDimitry Andric const DominatorTree *DT, 364fe6060f1SDimitry Andric const TargetLibraryInfo *TLI) { 3650b57cec5SDimitry Andric // If DT is not specified we can't make context-sensitive query 3660b57cec5SDimitry Andric const Instruction* CtxI = DT ? ScanFrom : nullptr; 367bdd1243dSDimitry Andric if (isDereferenceableAndAlignedPointer(V, Alignment, Size, DL, CtxI, AC, DT, 368bdd1243dSDimitry Andric TLI)) 3690b57cec5SDimitry Andric return true; 3700b57cec5SDimitry Andric 3710b57cec5SDimitry Andric if (!ScanFrom) 3720b57cec5SDimitry Andric return false; 3730b57cec5SDimitry Andric 3748bcb0991SDimitry Andric if (Size.getBitWidth() > 64) 3758bcb0991SDimitry Andric return false; 3764c2d3b02SDimitry Andric const TypeSize LoadSize = TypeSize::getFixed(Size.getZExtValue()); 3778bcb0991SDimitry Andric 3780b57cec5SDimitry Andric // Otherwise, be a little bit aggressive by scanning the local block where we 3790b57cec5SDimitry Andric // want to check to see if the pointer is already being loaded or stored 3800b57cec5SDimitry Andric // from/to. If so, the previous load or store would have already trapped, 3810b57cec5SDimitry Andric // so there is no harm doing an extra load (also, CSE will later eliminate 3820b57cec5SDimitry Andric // the load entirely). 3830b57cec5SDimitry Andric BasicBlock::iterator BBI = ScanFrom->getIterator(), 3840b57cec5SDimitry Andric E = ScanFrom->getParent()->begin(); 3850b57cec5SDimitry Andric 3860b57cec5SDimitry Andric // We can at least always strip pointer casts even though we can't use the 3870b57cec5SDimitry Andric // base here. 3880b57cec5SDimitry Andric V = V->stripPointerCasts(); 3890b57cec5SDimitry Andric 3900b57cec5SDimitry Andric while (BBI != E) { 3910b57cec5SDimitry Andric --BBI; 3920b57cec5SDimitry Andric 3930b57cec5SDimitry Andric // If we see a free or a call which may write to memory (i.e. which might do 3940b57cec5SDimitry Andric // a free) the pointer could be marked invalid. 3950b57cec5SDimitry Andric if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() && 396bdd1243dSDimitry Andric !isa<LifetimeIntrinsic>(BBI) && !isa<DbgInfoIntrinsic>(BBI)) 3970b57cec5SDimitry Andric return false; 3980b57cec5SDimitry Andric 3990b57cec5SDimitry Andric Value *AccessedPtr; 4005ffd83dbSDimitry Andric Type *AccessedTy; 4015ffd83dbSDimitry Andric Align AccessedAlign; 4020b57cec5SDimitry Andric if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) { 4030b57cec5SDimitry Andric // Ignore volatile loads. The execution of a volatile load cannot 4040b57cec5SDimitry Andric // be used to prove an address is backed by regular memory; it can, 4050b57cec5SDimitry Andric // for example, point to an MMIO register. 4060b57cec5SDimitry Andric if (LI->isVolatile()) 4070b57cec5SDimitry Andric continue; 4080b57cec5SDimitry Andric AccessedPtr = LI->getPointerOperand(); 4095ffd83dbSDimitry Andric AccessedTy = LI->getType(); 4105ffd83dbSDimitry Andric AccessedAlign = LI->getAlign(); 4110b57cec5SDimitry Andric } else if (StoreInst *SI = dyn_cast<StoreInst>(BBI)) { 4120b57cec5SDimitry Andric // Ignore volatile stores (see comment for loads). 4130b57cec5SDimitry Andric if (SI->isVolatile()) 4140b57cec5SDimitry Andric continue; 4150b57cec5SDimitry Andric AccessedPtr = SI->getPointerOperand(); 4165ffd83dbSDimitry Andric AccessedTy = SI->getValueOperand()->getType(); 4175ffd83dbSDimitry Andric AccessedAlign = SI->getAlign(); 4180b57cec5SDimitry Andric } else 4190b57cec5SDimitry Andric continue; 4200b57cec5SDimitry Andric 4218bcb0991SDimitry Andric if (AccessedAlign < Alignment) 4220b57cec5SDimitry Andric continue; 4230b57cec5SDimitry Andric 4240b57cec5SDimitry Andric // Handle trivial cases. 4258bcb0991SDimitry Andric if (AccessedPtr == V && 4264c2d3b02SDimitry Andric TypeSize::isKnownLE(LoadSize, DL.getTypeStoreSize(AccessedTy))) 4270b57cec5SDimitry Andric return true; 4280b57cec5SDimitry Andric 4290b57cec5SDimitry Andric if (AreEquivalentAddressValues(AccessedPtr->stripPointerCasts(), V) && 4304c2d3b02SDimitry Andric TypeSize::isKnownLE(LoadSize, DL.getTypeStoreSize(AccessedTy))) 4310b57cec5SDimitry Andric return true; 4320b57cec5SDimitry Andric } 4330b57cec5SDimitry Andric return false; 4340b57cec5SDimitry Andric } 4350b57cec5SDimitry Andric 4365ffd83dbSDimitry Andric bool llvm::isSafeToLoadUnconditionally(Value *V, Type *Ty, Align Alignment, 4370b57cec5SDimitry Andric const DataLayout &DL, 4380b57cec5SDimitry Andric Instruction *ScanFrom, 439bdd1243dSDimitry Andric AssumptionCache *AC, 440fe6060f1SDimitry Andric const DominatorTree *DT, 441fe6060f1SDimitry Andric const TargetLibraryInfo *TLI) { 442753f127fSDimitry Andric TypeSize TySize = DL.getTypeStoreSize(Ty); 443753f127fSDimitry Andric if (TySize.isScalable()) 444753f127fSDimitry Andric return false; 445753f127fSDimitry Andric APInt Size(DL.getIndexTypeSizeInBits(V->getType()), TySize.getFixedValue()); 446bdd1243dSDimitry Andric return isSafeToLoadUnconditionally(V, Alignment, Size, DL, ScanFrom, AC, DT, 447bdd1243dSDimitry Andric TLI); 4480b57cec5SDimitry Andric } 4490b57cec5SDimitry Andric 4500b57cec5SDimitry Andric /// DefMaxInstsToScan - the default number of maximum instructions 4510b57cec5SDimitry Andric /// to scan in the block, used by FindAvailableLoadedValue(). 4520b57cec5SDimitry Andric /// FindAvailableLoadedValue() was introduced in r60148, to improve jump 4530b57cec5SDimitry Andric /// threading in part by eliminating partially redundant loads. 4540b57cec5SDimitry Andric /// At that point, the value of MaxInstsToScan was already set to '6' 4550b57cec5SDimitry Andric /// without documented explanation. 4560b57cec5SDimitry Andric cl::opt<unsigned> 4570b57cec5SDimitry Andric llvm::DefMaxInstsToScan("available-load-scan-limit", cl::init(6), cl::Hidden, 4580b57cec5SDimitry Andric cl::desc("Use this to specify the default maximum number of instructions " 4590b57cec5SDimitry Andric "to scan backward from a given instruction, when searching for " 4600b57cec5SDimitry Andric "available loaded value")); 4610b57cec5SDimitry Andric 462b3edf446SDimitry Andric Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BasicBlock *ScanBB, 4630b57cec5SDimitry Andric BasicBlock::iterator &ScanFrom, 4640b57cec5SDimitry Andric unsigned MaxInstsToScan, 465b3edf446SDimitry Andric BatchAAResults *AA, bool *IsLoad, 4660b57cec5SDimitry Andric unsigned *NumScanedInst) { 4670b57cec5SDimitry Andric // Don't CSE load that is volatile or anything stronger than unordered. 4680b57cec5SDimitry Andric if (!Load->isUnordered()) 4690b57cec5SDimitry Andric return nullptr; 4700b57cec5SDimitry Andric 471fe6060f1SDimitry Andric MemoryLocation Loc = MemoryLocation::get(Load); 472fe6060f1SDimitry Andric return findAvailablePtrLoadStore(Loc, Load->getType(), Load->isAtomic(), 473fe6060f1SDimitry Andric ScanBB, ScanFrom, MaxInstsToScan, AA, IsLoad, 474fe6060f1SDimitry Andric NumScanedInst); 4750b57cec5SDimitry Andric } 4760b57cec5SDimitry Andric 4775ffd83dbSDimitry Andric // Check if the load and the store have the same base, constant offsets and 4785ffd83dbSDimitry Andric // non-overlapping access ranges. 479fe6060f1SDimitry Andric static bool areNonOverlapSameBaseLoadAndStore(const Value *LoadPtr, 480fe6060f1SDimitry Andric Type *LoadTy, 481fe6060f1SDimitry Andric const Value *StorePtr, 482fe6060f1SDimitry Andric Type *StoreTy, 4835ffd83dbSDimitry Andric const DataLayout &DL) { 484349cc55cSDimitry Andric APInt LoadOffset(DL.getIndexTypeSizeInBits(LoadPtr->getType()), 0); 485349cc55cSDimitry Andric APInt StoreOffset(DL.getIndexTypeSizeInBits(StorePtr->getType()), 0); 486fe6060f1SDimitry Andric const Value *LoadBase = LoadPtr->stripAndAccumulateConstantOffsets( 4875ffd83dbSDimitry Andric DL, LoadOffset, /* AllowNonInbounds */ false); 488fe6060f1SDimitry Andric const Value *StoreBase = StorePtr->stripAndAccumulateConstantOffsets( 4895ffd83dbSDimitry Andric DL, StoreOffset, /* AllowNonInbounds */ false); 4905ffd83dbSDimitry Andric if (LoadBase != StoreBase) 4915ffd83dbSDimitry Andric return false; 4925ffd83dbSDimitry Andric auto LoadAccessSize = LocationSize::precise(DL.getTypeStoreSize(LoadTy)); 4935ffd83dbSDimitry Andric auto StoreAccessSize = LocationSize::precise(DL.getTypeStoreSize(StoreTy)); 4945ffd83dbSDimitry Andric ConstantRange LoadRange(LoadOffset, 4955ffd83dbSDimitry Andric LoadOffset + LoadAccessSize.toRaw()); 4965ffd83dbSDimitry Andric ConstantRange StoreRange(StoreOffset, 4975ffd83dbSDimitry Andric StoreOffset + StoreAccessSize.toRaw()); 4985ffd83dbSDimitry Andric return LoadRange.intersectWith(StoreRange).isEmptySet(); 4995ffd83dbSDimitry Andric } 5005ffd83dbSDimitry Andric 501fe6060f1SDimitry Andric static Value *getAvailableLoadStore(Instruction *Inst, const Value *Ptr, 502fe6060f1SDimitry Andric Type *AccessTy, bool AtLeastAtomic, 503fe6060f1SDimitry Andric const DataLayout &DL, bool *IsLoadCSE) { 504fe6060f1SDimitry Andric // If this is a load of Ptr, the loaded value is available. 505fe6060f1SDimitry Andric // (This is true even if the load is volatile or atomic, although 506fe6060f1SDimitry Andric // those cases are unlikely.) 507fe6060f1SDimitry Andric if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { 508fe6060f1SDimitry Andric // We can value forward from an atomic to a non-atomic, but not the 509fe6060f1SDimitry Andric // other way around. 510fe6060f1SDimitry Andric if (LI->isAtomic() < AtLeastAtomic) 511fe6060f1SDimitry Andric return nullptr; 512fe6060f1SDimitry Andric 513fe6060f1SDimitry Andric Value *LoadPtr = LI->getPointerOperand()->stripPointerCasts(); 514fe6060f1SDimitry Andric if (!AreEquivalentAddressValues(LoadPtr, Ptr)) 515fe6060f1SDimitry Andric return nullptr; 516fe6060f1SDimitry Andric 517fe6060f1SDimitry Andric if (CastInst::isBitOrNoopPointerCastable(LI->getType(), AccessTy, DL)) { 518fe6060f1SDimitry Andric if (IsLoadCSE) 519fe6060f1SDimitry Andric *IsLoadCSE = true; 520fe6060f1SDimitry Andric return LI; 521fe6060f1SDimitry Andric } 522fe6060f1SDimitry Andric } 523fe6060f1SDimitry Andric 524fe6060f1SDimitry Andric // If this is a store through Ptr, the value is available! 525fe6060f1SDimitry Andric // (This is true even if the store is volatile or atomic, although 526fe6060f1SDimitry Andric // those cases are unlikely.) 527fe6060f1SDimitry Andric if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 528fe6060f1SDimitry Andric // We can value forward from an atomic to a non-atomic, but not the 529fe6060f1SDimitry Andric // other way around. 530fe6060f1SDimitry Andric if (SI->isAtomic() < AtLeastAtomic) 531fe6060f1SDimitry Andric return nullptr; 532fe6060f1SDimitry Andric 533fe6060f1SDimitry Andric Value *StorePtr = SI->getPointerOperand()->stripPointerCasts(); 534fe6060f1SDimitry Andric if (!AreEquivalentAddressValues(StorePtr, Ptr)) 535fe6060f1SDimitry Andric return nullptr; 536fe6060f1SDimitry Andric 537fe6060f1SDimitry Andric if (IsLoadCSE) 538fe6060f1SDimitry Andric *IsLoadCSE = false; 539fe6060f1SDimitry Andric 540fe6060f1SDimitry Andric Value *Val = SI->getValueOperand(); 541fe6060f1SDimitry Andric if (CastInst::isBitOrNoopPointerCastable(Val->getType(), AccessTy, DL)) 542fe6060f1SDimitry Andric return Val; 543fe6060f1SDimitry Andric 54481ad6265SDimitry Andric TypeSize StoreSize = DL.getTypeSizeInBits(Val->getType()); 54581ad6265SDimitry Andric TypeSize LoadSize = DL.getTypeSizeInBits(AccessTy); 546349cc55cSDimitry Andric if (TypeSize::isKnownLE(LoadSize, StoreSize)) 547fe6060f1SDimitry Andric if (auto *C = dyn_cast<Constant>(Val)) 548349cc55cSDimitry Andric return ConstantFoldLoadFromConst(C, AccessTy, DL); 549fe6060f1SDimitry Andric } 550fe6060f1SDimitry Andric 551bdd1243dSDimitry Andric if (auto *MSI = dyn_cast<MemSetInst>(Inst)) { 552bdd1243dSDimitry Andric // Don't forward from (non-atomic) memset to atomic load. 553bdd1243dSDimitry Andric if (AtLeastAtomic) 554bdd1243dSDimitry Andric return nullptr; 555bdd1243dSDimitry Andric 556bdd1243dSDimitry Andric // Only handle constant memsets. 557bdd1243dSDimitry Andric auto *Val = dyn_cast<ConstantInt>(MSI->getValue()); 558bdd1243dSDimitry Andric auto *Len = dyn_cast<ConstantInt>(MSI->getLength()); 559bdd1243dSDimitry Andric if (!Val || !Len) 560bdd1243dSDimitry Andric return nullptr; 561bdd1243dSDimitry Andric 562bdd1243dSDimitry Andric // TODO: Handle offsets. 563bdd1243dSDimitry Andric Value *Dst = MSI->getDest(); 564bdd1243dSDimitry Andric if (!AreEquivalentAddressValues(Dst, Ptr)) 565bdd1243dSDimitry Andric return nullptr; 566bdd1243dSDimitry Andric 567bdd1243dSDimitry Andric if (IsLoadCSE) 568bdd1243dSDimitry Andric *IsLoadCSE = false; 569bdd1243dSDimitry Andric 570bdd1243dSDimitry Andric TypeSize LoadTypeSize = DL.getTypeSizeInBits(AccessTy); 571bdd1243dSDimitry Andric if (LoadTypeSize.isScalable()) 572bdd1243dSDimitry Andric return nullptr; 573bdd1243dSDimitry Andric 574bdd1243dSDimitry Andric // Make sure the read bytes are contained in the memset. 575bdd1243dSDimitry Andric uint64_t LoadSize = LoadTypeSize.getFixedValue(); 576bdd1243dSDimitry Andric if ((Len->getValue() * 8).ult(LoadSize)) 577bdd1243dSDimitry Andric return nullptr; 578bdd1243dSDimitry Andric 579bdd1243dSDimitry Andric APInt Splat = LoadSize >= 8 ? APInt::getSplat(LoadSize, Val->getValue()) 580bdd1243dSDimitry Andric : Val->getValue().trunc(LoadSize); 581bdd1243dSDimitry Andric ConstantInt *SplatC = ConstantInt::get(MSI->getContext(), Splat); 582bdd1243dSDimitry Andric if (CastInst::isBitOrNoopPointerCastable(SplatC->getType(), AccessTy, DL)) 583bdd1243dSDimitry Andric return SplatC; 584bdd1243dSDimitry Andric 585bdd1243dSDimitry Andric return nullptr; 586bdd1243dSDimitry Andric } 587bdd1243dSDimitry Andric 588fe6060f1SDimitry Andric return nullptr; 589fe6060f1SDimitry Andric } 590fe6060f1SDimitry Andric 591fe6060f1SDimitry Andric Value *llvm::findAvailablePtrLoadStore( 592fe6060f1SDimitry Andric const MemoryLocation &Loc, Type *AccessTy, bool AtLeastAtomic, 593fe6060f1SDimitry Andric BasicBlock *ScanBB, BasicBlock::iterator &ScanFrom, unsigned MaxInstsToScan, 594b3edf446SDimitry Andric BatchAAResults *AA, bool *IsLoadCSE, unsigned *NumScanedInst) { 5950b57cec5SDimitry Andric if (MaxInstsToScan == 0) 5960b57cec5SDimitry Andric MaxInstsToScan = ~0U; 5970b57cec5SDimitry Andric 598*0fca6ea1SDimitry Andric const DataLayout &DL = ScanBB->getDataLayout(); 599fe6060f1SDimitry Andric const Value *StrippedPtr = Loc.Ptr->stripPointerCasts(); 6000b57cec5SDimitry Andric 6010b57cec5SDimitry Andric while (ScanFrom != ScanBB->begin()) { 6020b57cec5SDimitry Andric // We must ignore debug info directives when counting (otherwise they 6030b57cec5SDimitry Andric // would affect codegen). 6040b57cec5SDimitry Andric Instruction *Inst = &*--ScanFrom; 605fe6060f1SDimitry Andric if (Inst->isDebugOrPseudoInst()) 6060b57cec5SDimitry Andric continue; 6070b57cec5SDimitry Andric 6080b57cec5SDimitry Andric // Restore ScanFrom to expected value in case next test succeeds 6090b57cec5SDimitry Andric ScanFrom++; 6100b57cec5SDimitry Andric 6110b57cec5SDimitry Andric if (NumScanedInst) 6120b57cec5SDimitry Andric ++(*NumScanedInst); 6130b57cec5SDimitry Andric 6140b57cec5SDimitry Andric // Don't scan huge blocks. 6150b57cec5SDimitry Andric if (MaxInstsToScan-- == 0) 6160b57cec5SDimitry Andric return nullptr; 6170b57cec5SDimitry Andric 6180b57cec5SDimitry Andric --ScanFrom; 6190b57cec5SDimitry Andric 620fe6060f1SDimitry Andric if (Value *Available = getAvailableLoadStore(Inst, StrippedPtr, AccessTy, 621fe6060f1SDimitry Andric AtLeastAtomic, DL, IsLoadCSE)) 622fe6060f1SDimitry Andric return Available; 6230b57cec5SDimitry Andric 624480093f4SDimitry Andric // Try to get the store size for the type. 6250b57cec5SDimitry Andric if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 6260b57cec5SDimitry Andric Value *StorePtr = SI->getPointerOperand()->stripPointerCasts(); 6270b57cec5SDimitry Andric 6280b57cec5SDimitry Andric // If both StrippedPtr and StorePtr reach all the way to an alloca or 6290b57cec5SDimitry Andric // global and they are different, ignore the store. This is a trivial form 6300b57cec5SDimitry Andric // of alias analysis that is important for reg2mem'd code. 6310b57cec5SDimitry Andric if ((isa<AllocaInst>(StrippedPtr) || isa<GlobalVariable>(StrippedPtr)) && 6320b57cec5SDimitry Andric (isa<AllocaInst>(StorePtr) || isa<GlobalVariable>(StorePtr)) && 6330b57cec5SDimitry Andric StrippedPtr != StorePtr) 6340b57cec5SDimitry Andric continue; 6350b57cec5SDimitry Andric 6365ffd83dbSDimitry Andric if (!AA) { 6375ffd83dbSDimitry Andric // When AA isn't available, but if the load and the store have the same 6385ffd83dbSDimitry Andric // base, constant offsets and non-overlapping access ranges, ignore the 6395ffd83dbSDimitry Andric // store. This is a simple form of alias analysis that is used by the 6405ffd83dbSDimitry Andric // inliner. FIXME: use BasicAA if possible. 641fe6060f1SDimitry Andric if (areNonOverlapSameBaseLoadAndStore( 642fe6060f1SDimitry Andric Loc.Ptr, AccessTy, SI->getPointerOperand(), 6435ffd83dbSDimitry Andric SI->getValueOperand()->getType(), DL)) 6440b57cec5SDimitry Andric continue; 6455ffd83dbSDimitry Andric } else { 6465ffd83dbSDimitry Andric // If we have alias analysis and it says the store won't modify the 6475ffd83dbSDimitry Andric // loaded value, ignore the store. 648fe6060f1SDimitry Andric if (!isModSet(AA->getModRefInfo(SI, Loc))) 6495ffd83dbSDimitry Andric continue; 6505ffd83dbSDimitry Andric } 6510b57cec5SDimitry Andric 6520b57cec5SDimitry Andric // Otherwise the store that may or may not alias the pointer, bail out. 6530b57cec5SDimitry Andric ++ScanFrom; 6540b57cec5SDimitry Andric return nullptr; 6550b57cec5SDimitry Andric } 6560b57cec5SDimitry Andric 6570b57cec5SDimitry Andric // If this is some other instruction that may clobber Ptr, bail out. 6580b57cec5SDimitry Andric if (Inst->mayWriteToMemory()) { 6590b57cec5SDimitry Andric // If alias analysis claims that it really won't modify the load, 6600b57cec5SDimitry Andric // ignore it. 661fe6060f1SDimitry Andric if (AA && !isModSet(AA->getModRefInfo(Inst, Loc))) 6620b57cec5SDimitry Andric continue; 6630b57cec5SDimitry Andric 6640b57cec5SDimitry Andric // May modify the pointer, bail out. 6650b57cec5SDimitry Andric ++ScanFrom; 6660b57cec5SDimitry Andric return nullptr; 6670b57cec5SDimitry Andric } 6680b57cec5SDimitry Andric } 6690b57cec5SDimitry Andric 6700b57cec5SDimitry Andric // Got to the start of the block, we didn't find it, but are done for this 6710b57cec5SDimitry Andric // block. 6720b57cec5SDimitry Andric return nullptr; 6730b57cec5SDimitry Andric } 674e8d8bef9SDimitry Andric 675b3edf446SDimitry Andric Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BatchAAResults &AA, 676fe6060f1SDimitry Andric bool *IsLoadCSE, 677fe6060f1SDimitry Andric unsigned MaxInstsToScan) { 678*0fca6ea1SDimitry Andric const DataLayout &DL = Load->getDataLayout(); 679fe6060f1SDimitry Andric Value *StrippedPtr = Load->getPointerOperand()->stripPointerCasts(); 680fe6060f1SDimitry Andric BasicBlock *ScanBB = Load->getParent(); 681fe6060f1SDimitry Andric Type *AccessTy = Load->getType(); 682fe6060f1SDimitry Andric bool AtLeastAtomic = Load->isAtomic(); 683fe6060f1SDimitry Andric 684fe6060f1SDimitry Andric if (!Load->isUnordered()) 685fe6060f1SDimitry Andric return nullptr; 686fe6060f1SDimitry Andric 687fe6060f1SDimitry Andric // Try to find an available value first, and delay expensive alias analysis 688fe6060f1SDimitry Andric // queries until later. 68906c3fb27SDimitry Andric Value *Available = nullptr; 690fe6060f1SDimitry Andric SmallVector<Instruction *> MustNotAliasInsts; 691fe6060f1SDimitry Andric for (Instruction &Inst : make_range(++Load->getReverseIterator(), 692fe6060f1SDimitry Andric ScanBB->rend())) { 693fe6060f1SDimitry Andric if (Inst.isDebugOrPseudoInst()) 694fe6060f1SDimitry Andric continue; 695fe6060f1SDimitry Andric 696fe6060f1SDimitry Andric if (MaxInstsToScan-- == 0) 697fe6060f1SDimitry Andric return nullptr; 698fe6060f1SDimitry Andric 699fe6060f1SDimitry Andric Available = getAvailableLoadStore(&Inst, StrippedPtr, AccessTy, 700fe6060f1SDimitry Andric AtLeastAtomic, DL, IsLoadCSE); 701fe6060f1SDimitry Andric if (Available) 702fe6060f1SDimitry Andric break; 703fe6060f1SDimitry Andric 704fe6060f1SDimitry Andric if (Inst.mayWriteToMemory()) 705fe6060f1SDimitry Andric MustNotAliasInsts.push_back(&Inst); 706fe6060f1SDimitry Andric } 707fe6060f1SDimitry Andric 708fe6060f1SDimitry Andric // If we found an available value, ensure that the instructions in between 709fe6060f1SDimitry Andric // did not modify the memory location. 710fe6060f1SDimitry Andric if (Available) { 711fe6060f1SDimitry Andric MemoryLocation Loc = MemoryLocation::get(Load); 712fe6060f1SDimitry Andric for (Instruction *Inst : MustNotAliasInsts) 713fe6060f1SDimitry Andric if (isModSet(AA.getModRefInfo(Inst, Loc))) 714fe6060f1SDimitry Andric return nullptr; 715fe6060f1SDimitry Andric } 716fe6060f1SDimitry Andric 717fe6060f1SDimitry Andric return Available; 718fe6060f1SDimitry Andric } 719fe6060f1SDimitry Andric 720*0fca6ea1SDimitry Andric // Returns true if a use is either in an ICmp/PtrToInt or a Phi/Select that only 721*0fca6ea1SDimitry Andric // feeds into them. 722*0fca6ea1SDimitry Andric static bool isPointerUseReplacable(const Use &U) { 723*0fca6ea1SDimitry Andric unsigned Limit = 40; 724*0fca6ea1SDimitry Andric SmallVector<const User *> Worklist({U.getUser()}); 725*0fca6ea1SDimitry Andric SmallPtrSet<const User *, 8> Visited; 726e8d8bef9SDimitry Andric 727*0fca6ea1SDimitry Andric while (!Worklist.empty() && --Limit) { 728*0fca6ea1SDimitry Andric auto *User = Worklist.pop_back_val(); 729*0fca6ea1SDimitry Andric if (!Visited.insert(User).second) 730*0fca6ea1SDimitry Andric continue; 731*0fca6ea1SDimitry Andric if (isa<ICmpInst, PtrToIntInst>(User)) 732*0fca6ea1SDimitry Andric continue; 733*0fca6ea1SDimitry Andric if (isa<PHINode, SelectInst>(User)) 734*0fca6ea1SDimitry Andric Worklist.append(User->user_begin(), User->user_end()); 735*0fca6ea1SDimitry Andric else 736*0fca6ea1SDimitry Andric return false; 737e8d8bef9SDimitry Andric } 738e8d8bef9SDimitry Andric 739*0fca6ea1SDimitry Andric return Limit != 0; 740*0fca6ea1SDimitry Andric } 741*0fca6ea1SDimitry Andric 742*0fca6ea1SDimitry Andric // Returns true if `To` is a null pointer, constant dereferenceable pointer or 743*0fca6ea1SDimitry Andric // both pointers have the same underlying objects. 744*0fca6ea1SDimitry Andric static bool isPointerAlwaysReplaceable(const Value *From, const Value *To, 745*0fca6ea1SDimitry Andric const DataLayout &DL) { 746*0fca6ea1SDimitry Andric // This is not strictly correct, but we do it for now to retain important 747*0fca6ea1SDimitry Andric // optimizations. 748*0fca6ea1SDimitry Andric if (isa<ConstantPointerNull>(To)) 749*0fca6ea1SDimitry Andric return true; 750*0fca6ea1SDimitry Andric if (isa<Constant>(To) && 751*0fca6ea1SDimitry Andric isDereferenceablePointer(To, Type::getInt8Ty(To->getContext()), DL)) 752*0fca6ea1SDimitry Andric return true; 753*0fca6ea1SDimitry Andric return getUnderlyingObjectAggressive(From) == 754*0fca6ea1SDimitry Andric getUnderlyingObjectAggressive(To); 755*0fca6ea1SDimitry Andric } 756*0fca6ea1SDimitry Andric 757*0fca6ea1SDimitry Andric bool llvm::canReplacePointersInUseIfEqual(const Use &U, const Value *To, 758*0fca6ea1SDimitry Andric const DataLayout &DL) { 759*0fca6ea1SDimitry Andric assert(U->getType() == To->getType() && "values must have matching types"); 760*0fca6ea1SDimitry Andric // Not a pointer, just return true. 761*0fca6ea1SDimitry Andric if (!To->getType()->isPointerTy()) 762*0fca6ea1SDimitry Andric return true; 763*0fca6ea1SDimitry Andric 764*0fca6ea1SDimitry Andric if (isPointerAlwaysReplaceable(&*U, To, DL)) 765*0fca6ea1SDimitry Andric return true; 766*0fca6ea1SDimitry Andric return isPointerUseReplacable(U); 767*0fca6ea1SDimitry Andric } 768*0fca6ea1SDimitry Andric 769*0fca6ea1SDimitry Andric bool llvm::canReplacePointersIfEqual(const Value *From, const Value *To, 770*0fca6ea1SDimitry Andric const DataLayout &DL) { 771*0fca6ea1SDimitry Andric assert(From->getType() == To->getType() && "values must have matching types"); 772*0fca6ea1SDimitry Andric // Not a pointer, just return true. 773*0fca6ea1SDimitry Andric if (!From->getType()->isPointerTy()) 774*0fca6ea1SDimitry Andric return true; 775*0fca6ea1SDimitry Andric 776*0fca6ea1SDimitry Andric return isPointerAlwaysReplaceable(From, To, DL); 777*0fca6ea1SDimitry Andric } 778*0fca6ea1SDimitry Andric 779*0fca6ea1SDimitry Andric bool llvm::isDereferenceableReadOnlyLoop(Loop *L, ScalarEvolution *SE, 780*0fca6ea1SDimitry Andric DominatorTree *DT, 781*0fca6ea1SDimitry Andric AssumptionCache *AC) { 782*0fca6ea1SDimitry Andric for (BasicBlock *BB : L->blocks()) { 783*0fca6ea1SDimitry Andric for (Instruction &I : *BB) { 784*0fca6ea1SDimitry Andric if (auto *LI = dyn_cast<LoadInst>(&I)) { 785*0fca6ea1SDimitry Andric if (!isDereferenceableAndAlignedInLoop(LI, L, *SE, *DT, AC)) 786*0fca6ea1SDimitry Andric return false; 787*0fca6ea1SDimitry Andric } else if (I.mayReadFromMemory() || I.mayWriteToMemory() || I.mayThrow()) 788*0fca6ea1SDimitry Andric return false; 789*0fca6ea1SDimitry Andric } 790*0fca6ea1SDimitry Andric } 791e8d8bef9SDimitry Andric return true; 792e8d8bef9SDimitry Andric } 793