xref: /freebsd-src/contrib/llvm-project/llvm/lib/Analysis/BasicAliasAnalysis.cpp (revision 36b606ae6aa4b24061096ba18582e0a08ccd5dba)
10b57cec5SDimitry Andric //===- BasicAliasAnalysis.cpp - Stateless Alias Analysis Impl -------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file defines the primary stateless implementation of the
100b57cec5SDimitry Andric // Alias Analysis interface that implements identities (two different
110b57cec5SDimitry Andric // globals cannot alias, etc), but does no stateful analysis.
120b57cec5SDimitry Andric //
130b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
140b57cec5SDimitry Andric 
150b57cec5SDimitry Andric #include "llvm/Analysis/BasicAliasAnalysis.h"
160b57cec5SDimitry Andric #include "llvm/ADT/APInt.h"
17e8d8bef9SDimitry Andric #include "llvm/ADT/ScopeExit.h"
180b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h"
190b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h"
200b57cec5SDimitry Andric #include "llvm/ADT/Statistic.h"
210b57cec5SDimitry Andric #include "llvm/Analysis/AliasAnalysis.h"
220b57cec5SDimitry Andric #include "llvm/Analysis/AssumptionCache.h"
230b57cec5SDimitry Andric #include "llvm/Analysis/CFG.h"
240b57cec5SDimitry Andric #include "llvm/Analysis/CaptureTracking.h"
250b57cec5SDimitry Andric #include "llvm/Analysis/MemoryBuiltins.h"
260b57cec5SDimitry Andric #include "llvm/Analysis/MemoryLocation.h"
270b57cec5SDimitry Andric #include "llvm/Analysis/TargetLibraryInfo.h"
280b57cec5SDimitry Andric #include "llvm/Analysis/ValueTracking.h"
290b57cec5SDimitry Andric #include "llvm/IR/Argument.h"
300b57cec5SDimitry Andric #include "llvm/IR/Attributes.h"
310b57cec5SDimitry Andric #include "llvm/IR/Constant.h"
32349cc55cSDimitry Andric #include "llvm/IR/ConstantRange.h"
330b57cec5SDimitry Andric #include "llvm/IR/Constants.h"
340b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h"
350b57cec5SDimitry Andric #include "llvm/IR/DerivedTypes.h"
360b57cec5SDimitry Andric #include "llvm/IR/Dominators.h"
370b57cec5SDimitry Andric #include "llvm/IR/Function.h"
380b57cec5SDimitry Andric #include "llvm/IR/GetElementPtrTypeIterator.h"
390b57cec5SDimitry Andric #include "llvm/IR/GlobalAlias.h"
400b57cec5SDimitry Andric #include "llvm/IR/GlobalVariable.h"
410b57cec5SDimitry Andric #include "llvm/IR/InstrTypes.h"
420b57cec5SDimitry Andric #include "llvm/IR/Instruction.h"
430b57cec5SDimitry Andric #include "llvm/IR/Instructions.h"
440b57cec5SDimitry Andric #include "llvm/IR/IntrinsicInst.h"
450b57cec5SDimitry Andric #include "llvm/IR/Intrinsics.h"
460b57cec5SDimitry Andric #include "llvm/IR/Operator.h"
470fca6ea1SDimitry Andric #include "llvm/IR/PatternMatch.h"
480b57cec5SDimitry Andric #include "llvm/IR/Type.h"
490b57cec5SDimitry Andric #include "llvm/IR/User.h"
500b57cec5SDimitry Andric #include "llvm/IR/Value.h"
51480093f4SDimitry Andric #include "llvm/InitializePasses.h"
520b57cec5SDimitry Andric #include "llvm/Pass.h"
530b57cec5SDimitry Andric #include "llvm/Support/Casting.h"
540b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h"
550b57cec5SDimitry Andric #include "llvm/Support/Compiler.h"
560b57cec5SDimitry Andric #include "llvm/Support/KnownBits.h"
57bdd1243dSDimitry Andric #include "llvm/Support/SaveAndRestore.h"
580b57cec5SDimitry Andric #include <cassert>
590b57cec5SDimitry Andric #include <cstdint>
600b57cec5SDimitry Andric #include <cstdlib>
61bdd1243dSDimitry Andric #include <optional>
620b57cec5SDimitry Andric #include <utility>
630b57cec5SDimitry Andric 
640b57cec5SDimitry Andric #define DEBUG_TYPE "basicaa"
650b57cec5SDimitry Andric 
660b57cec5SDimitry Andric using namespace llvm;
670b57cec5SDimitry Andric 
680b57cec5SDimitry Andric /// Enable analysis of recursive PHI nodes.
695ffd83dbSDimitry Andric static cl::opt<bool> EnableRecPhiAnalysis("basic-aa-recphi", cl::Hidden,
70e8d8bef9SDimitry Andric                                           cl::init(true));
710b57cec5SDimitry Andric 
72bdd1243dSDimitry Andric static cl::opt<bool> EnableSeparateStorageAnalysis("basic-aa-separate-storage",
731db9f3b2SDimitry Andric                                                    cl::Hidden, cl::init(true));
74bdd1243dSDimitry Andric 
750b57cec5SDimitry Andric /// SearchLimitReached / SearchTimes shows how often the limit of
760b57cec5SDimitry Andric /// to decompose GEPs is reached. It will affect the precision
770b57cec5SDimitry Andric /// of basic alias analysis.
780b57cec5SDimitry Andric STATISTIC(SearchLimitReached, "Number of times the limit to "
790b57cec5SDimitry Andric                               "decompose GEPs is reached");
800b57cec5SDimitry Andric STATISTIC(SearchTimes, "Number of times a GEP is decomposed");
810b57cec5SDimitry Andric 
820b57cec5SDimitry Andric // The max limit of the search depth in DecomposeGEPExpression() and
83349cc55cSDimitry Andric // getUnderlyingObject().
840b57cec5SDimitry Andric static const unsigned MaxLookupSearchDepth = 6;
850b57cec5SDimitry Andric 
860b57cec5SDimitry Andric bool BasicAAResult::invalidate(Function &Fn, const PreservedAnalyses &PA,
870b57cec5SDimitry Andric                                FunctionAnalysisManager::Invalidator &Inv) {
880b57cec5SDimitry Andric   // We don't care if this analysis itself is preserved, it has no state. But
890b57cec5SDimitry Andric   // we need to check that the analyses it depends on have been. Note that we
900b57cec5SDimitry Andric   // may be created without handles to some analyses and in that case don't
910b57cec5SDimitry Andric   // depend on them.
920b57cec5SDimitry Andric   if (Inv.invalidate<AssumptionAnalysis>(Fn, PA) ||
93b3edf446SDimitry Andric       (DT_ && Inv.invalidate<DominatorTreeAnalysis>(Fn, PA)))
940b57cec5SDimitry Andric     return true;
950b57cec5SDimitry Andric 
960b57cec5SDimitry Andric   // Otherwise this analysis result remains valid.
970b57cec5SDimitry Andric   return false;
980b57cec5SDimitry Andric }
990b57cec5SDimitry Andric 
1000b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
1010b57cec5SDimitry Andric // Useful predicates
1020b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
1030b57cec5SDimitry Andric 
1040b57cec5SDimitry Andric /// Returns the size of the object specified by V or UnknownSize if unknown.
1055f757f3fSDimitry Andric static std::optional<TypeSize> getObjectSize(const Value *V,
1065f757f3fSDimitry Andric                                              const DataLayout &DL,
1070b57cec5SDimitry Andric                                              const TargetLibraryInfo &TLI,
1080b57cec5SDimitry Andric                                              bool NullIsValidLoc,
1090b57cec5SDimitry Andric                                              bool RoundToAlign = false) {
1100b57cec5SDimitry Andric   uint64_t Size;
1110b57cec5SDimitry Andric   ObjectSizeOpts Opts;
1120b57cec5SDimitry Andric   Opts.RoundToAlign = RoundToAlign;
1130b57cec5SDimitry Andric   Opts.NullIsUnknownSize = NullIsValidLoc;
1140b57cec5SDimitry Andric   if (getObjectSize(V, Size, DL, &TLI, Opts))
1155f757f3fSDimitry Andric     return TypeSize::getFixed(Size);
1165f757f3fSDimitry Andric   return std::nullopt;
1170b57cec5SDimitry Andric }
1180b57cec5SDimitry Andric 
1190b57cec5SDimitry Andric /// Returns true if we can prove that the object specified by V is smaller than
1200b57cec5SDimitry Andric /// Size.
1215f757f3fSDimitry Andric static bool isObjectSmallerThan(const Value *V, TypeSize Size,
1220b57cec5SDimitry Andric                                 const DataLayout &DL,
1230b57cec5SDimitry Andric                                 const TargetLibraryInfo &TLI,
1240b57cec5SDimitry Andric                                 bool NullIsValidLoc) {
1250b57cec5SDimitry Andric   // Note that the meanings of the "object" are slightly different in the
1260b57cec5SDimitry Andric   // following contexts:
1270b57cec5SDimitry Andric   //    c1: llvm::getObjectSize()
1280b57cec5SDimitry Andric   //    c2: llvm.objectsize() intrinsic
1290b57cec5SDimitry Andric   //    c3: isObjectSmallerThan()
1300b57cec5SDimitry Andric   // c1 and c2 share the same meaning; however, the meaning of "object" in c3
1310b57cec5SDimitry Andric   // refers to the "entire object".
1320b57cec5SDimitry Andric   //
1330b57cec5SDimitry Andric   //  Consider this example:
1340b57cec5SDimitry Andric   //     char *p = (char*)malloc(100)
1350b57cec5SDimitry Andric   //     char *q = p+80;
1360b57cec5SDimitry Andric   //
1370b57cec5SDimitry Andric   //  In the context of c1 and c2, the "object" pointed by q refers to the
1380b57cec5SDimitry Andric   // stretch of memory of q[0:19]. So, getObjectSize(q) should return 20.
1390b57cec5SDimitry Andric   //
1400b57cec5SDimitry Andric   //  However, in the context of c3, the "object" refers to the chunk of memory
1410b57cec5SDimitry Andric   // being allocated. So, the "object" has 100 bytes, and q points to the middle
1420b57cec5SDimitry Andric   // the "object". In case q is passed to isObjectSmallerThan() as the 1st
1430b57cec5SDimitry Andric   // parameter, before the llvm::getObjectSize() is called to get the size of
1440b57cec5SDimitry Andric   // entire object, we should:
1450b57cec5SDimitry Andric   //    - either rewind the pointer q to the base-address of the object in
1460b57cec5SDimitry Andric   //      question (in this case rewind to p), or
1470b57cec5SDimitry Andric   //    - just give up. It is up to caller to make sure the pointer is pointing
1480b57cec5SDimitry Andric   //      to the base address the object.
1490b57cec5SDimitry Andric   //
1500b57cec5SDimitry Andric   // We go for 2nd option for simplicity.
1510b57cec5SDimitry Andric   if (!isIdentifiedObject(V))
1520b57cec5SDimitry Andric     return false;
1530b57cec5SDimitry Andric 
1540b57cec5SDimitry Andric   // This function needs to use the aligned object size because we allow
1550b57cec5SDimitry Andric   // reads a bit past the end given sufficient alignment.
1565f757f3fSDimitry Andric   std::optional<TypeSize> ObjectSize = getObjectSize(V, DL, TLI, NullIsValidLoc,
1570b57cec5SDimitry Andric                                                      /*RoundToAlign*/ true);
1580b57cec5SDimitry Andric 
1595f757f3fSDimitry Andric   return ObjectSize && TypeSize::isKnownLT(*ObjectSize, Size);
1600b57cec5SDimitry Andric }
1610b57cec5SDimitry Andric 
1628bcb0991SDimitry Andric /// Return the minimal extent from \p V to the end of the underlying object,
1638bcb0991SDimitry Andric /// assuming the result is used in an aliasing query. E.g., we do use the query
1648bcb0991SDimitry Andric /// location size and the fact that null pointers cannot alias here.
1655f757f3fSDimitry Andric static TypeSize getMinimalExtentFrom(const Value &V,
1668bcb0991SDimitry Andric                                      const LocationSize &LocSize,
1678bcb0991SDimitry Andric                                      const DataLayout &DL,
1688bcb0991SDimitry Andric                                      bool NullIsValidLoc) {
1698bcb0991SDimitry Andric   // If we have dereferenceability information we know a lower bound for the
1708bcb0991SDimitry Andric   // extent as accesses for a lower offset would be valid. We need to exclude
171349cc55cSDimitry Andric   // the "or null" part if null is a valid pointer. We can ignore frees, as an
172349cc55cSDimitry Andric   // access after free would be undefined behavior.
173fe6060f1SDimitry Andric   bool CanBeNull, CanBeFreed;
174fe6060f1SDimitry Andric   uint64_t DerefBytes =
175fe6060f1SDimitry Andric     V.getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
1768bcb0991SDimitry Andric   DerefBytes = (CanBeNull && NullIsValidLoc) ? 0 : DerefBytes;
1778bcb0991SDimitry Andric   // If queried with a precise location size, we assume that location size to be
1788bcb0991SDimitry Andric   // accessed, thus valid.
1798bcb0991SDimitry Andric   if (LocSize.isPrecise())
1805f757f3fSDimitry Andric     DerefBytes = std::max(DerefBytes, LocSize.getValue().getKnownMinValue());
1815f757f3fSDimitry Andric   return TypeSize::getFixed(DerefBytes);
1828bcb0991SDimitry Andric }
1838bcb0991SDimitry Andric 
1840b57cec5SDimitry Andric /// Returns true if we can prove that the object specified by V has size Size.
1855f757f3fSDimitry Andric static bool isObjectSize(const Value *V, TypeSize Size, const DataLayout &DL,
1860b57cec5SDimitry Andric                          const TargetLibraryInfo &TLI, bool NullIsValidLoc) {
1875f757f3fSDimitry Andric   std::optional<TypeSize> ObjectSize =
1885f757f3fSDimitry Andric       getObjectSize(V, DL, TLI, NullIsValidLoc);
1895f757f3fSDimitry Andric   return ObjectSize && *ObjectSize == Size;
1900b57cec5SDimitry Andric }
1910b57cec5SDimitry Andric 
1920fca6ea1SDimitry Andric /// Return true if both V1 and V2 are VScale
1930fca6ea1SDimitry Andric static bool areBothVScale(const Value *V1, const Value *V2) {
1940fca6ea1SDimitry Andric   return PatternMatch::match(V1, PatternMatch::m_VScale()) &&
1950fca6ea1SDimitry Andric          PatternMatch::match(V2, PatternMatch::m_VScale());
1960fca6ea1SDimitry Andric }
1970fca6ea1SDimitry Andric 
1980b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
199349cc55cSDimitry Andric // CaptureInfo implementations
200349cc55cSDimitry Andric //===----------------------------------------------------------------------===//
201349cc55cSDimitry Andric 
202349cc55cSDimitry Andric CaptureInfo::~CaptureInfo() = default;
203349cc55cSDimitry Andric 
2045f757f3fSDimitry Andric bool SimpleCaptureInfo::isNotCapturedBefore(const Value *Object,
2055f757f3fSDimitry Andric                                             const Instruction *I, bool OrAt) {
206349cc55cSDimitry Andric   return isNonEscapingLocalObject(Object, &IsCapturedCache);
207349cc55cSDimitry Andric }
208349cc55cSDimitry Andric 
2095f757f3fSDimitry Andric static bool isNotInCycle(const Instruction *I, const DominatorTree *DT,
2105f757f3fSDimitry Andric                          const LoopInfo *LI) {
2115f757f3fSDimitry Andric   BasicBlock *BB = const_cast<BasicBlock *>(I->getParent());
2125f757f3fSDimitry Andric   SmallVector<BasicBlock *> Succs(successors(BB));
2135f757f3fSDimitry Andric   return Succs.empty() ||
2145f757f3fSDimitry Andric          !isPotentiallyReachableFromMany(Succs, BB, nullptr, DT, LI);
2155f757f3fSDimitry Andric }
2165f757f3fSDimitry Andric 
2175f757f3fSDimitry Andric bool EarliestEscapeInfo::isNotCapturedBefore(const Value *Object,
2185f757f3fSDimitry Andric                                              const Instruction *I, bool OrAt) {
219349cc55cSDimitry Andric   if (!isIdentifiedFunctionLocal(Object))
220349cc55cSDimitry Andric     return false;
221349cc55cSDimitry Andric 
222349cc55cSDimitry Andric   auto Iter = EarliestEscapes.insert({Object, nullptr});
223349cc55cSDimitry Andric   if (Iter.second) {
224349cc55cSDimitry Andric     Instruction *EarliestCapture = FindEarliestCapture(
2257a6dacacSDimitry Andric         Object, *const_cast<Function *>(DT.getRoot()->getParent()),
2265f757f3fSDimitry Andric         /*ReturnCaptures=*/false, /*StoreCaptures=*/true, DT);
227349cc55cSDimitry Andric     if (EarliestCapture) {
228349cc55cSDimitry Andric       auto Ins = Inst2Obj.insert({EarliestCapture, {}});
229349cc55cSDimitry Andric       Ins.first->second.push_back(Object);
230349cc55cSDimitry Andric     }
231349cc55cSDimitry Andric     Iter.first->second = EarliestCapture;
232349cc55cSDimitry Andric   }
233349cc55cSDimitry Andric 
234349cc55cSDimitry Andric   // No capturing instruction.
235349cc55cSDimitry Andric   if (!Iter.first->second)
236349cc55cSDimitry Andric     return true;
237349cc55cSDimitry Andric 
2387a6dacacSDimitry Andric   // No context instruction means any use is capturing.
2397a6dacacSDimitry Andric   if (!I)
2407a6dacacSDimitry Andric     return false;
2417a6dacacSDimitry Andric 
2425f757f3fSDimitry Andric   if (I == Iter.first->second) {
2435f757f3fSDimitry Andric     if (OrAt)
2445f757f3fSDimitry Andric       return false;
2455f757f3fSDimitry Andric     return isNotInCycle(I, &DT, LI);
2465f757f3fSDimitry Andric   }
2475f757f3fSDimitry Andric 
2485f757f3fSDimitry Andric   return !isPotentiallyReachable(Iter.first->second, I, nullptr, &DT, LI);
249349cc55cSDimitry Andric }
250349cc55cSDimitry Andric 
251349cc55cSDimitry Andric void EarliestEscapeInfo::removeInstruction(Instruction *I) {
252349cc55cSDimitry Andric   auto Iter = Inst2Obj.find(I);
253349cc55cSDimitry Andric   if (Iter != Inst2Obj.end()) {
254349cc55cSDimitry Andric     for (const Value *Obj : Iter->second)
255349cc55cSDimitry Andric       EarliestEscapes.erase(Obj);
256349cc55cSDimitry Andric     Inst2Obj.erase(I);
257349cc55cSDimitry Andric   }
258349cc55cSDimitry Andric }
259349cc55cSDimitry Andric 
260349cc55cSDimitry Andric //===----------------------------------------------------------------------===//
2610b57cec5SDimitry Andric // GetElementPtr Instruction Decomposition and Analysis
2620b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
2630b57cec5SDimitry Andric 
264fe6060f1SDimitry Andric namespace {
265349cc55cSDimitry Andric /// Represents zext(sext(trunc(V))).
266349cc55cSDimitry Andric struct CastedValue {
267fe6060f1SDimitry Andric   const Value *V;
268349cc55cSDimitry Andric   unsigned ZExtBits = 0;
269349cc55cSDimitry Andric   unsigned SExtBits = 0;
270349cc55cSDimitry Andric   unsigned TruncBits = 0;
2710fca6ea1SDimitry Andric   /// Whether trunc(V) is non-negative.
2720fca6ea1SDimitry Andric   bool IsNonNegative = false;
273fe6060f1SDimitry Andric 
274349cc55cSDimitry Andric   explicit CastedValue(const Value *V) : V(V) {}
275349cc55cSDimitry Andric   explicit CastedValue(const Value *V, unsigned ZExtBits, unsigned SExtBits,
2760fca6ea1SDimitry Andric                        unsigned TruncBits, bool IsNonNegative)
2770fca6ea1SDimitry Andric       : V(V), ZExtBits(ZExtBits), SExtBits(SExtBits), TruncBits(TruncBits),
2780fca6ea1SDimitry Andric         IsNonNegative(IsNonNegative) {}
279fe6060f1SDimitry Andric 
280fe6060f1SDimitry Andric   unsigned getBitWidth() const {
281349cc55cSDimitry Andric     return V->getType()->getPrimitiveSizeInBits() - TruncBits + ZExtBits +
282349cc55cSDimitry Andric            SExtBits;
283fe6060f1SDimitry Andric   }
284fe6060f1SDimitry Andric 
2850fca6ea1SDimitry Andric   CastedValue withValue(const Value *NewV, bool PreserveNonNeg) const {
2860fca6ea1SDimitry Andric     return CastedValue(NewV, ZExtBits, SExtBits, TruncBits,
2870fca6ea1SDimitry Andric                        IsNonNegative && PreserveNonNeg);
288fe6060f1SDimitry Andric   }
289fe6060f1SDimitry Andric 
290349cc55cSDimitry Andric   /// Replace V with zext(NewV)
2910fca6ea1SDimitry Andric   CastedValue withZExtOfValue(const Value *NewV, bool ZExtNonNegative) const {
292fe6060f1SDimitry Andric     unsigned ExtendBy = V->getType()->getPrimitiveSizeInBits() -
293fe6060f1SDimitry Andric                         NewV->getType()->getPrimitiveSizeInBits();
294349cc55cSDimitry Andric     if (ExtendBy <= TruncBits)
2950fca6ea1SDimitry Andric       // zext<nneg>(trunc(zext(NewV))) == zext<nneg>(trunc(NewV))
2960fca6ea1SDimitry Andric       // The nneg can be preserved on the outer zext here.
2970fca6ea1SDimitry Andric       return CastedValue(NewV, ZExtBits, SExtBits, TruncBits - ExtendBy,
2980fca6ea1SDimitry Andric                          IsNonNegative);
299349cc55cSDimitry Andric 
300fe6060f1SDimitry Andric     // zext(sext(zext(NewV))) == zext(zext(zext(NewV)))
301349cc55cSDimitry Andric     ExtendBy -= TruncBits;
3020fca6ea1SDimitry Andric     // zext<nneg>(zext(NewV)) == zext(NewV)
3030fca6ea1SDimitry Andric     // zext(zext<nneg>(NewV)) == zext<nneg>(NewV)
3040fca6ea1SDimitry Andric     // The nneg can be preserved from the inner zext here but must be dropped
3050fca6ea1SDimitry Andric     // from the outer.
3060fca6ea1SDimitry Andric     return CastedValue(NewV, ZExtBits + SExtBits + ExtendBy, 0, 0,
3070fca6ea1SDimitry Andric                        ZExtNonNegative);
308fe6060f1SDimitry Andric   }
309fe6060f1SDimitry Andric 
310349cc55cSDimitry Andric   /// Replace V with sext(NewV)
311349cc55cSDimitry Andric   CastedValue withSExtOfValue(const Value *NewV) const {
312fe6060f1SDimitry Andric     unsigned ExtendBy = V->getType()->getPrimitiveSizeInBits() -
313fe6060f1SDimitry Andric                         NewV->getType()->getPrimitiveSizeInBits();
314349cc55cSDimitry Andric     if (ExtendBy <= TruncBits)
3150fca6ea1SDimitry Andric       // zext<nneg>(trunc(sext(NewV))) == zext<nneg>(trunc(NewV))
3160fca6ea1SDimitry Andric       // The nneg can be preserved on the outer zext here
3170fca6ea1SDimitry Andric       return CastedValue(NewV, ZExtBits, SExtBits, TruncBits - ExtendBy,
3180fca6ea1SDimitry Andric                          IsNonNegative);
319349cc55cSDimitry Andric 
320fe6060f1SDimitry Andric     // zext(sext(sext(NewV)))
321349cc55cSDimitry Andric     ExtendBy -= TruncBits;
3220fca6ea1SDimitry Andric     // zext<nneg>(sext(sext(NewV))) = zext<nneg>(sext(NewV))
3230fca6ea1SDimitry Andric     // The nneg can be preserved on the outer zext here
3240fca6ea1SDimitry Andric     return CastedValue(NewV, ZExtBits, SExtBits + ExtendBy, 0, IsNonNegative);
325fe6060f1SDimitry Andric   }
326fe6060f1SDimitry Andric 
327fe6060f1SDimitry Andric   APInt evaluateWith(APInt N) const {
328fe6060f1SDimitry Andric     assert(N.getBitWidth() == V->getType()->getPrimitiveSizeInBits() &&
329fe6060f1SDimitry Andric            "Incompatible bit width");
330349cc55cSDimitry Andric     if (TruncBits) N = N.trunc(N.getBitWidth() - TruncBits);
331fe6060f1SDimitry Andric     if (SExtBits) N = N.sext(N.getBitWidth() + SExtBits);
332fe6060f1SDimitry Andric     if (ZExtBits) N = N.zext(N.getBitWidth() + ZExtBits);
333fe6060f1SDimitry Andric     return N;
334fe6060f1SDimitry Andric   }
335fe6060f1SDimitry Andric 
336349cc55cSDimitry Andric   ConstantRange evaluateWith(ConstantRange N) const {
337349cc55cSDimitry Andric     assert(N.getBitWidth() == V->getType()->getPrimitiveSizeInBits() &&
338349cc55cSDimitry Andric            "Incompatible bit width");
339349cc55cSDimitry Andric     if (TruncBits) N = N.truncate(N.getBitWidth() - TruncBits);
340349cc55cSDimitry Andric     if (SExtBits) N = N.signExtend(N.getBitWidth() + SExtBits);
341349cc55cSDimitry Andric     if (ZExtBits) N = N.zeroExtend(N.getBitWidth() + ZExtBits);
342349cc55cSDimitry Andric     return N;
343349cc55cSDimitry Andric   }
344349cc55cSDimitry Andric 
345fe6060f1SDimitry Andric   bool canDistributeOver(bool NUW, bool NSW) const {
346fe6060f1SDimitry Andric     // zext(x op<nuw> y) == zext(x) op<nuw> zext(y)
347fe6060f1SDimitry Andric     // sext(x op<nsw> y) == sext(x) op<nsw> sext(y)
348349cc55cSDimitry Andric     // trunc(x op y) == trunc(x) op trunc(y)
349fe6060f1SDimitry Andric     return (!ZExtBits || NUW) && (!SExtBits || NSW);
350fe6060f1SDimitry Andric   }
351349cc55cSDimitry Andric 
352349cc55cSDimitry Andric   bool hasSameCastsAs(const CastedValue &Other) const {
3530fca6ea1SDimitry Andric     if (ZExtBits == Other.ZExtBits && SExtBits == Other.SExtBits &&
3540fca6ea1SDimitry Andric         TruncBits == Other.TruncBits)
3550fca6ea1SDimitry Andric       return true;
3560fca6ea1SDimitry Andric     // If either CastedValue has a nneg zext then the sext/zext bits are
3570fca6ea1SDimitry Andric     // interchangable for that value.
3580fca6ea1SDimitry Andric     if (IsNonNegative || Other.IsNonNegative)
3590fca6ea1SDimitry Andric       return (ZExtBits + SExtBits == Other.ZExtBits + Other.SExtBits &&
3600fca6ea1SDimitry Andric               TruncBits == Other.TruncBits);
3610fca6ea1SDimitry Andric     return false;
362349cc55cSDimitry Andric   }
363fe6060f1SDimitry Andric };
364fe6060f1SDimitry Andric 
365349cc55cSDimitry Andric /// Represents zext(sext(trunc(V))) * Scale + Offset.
366fe6060f1SDimitry Andric struct LinearExpression {
367349cc55cSDimitry Andric   CastedValue Val;
368fe6060f1SDimitry Andric   APInt Scale;
369fe6060f1SDimitry Andric   APInt Offset;
370fe6060f1SDimitry Andric 
371fe6060f1SDimitry Andric   /// True if all operations in this expression are NSW.
372fe6060f1SDimitry Andric   bool IsNSW;
373fe6060f1SDimitry Andric 
374349cc55cSDimitry Andric   LinearExpression(const CastedValue &Val, const APInt &Scale,
375fe6060f1SDimitry Andric                    const APInt &Offset, bool IsNSW)
376fe6060f1SDimitry Andric       : Val(Val), Scale(Scale), Offset(Offset), IsNSW(IsNSW) {}
377fe6060f1SDimitry Andric 
378349cc55cSDimitry Andric   LinearExpression(const CastedValue &Val) : Val(Val), IsNSW(true) {
379fe6060f1SDimitry Andric     unsigned BitWidth = Val.getBitWidth();
380fe6060f1SDimitry Andric     Scale = APInt(BitWidth, 1);
381fe6060f1SDimitry Andric     Offset = APInt(BitWidth, 0);
382fe6060f1SDimitry Andric   }
383349cc55cSDimitry Andric 
384349cc55cSDimitry Andric   LinearExpression mul(const APInt &Other, bool MulIsNSW) const {
385349cc55cSDimitry Andric     // The check for zero offset is necessary, because generally
386349cc55cSDimitry Andric     // (X +nsw Y) *nsw Z does not imply (X *nsw Z) +nsw (Y *nsw Z).
387349cc55cSDimitry Andric     bool NSW = IsNSW && (Other.isOne() || (MulIsNSW && Offset.isZero()));
388349cc55cSDimitry Andric     return LinearExpression(Val, Scale * Other, Offset * Other, NSW);
389349cc55cSDimitry Andric   }
390fe6060f1SDimitry Andric };
391fe6060f1SDimitry Andric }
392fe6060f1SDimitry Andric 
3930b57cec5SDimitry Andric /// Analyzes the specified value as a linear expression: "A*V + B", where A and
3940b57cec5SDimitry Andric /// B are constant integers.
395fe6060f1SDimitry Andric static LinearExpression GetLinearExpression(
396349cc55cSDimitry Andric     const CastedValue &Val,  const DataLayout &DL, unsigned Depth,
397fe6060f1SDimitry Andric     AssumptionCache *AC, DominatorTree *DT) {
3980b57cec5SDimitry Andric   // Limit our recursion depth.
399fe6060f1SDimitry Andric   if (Depth == 6)
400fe6060f1SDimitry Andric     return Val;
4010b57cec5SDimitry Andric 
402fe6060f1SDimitry Andric   if (const ConstantInt *Const = dyn_cast<ConstantInt>(Val.V))
403fe6060f1SDimitry Andric     return LinearExpression(Val, APInt(Val.getBitWidth(), 0),
404fe6060f1SDimitry Andric                             Val.evaluateWith(Const->getValue()), true);
4050b57cec5SDimitry Andric 
406fe6060f1SDimitry Andric   if (const BinaryOperator *BOp = dyn_cast<BinaryOperator>(Val.V)) {
4070b57cec5SDimitry Andric     if (ConstantInt *RHSC = dyn_cast<ConstantInt>(BOp->getOperand(1))) {
408fe6060f1SDimitry Andric       APInt RHS = Val.evaluateWith(RHSC->getValue());
409fe6060f1SDimitry Andric       // The only non-OBO case we deal with is or, and only limited to the
410fe6060f1SDimitry Andric       // case where it is both nuw and nsw.
411fe6060f1SDimitry Andric       bool NUW = true, NSW = true;
412fe6060f1SDimitry Andric       if (isa<OverflowingBinaryOperator>(BOp)) {
413fe6060f1SDimitry Andric         NUW &= BOp->hasNoUnsignedWrap();
414fe6060f1SDimitry Andric         NSW &= BOp->hasNoSignedWrap();
415fe6060f1SDimitry Andric       }
416fe6060f1SDimitry Andric       if (!Val.canDistributeOver(NUW, NSW))
417fe6060f1SDimitry Andric         return Val;
4180b57cec5SDimitry Andric 
419349cc55cSDimitry Andric       // While we can distribute over trunc, we cannot preserve nowrap flags
420349cc55cSDimitry Andric       // in that case.
421349cc55cSDimitry Andric       if (Val.TruncBits)
422349cc55cSDimitry Andric         NUW = NSW = false;
423349cc55cSDimitry Andric 
424fe6060f1SDimitry Andric       LinearExpression E(Val);
4250b57cec5SDimitry Andric       switch (BOp->getOpcode()) {
4260b57cec5SDimitry Andric       default:
4270b57cec5SDimitry Andric         // We don't understand this instruction, so we can't decompose it any
4280b57cec5SDimitry Andric         // further.
429fe6060f1SDimitry Andric         return Val;
4300b57cec5SDimitry Andric       case Instruction::Or:
4317a6dacacSDimitry Andric         // X|C == X+C if it is disjoint.  Otherwise we can't analyze it.
4327a6dacacSDimitry Andric         if (!cast<PossiblyDisjointInst>(BOp)->isDisjoint())
433fe6060f1SDimitry Andric           return Val;
4340b57cec5SDimitry Andric 
435bdd1243dSDimitry Andric         [[fallthrough]];
436fe6060f1SDimitry Andric       case Instruction::Add: {
4370fca6ea1SDimitry Andric         E = GetLinearExpression(Val.withValue(BOp->getOperand(0), false), DL,
438fe6060f1SDimitry Andric                                 Depth + 1, AC, DT);
439fe6060f1SDimitry Andric         E.Offset += RHS;
440fe6060f1SDimitry Andric         E.IsNSW &= NSW;
441fe6060f1SDimitry Andric         break;
442fe6060f1SDimitry Andric       }
443fe6060f1SDimitry Andric       case Instruction::Sub: {
4440fca6ea1SDimitry Andric         E = GetLinearExpression(Val.withValue(BOp->getOperand(0), false), DL,
445fe6060f1SDimitry Andric                                 Depth + 1, AC, DT);
446fe6060f1SDimitry Andric         E.Offset -= RHS;
447fe6060f1SDimitry Andric         E.IsNSW &= NSW;
448fe6060f1SDimitry Andric         break;
449fe6060f1SDimitry Andric       }
450349cc55cSDimitry Andric       case Instruction::Mul:
4510fca6ea1SDimitry Andric         E = GetLinearExpression(Val.withValue(BOp->getOperand(0), false), DL,
452349cc55cSDimitry Andric                                 Depth + 1, AC, DT)
453349cc55cSDimitry Andric                 .mul(RHS, NSW);
454fe6060f1SDimitry Andric         break;
455fe6060f1SDimitry Andric       case Instruction::Shl:
4560b57cec5SDimitry Andric         // We're trying to linearize an expression of the kind:
4570b57cec5SDimitry Andric         //   shl i8 -128, 36
4580b57cec5SDimitry Andric         // where the shift count exceeds the bitwidth of the type.
4590b57cec5SDimitry Andric         // We can't decompose this further (the expression would return
4600b57cec5SDimitry Andric         // a poison value).
461fe6060f1SDimitry Andric         if (RHS.getLimitedValue() > Val.getBitWidth())
462fe6060f1SDimitry Andric           return Val;
4630b57cec5SDimitry Andric 
4640fca6ea1SDimitry Andric         E = GetLinearExpression(Val.withValue(BOp->getOperand(0), NSW), DL,
465fe6060f1SDimitry Andric                                 Depth + 1, AC, DT);
466fe6060f1SDimitry Andric         E.Offset <<= RHS.getLimitedValue();
467fe6060f1SDimitry Andric         E.Scale <<= RHS.getLimitedValue();
468fe6060f1SDimitry Andric         E.IsNSW &= NSW;
469fe6060f1SDimitry Andric         break;
4700b57cec5SDimitry Andric       }
471fe6060f1SDimitry Andric       return E;
4720b57cec5SDimitry Andric     }
4730b57cec5SDimitry Andric   }
4740b57cec5SDimitry Andric 
4750fca6ea1SDimitry Andric   if (const auto *ZExt = dyn_cast<ZExtInst>(Val.V))
476fe6060f1SDimitry Andric     return GetLinearExpression(
4770fca6ea1SDimitry Andric         Val.withZExtOfValue(ZExt->getOperand(0), ZExt->hasNonNeg()), DL,
4780fca6ea1SDimitry Andric         Depth + 1, AC, DT);
4790b57cec5SDimitry Andric 
480fe6060f1SDimitry Andric   if (isa<SExtInst>(Val.V))
481fe6060f1SDimitry Andric     return GetLinearExpression(
482fe6060f1SDimitry Andric         Val.withSExtOfValue(cast<CastInst>(Val.V)->getOperand(0)),
483fe6060f1SDimitry Andric         DL, Depth + 1, AC, DT);
4840b57cec5SDimitry Andric 
485fe6060f1SDimitry Andric   return Val;
4860b57cec5SDimitry Andric }
4870b57cec5SDimitry Andric 
488349cc55cSDimitry Andric /// To ensure a pointer offset fits in an integer of size IndexSize
489349cc55cSDimitry Andric /// (in bits) when that size is smaller than the maximum index size. This is
4900b57cec5SDimitry Andric /// an issue, for example, in particular for 32b pointers with negative indices
4910b57cec5SDimitry Andric /// that rely on two's complement wrap-arounds for precise alias information
492349cc55cSDimitry Andric /// where the maximum index size is 64b.
4935f757f3fSDimitry Andric static void adjustToIndexSize(APInt &Offset, unsigned IndexSize) {
494349cc55cSDimitry Andric   assert(IndexSize <= Offset.getBitWidth() && "Invalid IndexSize!");
495349cc55cSDimitry Andric   unsigned ShiftBits = Offset.getBitWidth() - IndexSize;
4965f757f3fSDimitry Andric   if (ShiftBits != 0) {
4975f757f3fSDimitry Andric     Offset <<= ShiftBits;
4985f757f3fSDimitry Andric     Offset.ashrInPlace(ShiftBits);
4995f757f3fSDimitry Andric   }
5000b57cec5SDimitry Andric }
5010b57cec5SDimitry Andric 
502349cc55cSDimitry Andric namespace {
503349cc55cSDimitry Andric // A linear transformation of a Value; this class represents
504349cc55cSDimitry Andric // ZExt(SExt(Trunc(V, TruncBits), SExtBits), ZExtBits) * Scale.
505349cc55cSDimitry Andric struct VariableGEPIndex {
506349cc55cSDimitry Andric   CastedValue Val;
507349cc55cSDimitry Andric   APInt Scale;
5080b57cec5SDimitry Andric 
509349cc55cSDimitry Andric   // Context instruction to use when querying information about this index.
510349cc55cSDimitry Andric   const Instruction *CxtI;
511349cc55cSDimitry Andric 
512349cc55cSDimitry Andric   /// True if all operations in this expression are NSW.
513349cc55cSDimitry Andric   bool IsNSW;
514349cc55cSDimitry Andric 
51506c3fb27SDimitry Andric   /// True if the index should be subtracted rather than added. We don't simply
51606c3fb27SDimitry Andric   /// negate the Scale, to avoid losing the NSW flag: X - INT_MIN*1 may be
51706c3fb27SDimitry Andric   /// non-wrapping, while X + INT_MIN*(-1) wraps.
51806c3fb27SDimitry Andric   bool IsNegated;
51906c3fb27SDimitry Andric 
52006c3fb27SDimitry Andric   bool hasNegatedScaleOf(const VariableGEPIndex &Other) const {
52106c3fb27SDimitry Andric     if (IsNegated == Other.IsNegated)
52206c3fb27SDimitry Andric       return Scale == -Other.Scale;
52306c3fb27SDimitry Andric     return Scale == Other.Scale;
52406c3fb27SDimitry Andric   }
52506c3fb27SDimitry Andric 
526349cc55cSDimitry Andric   void dump() const {
527349cc55cSDimitry Andric     print(dbgs());
528349cc55cSDimitry Andric     dbgs() << "\n";
5290b57cec5SDimitry Andric   }
530349cc55cSDimitry Andric   void print(raw_ostream &OS) const {
531349cc55cSDimitry Andric     OS << "(V=" << Val.V->getName()
532349cc55cSDimitry Andric        << ", zextbits=" << Val.ZExtBits
533349cc55cSDimitry Andric        << ", sextbits=" << Val.SExtBits
534349cc55cSDimitry Andric        << ", truncbits=" << Val.TruncBits
53506c3fb27SDimitry Andric        << ", scale=" << Scale
53606c3fb27SDimitry Andric        << ", nsw=" << IsNSW
53706c3fb27SDimitry Andric        << ", negated=" << IsNegated << ")";
538349cc55cSDimitry Andric   }
539349cc55cSDimitry Andric };
540349cc55cSDimitry Andric }
541349cc55cSDimitry Andric 
542349cc55cSDimitry Andric // Represents the internal structure of a GEP, decomposed into a base pointer,
543349cc55cSDimitry Andric // constant offsets, and variable scaled indices.
544349cc55cSDimitry Andric struct BasicAAResult::DecomposedGEP {
545349cc55cSDimitry Andric   // Base pointer of the GEP
546349cc55cSDimitry Andric   const Value *Base;
547349cc55cSDimitry Andric   // Total constant offset from base.
548349cc55cSDimitry Andric   APInt Offset;
549349cc55cSDimitry Andric   // Scaled variable (non-constant) indices.
550349cc55cSDimitry Andric   SmallVector<VariableGEPIndex, 4> VarIndices;
551349cc55cSDimitry Andric   // Are all operations inbounds GEPs or non-indexing operations?
552bdd1243dSDimitry Andric   // (std::nullopt iff expression doesn't involve any geps)
553bdd1243dSDimitry Andric   std::optional<bool> InBounds;
554349cc55cSDimitry Andric 
555349cc55cSDimitry Andric   void dump() const {
556349cc55cSDimitry Andric     print(dbgs());
557349cc55cSDimitry Andric     dbgs() << "\n";
558349cc55cSDimitry Andric   }
559349cc55cSDimitry Andric   void print(raw_ostream &OS) const {
560349cc55cSDimitry Andric     OS << "(DecomposedGEP Base=" << Base->getName()
561349cc55cSDimitry Andric        << ", Offset=" << Offset
562349cc55cSDimitry Andric        << ", VarIndices=[";
563349cc55cSDimitry Andric     for (size_t i = 0; i < VarIndices.size(); i++) {
564349cc55cSDimitry Andric       if (i != 0)
565349cc55cSDimitry Andric         OS << ", ";
566349cc55cSDimitry Andric       VarIndices[i].print(OS);
567349cc55cSDimitry Andric     }
568349cc55cSDimitry Andric     OS << "])";
569349cc55cSDimitry Andric   }
570349cc55cSDimitry Andric };
571349cc55cSDimitry Andric 
5720b57cec5SDimitry Andric 
5730b57cec5SDimitry Andric /// If V is a symbolic pointer expression, decompose it into a base pointer
5740b57cec5SDimitry Andric /// with a constant offset and a number of scaled symbolic offsets.
5750b57cec5SDimitry Andric ///
5760b57cec5SDimitry Andric /// The scaled symbolic offsets (represented by pairs of a Value* and a scale
5770b57cec5SDimitry Andric /// in the VarIndices vector) are Value*'s that are known to be scaled by the
5780b57cec5SDimitry Andric /// specified amount, but which may have other unrepresented high bits. As
5790b57cec5SDimitry Andric /// such, the gep cannot necessarily be reconstructed from its decomposed form.
580e8d8bef9SDimitry Andric BasicAAResult::DecomposedGEP
581e8d8bef9SDimitry Andric BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
582e8d8bef9SDimitry Andric                                       AssumptionCache *AC, DominatorTree *DT) {
5830b57cec5SDimitry Andric   // Limit recursion depth to limit compile time in crazy cases.
5840b57cec5SDimitry Andric   unsigned MaxLookup = MaxLookupSearchDepth;
5850b57cec5SDimitry Andric   SearchTimes++;
586e8d8bef9SDimitry Andric   const Instruction *CxtI = dyn_cast<Instruction>(V);
5870b57cec5SDimitry Andric 
588349cc55cSDimitry Andric   unsigned MaxIndexSize = DL.getMaxIndexSizeInBits();
589e8d8bef9SDimitry Andric   DecomposedGEP Decomposed;
590349cc55cSDimitry Andric   Decomposed.Offset = APInt(MaxIndexSize, 0);
5910b57cec5SDimitry Andric   do {
5920b57cec5SDimitry Andric     // See if this is a bitcast or GEP.
5930b57cec5SDimitry Andric     const Operator *Op = dyn_cast<Operator>(V);
5940b57cec5SDimitry Andric     if (!Op) {
5950b57cec5SDimitry Andric       // The only non-operator case we can handle are GlobalAliases.
5960b57cec5SDimitry Andric       if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
5970b57cec5SDimitry Andric         if (!GA->isInterposable()) {
5980b57cec5SDimitry Andric           V = GA->getAliasee();
5990b57cec5SDimitry Andric           continue;
6000b57cec5SDimitry Andric         }
6010b57cec5SDimitry Andric       }
6020b57cec5SDimitry Andric       Decomposed.Base = V;
603e8d8bef9SDimitry Andric       return Decomposed;
6040b57cec5SDimitry Andric     }
6050b57cec5SDimitry Andric 
6060b57cec5SDimitry Andric     if (Op->getOpcode() == Instruction::BitCast ||
6070b57cec5SDimitry Andric         Op->getOpcode() == Instruction::AddrSpaceCast) {
6080b57cec5SDimitry Andric       V = Op->getOperand(0);
6090b57cec5SDimitry Andric       continue;
6100b57cec5SDimitry Andric     }
6110b57cec5SDimitry Andric 
6120b57cec5SDimitry Andric     const GEPOperator *GEPOp = dyn_cast<GEPOperator>(Op);
6130b57cec5SDimitry Andric     if (!GEPOp) {
6145ffd83dbSDimitry Andric       if (const auto *PHI = dyn_cast<PHINode>(V)) {
6155ffd83dbSDimitry Andric         // Look through single-arg phi nodes created by LCSSA.
6165ffd83dbSDimitry Andric         if (PHI->getNumIncomingValues() == 1) {
6175ffd83dbSDimitry Andric           V = PHI->getIncomingValue(0);
6185ffd83dbSDimitry Andric           continue;
6195ffd83dbSDimitry Andric         }
6205ffd83dbSDimitry Andric       } else if (const auto *Call = dyn_cast<CallBase>(V)) {
6210b57cec5SDimitry Andric         // CaptureTracking can know about special capturing properties of some
6220b57cec5SDimitry Andric         // intrinsics like launder.invariant.group, that can't be expressed with
6230b57cec5SDimitry Andric         // the attributes, but have properties like returning aliasing pointer.
6240b57cec5SDimitry Andric         // Because some analysis may assume that nocaptured pointer is not
6250b57cec5SDimitry Andric         // returned from some special intrinsic (because function would have to
6260b57cec5SDimitry Andric         // be marked with returns attribute), it is crucial to use this function
6270b57cec5SDimitry Andric         // because it should be in sync with CaptureTracking. Not using it may
6280b57cec5SDimitry Andric         // cause weird miscompilations where 2 aliasing pointers are assumed to
6290b57cec5SDimitry Andric         // noalias.
6308bcb0991SDimitry Andric         if (auto *RP = getArgumentAliasingToReturnedPointer(Call, false)) {
6310b57cec5SDimitry Andric           V = RP;
6320b57cec5SDimitry Andric           continue;
6330b57cec5SDimitry Andric         }
6340b57cec5SDimitry Andric       }
6350b57cec5SDimitry Andric 
6360b57cec5SDimitry Andric       Decomposed.Base = V;
637e8d8bef9SDimitry Andric       return Decomposed;
6380b57cec5SDimitry Andric     }
6390b57cec5SDimitry Andric 
640fe6060f1SDimitry Andric     // Track whether we've seen at least one in bounds gep, and if so, whether
641fe6060f1SDimitry Andric     // all geps parsed were in bounds.
642bdd1243dSDimitry Andric     if (Decomposed.InBounds == std::nullopt)
643fe6060f1SDimitry Andric       Decomposed.InBounds = GEPOp->isInBounds();
644fe6060f1SDimitry Andric     else if (!GEPOp->isInBounds())
645fe6060f1SDimitry Andric       Decomposed.InBounds = false;
646fe6060f1SDimitry Andric 
647349cc55cSDimitry Andric     assert(GEPOp->getSourceElementType()->isSized() && "GEP must be sized");
6480b57cec5SDimitry Andric 
6490b57cec5SDimitry Andric     unsigned AS = GEPOp->getPointerAddressSpace();
6500b57cec5SDimitry Andric     // Walk the indices of the GEP, accumulating them into BaseOff/VarIndices.
6510b57cec5SDimitry Andric     gep_type_iterator GTI = gep_type_begin(GEPOp);
652349cc55cSDimitry Andric     unsigned IndexSize = DL.getIndexSizeInBits(AS);
6530b57cec5SDimitry Andric     // Assume all GEP operands are constants until proven otherwise.
6540b57cec5SDimitry Andric     bool GepHasConstantOffset = true;
6550b57cec5SDimitry Andric     for (User::const_op_iterator I = GEPOp->op_begin() + 1, E = GEPOp->op_end();
6560b57cec5SDimitry Andric          I != E; ++I, ++GTI) {
6570b57cec5SDimitry Andric       const Value *Index = *I;
6580b57cec5SDimitry Andric       // Compute the (potentially symbolic) offset in bytes for this index.
6590b57cec5SDimitry Andric       if (StructType *STy = GTI.getStructTypeOrNull()) {
6600b57cec5SDimitry Andric         // For a struct, add the member offset.
6610b57cec5SDimitry Andric         unsigned FieldNo = cast<ConstantInt>(Index)->getZExtValue();
6620b57cec5SDimitry Andric         if (FieldNo == 0)
6630b57cec5SDimitry Andric           continue;
6640b57cec5SDimitry Andric 
665e8d8bef9SDimitry Andric         Decomposed.Offset += DL.getStructLayout(STy)->getElementOffset(FieldNo);
6660b57cec5SDimitry Andric         continue;
6670b57cec5SDimitry Andric       }
6680b57cec5SDimitry Andric 
6690b57cec5SDimitry Andric       // For an array/pointer, add the element offset, explicitly scaled.
6700b57cec5SDimitry Andric       if (const ConstantInt *CIdx = dyn_cast<ConstantInt>(Index)) {
6710b57cec5SDimitry Andric         if (CIdx->isZero())
6720b57cec5SDimitry Andric           continue;
673bdd1243dSDimitry Andric 
674bdd1243dSDimitry Andric         // Don't attempt to analyze GEPs if the scalable index is not zero.
6751db9f3b2SDimitry Andric         TypeSize AllocTypeSize = GTI.getSequentialElementStride(DL);
676bdd1243dSDimitry Andric         if (AllocTypeSize.isScalable()) {
677bdd1243dSDimitry Andric           Decomposed.Base = V;
678bdd1243dSDimitry Andric           return Decomposed;
679bdd1243dSDimitry Andric         }
680bdd1243dSDimitry Andric 
681bdd1243dSDimitry Andric         Decomposed.Offset += AllocTypeSize.getFixedValue() *
682349cc55cSDimitry Andric                              CIdx->getValue().sextOrTrunc(MaxIndexSize);
6830b57cec5SDimitry Andric         continue;
6840b57cec5SDimitry Andric       }
6850b57cec5SDimitry Andric 
6861db9f3b2SDimitry Andric       TypeSize AllocTypeSize = GTI.getSequentialElementStride(DL);
687bdd1243dSDimitry Andric       if (AllocTypeSize.isScalable()) {
688bdd1243dSDimitry Andric         Decomposed.Base = V;
689bdd1243dSDimitry Andric         return Decomposed;
690bdd1243dSDimitry Andric       }
691bdd1243dSDimitry Andric 
6920b57cec5SDimitry Andric       GepHasConstantOffset = false;
6930b57cec5SDimitry Andric 
694349cc55cSDimitry Andric       // If the integer type is smaller than the index size, it is implicitly
695349cc55cSDimitry Andric       // sign extended or truncated to index size.
6960b57cec5SDimitry Andric       unsigned Width = Index->getType()->getIntegerBitWidth();
697349cc55cSDimitry Andric       unsigned SExtBits = IndexSize > Width ? IndexSize - Width : 0;
698349cc55cSDimitry Andric       unsigned TruncBits = IndexSize < Width ? Width - IndexSize : 0;
699fe6060f1SDimitry Andric       LinearExpression LE = GetLinearExpression(
7000fca6ea1SDimitry Andric           CastedValue(Index, 0, SExtBits, TruncBits, false), DL, 0, AC, DT);
7010b57cec5SDimitry Andric 
702349cc55cSDimitry Andric       // Scale by the type size.
703bdd1243dSDimitry Andric       unsigned TypeSize = AllocTypeSize.getFixedValue();
704349cc55cSDimitry Andric       LE = LE.mul(APInt(IndexSize, TypeSize), GEPOp->isInBounds());
70581ad6265SDimitry Andric       Decomposed.Offset += LE.Offset.sext(MaxIndexSize);
70681ad6265SDimitry Andric       APInt Scale = LE.Scale.sext(MaxIndexSize);
7070b57cec5SDimitry Andric 
7080b57cec5SDimitry Andric       // If we already had an occurrence of this index variable, merge this
7090b57cec5SDimitry Andric       // scale into it.  For example, we want to handle:
7100b57cec5SDimitry Andric       //   A[x][x] -> x*16 + x*4 -> x*20
7110b57cec5SDimitry Andric       // This also ensures that 'x' only appears in the index list once.
7120b57cec5SDimitry Andric       for (unsigned i = 0, e = Decomposed.VarIndices.size(); i != e; ++i) {
7130fca6ea1SDimitry Andric         if ((Decomposed.VarIndices[i].Val.V == LE.Val.V ||
7140fca6ea1SDimitry Andric              areBothVScale(Decomposed.VarIndices[i].Val.V, LE.Val.V)) &&
715349cc55cSDimitry Andric             Decomposed.VarIndices[i].Val.hasSameCastsAs(LE.Val)) {
7160b57cec5SDimitry Andric           Scale += Decomposed.VarIndices[i].Scale;
7175f757f3fSDimitry Andric           LE.IsNSW = false; // We cannot guarantee nsw for the merge.
7180b57cec5SDimitry Andric           Decomposed.VarIndices.erase(Decomposed.VarIndices.begin() + i);
7190b57cec5SDimitry Andric           break;
7200b57cec5SDimitry Andric         }
7210b57cec5SDimitry Andric       }
7220b57cec5SDimitry Andric 
7230b57cec5SDimitry Andric       // Make sure that we have a scale that makes sense for this target's
724349cc55cSDimitry Andric       // index size.
7255f757f3fSDimitry Andric       adjustToIndexSize(Scale, IndexSize);
7260b57cec5SDimitry Andric 
7270b57cec5SDimitry Andric       if (!!Scale) {
72806c3fb27SDimitry Andric         VariableGEPIndex Entry = {LE.Val, Scale, CxtI, LE.IsNSW,
72906c3fb27SDimitry Andric                                   /* IsNegated */ false};
7300b57cec5SDimitry Andric         Decomposed.VarIndices.push_back(Entry);
7310b57cec5SDimitry Andric       }
7320b57cec5SDimitry Andric     }
7330b57cec5SDimitry Andric 
7340b57cec5SDimitry Andric     // Take care of wrap-arounds
735e8d8bef9SDimitry Andric     if (GepHasConstantOffset)
7365f757f3fSDimitry Andric       adjustToIndexSize(Decomposed.Offset, IndexSize);
7370b57cec5SDimitry Andric 
7380b57cec5SDimitry Andric     // Analyze the base pointer next.
7390b57cec5SDimitry Andric     V = GEPOp->getOperand(0);
7400b57cec5SDimitry Andric   } while (--MaxLookup);
7410b57cec5SDimitry Andric 
7420b57cec5SDimitry Andric   // If the chain of expressions is too deep, just return early.
7430b57cec5SDimitry Andric   Decomposed.Base = V;
7440b57cec5SDimitry Andric   SearchLimitReached++;
745e8d8bef9SDimitry Andric   return Decomposed;
7460b57cec5SDimitry Andric }
7470b57cec5SDimitry Andric 
748bdd1243dSDimitry Andric ModRefInfo BasicAAResult::getModRefInfoMask(const MemoryLocation &Loc,
749bdd1243dSDimitry Andric                                             AAQueryInfo &AAQI,
750bdd1243dSDimitry Andric                                             bool IgnoreLocals) {
7510b57cec5SDimitry Andric   assert(Visited.empty() && "Visited must be cleared after use!");
752bdd1243dSDimitry Andric   auto _ = make_scope_exit([&] { Visited.clear(); });
7530b57cec5SDimitry Andric 
7540b57cec5SDimitry Andric   unsigned MaxLookup = 8;
7550b57cec5SDimitry Andric   SmallVector<const Value *, 16> Worklist;
7560b57cec5SDimitry Andric   Worklist.push_back(Loc.Ptr);
757bdd1243dSDimitry Andric   ModRefInfo Result = ModRefInfo::NoModRef;
758bdd1243dSDimitry Andric 
7590b57cec5SDimitry Andric   do {
760e8d8bef9SDimitry Andric     const Value *V = getUnderlyingObject(Worklist.pop_back_val());
761bdd1243dSDimitry Andric     if (!Visited.insert(V).second)
7620b57cec5SDimitry Andric       continue;
7630b57cec5SDimitry Andric 
764bdd1243dSDimitry Andric     // Ignore allocas if we were instructed to do so.
765bdd1243dSDimitry Andric     if (IgnoreLocals && isa<AllocaInst>(V))
766bdd1243dSDimitry Andric       continue;
767bdd1243dSDimitry Andric 
768bdd1243dSDimitry Andric     // If the location points to memory that is known to be invariant for
769bdd1243dSDimitry Andric     // the life of the underlying SSA value, then we can exclude Mod from
770bdd1243dSDimitry Andric     // the set of valid memory effects.
771bdd1243dSDimitry Andric     //
772bdd1243dSDimitry Andric     // An argument that is marked readonly and noalias is known to be
773bdd1243dSDimitry Andric     // invariant while that function is executing.
774bdd1243dSDimitry Andric     if (const Argument *Arg = dyn_cast<Argument>(V)) {
775bdd1243dSDimitry Andric       if (Arg->hasNoAliasAttr() && Arg->onlyReadsMemory()) {
776bdd1243dSDimitry Andric         Result |= ModRefInfo::Ref;
777bdd1243dSDimitry Andric         continue;
778bdd1243dSDimitry Andric       }
779bdd1243dSDimitry Andric     }
780bdd1243dSDimitry Andric 
781bdd1243dSDimitry Andric     // A global constant can't be mutated.
7820b57cec5SDimitry Andric     if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
7830b57cec5SDimitry Andric       // Note: this doesn't require GV to be "ODR" because it isn't legal for a
7840b57cec5SDimitry Andric       // global to be marked constant in some modules and non-constant in
7850b57cec5SDimitry Andric       // others.  GV may even be a declaration, not a definition.
786bdd1243dSDimitry Andric       if (!GV->isConstant())
7875f757f3fSDimitry Andric         return ModRefInfo::ModRef;
7880b57cec5SDimitry Andric       continue;
7890b57cec5SDimitry Andric     }
7900b57cec5SDimitry Andric 
7910b57cec5SDimitry Andric     // If both select values point to local memory, then so does the select.
7920b57cec5SDimitry Andric     if (const SelectInst *SI = dyn_cast<SelectInst>(V)) {
7930b57cec5SDimitry Andric       Worklist.push_back(SI->getTrueValue());
7940b57cec5SDimitry Andric       Worklist.push_back(SI->getFalseValue());
7950b57cec5SDimitry Andric       continue;
7960b57cec5SDimitry Andric     }
7970b57cec5SDimitry Andric 
7980b57cec5SDimitry Andric     // If all values incoming to a phi node point to local memory, then so does
7990b57cec5SDimitry Andric     // the phi.
8000b57cec5SDimitry Andric     if (const PHINode *PN = dyn_cast<PHINode>(V)) {
8010b57cec5SDimitry Andric       // Don't bother inspecting phi nodes with many operands.
802bdd1243dSDimitry Andric       if (PN->getNumIncomingValues() > MaxLookup)
8035f757f3fSDimitry Andric         return ModRefInfo::ModRef;
804e8d8bef9SDimitry Andric       append_range(Worklist, PN->incoming_values());
8050b57cec5SDimitry Andric       continue;
8060b57cec5SDimitry Andric     }
8070b57cec5SDimitry Andric 
8080b57cec5SDimitry Andric     // Otherwise be conservative.
8095f757f3fSDimitry Andric     return ModRefInfo::ModRef;
8100b57cec5SDimitry Andric   } while (!Worklist.empty() && --MaxLookup);
8110b57cec5SDimitry Andric 
812bdd1243dSDimitry Andric   // If we hit the maximum number of instructions to examine, be conservative.
813bdd1243dSDimitry Andric   if (!Worklist.empty())
8145f757f3fSDimitry Andric     return ModRefInfo::ModRef;
815bdd1243dSDimitry Andric 
816bdd1243dSDimitry Andric   return Result;
8170b57cec5SDimitry Andric }
8180b57cec5SDimitry Andric 
819fe6060f1SDimitry Andric static bool isIntrinsicCall(const CallBase *Call, Intrinsic::ID IID) {
820fe6060f1SDimitry Andric   const IntrinsicInst *II = dyn_cast<IntrinsicInst>(Call);
821fe6060f1SDimitry Andric   return II && II->getIntrinsicID() == IID;
822fe6060f1SDimitry Andric }
823fe6060f1SDimitry Andric 
8240b57cec5SDimitry Andric /// Returns the behavior when calling the given call site.
825bdd1243dSDimitry Andric MemoryEffects BasicAAResult::getMemoryEffects(const CallBase *Call,
826bdd1243dSDimitry Andric                                               AAQueryInfo &AAQI) {
827bdd1243dSDimitry Andric   MemoryEffects Min = Call->getAttributes().getMemoryEffects();
8280b57cec5SDimitry Andric 
829bdd1243dSDimitry Andric   if (const Function *F = dyn_cast<Function>(Call->getCalledOperand())) {
830bdd1243dSDimitry Andric     MemoryEffects FuncME = AAQI.AAR.getMemoryEffects(F);
831bdd1243dSDimitry Andric     // Operand bundles on the call may also read or write memory, in addition
832bdd1243dSDimitry Andric     // to the behavior of the called function.
833bdd1243dSDimitry Andric     if (Call->hasReadingOperandBundles())
834bdd1243dSDimitry Andric       FuncME |= MemoryEffects::readOnly();
835bdd1243dSDimitry Andric     if (Call->hasClobberingOperandBundles())
836bdd1243dSDimitry Andric       FuncME |= MemoryEffects::writeOnly();
837bdd1243dSDimitry Andric     Min &= FuncME;
838bdd1243dSDimitry Andric   }
8390b57cec5SDimitry Andric 
8400b57cec5SDimitry Andric   return Min;
8410b57cec5SDimitry Andric }
8420b57cec5SDimitry Andric 
8430b57cec5SDimitry Andric /// Returns the behavior when calling the given function. For use when the call
8440b57cec5SDimitry Andric /// site is not known.
845bdd1243dSDimitry Andric MemoryEffects BasicAAResult::getMemoryEffects(const Function *F) {
846bdd1243dSDimitry Andric   switch (F->getIntrinsicID()) {
847bdd1243dSDimitry Andric   case Intrinsic::experimental_guard:
848bdd1243dSDimitry Andric   case Intrinsic::experimental_deoptimize:
849bdd1243dSDimitry Andric     // These intrinsics can read arbitrary memory, and additionally modref
850bdd1243dSDimitry Andric     // inaccessible memory to model control dependence.
851bdd1243dSDimitry Andric     return MemoryEffects::readOnly() |
852bdd1243dSDimitry Andric            MemoryEffects::inaccessibleMemOnly(ModRefInfo::ModRef);
8530b57cec5SDimitry Andric   }
8540b57cec5SDimitry Andric 
855bdd1243dSDimitry Andric   return F->getMemoryEffects();
8560b57cec5SDimitry Andric }
8570b57cec5SDimitry Andric 
8580b57cec5SDimitry Andric ModRefInfo BasicAAResult::getArgModRefInfo(const CallBase *Call,
8590b57cec5SDimitry Andric                                            unsigned ArgIdx) {
860bdd1243dSDimitry Andric   if (Call->paramHasAttr(ArgIdx, Attribute::WriteOnly))
8610b57cec5SDimitry Andric     return ModRefInfo::Mod;
8620b57cec5SDimitry Andric 
8630b57cec5SDimitry Andric   if (Call->paramHasAttr(ArgIdx, Attribute::ReadOnly))
8640b57cec5SDimitry Andric     return ModRefInfo::Ref;
8650b57cec5SDimitry Andric 
8660b57cec5SDimitry Andric   if (Call->paramHasAttr(ArgIdx, Attribute::ReadNone))
8670b57cec5SDimitry Andric     return ModRefInfo::NoModRef;
8680b57cec5SDimitry Andric 
8695f757f3fSDimitry Andric   return ModRefInfo::ModRef;
8700b57cec5SDimitry Andric }
8710b57cec5SDimitry Andric 
8720b57cec5SDimitry Andric #ifndef NDEBUG
8730b57cec5SDimitry Andric static const Function *getParent(const Value *V) {
8740b57cec5SDimitry Andric   if (const Instruction *inst = dyn_cast<Instruction>(V)) {
8750b57cec5SDimitry Andric     if (!inst->getParent())
8760b57cec5SDimitry Andric       return nullptr;
8770b57cec5SDimitry Andric     return inst->getParent()->getParent();
8780b57cec5SDimitry Andric   }
8790b57cec5SDimitry Andric 
8800b57cec5SDimitry Andric   if (const Argument *arg = dyn_cast<Argument>(V))
8810b57cec5SDimitry Andric     return arg->getParent();
8820b57cec5SDimitry Andric 
8830b57cec5SDimitry Andric   return nullptr;
8840b57cec5SDimitry Andric }
8850b57cec5SDimitry Andric 
8860b57cec5SDimitry Andric static bool notDifferentParent(const Value *O1, const Value *O2) {
8870b57cec5SDimitry Andric 
8880b57cec5SDimitry Andric   const Function *F1 = getParent(O1);
8890b57cec5SDimitry Andric   const Function *F2 = getParent(O2);
8900b57cec5SDimitry Andric 
8910b57cec5SDimitry Andric   return !F1 || !F2 || F1 == F2;
8920b57cec5SDimitry Andric }
8930b57cec5SDimitry Andric #endif
8940b57cec5SDimitry Andric 
8950b57cec5SDimitry Andric AliasResult BasicAAResult::alias(const MemoryLocation &LocA,
896bdd1243dSDimitry Andric                                  const MemoryLocation &LocB, AAQueryInfo &AAQI,
897bdd1243dSDimitry Andric                                  const Instruction *CtxI) {
8980b57cec5SDimitry Andric   assert(notDifferentParent(LocA.Ptr, LocB.Ptr) &&
8990b57cec5SDimitry Andric          "BasicAliasAnalysis doesn't support interprocedural queries.");
900bdd1243dSDimitry Andric   return aliasCheck(LocA.Ptr, LocA.Size, LocB.Ptr, LocB.Size, AAQI, CtxI);
9010b57cec5SDimitry Andric }
9020b57cec5SDimitry Andric 
9030b57cec5SDimitry Andric /// Checks to see if the specified callsite can clobber the specified memory
9040b57cec5SDimitry Andric /// object.
9050b57cec5SDimitry Andric ///
9060b57cec5SDimitry Andric /// Since we only look at local properties of this function, we really can't
9070b57cec5SDimitry Andric /// say much about this query.  We do, however, use simple "address taken"
9080b57cec5SDimitry Andric /// analysis on local objects.
9090b57cec5SDimitry Andric ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call,
9100b57cec5SDimitry Andric                                         const MemoryLocation &Loc,
9110b57cec5SDimitry Andric                                         AAQueryInfo &AAQI) {
9120b57cec5SDimitry Andric   assert(notDifferentParent(Call, Loc.Ptr) &&
9130b57cec5SDimitry Andric          "AliasAnalysis query involving multiple functions!");
9140b57cec5SDimitry Andric 
915e8d8bef9SDimitry Andric   const Value *Object = getUnderlyingObject(Loc.Ptr);
9160b57cec5SDimitry Andric 
9170b57cec5SDimitry Andric   // Calls marked 'tail' cannot read or write allocas from the current frame
9180b57cec5SDimitry Andric   // because the current frame might be destroyed by the time they run. However,
9190b57cec5SDimitry Andric   // a tail call may use an alloca with byval. Calling with byval copies the
9200b57cec5SDimitry Andric   // contents of the alloca into argument registers or stack slots, so there is
9210b57cec5SDimitry Andric   // no lifetime issue.
9220b57cec5SDimitry Andric   if (isa<AllocaInst>(Object))
9230b57cec5SDimitry Andric     if (const CallInst *CI = dyn_cast<CallInst>(Call))
9240b57cec5SDimitry Andric       if (CI->isTailCall() &&
9250b57cec5SDimitry Andric           !CI->getAttributes().hasAttrSomewhere(Attribute::ByVal))
9260b57cec5SDimitry Andric         return ModRefInfo::NoModRef;
9270b57cec5SDimitry Andric 
9280b57cec5SDimitry Andric   // Stack restore is able to modify unescaped dynamic allocas. Assume it may
9290b57cec5SDimitry Andric   // modify them even though the alloca is not escaped.
9300b57cec5SDimitry Andric   if (auto *AI = dyn_cast<AllocaInst>(Object))
9310b57cec5SDimitry Andric     if (!AI->isStaticAlloca() && isIntrinsicCall(Call, Intrinsic::stackrestore))
9320b57cec5SDimitry Andric       return ModRefInfo::Mod;
9330b57cec5SDimitry Andric 
93406c3fb27SDimitry Andric   // A call can access a locally allocated object either because it is passed as
93506c3fb27SDimitry Andric   // an argument to the call, or because it has escaped prior to the call.
93606c3fb27SDimitry Andric   //
93706c3fb27SDimitry Andric   // Make sure the object has not escaped here, and then check that none of the
93806c3fb27SDimitry Andric   // call arguments alias the object below.
9390b57cec5SDimitry Andric   if (!isa<Constant>(Object) && Call != Object &&
9405f757f3fSDimitry Andric       AAQI.CI->isNotCapturedBefore(Object, Call, /*OrAt*/ false)) {
9410b57cec5SDimitry Andric 
9420b57cec5SDimitry Andric     // Optimistically assume that call doesn't touch Object and check this
9430b57cec5SDimitry Andric     // assumption in the following loop.
9440b57cec5SDimitry Andric     ModRefInfo Result = ModRefInfo::NoModRef;
9450b57cec5SDimitry Andric 
9460b57cec5SDimitry Andric     unsigned OperandNo = 0;
9470b57cec5SDimitry Andric     for (auto CI = Call->data_operands_begin(), CE = Call->data_operands_end();
9480b57cec5SDimitry Andric          CI != CE; ++CI, ++OperandNo) {
94906c3fb27SDimitry Andric       if (!(*CI)->getType()->isPointerTy())
9500b57cec5SDimitry Andric         continue;
9510b57cec5SDimitry Andric 
9520b57cec5SDimitry Andric       // Call doesn't access memory through this operand, so we don't care
9530b57cec5SDimitry Andric       // if it aliases with Object.
9540b57cec5SDimitry Andric       if (Call->doesNotAccessMemory(OperandNo))
9550b57cec5SDimitry Andric         continue;
9560b57cec5SDimitry Andric 
9570b57cec5SDimitry Andric       // If this is a no-capture pointer argument, see if we can tell that it
9580b57cec5SDimitry Andric       // is impossible to alias the pointer we're checking.
959bdd1243dSDimitry Andric       AliasResult AR =
960bdd1243dSDimitry Andric           AAQI.AAR.alias(MemoryLocation::getBeforeOrAfter(*CI),
961e8d8bef9SDimitry Andric                          MemoryLocation::getBeforeOrAfter(Object), AAQI);
9620b57cec5SDimitry Andric       // Operand doesn't alias 'Object', continue looking for other aliases
963fe6060f1SDimitry Andric       if (AR == AliasResult::NoAlias)
9640b57cec5SDimitry Andric         continue;
9650b57cec5SDimitry Andric       // Operand aliases 'Object', but call doesn't modify it. Strengthen
9660b57cec5SDimitry Andric       // initial assumption and keep looking in case if there are more aliases.
9670b57cec5SDimitry Andric       if (Call->onlyReadsMemory(OperandNo)) {
968bdd1243dSDimitry Andric         Result |= ModRefInfo::Ref;
9690b57cec5SDimitry Andric         continue;
9700b57cec5SDimitry Andric       }
9710b57cec5SDimitry Andric       // Operand aliases 'Object' but call only writes into it.
97204eeddc0SDimitry Andric       if (Call->onlyWritesMemory(OperandNo)) {
973bdd1243dSDimitry Andric         Result |= ModRefInfo::Mod;
9740b57cec5SDimitry Andric         continue;
9750b57cec5SDimitry Andric       }
9760b57cec5SDimitry Andric       // This operand aliases 'Object' and call reads and writes into it.
9770b57cec5SDimitry Andric       // Setting ModRef will not yield an early return below, MustAlias is not
9780b57cec5SDimitry Andric       // used further.
9790b57cec5SDimitry Andric       Result = ModRefInfo::ModRef;
9800b57cec5SDimitry Andric       break;
9810b57cec5SDimitry Andric     }
9820b57cec5SDimitry Andric 
9830b57cec5SDimitry Andric     // Early return if we improved mod ref information
984bdd1243dSDimitry Andric     if (!isModAndRefSet(Result))
985bdd1243dSDimitry Andric       return Result;
9860b57cec5SDimitry Andric   }
9870b57cec5SDimitry Andric 
9885ffd83dbSDimitry Andric   // If the call is malloc/calloc like, we can assume that it doesn't
9890b57cec5SDimitry Andric   // modify any IR visible value.  This is only valid because we assume these
9900b57cec5SDimitry Andric   // routines do not read values visible in the IR.  TODO: Consider special
9910b57cec5SDimitry Andric   // casing realloc and strdup routines which access only their arguments as
9920b57cec5SDimitry Andric   // well.  Or alternatively, replace all of this with inaccessiblememonly once
9930b57cec5SDimitry Andric   // that's implemented fully.
9940b57cec5SDimitry Andric   if (isMallocOrCallocLikeFn(Call, &TLI)) {
9950b57cec5SDimitry Andric     // Be conservative if the accessed pointer may alias the allocation -
9960b57cec5SDimitry Andric     // fallback to the generic handling below.
997bdd1243dSDimitry Andric     if (AAQI.AAR.alias(MemoryLocation::getBeforeOrAfter(Call), Loc, AAQI) ==
998bdd1243dSDimitry Andric         AliasResult::NoAlias)
9990b57cec5SDimitry Andric       return ModRefInfo::NoModRef;
10000b57cec5SDimitry Andric   }
10010b57cec5SDimitry Andric 
10020b57cec5SDimitry Andric   // Like assumes, invariant.start intrinsics were also marked as arbitrarily
10030b57cec5SDimitry Andric   // writing so that proper control dependencies are maintained but they never
10040b57cec5SDimitry Andric   // mod any particular memory location visible to the IR.
10050b57cec5SDimitry Andric   // *Unlike* assumes (which are now modeled as NoModRef), invariant.start
10060b57cec5SDimitry Andric   // intrinsic is now modeled as reading memory. This prevents hoisting the
10070b57cec5SDimitry Andric   // invariant.start intrinsic over stores. Consider:
10080b57cec5SDimitry Andric   // *ptr = 40;
10090b57cec5SDimitry Andric   // *ptr = 50;
10100b57cec5SDimitry Andric   // invariant_start(ptr)
10110b57cec5SDimitry Andric   // int val = *ptr;
10120b57cec5SDimitry Andric   // print(val);
10130b57cec5SDimitry Andric   //
10140b57cec5SDimitry Andric   // This cannot be transformed to:
10150b57cec5SDimitry Andric   //
10160b57cec5SDimitry Andric   // *ptr = 40;
10170b57cec5SDimitry Andric   // invariant_start(ptr)
10180b57cec5SDimitry Andric   // *ptr = 50;
10190b57cec5SDimitry Andric   // int val = *ptr;
10200b57cec5SDimitry Andric   // print(val);
10210b57cec5SDimitry Andric   //
10220b57cec5SDimitry Andric   // The transformation will cause the second store to be ignored (based on
10230b57cec5SDimitry Andric   // rules of invariant.start)  and print 40, while the first program always
10240b57cec5SDimitry Andric   // prints 50.
10250b57cec5SDimitry Andric   if (isIntrinsicCall(Call, Intrinsic::invariant_start))
10260b57cec5SDimitry Andric     return ModRefInfo::Ref;
10270b57cec5SDimitry Andric 
10285f757f3fSDimitry Andric   // Be conservative.
10295f757f3fSDimitry Andric   return ModRefInfo::ModRef;
10300b57cec5SDimitry Andric }
10310b57cec5SDimitry Andric 
10320b57cec5SDimitry Andric ModRefInfo BasicAAResult::getModRefInfo(const CallBase *Call1,
10330b57cec5SDimitry Andric                                         const CallBase *Call2,
10340b57cec5SDimitry Andric                                         AAQueryInfo &AAQI) {
1035fe6060f1SDimitry Andric   // Guard intrinsics are marked as arbitrarily writing so that proper control
1036fe6060f1SDimitry Andric   // dependencies are maintained but they never mods any particular memory
1037fe6060f1SDimitry Andric   // location.
10380b57cec5SDimitry Andric   //
10390b57cec5SDimitry Andric   // *Unlike* assumes, guard intrinsics are modeled as reading memory since the
10400b57cec5SDimitry Andric   // heap state at the point the guard is issued needs to be consistent in case
10410b57cec5SDimitry Andric   // the guard invokes the "deopt" continuation.
10420b57cec5SDimitry Andric 
10430b57cec5SDimitry Andric   // NB! This function is *not* commutative, so we special case two
10440b57cec5SDimitry Andric   // possibilities for guard intrinsics.
10450b57cec5SDimitry Andric 
10460b57cec5SDimitry Andric   if (isIntrinsicCall(Call1, Intrinsic::experimental_guard))
1047bdd1243dSDimitry Andric     return isModSet(getMemoryEffects(Call2, AAQI).getModRef())
10480b57cec5SDimitry Andric                ? ModRefInfo::Ref
10490b57cec5SDimitry Andric                : ModRefInfo::NoModRef;
10500b57cec5SDimitry Andric 
10510b57cec5SDimitry Andric   if (isIntrinsicCall(Call2, Intrinsic::experimental_guard))
1052bdd1243dSDimitry Andric     return isModSet(getMemoryEffects(Call1, AAQI).getModRef())
10530b57cec5SDimitry Andric                ? ModRefInfo::Mod
10540b57cec5SDimitry Andric                : ModRefInfo::NoModRef;
10550b57cec5SDimitry Andric 
10565f757f3fSDimitry Andric   // Be conservative.
10575f757f3fSDimitry Andric   return ModRefInfo::ModRef;
10580b57cec5SDimitry Andric }
10590b57cec5SDimitry Andric 
1060fe6060f1SDimitry Andric /// Return true if we know V to the base address of the corresponding memory
1061fe6060f1SDimitry Andric /// object.  This implies that any address less than V must be out of bounds
1062fe6060f1SDimitry Andric /// for the underlying object.  Note that just being isIdentifiedObject() is
1063fe6060f1SDimitry Andric /// not enough - For example, a negative offset from a noalias argument or call
1064fe6060f1SDimitry Andric /// can be inbounds w.r.t the actual underlying object.
1065fe6060f1SDimitry Andric static bool isBaseOfObject(const Value *V) {
1066fe6060f1SDimitry Andric   // TODO: We can handle other cases here
1067fe6060f1SDimitry Andric   // 1) For GC languages, arguments to functions are often required to be
1068fe6060f1SDimitry Andric   //    base pointers.
1069fe6060f1SDimitry Andric   // 2) Result of allocation routines are often base pointers.  Leverage TLI.
1070fe6060f1SDimitry Andric   return (isa<AllocaInst>(V) || isa<GlobalVariable>(V));
10710b57cec5SDimitry Andric }
10720b57cec5SDimitry Andric 
10730b57cec5SDimitry Andric /// Provides a bunch of ad-hoc rules to disambiguate a GEP instruction against
10740b57cec5SDimitry Andric /// another pointer.
10750b57cec5SDimitry Andric ///
10760b57cec5SDimitry Andric /// We know that V1 is a GEP, but we don't know anything about V2.
1077e8d8bef9SDimitry Andric /// UnderlyingV1 is getUnderlyingObject(GEP1), UnderlyingV2 is the same for
10780b57cec5SDimitry Andric /// V2.
10790b57cec5SDimitry Andric AliasResult BasicAAResult::aliasGEP(
1080fe6060f1SDimitry Andric     const GEPOperator *GEP1, LocationSize V1Size,
1081fe6060f1SDimitry Andric     const Value *V2, LocationSize V2Size,
10820b57cec5SDimitry Andric     const Value *UnderlyingV1, const Value *UnderlyingV2, AAQueryInfo &AAQI) {
1083fe6060f1SDimitry Andric   if (!V1Size.hasValue() && !V2Size.hasValue()) {
1084fe6060f1SDimitry Andric     // TODO: This limitation exists for compile-time reasons. Relax it if we
1085fe6060f1SDimitry Andric     // can avoid exponential pathological cases.
1086fe6060f1SDimitry Andric     if (!isa<GEPOperator>(V2))
1087fe6060f1SDimitry Andric       return AliasResult::MayAlias;
1088fe6060f1SDimitry Andric 
1089fe6060f1SDimitry Andric     // If both accesses have unknown size, we can only check whether the base
1090fe6060f1SDimitry Andric     // objects don't alias.
1091bdd1243dSDimitry Andric     AliasResult BaseAlias =
1092bdd1243dSDimitry Andric         AAQI.AAR.alias(MemoryLocation::getBeforeOrAfter(UnderlyingV1),
1093fe6060f1SDimitry Andric                        MemoryLocation::getBeforeOrAfter(UnderlyingV2), AAQI);
1094fe6060f1SDimitry Andric     return BaseAlias == AliasResult::NoAlias ? AliasResult::NoAlias
1095fe6060f1SDimitry Andric                                              : AliasResult::MayAlias;
1096fe6060f1SDimitry Andric   }
1097fe6060f1SDimitry Andric 
1098b3edf446SDimitry Andric   DominatorTree *DT = getDT(AAQI);
1099e8d8bef9SDimitry Andric   DecomposedGEP DecompGEP1 = DecomposeGEPExpression(GEP1, DL, &AC, DT);
1100e8d8bef9SDimitry Andric   DecomposedGEP DecompGEP2 = DecomposeGEPExpression(V2, DL, &AC, DT);
11010b57cec5SDimitry Andric 
1102349cc55cSDimitry Andric   // Bail if we were not able to decompose anything.
1103349cc55cSDimitry Andric   if (DecompGEP1.Base == GEP1 && DecompGEP2.Base == V2)
1104fe6060f1SDimitry Andric     return AliasResult::MayAlias;
11055ffd83dbSDimitry Andric 
11060b57cec5SDimitry Andric   // Subtract the GEP2 pointer from the GEP1 pointer to find out their
11070b57cec5SDimitry Andric   // symbolic difference.
1108bdd1243dSDimitry Andric   subtractDecomposedGEPs(DecompGEP1, DecompGEP2, AAQI);
11090b57cec5SDimitry Andric 
1110fe6060f1SDimitry Andric   // If an inbounds GEP would have to start from an out of bounds address
1111fe6060f1SDimitry Andric   // for the two to alias, then we can assume noalias.
11125f757f3fSDimitry Andric   // TODO: Remove !isScalable() once BasicAA fully support scalable location
11135f757f3fSDimitry Andric   // size
1114fe6060f1SDimitry Andric   if (*DecompGEP1.InBounds && DecompGEP1.VarIndices.empty() &&
11155f757f3fSDimitry Andric       V2Size.hasValue() && !V2Size.isScalable() &&
11165f757f3fSDimitry Andric       DecompGEP1.Offset.sge(V2Size.getValue()) &&
1117fe6060f1SDimitry Andric       isBaseOfObject(DecompGEP2.Base))
1118fe6060f1SDimitry Andric     return AliasResult::NoAlias;
11190b57cec5SDimitry Andric 
1120fe6060f1SDimitry Andric   if (isa<GEPOperator>(V2)) {
1121fe6060f1SDimitry Andric     // Symmetric case to above.
1122fe6060f1SDimitry Andric     if (*DecompGEP2.InBounds && DecompGEP1.VarIndices.empty() &&
11235f757f3fSDimitry Andric         V1Size.hasValue() && !V1Size.isScalable() &&
11245f757f3fSDimitry Andric         DecompGEP1.Offset.sle(-V1Size.getValue()) &&
1125fe6060f1SDimitry Andric         isBaseOfObject(DecompGEP1.Base))
1126fe6060f1SDimitry Andric       return AliasResult::NoAlias;
11270b57cec5SDimitry Andric   }
11280b57cec5SDimitry Andric 
1129fe6060f1SDimitry Andric   // For GEPs with identical offsets, we can preserve the size and AAInfo
1130fe6060f1SDimitry Andric   // when performing the alias check on the underlying objects.
1131e8d8bef9SDimitry Andric   if (DecompGEP1.Offset == 0 && DecompGEP1.VarIndices.empty())
1132bdd1243dSDimitry Andric     return AAQI.AAR.alias(MemoryLocation(DecompGEP1.Base, V1Size),
1133bdd1243dSDimitry Andric                           MemoryLocation(DecompGEP2.Base, V2Size), AAQI);
1134fe6060f1SDimitry Andric 
1135fe6060f1SDimitry Andric   // Do the base pointers alias?
1136bdd1243dSDimitry Andric   AliasResult BaseAlias =
1137bdd1243dSDimitry Andric       AAQI.AAR.alias(MemoryLocation::getBeforeOrAfter(DecompGEP1.Base),
1138349cc55cSDimitry Andric                      MemoryLocation::getBeforeOrAfter(DecompGEP2.Base), AAQI);
1139fe6060f1SDimitry Andric 
1140fe6060f1SDimitry Andric   // If we get a No or May, then return it immediately, no amount of analysis
1141fe6060f1SDimitry Andric   // will improve this situation.
1142fe6060f1SDimitry Andric   if (BaseAlias != AliasResult::MustAlias) {
1143fe6060f1SDimitry Andric     assert(BaseAlias == AliasResult::NoAlias ||
1144fe6060f1SDimitry Andric            BaseAlias == AliasResult::MayAlias);
1145fe6060f1SDimitry Andric     return BaseAlias;
1146fe6060f1SDimitry Andric   }
11470b57cec5SDimitry Andric 
11480b57cec5SDimitry Andric   // If there is a constant difference between the pointers, but the difference
11490b57cec5SDimitry Andric   // is less than the size of the associated memory object, then we know
11500b57cec5SDimitry Andric   // that the objects are partially overlapping.  If the difference is
11510b57cec5SDimitry Andric   // greater, we know they do not overlap.
1152349cc55cSDimitry Andric   if (DecompGEP1.VarIndices.empty()) {
1153fe6060f1SDimitry Andric     APInt &Off = DecompGEP1.Offset;
1154fe6060f1SDimitry Andric 
1155fe6060f1SDimitry Andric     // Initialize for Off >= 0 (V2 <= GEP1) case.
1156fe6060f1SDimitry Andric     const Value *LeftPtr = V2;
1157fe6060f1SDimitry Andric     const Value *RightPtr = GEP1;
1158fe6060f1SDimitry Andric     LocationSize VLeftSize = V2Size;
1159fe6060f1SDimitry Andric     LocationSize VRightSize = V1Size;
1160fe6060f1SDimitry Andric     const bool Swapped = Off.isNegative();
1161fe6060f1SDimitry Andric 
1162fe6060f1SDimitry Andric     if (Swapped) {
1163fe6060f1SDimitry Andric       // Swap if we have the situation where:
11640b57cec5SDimitry Andric       // +                +
11650b57cec5SDimitry Andric       // | BaseOffset     |
11660b57cec5SDimitry Andric       // ---------------->|
11670b57cec5SDimitry Andric       // |-->V1Size       |-------> V2Size
11680b57cec5SDimitry Andric       // GEP1             V2
1169fe6060f1SDimitry Andric       std::swap(LeftPtr, RightPtr);
1170fe6060f1SDimitry Andric       std::swap(VLeftSize, VRightSize);
1171fe6060f1SDimitry Andric       Off = -Off;
11720b57cec5SDimitry Andric     }
1173fe6060f1SDimitry Andric 
1174349cc55cSDimitry Andric     if (!VLeftSize.hasValue())
1175349cc55cSDimitry Andric       return AliasResult::MayAlias;
1176349cc55cSDimitry Andric 
11770fca6ea1SDimitry Andric     const TypeSize LSize = VLeftSize.getValue();
11780fca6ea1SDimitry Andric     if (!LSize.isScalable()) {
1179fe6060f1SDimitry Andric       if (Off.ult(LSize)) {
1180fe6060f1SDimitry Andric         // Conservatively drop processing if a phi was visited and/or offset is
1181fe6060f1SDimitry Andric         // too big.
1182fe6060f1SDimitry Andric         AliasResult AR = AliasResult::PartialAlias;
11830fca6ea1SDimitry Andric         if (VRightSize.hasValue() && !VRightSize.isScalable() &&
11840fca6ea1SDimitry Andric             Off.ule(INT32_MAX) && (Off + VRightSize.getValue()).ule(LSize)) {
1185fe6060f1SDimitry Andric           // Memory referenced by right pointer is nested. Save the offset in
1186fe6060f1SDimitry Andric           // cache. Note that originally offset estimated as GEP1-V2, but
1187fe6060f1SDimitry Andric           // AliasResult contains the shift that represents GEP1+Offset=V2.
1188fe6060f1SDimitry Andric           AR.setOffset(-Off.getSExtValue());
1189fe6060f1SDimitry Andric           AR.swap(Swapped);
1190fe6060f1SDimitry Andric         }
1191fe6060f1SDimitry Andric         return AR;
1192fe6060f1SDimitry Andric       }
1193fe6060f1SDimitry Andric       return AliasResult::NoAlias;
11940fca6ea1SDimitry Andric     } else {
11950fca6ea1SDimitry Andric       // We can use the getVScaleRange to prove that Off >= (CR.upper * LSize).
11960fca6ea1SDimitry Andric       ConstantRange CR = getVScaleRange(&F, Off.getBitWidth());
11970fca6ea1SDimitry Andric       bool Overflow;
11980fca6ea1SDimitry Andric       APInt UpperRange = CR.getUnsignedMax().umul_ov(
11990fca6ea1SDimitry Andric           APInt(Off.getBitWidth(), LSize.getKnownMinValue()), Overflow);
12000fca6ea1SDimitry Andric       if (!Overflow && Off.uge(UpperRange))
12010fca6ea1SDimitry Andric         return AliasResult::NoAlias;
12020b57cec5SDimitry Andric     }
12030fca6ea1SDimitry Andric   }
12040fca6ea1SDimitry Andric 
12050fca6ea1SDimitry Andric   // VScale Alias Analysis - Given one scalable offset between accesses and a
12060fca6ea1SDimitry Andric   // scalable typesize, we can divide each side by vscale, treating both values
12070fca6ea1SDimitry Andric   // as a constant. We prove that Offset/vscale >= TypeSize/vscale.
12080fca6ea1SDimitry Andric   if (DecompGEP1.VarIndices.size() == 1 &&
12090fca6ea1SDimitry Andric       DecompGEP1.VarIndices[0].Val.TruncBits == 0 &&
12100fca6ea1SDimitry Andric       DecompGEP1.Offset.isZero() &&
12110fca6ea1SDimitry Andric       PatternMatch::match(DecompGEP1.VarIndices[0].Val.V,
12120fca6ea1SDimitry Andric                           PatternMatch::m_VScale())) {
12130fca6ea1SDimitry Andric     const VariableGEPIndex &ScalableVar = DecompGEP1.VarIndices[0];
12140fca6ea1SDimitry Andric     APInt Scale =
12150fca6ea1SDimitry Andric         ScalableVar.IsNegated ? -ScalableVar.Scale : ScalableVar.Scale;
12160fca6ea1SDimitry Andric     LocationSize VLeftSize = Scale.isNegative() ? V1Size : V2Size;
12170fca6ea1SDimitry Andric 
12180fca6ea1SDimitry Andric     // Check if the offset is known to not overflow, if it does then attempt to
12190fca6ea1SDimitry Andric     // prove it with the known values of vscale_range.
12200fca6ea1SDimitry Andric     bool Overflows = !DecompGEP1.VarIndices[0].IsNSW;
12210fca6ea1SDimitry Andric     if (Overflows) {
12220fca6ea1SDimitry Andric       ConstantRange CR = getVScaleRange(&F, Scale.getBitWidth());
12230fca6ea1SDimitry Andric       (void)CR.getSignedMax().smul_ov(Scale, Overflows);
12240fca6ea1SDimitry Andric     }
12250fca6ea1SDimitry Andric 
12260fca6ea1SDimitry Andric     if (!Overflows) {
12270fca6ea1SDimitry Andric       // Note that we do not check that the typesize is scalable, as vscale >= 1
12280fca6ea1SDimitry Andric       // so noalias still holds so long as the dependency distance is at least
12290fca6ea1SDimitry Andric       // as big as the typesize.
12300fca6ea1SDimitry Andric       if (VLeftSize.hasValue() &&
12310fca6ea1SDimitry Andric           Scale.abs().uge(VLeftSize.getValue().getKnownMinValue()))
12320fca6ea1SDimitry Andric         return AliasResult::NoAlias;
12330fca6ea1SDimitry Andric     }
12340fca6ea1SDimitry Andric   }
12350fca6ea1SDimitry Andric 
12360fca6ea1SDimitry Andric   // Bail on analysing scalable LocationSize
12370fca6ea1SDimitry Andric   if (V1Size.isScalable() || V2Size.isScalable())
12380fca6ea1SDimitry Andric     return AliasResult::MayAlias;
12390b57cec5SDimitry Andric 
1240349cc55cSDimitry Andric   // We need to know both acess sizes for all the following heuristics.
1241349cc55cSDimitry Andric   if (!V1Size.hasValue() || !V2Size.hasValue())
1242349cc55cSDimitry Andric     return AliasResult::MayAlias;
1243349cc55cSDimitry Andric 
1244e8d8bef9SDimitry Andric   APInt GCD;
1245349cc55cSDimitry Andric   ConstantRange OffsetRange = ConstantRange(DecompGEP1.Offset);
12460b57cec5SDimitry Andric   for (unsigned i = 0, e = DecompGEP1.VarIndices.size(); i != e; ++i) {
1247349cc55cSDimitry Andric     const VariableGEPIndex &Index = DecompGEP1.VarIndices[i];
1248349cc55cSDimitry Andric     const APInt &Scale = Index.Scale;
1249349cc55cSDimitry Andric     APInt ScaleForGCD = Scale;
1250349cc55cSDimitry Andric     if (!Index.IsNSW)
125106c3fb27SDimitry Andric       ScaleForGCD =
125206c3fb27SDimitry Andric           APInt::getOneBitSet(Scale.getBitWidth(), Scale.countr_zero());
1253fe6060f1SDimitry Andric 
1254e8d8bef9SDimitry Andric     if (i == 0)
1255fe6060f1SDimitry Andric       GCD = ScaleForGCD.abs();
1256e8d8bef9SDimitry Andric     else
1257fe6060f1SDimitry Andric       GCD = APIntOps::GreatestCommonDivisor(GCD, ScaleForGCD.abs());
12580b57cec5SDimitry Andric 
125904eeddc0SDimitry Andric     ConstantRange CR = computeConstantRange(Index.Val.V, /* ForSigned */ false,
126004eeddc0SDimitry Andric                                             true, &AC, Index.CxtI);
1261349cc55cSDimitry Andric     KnownBits Known =
1262349cc55cSDimitry Andric         computeKnownBits(Index.Val.V, DL, 0, &AC, Index.CxtI, DT);
1263349cc55cSDimitry Andric     CR = CR.intersectWith(
1264349cc55cSDimitry Andric         ConstantRange::fromKnownBits(Known, /* Signed */ true),
1265349cc55cSDimitry Andric         ConstantRange::Signed);
1266349cc55cSDimitry Andric     CR = Index.Val.evaluateWith(CR).sextOrTrunc(OffsetRange.getBitWidth());
12670b57cec5SDimitry Andric 
1268349cc55cSDimitry Andric     assert(OffsetRange.getBitWidth() == Scale.getBitWidth() &&
1269349cc55cSDimitry Andric            "Bit widths are normalized to MaxIndexSize");
1270349cc55cSDimitry Andric     if (Index.IsNSW)
127106c3fb27SDimitry Andric       CR = CR.smul_sat(ConstantRange(Scale));
1272349cc55cSDimitry Andric     else
127306c3fb27SDimitry Andric       CR = CR.smul_fast(ConstantRange(Scale));
127406c3fb27SDimitry Andric 
127506c3fb27SDimitry Andric     if (Index.IsNegated)
127606c3fb27SDimitry Andric       OffsetRange = OffsetRange.sub(CR);
127706c3fb27SDimitry Andric     else
127806c3fb27SDimitry Andric       OffsetRange = OffsetRange.add(CR);
12790b57cec5SDimitry Andric   }
12800b57cec5SDimitry Andric 
1281e8d8bef9SDimitry Andric   // We now have accesses at two offsets from the same base:
1282e8d8bef9SDimitry Andric   //  1. (...)*GCD + DecompGEP1.Offset with size V1Size
1283e8d8bef9SDimitry Andric   //  2. 0 with size V2Size
1284e8d8bef9SDimitry Andric   // Using arithmetic modulo GCD, the accesses are at
1285e8d8bef9SDimitry Andric   // [ModOffset..ModOffset+V1Size) and [0..V2Size). If the first access fits
1286e8d8bef9SDimitry Andric   // into the range [V2Size..GCD), then we know they cannot overlap.
1287e8d8bef9SDimitry Andric   APInt ModOffset = DecompGEP1.Offset.srem(GCD);
1288e8d8bef9SDimitry Andric   if (ModOffset.isNegative())
1289e8d8bef9SDimitry Andric     ModOffset += GCD; // We want mod, not rem.
1290349cc55cSDimitry Andric   if (ModOffset.uge(V2Size.getValue()) &&
1291e8d8bef9SDimitry Andric       (GCD - ModOffset).uge(V1Size.getValue()))
1292fe6060f1SDimitry Andric     return AliasResult::NoAlias;
12930b57cec5SDimitry Andric 
1294349cc55cSDimitry Andric   // Compute ranges of potentially accessed bytes for both accesses. If the
1295349cc55cSDimitry Andric   // interseciton is empty, there can be no overlap.
1296349cc55cSDimitry Andric   unsigned BW = OffsetRange.getBitWidth();
1297349cc55cSDimitry Andric   ConstantRange Range1 = OffsetRange.add(
1298349cc55cSDimitry Andric       ConstantRange(APInt(BW, 0), APInt(BW, V1Size.getValue())));
1299349cc55cSDimitry Andric   ConstantRange Range2 =
1300349cc55cSDimitry Andric       ConstantRange(APInt(BW, 0), APInt(BW, V2Size.getValue()));
1301349cc55cSDimitry Andric   if (Range1.intersectWith(Range2).isEmptySet())
1302fe6060f1SDimitry Andric     return AliasResult::NoAlias;
1303e8d8bef9SDimitry Andric 
1304349cc55cSDimitry Andric   // Try to determine the range of values for VarIndex such that
1305349cc55cSDimitry Andric   // VarIndex <= -MinAbsVarIndex || MinAbsVarIndex <= VarIndex.
1306bdd1243dSDimitry Andric   std::optional<APInt> MinAbsVarIndex;
1307e8d8bef9SDimitry Andric   if (DecompGEP1.VarIndices.size() == 1) {
1308349cc55cSDimitry Andric     // VarIndex = Scale*V.
1309e8d8bef9SDimitry Andric     const VariableGEPIndex &Var = DecompGEP1.VarIndices[0];
1310349cc55cSDimitry Andric     if (Var.Val.TruncBits == 0 &&
13110fca6ea1SDimitry Andric         isKnownNonZero(Var.Val.V, SimplifyQuery(DL, DT, &AC, Var.CxtI))) {
131281ad6265SDimitry Andric       // Check if abs(V*Scale) >= abs(Scale) holds in the presence of
131381ad6265SDimitry Andric       // potentially wrapping math.
131481ad6265SDimitry Andric       auto MultiplyByScaleNoWrap = [](const VariableGEPIndex &Var) {
131581ad6265SDimitry Andric         if (Var.IsNSW)
131681ad6265SDimitry Andric           return true;
131781ad6265SDimitry Andric 
131881ad6265SDimitry Andric         int ValOrigBW = Var.Val.V->getType()->getPrimitiveSizeInBits();
131981ad6265SDimitry Andric         // If Scale is small enough so that abs(V*Scale) >= abs(Scale) holds.
132081ad6265SDimitry Andric         // The max value of abs(V) is 2^ValOrigBW - 1. Multiplying with a
132181ad6265SDimitry Andric         // constant smaller than 2^(bitwidth(Val) - ValOrigBW) won't wrap.
132281ad6265SDimitry Andric         int MaxScaleValueBW = Var.Val.getBitWidth() - ValOrigBW;
132381ad6265SDimitry Andric         if (MaxScaleValueBW <= 0)
132481ad6265SDimitry Andric           return false;
132581ad6265SDimitry Andric         return Var.Scale.ule(
132681ad6265SDimitry Andric             APInt::getMaxValue(MaxScaleValueBW).zext(Var.Scale.getBitWidth()));
132781ad6265SDimitry Andric       };
132881ad6265SDimitry Andric       // Refine MinAbsVarIndex, if abs(Scale*V) >= abs(Scale) holds in the
132981ad6265SDimitry Andric       // presence of potentially wrapping math.
133081ad6265SDimitry Andric       if (MultiplyByScaleNoWrap(Var)) {
1331349cc55cSDimitry Andric         // If V != 0 then abs(VarIndex) >= abs(Scale).
1332e8d8bef9SDimitry Andric         MinAbsVarIndex = Var.Scale.abs();
1333349cc55cSDimitry Andric       }
133481ad6265SDimitry Andric     }
1335e8d8bef9SDimitry Andric   } else if (DecompGEP1.VarIndices.size() == 2) {
1336e8d8bef9SDimitry Andric     // VarIndex = Scale*V0 + (-Scale)*V1.
1337e8d8bef9SDimitry Andric     // If V0 != V1 then abs(VarIndex) >= abs(Scale).
1338bdd1243dSDimitry Andric     // Check that MayBeCrossIteration is false, to avoid reasoning about
1339e8d8bef9SDimitry Andric     // inequality of values across loop iterations.
1340e8d8bef9SDimitry Andric     const VariableGEPIndex &Var0 = DecompGEP1.VarIndices[0];
1341e8d8bef9SDimitry Andric     const VariableGEPIndex &Var1 = DecompGEP1.VarIndices[1];
134206c3fb27SDimitry Andric     if (Var0.hasNegatedScaleOf(Var1) && Var0.Val.TruncBits == 0 &&
1343bdd1243dSDimitry Andric         Var0.Val.hasSameCastsAs(Var1.Val) && !AAQI.MayBeCrossIteration &&
1344349cc55cSDimitry Andric         isKnownNonEqual(Var0.Val.V, Var1.Val.V, DL, &AC, /* CxtI */ nullptr,
1345349cc55cSDimitry Andric                         DT))
1346e8d8bef9SDimitry Andric       MinAbsVarIndex = Var0.Scale.abs();
1347e8d8bef9SDimitry Andric   }
1348e8d8bef9SDimitry Andric 
1349e8d8bef9SDimitry Andric   if (MinAbsVarIndex) {
1350e8d8bef9SDimitry Andric     // The constant offset will have added at least +/-MinAbsVarIndex to it.
1351e8d8bef9SDimitry Andric     APInt OffsetLo = DecompGEP1.Offset - *MinAbsVarIndex;
1352e8d8bef9SDimitry Andric     APInt OffsetHi = DecompGEP1.Offset + *MinAbsVarIndex;
1353349cc55cSDimitry Andric     // We know that Offset <= OffsetLo || Offset >= OffsetHi
1354e8d8bef9SDimitry Andric     if (OffsetLo.isNegative() && (-OffsetLo).uge(V1Size.getValue()) &&
1355e8d8bef9SDimitry Andric         OffsetHi.isNonNegative() && OffsetHi.uge(V2Size.getValue()))
1356fe6060f1SDimitry Andric       return AliasResult::NoAlias;
1357e8d8bef9SDimitry Andric   }
13580b57cec5SDimitry Andric 
1359bdd1243dSDimitry Andric   if (constantOffsetHeuristic(DecompGEP1, V1Size, V2Size, &AC, DT, AAQI))
1360fe6060f1SDimitry Andric     return AliasResult::NoAlias;
13610b57cec5SDimitry Andric 
13620b57cec5SDimitry Andric   // Statically, we can see that the base objects are the same, but the
13630b57cec5SDimitry Andric   // pointers have dynamic offsets which we can't resolve. And none of our
13640b57cec5SDimitry Andric   // little tricks above worked.
1365fe6060f1SDimitry Andric   return AliasResult::MayAlias;
13660b57cec5SDimitry Andric }
13670b57cec5SDimitry Andric 
13680b57cec5SDimitry Andric static AliasResult MergeAliasResults(AliasResult A, AliasResult B) {
13690b57cec5SDimitry Andric   // If the results agree, take it.
13700b57cec5SDimitry Andric   if (A == B)
13710b57cec5SDimitry Andric     return A;
13720b57cec5SDimitry Andric   // A mix of PartialAlias and MustAlias is PartialAlias.
1373fe6060f1SDimitry Andric   if ((A == AliasResult::PartialAlias && B == AliasResult::MustAlias) ||
1374fe6060f1SDimitry Andric       (B == AliasResult::PartialAlias && A == AliasResult::MustAlias))
1375fe6060f1SDimitry Andric     return AliasResult::PartialAlias;
13760b57cec5SDimitry Andric   // Otherwise, we don't know anything.
1377fe6060f1SDimitry Andric   return AliasResult::MayAlias;
13780b57cec5SDimitry Andric }
13790b57cec5SDimitry Andric 
13800b57cec5SDimitry Andric /// Provides a bunch of ad-hoc rules to disambiguate a Select instruction
13810b57cec5SDimitry Andric /// against another.
13820b57cec5SDimitry Andric AliasResult
13830b57cec5SDimitry Andric BasicAAResult::aliasSelect(const SelectInst *SI, LocationSize SISize,
1384fe6060f1SDimitry Andric                            const Value *V2, LocationSize V2Size,
1385e8d8bef9SDimitry Andric                            AAQueryInfo &AAQI) {
13860b57cec5SDimitry Andric   // If the values are Selects with the same condition, we can do a more precise
13870b57cec5SDimitry Andric   // check: just check for aliases between the values on corresponding arms.
13880b57cec5SDimitry Andric   if (const SelectInst *SI2 = dyn_cast<SelectInst>(V2))
1389bdd1243dSDimitry Andric     if (isValueEqualInPotentialCycles(SI->getCondition(), SI2->getCondition(),
1390bdd1243dSDimitry Andric                                       AAQI)) {
1391bdd1243dSDimitry Andric       AliasResult Alias =
1392bdd1243dSDimitry Andric           AAQI.AAR.alias(MemoryLocation(SI->getTrueValue(), SISize),
1393fe6060f1SDimitry Andric                          MemoryLocation(SI2->getTrueValue(), V2Size), AAQI);
1394fe6060f1SDimitry Andric       if (Alias == AliasResult::MayAlias)
1395fe6060f1SDimitry Andric         return AliasResult::MayAlias;
1396bdd1243dSDimitry Andric       AliasResult ThisAlias =
1397bdd1243dSDimitry Andric           AAQI.AAR.alias(MemoryLocation(SI->getFalseValue(), SISize),
1398fe6060f1SDimitry Andric                          MemoryLocation(SI2->getFalseValue(), V2Size), AAQI);
13990b57cec5SDimitry Andric       return MergeAliasResults(ThisAlias, Alias);
14000b57cec5SDimitry Andric     }
14010b57cec5SDimitry Andric 
14020b57cec5SDimitry Andric   // If both arms of the Select node NoAlias or MustAlias V2, then returns
14030b57cec5SDimitry Andric   // NoAlias / MustAlias. Otherwise, returns MayAlias.
1404bdd1243dSDimitry Andric   AliasResult Alias = AAQI.AAR.alias(MemoryLocation(SI->getTrueValue(), SISize),
140581ad6265SDimitry Andric                                      MemoryLocation(V2, V2Size), AAQI);
1406fe6060f1SDimitry Andric   if (Alias == AliasResult::MayAlias)
1407fe6060f1SDimitry Andric     return AliasResult::MayAlias;
14080b57cec5SDimitry Andric 
140981ad6265SDimitry Andric   AliasResult ThisAlias =
1410bdd1243dSDimitry Andric       AAQI.AAR.alias(MemoryLocation(SI->getFalseValue(), SISize),
141181ad6265SDimitry Andric                      MemoryLocation(V2, V2Size), AAQI);
14120b57cec5SDimitry Andric   return MergeAliasResults(ThisAlias, Alias);
14130b57cec5SDimitry Andric }
14140b57cec5SDimitry Andric 
14150b57cec5SDimitry Andric /// Provide a bunch of ad-hoc rules to disambiguate a PHI instruction against
14160b57cec5SDimitry Andric /// another.
14170b57cec5SDimitry Andric AliasResult BasicAAResult::aliasPHI(const PHINode *PN, LocationSize PNSize,
1418fe6060f1SDimitry Andric                                     const Value *V2, LocationSize V2Size,
1419e8d8bef9SDimitry Andric                                     AAQueryInfo &AAQI) {
1420fe6060f1SDimitry Andric   if (!PN->getNumIncomingValues())
1421fe6060f1SDimitry Andric     return AliasResult::NoAlias;
14220b57cec5SDimitry Andric   // If the values are PHIs in the same block, we can do a more precise
14230b57cec5SDimitry Andric   // as well as efficient check: just check for aliases between the values
14240b57cec5SDimitry Andric   // on corresponding edges.
14250b57cec5SDimitry Andric   if (const PHINode *PN2 = dyn_cast<PHINode>(V2))
14260b57cec5SDimitry Andric     if (PN2->getParent() == PN->getParent()) {
1427bdd1243dSDimitry Andric       std::optional<AliasResult> Alias;
14280b57cec5SDimitry Andric       for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
1429bdd1243dSDimitry Andric         AliasResult ThisAlias = AAQI.AAR.alias(
1430fe6060f1SDimitry Andric             MemoryLocation(PN->getIncomingValue(i), PNSize),
1431e8d8bef9SDimitry Andric             MemoryLocation(
1432fe6060f1SDimitry Andric                 PN2->getIncomingValueForBlock(PN->getIncomingBlock(i)), V2Size),
1433e8d8bef9SDimitry Andric             AAQI);
1434e8d8bef9SDimitry Andric         if (Alias)
1435e8d8bef9SDimitry Andric           *Alias = MergeAliasResults(*Alias, ThisAlias);
1436e8d8bef9SDimitry Andric         else
1437e8d8bef9SDimitry Andric           Alias = ThisAlias;
1438fe6060f1SDimitry Andric         if (*Alias == AliasResult::MayAlias)
14390b57cec5SDimitry Andric           break;
14400b57cec5SDimitry Andric       }
1441e8d8bef9SDimitry Andric       return *Alias;
14420b57cec5SDimitry Andric     }
14430b57cec5SDimitry Andric 
14440b57cec5SDimitry Andric   SmallVector<Value *, 4> V1Srcs;
1445e8d8bef9SDimitry Andric   // If a phi operand recurses back to the phi, we can still determine NoAlias
1446e8d8bef9SDimitry Andric   // if we don't alias the underlying objects of the other phi operands, as we
1447e8d8bef9SDimitry Andric   // know that the recursive phi needs to be based on them in some way.
14480b57cec5SDimitry Andric   bool isRecursive = false;
1449979e22ffSDimitry Andric   auto CheckForRecPhi = [&](Value *PV) {
1450979e22ffSDimitry Andric     if (!EnableRecPhiAnalysis)
1451979e22ffSDimitry Andric       return false;
1452e8d8bef9SDimitry Andric     if (getUnderlyingObject(PV) == PN) {
1453979e22ffSDimitry Andric       isRecursive = true;
1454979e22ffSDimitry Andric       return true;
1455979e22ffSDimitry Andric     }
1456979e22ffSDimitry Andric     return false;
1457979e22ffSDimitry Andric   };
1458979e22ffSDimitry Andric 
14590b57cec5SDimitry Andric   SmallPtrSet<Value *, 4> UniqueSrc;
1460fe6060f1SDimitry Andric   Value *OnePhi = nullptr;
14610b57cec5SDimitry Andric   for (Value *PV1 : PN->incoming_values()) {
1462bdd1243dSDimitry Andric     // Skip the phi itself being the incoming value.
1463bdd1243dSDimitry Andric     if (PV1 == PN)
1464bdd1243dSDimitry Andric       continue;
1465bdd1243dSDimitry Andric 
1466fe6060f1SDimitry Andric     if (isa<PHINode>(PV1)) {
1467fe6060f1SDimitry Andric       if (OnePhi && OnePhi != PV1) {
1468fe6060f1SDimitry Andric         // To control potential compile time explosion, we choose to be
1469fe6060f1SDimitry Andric         // conserviate when we have more than one Phi input.  It is important
1470fe6060f1SDimitry Andric         // that we handle the single phi case as that lets us handle LCSSA
1471fe6060f1SDimitry Andric         // phi nodes and (combined with the recursive phi handling) simple
1472fe6060f1SDimitry Andric         // pointer induction variable patterns.
1473fe6060f1SDimitry Andric         return AliasResult::MayAlias;
1474fe6060f1SDimitry Andric       }
1475fe6060f1SDimitry Andric       OnePhi = PV1;
1476fe6060f1SDimitry Andric     }
14770b57cec5SDimitry Andric 
1478979e22ffSDimitry Andric     if (CheckForRecPhi(PV1))
14790b57cec5SDimitry Andric       continue;
14800b57cec5SDimitry Andric 
14810b57cec5SDimitry Andric     if (UniqueSrc.insert(PV1).second)
14820b57cec5SDimitry Andric       V1Srcs.push_back(PV1);
14830b57cec5SDimitry Andric   }
1484fe6060f1SDimitry Andric 
1485fe6060f1SDimitry Andric   if (OnePhi && UniqueSrc.size() > 1)
1486fe6060f1SDimitry Andric     // Out of an abundance of caution, allow only the trivial lcssa and
1487fe6060f1SDimitry Andric     // recursive phi cases.
1488fe6060f1SDimitry Andric     return AliasResult::MayAlias;
14890b57cec5SDimitry Andric 
14900b57cec5SDimitry Andric   // If V1Srcs is empty then that means that the phi has no underlying non-phi
14910b57cec5SDimitry Andric   // value. This should only be possible in blocks unreachable from the entry
14920b57cec5SDimitry Andric   // block, but return MayAlias just in case.
14930b57cec5SDimitry Andric   if (V1Srcs.empty())
1494fe6060f1SDimitry Andric     return AliasResult::MayAlias;
14950b57cec5SDimitry Andric 
1496e8d8bef9SDimitry Andric   // If this PHI node is recursive, indicate that the pointer may be moved
1497e8d8bef9SDimitry Andric   // across iterations. We can only prove NoAlias if different underlying
1498e8d8bef9SDimitry Andric   // objects are involved.
14990b57cec5SDimitry Andric   if (isRecursive)
1500e8d8bef9SDimitry Andric     PNSize = LocationSize::beforeOrAfterPointer();
15010b57cec5SDimitry Andric 
1502e8d8bef9SDimitry Andric   // In the recursive alias queries below, we may compare values from two
1503bdd1243dSDimitry Andric   // different loop iterations.
1504bdd1243dSDimitry Andric   SaveAndRestore SavedMayBeCrossIteration(AAQI.MayBeCrossIteration, true);
1505e8d8bef9SDimitry Andric 
1506bdd1243dSDimitry Andric   AliasResult Alias = AAQI.AAR.alias(MemoryLocation(V1Srcs[0], PNSize),
1507bdd1243dSDimitry Andric                                      MemoryLocation(V2, V2Size), AAQI);
15080b57cec5SDimitry Andric 
15090b57cec5SDimitry Andric   // Early exit if the check of the first PHI source against V2 is MayAlias.
15100b57cec5SDimitry Andric   // Other results are not possible.
1511fe6060f1SDimitry Andric   if (Alias == AliasResult::MayAlias)
1512fe6060f1SDimitry Andric     return AliasResult::MayAlias;
15135ffd83dbSDimitry Andric   // With recursive phis we cannot guarantee that MustAlias/PartialAlias will
15145ffd83dbSDimitry Andric   // remain valid to all elements and needs to conservatively return MayAlias.
1515fe6060f1SDimitry Andric   if (isRecursive && Alias != AliasResult::NoAlias)
1516fe6060f1SDimitry Andric     return AliasResult::MayAlias;
15170b57cec5SDimitry Andric 
15180b57cec5SDimitry Andric   // If all sources of the PHI node NoAlias or MustAlias V2, then returns
15190b57cec5SDimitry Andric   // NoAlias / MustAlias. Otherwise, returns MayAlias.
15200b57cec5SDimitry Andric   for (unsigned i = 1, e = V1Srcs.size(); i != e; ++i) {
15210b57cec5SDimitry Andric     Value *V = V1Srcs[i];
15220b57cec5SDimitry Andric 
1523bdd1243dSDimitry Andric     AliasResult ThisAlias = AAQI.AAR.alias(
1524bdd1243dSDimitry Andric         MemoryLocation(V, PNSize), MemoryLocation(V2, V2Size), AAQI);
15250b57cec5SDimitry Andric     Alias = MergeAliasResults(ThisAlias, Alias);
1526fe6060f1SDimitry Andric     if (Alias == AliasResult::MayAlias)
15270b57cec5SDimitry Andric       break;
15280b57cec5SDimitry Andric   }
15290b57cec5SDimitry Andric 
15300b57cec5SDimitry Andric   return Alias;
15310b57cec5SDimitry Andric }
15320b57cec5SDimitry Andric 
15330b57cec5SDimitry Andric /// Provides a bunch of ad-hoc rules to disambiguate in common cases, such as
15340b57cec5SDimitry Andric /// array references.
15350b57cec5SDimitry Andric AliasResult BasicAAResult::aliasCheck(const Value *V1, LocationSize V1Size,
1536e8d8bef9SDimitry Andric                                       const Value *V2, LocationSize V2Size,
1537bdd1243dSDimitry Andric                                       AAQueryInfo &AAQI,
1538bdd1243dSDimitry Andric                                       const Instruction *CtxI) {
15390b57cec5SDimitry Andric   // If either of the memory references is empty, it doesn't matter what the
15400b57cec5SDimitry Andric   // pointer values are.
15410b57cec5SDimitry Andric   if (V1Size.isZero() || V2Size.isZero())
1542fe6060f1SDimitry Andric     return AliasResult::NoAlias;
15430b57cec5SDimitry Andric 
15440b57cec5SDimitry Andric   // Strip off any casts if they exist.
1545fe6060f1SDimitry Andric   V1 = V1->stripPointerCastsForAliasAnalysis();
1546fe6060f1SDimitry Andric   V2 = V2->stripPointerCastsForAliasAnalysis();
15470b57cec5SDimitry Andric 
15480b57cec5SDimitry Andric   // If V1 or V2 is undef, the result is NoAlias because we can always pick a
15490b57cec5SDimitry Andric   // value for undef that aliases nothing in the program.
15500b57cec5SDimitry Andric   if (isa<UndefValue>(V1) || isa<UndefValue>(V2))
1551fe6060f1SDimitry Andric     return AliasResult::NoAlias;
15520b57cec5SDimitry Andric 
15530b57cec5SDimitry Andric   // Are we checking for alias of the same value?
15540b57cec5SDimitry Andric   // Because we look 'through' phi nodes, we could look at "Value" pointers from
15550b57cec5SDimitry Andric   // different iterations. We must therefore make sure that this is not the
15560b57cec5SDimitry Andric   // case. The function isValueEqualInPotentialCycles ensures that this cannot
15570b57cec5SDimitry Andric   // happen by looking at the visited phi nodes and making sure they cannot
15580b57cec5SDimitry Andric   // reach the value.
1559bdd1243dSDimitry Andric   if (isValueEqualInPotentialCycles(V1, V2, AAQI))
1560fe6060f1SDimitry Andric     return AliasResult::MustAlias;
15610b57cec5SDimitry Andric 
15620b57cec5SDimitry Andric   if (!V1->getType()->isPointerTy() || !V2->getType()->isPointerTy())
1563fe6060f1SDimitry Andric     return AliasResult::NoAlias; // Scalars cannot alias each other
15640b57cec5SDimitry Andric 
15650b57cec5SDimitry Andric   // Figure out what objects these things are pointing to if we can.
1566e8d8bef9SDimitry Andric   const Value *O1 = getUnderlyingObject(V1, MaxLookupSearchDepth);
1567e8d8bef9SDimitry Andric   const Value *O2 = getUnderlyingObject(V2, MaxLookupSearchDepth);
15680b57cec5SDimitry Andric 
15690b57cec5SDimitry Andric   // Null values in the default address space don't point to any object, so they
15700b57cec5SDimitry Andric   // don't alias any other pointer.
15710b57cec5SDimitry Andric   if (const ConstantPointerNull *CPN = dyn_cast<ConstantPointerNull>(O1))
15720b57cec5SDimitry Andric     if (!NullPointerIsDefined(&F, CPN->getType()->getAddressSpace()))
1573fe6060f1SDimitry Andric       return AliasResult::NoAlias;
15740b57cec5SDimitry Andric   if (const ConstantPointerNull *CPN = dyn_cast<ConstantPointerNull>(O2))
15750b57cec5SDimitry Andric     if (!NullPointerIsDefined(&F, CPN->getType()->getAddressSpace()))
1576fe6060f1SDimitry Andric       return AliasResult::NoAlias;
15770b57cec5SDimitry Andric 
15780b57cec5SDimitry Andric   if (O1 != O2) {
15790b57cec5SDimitry Andric     // If V1/V2 point to two different objects, we know that we have no alias.
15800b57cec5SDimitry Andric     if (isIdentifiedObject(O1) && isIdentifiedObject(O2))
1581fe6060f1SDimitry Andric       return AliasResult::NoAlias;
15820b57cec5SDimitry Andric 
15830b57cec5SDimitry Andric     // Function arguments can't alias with things that are known to be
15840b57cec5SDimitry Andric     // unambigously identified at the function level.
15850b57cec5SDimitry Andric     if ((isa<Argument>(O1) && isIdentifiedFunctionLocal(O2)) ||
15860b57cec5SDimitry Andric         (isa<Argument>(O2) && isIdentifiedFunctionLocal(O1)))
1587fe6060f1SDimitry Andric       return AliasResult::NoAlias;
15880b57cec5SDimitry Andric 
15890b57cec5SDimitry Andric     // If one pointer is the result of a call/invoke or load and the other is a
15900b57cec5SDimitry Andric     // non-escaping local object within the same function, then we know the
15910b57cec5SDimitry Andric     // object couldn't escape to a point where the call could return it.
15920b57cec5SDimitry Andric     //
15930b57cec5SDimitry Andric     // Note that if the pointers are in different functions, there are a
15940b57cec5SDimitry Andric     // variety of complications. A call with a nocapture argument may still
15950b57cec5SDimitry Andric     // temporary store the nocapture argument's value in a temporary memory
15960b57cec5SDimitry Andric     // location if that memory location doesn't escape. Or it may pass a
15970b57cec5SDimitry Andric     // nocapture value to other functions as long as they don't capture it.
15987a6dacacSDimitry Andric     if (isEscapeSource(O1) && AAQI.CI->isNotCapturedBefore(
15997a6dacacSDimitry Andric                                   O2, dyn_cast<Instruction>(O1), /*OrAt*/ true))
1600fe6060f1SDimitry Andric       return AliasResult::NoAlias;
16017a6dacacSDimitry Andric     if (isEscapeSource(O2) && AAQI.CI->isNotCapturedBefore(
16027a6dacacSDimitry Andric                                   O1, dyn_cast<Instruction>(O2), /*OrAt*/ true))
1603fe6060f1SDimitry Andric       return AliasResult::NoAlias;
16040b57cec5SDimitry Andric   }
16050b57cec5SDimitry Andric 
16060b57cec5SDimitry Andric   // If the size of one access is larger than the entire object on the other
16070b57cec5SDimitry Andric   // side, then we know such behavior is undefined and can assume no alias.
16080b57cec5SDimitry Andric   bool NullIsValidLocation = NullPointerIsDefined(&F);
16098bcb0991SDimitry Andric   if ((isObjectSmallerThan(
16108bcb0991SDimitry Andric           O2, getMinimalExtentFrom(*V1, V1Size, DL, NullIsValidLocation), DL,
16118bcb0991SDimitry Andric           TLI, NullIsValidLocation)) ||
16128bcb0991SDimitry Andric       (isObjectSmallerThan(
16138bcb0991SDimitry Andric           O1, getMinimalExtentFrom(*V2, V2Size, DL, NullIsValidLocation), DL,
16148bcb0991SDimitry Andric           TLI, NullIsValidLocation)))
1615fe6060f1SDimitry Andric     return AliasResult::NoAlias;
16160b57cec5SDimitry Andric 
16171db9f3b2SDimitry Andric   if (EnableSeparateStorageAnalysis) {
16181db9f3b2SDimitry Andric     for (AssumptionCache::ResultElem &Elem : AC.assumptionsFor(O1)) {
16191db9f3b2SDimitry Andric       if (!Elem || Elem.Index == AssumptionCache::ExprResultIdx)
1620bdd1243dSDimitry Andric         continue;
1621bdd1243dSDimitry Andric 
16221db9f3b2SDimitry Andric       AssumeInst *Assume = cast<AssumeInst>(Elem);
16231db9f3b2SDimitry Andric       OperandBundleUse OBU = Assume->getOperandBundleAt(Elem.Index);
1624bdd1243dSDimitry Andric       if (OBU.getTagName() == "separate_storage") {
1625bdd1243dSDimitry Andric         assert(OBU.Inputs.size() == 2);
1626bdd1243dSDimitry Andric         const Value *Hint1 = OBU.Inputs[0].get();
1627bdd1243dSDimitry Andric         const Value *Hint2 = OBU.Inputs[1].get();
162806c3fb27SDimitry Andric         // This is often a no-op; instcombine rewrites this for us. No-op
162906c3fb27SDimitry Andric         // getUnderlyingObject calls are fast, though.
1630bdd1243dSDimitry Andric         const Value *HintO1 = getUnderlyingObject(Hint1);
1631bdd1243dSDimitry Andric         const Value *HintO2 = getUnderlyingObject(Hint2);
1632bdd1243dSDimitry Andric 
1633b3edf446SDimitry Andric         DominatorTree *DT = getDT(AAQI);
16341db9f3b2SDimitry Andric         auto ValidAssumeForPtrContext = [&](const Value *Ptr) {
16351db9f3b2SDimitry Andric           if (const Instruction *PtrI = dyn_cast<Instruction>(Ptr)) {
16361db9f3b2SDimitry Andric             return isValidAssumeForContext(Assume, PtrI, DT,
16371db9f3b2SDimitry Andric                                            /* AllowEphemerals */ true);
16381db9f3b2SDimitry Andric           }
16391db9f3b2SDimitry Andric           if (const Argument *PtrA = dyn_cast<Argument>(Ptr)) {
16401db9f3b2SDimitry Andric             const Instruction *FirstI =
16411db9f3b2SDimitry Andric                 &*PtrA->getParent()->getEntryBlock().begin();
16421db9f3b2SDimitry Andric             return isValidAssumeForContext(Assume, FirstI, DT,
16431db9f3b2SDimitry Andric                                            /* AllowEphemerals */ true);
16441db9f3b2SDimitry Andric           }
16451db9f3b2SDimitry Andric           return false;
16461db9f3b2SDimitry Andric         };
16471db9f3b2SDimitry Andric 
16481db9f3b2SDimitry Andric         if ((O1 == HintO1 && O2 == HintO2) || (O1 == HintO2 && O2 == HintO1)) {
16491db9f3b2SDimitry Andric           // Note that we go back to V1 and V2 for the
16501db9f3b2SDimitry Andric           // ValidAssumeForPtrContext checks; they're dominated by O1 and O2,
16511db9f3b2SDimitry Andric           // so strictly more assumptions are valid for them.
16521db9f3b2SDimitry Andric           if ((CtxI && isValidAssumeForContext(Assume, CtxI, DT,
16531db9f3b2SDimitry Andric                                                /* AllowEphemerals */ true)) ||
16541db9f3b2SDimitry Andric               ValidAssumeForPtrContext(V1) || ValidAssumeForPtrContext(V2)) {
1655bdd1243dSDimitry Andric             return AliasResult::NoAlias;
1656bdd1243dSDimitry Andric           }
1657bdd1243dSDimitry Andric         }
1658bdd1243dSDimitry Andric       }
1659bdd1243dSDimitry Andric     }
16601db9f3b2SDimitry Andric   }
1661bdd1243dSDimitry Andric 
1662e8d8bef9SDimitry Andric   // If one the accesses may be before the accessed pointer, canonicalize this
1663e8d8bef9SDimitry Andric   // by using unknown after-pointer sizes for both accesses. This is
1664e8d8bef9SDimitry Andric   // equivalent, because regardless of which pointer is lower, one of them
1665e8d8bef9SDimitry Andric   // will always came after the other, as long as the underlying objects aren't
1666e8d8bef9SDimitry Andric   // disjoint. We do this so that the rest of BasicAA does not have to deal
1667e8d8bef9SDimitry Andric   // with accesses before the base pointer, and to improve cache utilization by
1668e8d8bef9SDimitry Andric   // merging equivalent states.
1669e8d8bef9SDimitry Andric   if (V1Size.mayBeBeforePointer() || V2Size.mayBeBeforePointer()) {
1670e8d8bef9SDimitry Andric     V1Size = LocationSize::afterPointer();
1671e8d8bef9SDimitry Andric     V2Size = LocationSize::afterPointer();
1672e8d8bef9SDimitry Andric   }
1673e8d8bef9SDimitry Andric 
1674fe6060f1SDimitry Andric   // FIXME: If this depth limit is hit, then we may cache sub-optimal results
1675fe6060f1SDimitry Andric   // for recursive queries. For this reason, this limit is chosen to be large
1676fe6060f1SDimitry Andric   // enough to be very rarely hit, while still being small enough to avoid
1677fe6060f1SDimitry Andric   // stack overflows.
1678fe6060f1SDimitry Andric   if (AAQI.Depth >= 512)
1679fe6060f1SDimitry Andric     return AliasResult::MayAlias;
1680fe6060f1SDimitry Andric 
16810b57cec5SDimitry Andric   // Check the cache before climbing up use-def chains. This also terminates
1682bdd1243dSDimitry Andric   // otherwise infinitely recursive queries. Include MayBeCrossIteration in the
1683bdd1243dSDimitry Andric   // cache key, because some cases where MayBeCrossIteration==false returns
1684bdd1243dSDimitry Andric   // MustAlias or NoAlias may become MayAlias under MayBeCrossIteration==true.
1685bdd1243dSDimitry Andric   AAQueryInfo::LocPair Locs({V1, V1Size, AAQI.MayBeCrossIteration},
1686bdd1243dSDimitry Andric                             {V2, V2Size, AAQI.MayBeCrossIteration});
1687fe6060f1SDimitry Andric   const bool Swapped = V1 > V2;
1688fe6060f1SDimitry Andric   if (Swapped)
16890b57cec5SDimitry Andric     std::swap(Locs.first, Locs.second);
1690e8d8bef9SDimitry Andric   const auto &Pair = AAQI.AliasCache.try_emplace(
1691fe6060f1SDimitry Andric       Locs, AAQueryInfo::CacheEntry{AliasResult::NoAlias, 0});
1692e8d8bef9SDimitry Andric   if (!Pair.second) {
1693e8d8bef9SDimitry Andric     auto &Entry = Pair.first->second;
1694e8d8bef9SDimitry Andric     if (!Entry.isDefinitive()) {
1695*36b606aeSDimitry Andric       // Remember that we used an assumption. This may either be a direct use
1696*36b606aeSDimitry Andric       // of an assumption, or a use of an entry that may itself be based on an
1697*36b606aeSDimitry Andric       // assumption.
1698e8d8bef9SDimitry Andric       ++AAQI.NumAssumptionUses;
1699*36b606aeSDimitry Andric       if (Entry.isAssumption())
1700*36b606aeSDimitry Andric         ++Entry.NumAssumptionUses;
17010b57cec5SDimitry Andric     }
1702fe6060f1SDimitry Andric     // Cache contains sorted {V1,V2} pairs but we should return original order.
1703fe6060f1SDimitry Andric     auto Result = Entry.Result;
1704fe6060f1SDimitry Andric     Result.swap(Swapped);
1705fe6060f1SDimitry Andric     return Result;
1706e8d8bef9SDimitry Andric   }
1707e8d8bef9SDimitry Andric 
1708e8d8bef9SDimitry Andric   int OrigNumAssumptionUses = AAQI.NumAssumptionUses;
1709e8d8bef9SDimitry Andric   unsigned OrigNumAssumptionBasedResults = AAQI.AssumptionBasedResults.size();
1710fe6060f1SDimitry Andric   AliasResult Result =
1711fe6060f1SDimitry Andric       aliasCheckRecursive(V1, V1Size, V2, V2Size, AAQI, O1, O2);
1712e8d8bef9SDimitry Andric 
1713e8d8bef9SDimitry Andric   auto It = AAQI.AliasCache.find(Locs);
1714e8d8bef9SDimitry Andric   assert(It != AAQI.AliasCache.end() && "Must be in cache");
1715e8d8bef9SDimitry Andric   auto &Entry = It->second;
1716e8d8bef9SDimitry Andric 
1717e8d8bef9SDimitry Andric   // Check whether a NoAlias assumption has been used, but disproven.
1718fe6060f1SDimitry Andric   bool AssumptionDisproven =
1719fe6060f1SDimitry Andric       Entry.NumAssumptionUses > 0 && Result != AliasResult::NoAlias;
1720e8d8bef9SDimitry Andric   if (AssumptionDisproven)
1721fe6060f1SDimitry Andric     Result = AliasResult::MayAlias;
1722e8d8bef9SDimitry Andric 
1723e8d8bef9SDimitry Andric   // This is a definitive result now, when considered as a root query.
1724e8d8bef9SDimitry Andric   AAQI.NumAssumptionUses -= Entry.NumAssumptionUses;
1725e8d8bef9SDimitry Andric   Entry.Result = Result;
1726fe6060f1SDimitry Andric   // Cache contains sorted {V1,V2} pairs.
1727fe6060f1SDimitry Andric   Entry.Result.swap(Swapped);
1728e8d8bef9SDimitry Andric 
1729e8d8bef9SDimitry Andric   // If the assumption has been disproven, remove any results that may have
1730e8d8bef9SDimitry Andric   // been based on this assumption. Do this after the Entry updates above to
1731e8d8bef9SDimitry Andric   // avoid iterator invalidation.
1732e8d8bef9SDimitry Andric   if (AssumptionDisproven)
1733e8d8bef9SDimitry Andric     while (AAQI.AssumptionBasedResults.size() > OrigNumAssumptionBasedResults)
1734e8d8bef9SDimitry Andric       AAQI.AliasCache.erase(AAQI.AssumptionBasedResults.pop_back_val());
1735e8d8bef9SDimitry Andric 
1736e8d8bef9SDimitry Andric   // The result may still be based on assumptions higher up in the chain.
1737e8d8bef9SDimitry Andric   // Remember it, so it can be purged from the cache later.
1738fe6060f1SDimitry Andric   if (OrigNumAssumptionUses != AAQI.NumAssumptionUses &&
1739*36b606aeSDimitry Andric       Result != AliasResult::MayAlias) {
1740e8d8bef9SDimitry Andric     AAQI.AssumptionBasedResults.push_back(Locs);
1741*36b606aeSDimitry Andric     Entry.NumAssumptionUses = AAQueryInfo::CacheEntry::AssumptionBased;
1742*36b606aeSDimitry Andric   } else {
1743*36b606aeSDimitry Andric     Entry.NumAssumptionUses = AAQueryInfo::CacheEntry::Definitive;
1744*36b606aeSDimitry Andric   }
1745*36b606aeSDimitry Andric 
1746*36b606aeSDimitry Andric   // Depth is incremented before this function is called, so Depth==1 indicates
1747*36b606aeSDimitry Andric   // a root query.
1748*36b606aeSDimitry Andric   if (AAQI.Depth == 1) {
1749*36b606aeSDimitry Andric     // Any remaining assumption based results must be based on proven
1750*36b606aeSDimitry Andric     // assumptions, so convert them to definitive results.
1751*36b606aeSDimitry Andric     for (const auto &Loc : AAQI.AssumptionBasedResults) {
1752*36b606aeSDimitry Andric       auto It = AAQI.AliasCache.find(Loc);
1753*36b606aeSDimitry Andric       if (It != AAQI.AliasCache.end())
1754*36b606aeSDimitry Andric         It->second.NumAssumptionUses = AAQueryInfo::CacheEntry::Definitive;
1755*36b606aeSDimitry Andric     }
1756*36b606aeSDimitry Andric     AAQI.AssumptionBasedResults.clear();
1757*36b606aeSDimitry Andric     AAQI.NumAssumptionUses = 0;
1758*36b606aeSDimitry Andric   }
1759e8d8bef9SDimitry Andric   return Result;
1760e8d8bef9SDimitry Andric }
1761e8d8bef9SDimitry Andric 
1762e8d8bef9SDimitry Andric AliasResult BasicAAResult::aliasCheckRecursive(
1763fe6060f1SDimitry Andric     const Value *V1, LocationSize V1Size,
1764fe6060f1SDimitry Andric     const Value *V2, LocationSize V2Size,
1765e8d8bef9SDimitry Andric     AAQueryInfo &AAQI, const Value *O1, const Value *O2) {
17660b57cec5SDimitry Andric   if (const GEPOperator *GV1 = dyn_cast<GEPOperator>(V1)) {
1767fe6060f1SDimitry Andric     AliasResult Result = aliasGEP(GV1, V1Size, V2, V2Size, O1, O2, AAQI);
1768fe6060f1SDimitry Andric     if (Result != AliasResult::MayAlias)
1769e8d8bef9SDimitry Andric       return Result;
1770e8d8bef9SDimitry Andric   } else if (const GEPOperator *GV2 = dyn_cast<GEPOperator>(V2)) {
1771fe6060f1SDimitry Andric     AliasResult Result = aliasGEP(GV2, V2Size, V1, V1Size, O2, O1, AAQI);
17720eae32dcSDimitry Andric     Result.swap();
1773fe6060f1SDimitry Andric     if (Result != AliasResult::MayAlias)
17740b57cec5SDimitry Andric       return Result;
17750b57cec5SDimitry Andric   }
17760b57cec5SDimitry Andric 
17770b57cec5SDimitry Andric   if (const PHINode *PN = dyn_cast<PHINode>(V1)) {
1778fe6060f1SDimitry Andric     AliasResult Result = aliasPHI(PN, V1Size, V2, V2Size, AAQI);
1779fe6060f1SDimitry Andric     if (Result != AliasResult::MayAlias)
1780e8d8bef9SDimitry Andric       return Result;
1781e8d8bef9SDimitry Andric   } else if (const PHINode *PN = dyn_cast<PHINode>(V2)) {
1782fe6060f1SDimitry Andric     AliasResult Result = aliasPHI(PN, V2Size, V1, V1Size, AAQI);
17830eae32dcSDimitry Andric     Result.swap();
1784fe6060f1SDimitry Andric     if (Result != AliasResult::MayAlias)
1785e8d8bef9SDimitry Andric       return Result;
17860b57cec5SDimitry Andric   }
17870b57cec5SDimitry Andric 
17880b57cec5SDimitry Andric   if (const SelectInst *S1 = dyn_cast<SelectInst>(V1)) {
1789fe6060f1SDimitry Andric     AliasResult Result = aliasSelect(S1, V1Size, V2, V2Size, AAQI);
1790fe6060f1SDimitry Andric     if (Result != AliasResult::MayAlias)
1791e8d8bef9SDimitry Andric       return Result;
1792e8d8bef9SDimitry Andric   } else if (const SelectInst *S2 = dyn_cast<SelectInst>(V2)) {
1793fe6060f1SDimitry Andric     AliasResult Result = aliasSelect(S2, V2Size, V1, V1Size, AAQI);
17940eae32dcSDimitry Andric     Result.swap();
1795fe6060f1SDimitry Andric     if (Result != AliasResult::MayAlias)
1796e8d8bef9SDimitry Andric       return Result;
17970b57cec5SDimitry Andric   }
17980b57cec5SDimitry Andric 
17990b57cec5SDimitry Andric   // If both pointers are pointing into the same object and one of them
18000b57cec5SDimitry Andric   // accesses the entire object, then the accesses must overlap in some way.
1801e8d8bef9SDimitry Andric   if (O1 == O2) {
1802e8d8bef9SDimitry Andric     bool NullIsValidLocation = NullPointerIsDefined(&F);
18030b57cec5SDimitry Andric     if (V1Size.isPrecise() && V2Size.isPrecise() &&
18040b57cec5SDimitry Andric         (isObjectSize(O1, V1Size.getValue(), DL, TLI, NullIsValidLocation) ||
1805e8d8bef9SDimitry Andric          isObjectSize(O2, V2Size.getValue(), DL, TLI, NullIsValidLocation)))
1806fe6060f1SDimitry Andric       return AliasResult::PartialAlias;
18070b57cec5SDimitry Andric   }
18080b57cec5SDimitry Andric 
1809fe6060f1SDimitry Andric   return AliasResult::MayAlias;
18100b57cec5SDimitry Andric }
18110b57cec5SDimitry Andric 
18120b57cec5SDimitry Andric /// Check whether two Values can be considered equivalent.
18130b57cec5SDimitry Andric ///
1814bdd1243dSDimitry Andric /// If the values may come from different cycle iterations, this will also
1815bdd1243dSDimitry Andric /// check that the values are not part of cycle. We have to do this because we
1816bdd1243dSDimitry Andric /// are looking through phi nodes, that is we say
18170b57cec5SDimitry Andric /// noalias(V, phi(VA, VB)) if noalias(V, VA) and noalias(V, VB).
18180b57cec5SDimitry Andric bool BasicAAResult::isValueEqualInPotentialCycles(const Value *V,
1819bdd1243dSDimitry Andric                                                   const Value *V2,
1820bdd1243dSDimitry Andric                                                   const AAQueryInfo &AAQI) {
18210b57cec5SDimitry Andric   if (V != V2)
18220b57cec5SDimitry Andric     return false;
18230b57cec5SDimitry Andric 
1824bdd1243dSDimitry Andric   if (!AAQI.MayBeCrossIteration)
1825bdd1243dSDimitry Andric     return true;
1826bdd1243dSDimitry Andric 
1827bdd1243dSDimitry Andric   // Non-instructions and instructions in the entry block cannot be part of
1828bdd1243dSDimitry Andric   // a loop.
18290b57cec5SDimitry Andric   const Instruction *Inst = dyn_cast<Instruction>(V);
1830bdd1243dSDimitry Andric   if (!Inst || Inst->getParent()->isEntryBlock())
18310b57cec5SDimitry Andric     return true;
18320b57cec5SDimitry Andric 
1833b3edf446SDimitry Andric   return isNotInCycle(Inst, getDT(AAQI), /*LI*/ nullptr);
18340b57cec5SDimitry Andric }
18350b57cec5SDimitry Andric 
18360b57cec5SDimitry Andric /// Computes the symbolic difference between two de-composed GEPs.
1837349cc55cSDimitry Andric void BasicAAResult::subtractDecomposedGEPs(DecomposedGEP &DestGEP,
1838bdd1243dSDimitry Andric                                            const DecomposedGEP &SrcGEP,
1839bdd1243dSDimitry Andric                                            const AAQueryInfo &AAQI) {
1840349cc55cSDimitry Andric   DestGEP.Offset -= SrcGEP.Offset;
1841349cc55cSDimitry Andric   for (const VariableGEPIndex &Src : SrcGEP.VarIndices) {
18420b57cec5SDimitry Andric     // Find V in Dest.  This is N^2, but pointer indices almost never have more
18430b57cec5SDimitry Andric     // than a few variable indexes.
1844349cc55cSDimitry Andric     bool Found = false;
1845349cc55cSDimitry Andric     for (auto I : enumerate(DestGEP.VarIndices)) {
1846349cc55cSDimitry Andric       VariableGEPIndex &Dest = I.value();
18470fca6ea1SDimitry Andric       if ((!isValueEqualInPotentialCycles(Dest.Val.V, Src.Val.V, AAQI) &&
18480fca6ea1SDimitry Andric            !areBothVScale(Dest.Val.V, Src.Val.V)) ||
1849349cc55cSDimitry Andric           !Dest.Val.hasSameCastsAs(Src.Val))
18500b57cec5SDimitry Andric         continue;
18510b57cec5SDimitry Andric 
185206c3fb27SDimitry Andric       // Normalize IsNegated if we're going to lose the NSW flag anyway.
185306c3fb27SDimitry Andric       if (Dest.IsNegated) {
185406c3fb27SDimitry Andric         Dest.Scale = -Dest.Scale;
185506c3fb27SDimitry Andric         Dest.IsNegated = false;
185606c3fb27SDimitry Andric         Dest.IsNSW = false;
185706c3fb27SDimitry Andric       }
185806c3fb27SDimitry Andric 
18590b57cec5SDimitry Andric       // If we found it, subtract off Scale V's from the entry in Dest.  If it
18600b57cec5SDimitry Andric       // goes to zero, remove the entry.
1861349cc55cSDimitry Andric       if (Dest.Scale != Src.Scale) {
1862349cc55cSDimitry Andric         Dest.Scale -= Src.Scale;
1863349cc55cSDimitry Andric         Dest.IsNSW = false;
1864349cc55cSDimitry Andric       } else {
1865349cc55cSDimitry Andric         DestGEP.VarIndices.erase(DestGEP.VarIndices.begin() + I.index());
1866349cc55cSDimitry Andric       }
1867349cc55cSDimitry Andric       Found = true;
18680b57cec5SDimitry Andric       break;
18690b57cec5SDimitry Andric     }
18700b57cec5SDimitry Andric 
18710b57cec5SDimitry Andric     // If we didn't consume this entry, add it to the end of the Dest list.
1872349cc55cSDimitry Andric     if (!Found) {
187306c3fb27SDimitry Andric       VariableGEPIndex Entry = {Src.Val, Src.Scale, Src.CxtI, Src.IsNSW,
187406c3fb27SDimitry Andric                                 /* IsNegated */ true};
1875349cc55cSDimitry Andric       DestGEP.VarIndices.push_back(Entry);
18760b57cec5SDimitry Andric     }
18770b57cec5SDimitry Andric   }
18780b57cec5SDimitry Andric }
18790b57cec5SDimitry Andric 
1880bdd1243dSDimitry Andric bool BasicAAResult::constantOffsetHeuristic(const DecomposedGEP &GEP,
1881bdd1243dSDimitry Andric                                             LocationSize MaybeV1Size,
1882bdd1243dSDimitry Andric                                             LocationSize MaybeV2Size,
1883bdd1243dSDimitry Andric                                             AssumptionCache *AC,
1884bdd1243dSDimitry Andric                                             DominatorTree *DT,
1885bdd1243dSDimitry Andric                                             const AAQueryInfo &AAQI) {
1886349cc55cSDimitry Andric   if (GEP.VarIndices.size() != 2 || !MaybeV1Size.hasValue() ||
1887e8d8bef9SDimitry Andric       !MaybeV2Size.hasValue())
18880b57cec5SDimitry Andric     return false;
18890b57cec5SDimitry Andric 
18900b57cec5SDimitry Andric   const uint64_t V1Size = MaybeV1Size.getValue();
18910b57cec5SDimitry Andric   const uint64_t V2Size = MaybeV2Size.getValue();
18920b57cec5SDimitry Andric 
1893349cc55cSDimitry Andric   const VariableGEPIndex &Var0 = GEP.VarIndices[0], &Var1 = GEP.VarIndices[1];
18940b57cec5SDimitry Andric 
1895349cc55cSDimitry Andric   if (Var0.Val.TruncBits != 0 || !Var0.Val.hasSameCastsAs(Var1.Val) ||
189606c3fb27SDimitry Andric       !Var0.hasNegatedScaleOf(Var1) ||
1897349cc55cSDimitry Andric       Var0.Val.V->getType() != Var1.Val.V->getType())
18980b57cec5SDimitry Andric     return false;
18990b57cec5SDimitry Andric 
19000b57cec5SDimitry Andric   // We'll strip off the Extensions of Var0 and Var1 and do another round
19010b57cec5SDimitry Andric   // of GetLinearExpression decomposition. In the example above, if Var0
19020b57cec5SDimitry Andric   // is zext(%x + 1) we should get V1 == %x and V1Offset == 1.
19030b57cec5SDimitry Andric 
1904fe6060f1SDimitry Andric   LinearExpression E0 =
1905349cc55cSDimitry Andric       GetLinearExpression(CastedValue(Var0.Val.V), DL, 0, AC, DT);
1906fe6060f1SDimitry Andric   LinearExpression E1 =
1907349cc55cSDimitry Andric       GetLinearExpression(CastedValue(Var1.Val.V), DL, 0, AC, DT);
1908349cc55cSDimitry Andric   if (E0.Scale != E1.Scale || !E0.Val.hasSameCastsAs(E1.Val) ||
1909bdd1243dSDimitry Andric       !isValueEqualInPotentialCycles(E0.Val.V, E1.Val.V, AAQI))
19100b57cec5SDimitry Andric     return false;
19110b57cec5SDimitry Andric 
19120b57cec5SDimitry Andric   // We have a hit - Var0 and Var1 only differ by a constant offset!
19130b57cec5SDimitry Andric 
19140b57cec5SDimitry Andric   // If we've been sext'ed then zext'd the maximum difference between Var0 and
19150b57cec5SDimitry Andric   // Var1 is possible to calculate, but we're just interested in the absolute
19160b57cec5SDimitry Andric   // minimum difference between the two. The minimum distance may occur due to
19170b57cec5SDimitry Andric   // wrapping; consider "add i3 %i, 5": if %i == 7 then 7 + 5 mod 8 == 4, and so
19180b57cec5SDimitry Andric   // the minimum distance between %i and %i + 5 is 3.
1919fe6060f1SDimitry Andric   APInt MinDiff = E0.Offset - E1.Offset, Wrapped = -MinDiff;
19200b57cec5SDimitry Andric   MinDiff = APIntOps::umin(MinDiff, Wrapped);
19210b57cec5SDimitry Andric   APInt MinDiffBytes =
19220b57cec5SDimitry Andric     MinDiff.zextOrTrunc(Var0.Scale.getBitWidth()) * Var0.Scale.abs();
19230b57cec5SDimitry Andric 
19240b57cec5SDimitry Andric   // We can't definitely say whether GEP1 is before or after V2 due to wrapping
19250b57cec5SDimitry Andric   // arithmetic (i.e. for some values of GEP1 and V2 GEP1 < V2, and for other
19260b57cec5SDimitry Andric   // values GEP1 > V2). We'll therefore only declare NoAlias if both V1Size and
19270b57cec5SDimitry Andric   // V2Size can fit in the MinDiffBytes gap.
1928349cc55cSDimitry Andric   return MinDiffBytes.uge(V1Size + GEP.Offset.abs()) &&
1929349cc55cSDimitry Andric          MinDiffBytes.uge(V2Size + GEP.Offset.abs());
19300b57cec5SDimitry Andric }
19310b57cec5SDimitry Andric 
19320b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
19330b57cec5SDimitry Andric // BasicAliasAnalysis Pass
19340b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
19350b57cec5SDimitry Andric 
19360b57cec5SDimitry Andric AnalysisKey BasicAA::Key;
19370b57cec5SDimitry Andric 
19380b57cec5SDimitry Andric BasicAAResult BasicAA::run(Function &F, FunctionAnalysisManager &AM) {
1939e8d8bef9SDimitry Andric   auto &TLI = AM.getResult<TargetLibraryAnalysis>(F);
1940e8d8bef9SDimitry Andric   auto &AC = AM.getResult<AssumptionAnalysis>(F);
1941e8d8bef9SDimitry Andric   auto *DT = &AM.getResult<DominatorTreeAnalysis>(F);
19420fca6ea1SDimitry Andric   return BasicAAResult(F.getDataLayout(), F, TLI, AC, DT);
19430b57cec5SDimitry Andric }
19440b57cec5SDimitry Andric 
19450b57cec5SDimitry Andric BasicAAWrapperPass::BasicAAWrapperPass() : FunctionPass(ID) {
19460b57cec5SDimitry Andric   initializeBasicAAWrapperPassPass(*PassRegistry::getPassRegistry());
19470b57cec5SDimitry Andric }
19480b57cec5SDimitry Andric 
19490b57cec5SDimitry Andric char BasicAAWrapperPass::ID = 0;
19500b57cec5SDimitry Andric 
19510b57cec5SDimitry Andric void BasicAAWrapperPass::anchor() {}
19520b57cec5SDimitry Andric 
19535ffd83dbSDimitry Andric INITIALIZE_PASS_BEGIN(BasicAAWrapperPass, "basic-aa",
1954b32fb2a4SDimitry Andric                       "Basic Alias Analysis (stateless AA impl)", true, true)
19550b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
19560b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
19570b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
19585ffd83dbSDimitry Andric INITIALIZE_PASS_END(BasicAAWrapperPass, "basic-aa",
1959b32fb2a4SDimitry Andric                     "Basic Alias Analysis (stateless AA impl)", true, true)
19600b57cec5SDimitry Andric 
19610b57cec5SDimitry Andric FunctionPass *llvm::createBasicAAWrapperPass() {
19620b57cec5SDimitry Andric   return new BasicAAWrapperPass();
19630b57cec5SDimitry Andric }
19640b57cec5SDimitry Andric 
19650b57cec5SDimitry Andric bool BasicAAWrapperPass::runOnFunction(Function &F) {
19660b57cec5SDimitry Andric   auto &ACT = getAnalysis<AssumptionCacheTracker>();
19670b57cec5SDimitry Andric   auto &TLIWP = getAnalysis<TargetLibraryInfoWrapperPass>();
19680b57cec5SDimitry Andric   auto &DTWP = getAnalysis<DominatorTreeWrapperPass>();
19690b57cec5SDimitry Andric 
19700fca6ea1SDimitry Andric   Result.reset(new BasicAAResult(F.getDataLayout(), F,
19718bcb0991SDimitry Andric                                  TLIWP.getTLI(F), ACT.getAssumptionCache(F),
1972bdd1243dSDimitry Andric                                  &DTWP.getDomTree()));
19730b57cec5SDimitry Andric 
19740b57cec5SDimitry Andric   return false;
19750b57cec5SDimitry Andric }
19760b57cec5SDimitry Andric 
19770b57cec5SDimitry Andric void BasicAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
19780b57cec5SDimitry Andric   AU.setPreservesAll();
1979e8d8bef9SDimitry Andric   AU.addRequiredTransitive<AssumptionCacheTracker>();
1980e8d8bef9SDimitry Andric   AU.addRequiredTransitive<DominatorTreeWrapperPass>();
1981e8d8bef9SDimitry Andric   AU.addRequiredTransitive<TargetLibraryInfoWrapperPass>();
19820b57cec5SDimitry Andric }
1983