Lines Matching +full:isa +full:- +full:base
1 //===- Loads.cpp - Local load analysis ------------------------------------===//
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
11 //===----------------------------------------------------------------------===//
29 static bool isAligned(const Value *Base, const APInt &Offset, Align Alignment,
31 Align BA = Base->getPointerAlignment(DL);
42 assert(V->getType()->isPointerTy() && "Base must be pointer");
45 if (MaxDepth-- == 0)
57 const Value *Base = GEP->getPointerOperand();
59 APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0);
60 if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() ||
65 // If the base pointer is dereferenceable for Offset+Size bytes, then the
66 // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base
68 // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also
74 Base, Alignment, Offset + Size.sextOrTrunc(Offset.getBitWidth()), DL,
78 // bitcast instructions are no-ops as far as dereferenceability is concerned.
80 if (BC->getSrcTy()->isPointerTy())
82 BC->getOperand(0), Alignment, Size, DL, CtxI, AC, DT, TLI,
88 return isDereferenceableAndAlignedPointer(Sel->getTrueValue(), Alignment,
91 isDereferenceableAndAlignedPointer(Sel->getFalseValue(), Alignment,
98 V->getPointerDereferenceableBytes(DL, CheckForNonNull,
105 // that each step advanced by a multiple of the alignment. If our base is
107 APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0);
123 // that we still need to prove the result non-null at point of use.
124 // NOTE: We can only use the object size as a base fact as we a) need to
138 !V->canBeFreed()) {
141 // our base is properly aligned, then the original offset accessed
143 APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0);
151 return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(),
156 return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment,
192 // a query of whether [Base, V] is dereferenceable and V is aligned (since
207 if (!Ty->isSized() || Ty->isScalableTy())
215 APInt AccessSize(DL.getPointerTypeSizeInBits(V->getType()),
252 if (isa<BinaryOperator>(A) || isa<CastInst>(A) || isa<PHINode>(A) ||
253 isa<GetElementPtrInst>(A))
255 if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI))
266 auto &DL = LI->getDataLayout();
267 Value *Ptr = LI->getPointerOperand();
269 APInt EltSize(DL.getIndexTypeSizeInBits(Ptr->getType()),
270 DL.getTypeStoreSize(LI->getType()).getFixedValue());
271 const Align Alignment = LI->getAlign();
273 Instruction *HeaderFirstNonPHI = L->getHeader()->getFirstNonPHI();
275 // If given a uniform (i.e. non-varying) address, see if we can prove the
277 if (L->isLoopInvariant(Ptr))
284 if (!AddRec || AddRec->getLoop() != L || !AddRec->isAffine())
286 auto* Step = dyn_cast<SCEVConstant>(AddRec->getStepRecurrence(SE));
295 // We should be computing AccessSize as (TC - 1) * Step + EltSize.
296 if (EltSize.sgt(Step->getAPInt()))
304 APInt AccessSize = TC * Step->getAPInt();
306 assert(SE.isLoopInvariant(AddRec->getStart(), L) &&
308 Value *Base = nullptr;
309 if (auto *StartS = dyn_cast<SCEVUnknown>(AddRec->getStart())) {
310 Base = StartS->getValue();
311 } else if (auto *StartS = dyn_cast<SCEVAddExpr>(AddRec->getStart())) {
313 const auto *Offset = dyn_cast<SCEVConstant>(StartS->getOperand(0));
314 const auto *NewBase = dyn_cast<SCEVUnknown>(StartS->getOperand(1));
315 if (StartS->getNumOperands() == 2 && Offset && NewBase) {
319 // the offset will be treated as (i8 -1) and sign-extended to (i64 -1).
320 if (Offset->getAPInt().isNegative())
324 // multiple of the requested alignment and the base is aligned.
326 if (Offset->getAPInt().urem(Alignment.value()) != 0)
328 Base = NewBase->getValue();
330 AccessSize = AccessSize.uadd_ov(Offset->getAPInt(), Overflow);
336 if (!Base)
340 // multiple of the requested alignment and the base is aligned.
344 return isDereferenceableAndAlignedPointer(Base, Alignment, AccessSize, DL,
350 /// If DT and ScanFrom are specified this method performs context-sensitive
365 // If DT is not specified we can't make context-sensitive query
383 BasicBlock::iterator BBI = ScanFrom->getIterator(),
384 E = ScanFrom->getParent()->begin();
387 // base here.
388 V = V->stripPointerCasts();
391 --BBI;
395 if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() &&
396 !isa<LifetimeIntrinsic>(BBI) && !isa<DbgInfoIntrinsic>(BBI))
406 if (LI->isVolatile())
408 AccessedPtr = LI->getPointerOperand();
409 AccessedTy = LI->getType();
410 AccessedAlign = LI->getAlign();
413 if (SI->isVolatile())
415 AccessedPtr = SI->getPointerOperand();
416 AccessedTy = SI->getValueOperand()->getType();
417 AccessedAlign = SI->getAlign();
429 if (AreEquivalentAddressValues(AccessedPtr->stripPointerCasts(), V) &&
445 APInt Size(DL.getIndexTypeSizeInBits(V->getType()), TySize.getFixedValue());
450 /// DefMaxInstsToScan - the default number of maximum instructions
457 llvm::DefMaxInstsToScan("available-load-scan-limit", cl::init(6), cl::Hidden,
468 if (!Load->isUnordered())
472 return findAvailablePtrLoadStore(Loc, Load->getType(), Load->isAtomic(),
477 // Check if the load and the store have the same base, constant offsets and
478 // non-overlapping access ranges.
484 APInt LoadOffset(DL.getIndexTypeSizeInBits(LoadPtr->getType()), 0);
485 APInt StoreOffset(DL.getIndexTypeSizeInBits(StorePtr->getType()), 0);
486 const Value *LoadBase = LoadPtr->stripAndAccumulateConstantOffsets(
488 const Value *StoreBase = StorePtr->stripAndAccumulateConstantOffsets(
508 // We can value forward from an atomic to a non-atomic, but not the
510 if (LI->isAtomic() < AtLeastAtomic)
513 Value *LoadPtr = LI->getPointerOperand()->stripPointerCasts();
517 if (CastInst::isBitOrNoopPointerCastable(LI->getType(), AccessTy, DL)) {
528 // We can value forward from an atomic to a non-atomic, but not the
530 if (SI->isAtomic() < AtLeastAtomic)
533 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts();
540 Value *Val = SI->getValueOperand();
541 if (CastInst::isBitOrNoopPointerCastable(Val->getType(), AccessTy, DL))
544 TypeSize StoreSize = DL.getTypeSizeInBits(Val->getType());
552 // Don't forward from (non-atomic) memset to atomic load.
557 auto *Val = dyn_cast<ConstantInt>(MSI->getValue());
558 auto *Len = dyn_cast<ConstantInt>(MSI->getLength());
563 Value *Dst = MSI->getDest();
576 if ((Len->getValue() * 8).ult(LoadSize))
579 APInt Splat = LoadSize >= 8 ? APInt::getSplat(LoadSize, Val->getValue())
580 : Val->getValue().trunc(LoadSize);
581 ConstantInt *SplatC = ConstantInt::get(MSI->getContext(), Splat);
582 if (CastInst::isBitOrNoopPointerCastable(SplatC->getType(), AccessTy, DL))
598 const DataLayout &DL = ScanBB->getDataLayout();
599 const Value *StrippedPtr = Loc.Ptr->stripPointerCasts();
601 while (ScanFrom != ScanBB->begin()) {
604 Instruction *Inst = &*--ScanFrom;
605 if (Inst->isDebugOrPseudoInst())
615 if (MaxInstsToScan-- == 0)
618 --ScanFrom;
626 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts();
631 if ((isa<AllocaInst>(StrippedPtr) || isa<GlobalVariable>(StrippedPtr)) &&
632 (isa<AllocaInst>(StorePtr) || isa<GlobalVariable>(StorePtr)) &&
638 // base, constant offsets and non-overlapping access ranges, ignore the
642 Loc.Ptr, AccessTy, SI->getPointerOperand(),
643 SI->getValueOperand()->getType(), DL))
648 if (!isModSet(AA->getModRefInfo(SI, Loc)))
658 if (Inst->mayWriteToMemory()) {
661 if (AA && !isModSet(AA->getModRefInfo(Inst, Loc)))
678 const DataLayout &DL = Load->getDataLayout();
679 Value *StrippedPtr = Load->getPointerOperand()->stripPointerCasts();
680 BasicBlock *ScanBB = Load->getParent();
681 Type *AccessTy = Load->getType();
682 bool AtLeastAtomic = Load->isAtomic();
684 if (!Load->isUnordered())
691 for (Instruction &Inst : make_range(++Load->getReverseIterator(),
692 ScanBB->rend())) {
696 if (MaxInstsToScan-- == 0)
727 while (!Worklist.empty() && --Limit) {
731 if (isa<ICmpInst, PtrToIntInst>(User))
733 if (isa<PHINode, SelectInst>(User))
734 Worklist.append(User->user_begin(), User->user_end());
748 if (isa<ConstantPointerNull>(To))
750 if (isa<Constant>(To) &&
751 isDereferenceablePointer(To, Type::getInt8Ty(To->getContext()), DL))
759 assert(U->getType() == To->getType() && "values must have matching types");
761 if (!To->getType()->isPointerTy())
771 assert(From->getType() == To->getType() && "values must have matching types");
773 if (!From->getType()->isPointerTy())
782 for (BasicBlock *BB : L->blocks()) {