Lines Matching defs:Load
121 cl::desc("Max number of dependences to attempt Load PRE (default = 100)"));
235 static AvailableValue getLoad(LoadInst *Load, unsigned Offset = 0) {
237 Res.Val = Load;
289 Value *MaterializeAdjustedValue(LoadInst *Load, Instruction *InsertPt,
325 Value *MaterializeAdjustedValue(LoadInst *Load, GVNPass &gvn) const {
326 return AV.MaterializeAdjustedValue(Load, BB->getTerminator(), gvn);
1044 /// construct SSA form, allowing us to eliminate Load. This returns the value
1045 /// that should be used at Load's definition site.
1047 ConstructSSAForLoadSet(LoadInst *Load,
1054 Load->getParent())) {
1057 return ValuesPerBlock[0].MaterializeAdjustedValue(Load, gvn);
1063 SSAUpdate.Initialize(Load->getType(), Load->getName());
1078 if (BB == Load->getParent() &&
1079 ((AV.AV.isSimpleValue() && AV.AV.getSimpleValue() == Load) ||
1080 (AV.AV.isCoercedLoadValue() && AV.AV.getCoercedLoadValue() == Load)))
1083 SSAUpdate.AddAvailableValue(BB, AV.MaterializeAdjustedValue(Load, gvn));
1087 return SSAUpdate.GetValueInMiddleOfBlock(Load->getParent());
1090 Value *AvailableValue::MaterializeAdjustedValue(LoadInst *Load,
1094 Type *LoadTy = Load->getType();
1095 const DataLayout &DL = Load->getDataLayout();
1110 combineMetadataForCSE(CoercedLoad, Load, false);
1147 cast<SelectInst>(Res)->setDebugLoc(Load->getDebugLoc());
1174 static void reportMayClobberedLoad(LoadInst *Load, MemDepResult DepInfo,
1181 OptimizationRemarkMissed R(DEBUG_TYPE, "LoadClobbered", Load);
1182 R << "load of type " << NV("Type", Load->getType()) << " not eliminated"
1185 for (auto *U : Load->getPointerOperand()->users()) {
1186 if (U != Load && (isa<LoadInst>(U) || isa<StoreInst>(U))) {
1188 if (I->getFunction() == Load->getFunction() && DT->dominates(I, Load)) {
1203 // use that lies between any other potentially available use and Load.
1204 for (auto *U : Load->getPointerOperand()->users()) {
1205 if (U != Load && (isa<LoadInst>(U) || isa<StoreInst>(U))) {
1207 if (I->getFunction() == Load->getFunction() &&
1208 isPotentiallyReachable(I, Load, nullptr, DT)) {
1210 if (liesBetween(OtherAccess, I, Load, DT)) {
1212 } else if (!liesBetween(I, OtherAccess, Load, DT)) {
1213 // These uses are both partially available at Load were it not for
1217 } // else: keep current OtherAccess since it lies between U and Load
1257 GVNPass::AnalyzeLoadAvailability(LoadInst *Load, MemDepResult DepInfo,
1259 assert(Load->isUnordered() && "rules below are incorrect for ordered access");
1264 const DataLayout &DL = Load->getDataLayout();
1271 if (Address && Load->isAtomic() <= DepSI->isAtomic()) {
1273 analyzeLoadFromClobberingStore(Load->getType(), Address, DepSI, DL);
1287 if (DepLoad != Load && Address &&
1288 Load->isAtomic() <= DepLoad->isAtomic()) {
1289 Type *LoadType = Load->getType();
1312 if (Address && !Load->isAtomic()) {
1313 int Offset = analyzeLoadFromClobberingMemInst(Load->getType(), Address,
1323 dbgs() << "GVN: load "; Load->printAsOperand(dbgs());
1326 reportMayClobberedLoad(Load, DepInfo, DT, ORE);
1335 return AvailableValue::get(UndefValue::get(Load->getType()));
1338 getInitialValueOfAllocation(DepInst, TLI, Load->getType()))
1345 if (!canCoerceMustAliasedValueToLoad(S->getValueOperand(), Load->getType(),
1350 if (S->isAtomic() < Load->isAtomic())
1360 if (!canCoerceMustAliasedValueToLoad(LD, Load->getType(), DL))
1364 if (LD->isAtomic() < Load->isAtomic())
1374 assert(Sel->getType() == Load->getPointerOperandType());
1375 auto Loc = MemoryLocation::get(Load);
1378 Load->getType(), DepInst, getAliasAnalysis());
1383 Load->getType(), DepInst, getAliasAnalysis());
1392 dbgs() << "GVN: load "; Load->printAsOperand(dbgs());
1397 void GVNPass::AnalyzeLoadAvailability(LoadInst *Load, LoadDepVect &Deps,
1423 if (auto AV = AnalyzeLoadAvailability(Load, DepInfo, Dep.getAddress())) {
1458 LoadInst *Load) {
1476 if (!Inst.isIdenticalTo(Load))
1496 LoadInst *Load, AvailValInBlkVect &ValuesPerBlock,
1504 Load->getType(), LoadPtr, Load->getName() + ".pre", Load->isVolatile(),
1505 Load->getAlign(), Load->getOrdering(), Load->getSyncScopeID(),
1507 NewLoad->setDebugLoc(Load->getDebugLoc());
1518 AAMDNodes Tags = Load->getAAMetadata();
1522 if (auto *MD = Load->getMetadata(LLVMContext::MD_invariant_load))
1524 if (auto *InvGroupMD = Load->getMetadata(LLVMContext::MD_invariant_group))
1526 if (auto *RangeMD = Load->getMetadata(LLVMContext::MD_range))
1528 if (auto *AccessMD = Load->getMetadata(LLVMContext::MD_access_group))
1529 if (LI->getLoopFor(Load->getParent()) == LI->getLoopFor(UnavailableBlock))
1564 Value *V = ConstructSSAForLoadSet(Load, ValuesPerBlock, *this);
1566 ICF->removeUsersOf(Load);
1567 Load->replaceAllUsesWith(V);
1569 V->takeName(Load);
1571 I->setDebugLoc(Load->getDebugLoc());
1574 markInstructionForDeletion(Load);
1576 return OptimizationRemark(DEBUG_TYPE, "LoadPRE", Load)
1581 bool GVNPass::PerformLoadPRE(LoadInst *Load, AvailValInBlkVect &ValuesPerBlock,
1596 BasicBlock *LoadBB = Load->getParent();
1615 ICF->isDominatedByICFIFromSameBlock(Load);
1661 << Pred->getName() << "': " << *Load << '\n');
1673 << Pred->getName() << "': " << *Load << '\n');
1680 << Pred->getName() << "': " << *Load << '\n');
1690 << Pred->getName() << "': " << *Load << '\n');
1694 if (LoadInst *LI = findLoadToHoistIntoPred(Pred, LoadBB, Load))
1714 // all the preds that don't have an available Load and insert a new load into
1723 if (!isSafeToSpeculativelyExecute(Load, &*LoadBB->getFirstNonPHIIt(), AC,
1727 if (!isSafeToSpeculativelyExecute(Load, PL.first->getTerminator(), AC,
1731 if (!isSafeToSpeculativelyExecute(Load, CEP.first->getTerminator(), AC,
1750 const DataLayout &DL = Load->getDataLayout();
1757 // We do the translation for each edge we skipped by going from Load's block
1763 Value *LoadPtr = Load->getPointerOperand();
1764 BasicBlock *Cur = Load->getParent();
1785 << *Load->getPointerOperand() << "\n");
1810 LLVM_DEBUG(dbgs() << "GVN REMOVING PRE LOAD: " << *Load << '\n');
1829 eliminatePartiallyRedundantLoad(Load, ValuesPerBlock, PredLoads,
1835 bool GVNPass::performLoopLoadPRE(LoadInst *Load,
1838 const Loop *L = LI->getLoopFor(Load->getParent());
1840 if (!L || L->getHeader() != Load->getParent())
1848 Value *LoadPtr = Load->getPointerOperand();
1856 if (ICF->isDominatedByICFIFromSameBlock(Load))
1905 LLVM_DEBUG(dbgs() << "GVN REMOVING PRE LOOP LOAD: " << *Load << '\n');
1906 eliminatePartiallyRedundantLoad(Load, ValuesPerBlock, AvailableLoads,
1912 static void reportLoadElim(LoadInst *Load, Value *AvailableValue,
1917 return OptimizationRemark(DEBUG_TYPE, "LoadElim", Load)
1918 << "load of type " << NV("Type", Load->getType()) << " eliminated"
1926 bool GVNPass::processNonLocalLoad(LoadInst *Load) {
1928 if (Load->getParent()->getParent()->hasFnAttribute(
1930 Load->getParent()->getParent()->hasFnAttribute(
1936 MD->getNonLocalPointerDependency(Load, Deps);
1949 LLVM_DEBUG(dbgs() << "GVN: non-local load "; Load->printAsOperand(dbgs());
1957 dyn_cast<GetElementPtrInst>(Load->getOperand(0))) {
1966 AnalyzeLoadAvailability(Load, Deps, ValuesPerBlock, UnavailableBlocks);
1979 LLVM_DEBUG(dbgs() << "GVN REMOVING NONLOCAL LOAD: " << *Load << '\n');
1982 Value *V = ConstructSSAForLoadSet(Load, ValuesPerBlock, *this);
1984 ICF->removeUsersOf(Load);
1985 Load->replaceAllUsesWith(V);
1988 V->takeName(Load);
1992 // to propagate Load's DebugLoc because Load may not post-dominate I.
1993 if (Load->getDebugLoc() && Load->getParent() == I->getParent())
1994 I->setDebugLoc(Load->getDebugLoc());
1997 markInstructionForDeletion(Load);
1999 reportLoadElim(Load, V, ORE);
2006 if (!isLoadInLoopPREEnabled() && LI->getLoopFor(Load->getParent()))
2009 if (performLoopLoadPRE(Load, ValuesPerBlock, UnavailableBlocks) ||
2010 PerformLoadPRE(Load, ValuesPerBlock, UnavailableBlocks))
2619 if (LoadInst *Load = dyn_cast<LoadInst>(I)) {
2620 if (processLoad(Load))
2623 unsigned Num = VN.lookupOrAdd(Load);
2624 LeaderTable.insert(Num, Load, Load->getParent());
2927 // This doesn't prevent Load PRE. PHI translation will make the GEP available