Home
last modified time | relevance | path

Searched refs:LoadInst (Results 1 – 25 of 291) sorted by relevance

12345678910>>...12

/openbsd-src/gnu/llvm/llvm/lib/Target/X86/
H A DX86AvoidStoreForwardingBlocks.cpp102 void breakBlockedCopies(MachineInstr *LoadInst, MachineInstr *StoreInst,
105 void buildCopies(int Size, MachineInstr *LoadInst, int64_t LdDispImm,
109 void buildCopy(MachineInstr *LoadInst, unsigned NLoadOpcode, int64_t LoadDisp,
339 findPotentialBlockers(MachineInstr *LoadInst) { in findPotentialBlockers() argument
343 for (auto PBInst = std::next(MachineBasicBlock::reverse_iterator(LoadInst)), in findPotentialBlockers()
344 E = LoadInst->getParent()->rend(); in findPotentialBlockers()
361 MachineBasicBlock *MBB = LoadInst->getParent(); in findPotentialBlockers()
380 void X86AvoidSFBPass::buildCopy(MachineInstr *LoadInst, unsigned NLoadOpcode, in buildCopy() argument
385 MachineOperand &LoadBase = getBaseOperand(LoadInst); in buildCopy()
387 MachineBasicBlock *MBB = LoadInst->getParent(); in buildCopy()
[all …]
/openbsd-src/gnu/llvm/llvm/lib/Target/ARM/
H A DARMParallelDSP.cpp57 using MemInstList = SmallVectorImpl<LoadInst*>;
69 SmallVector<LoadInst*, 2> VecLd; // Container for loads to widen.
75 return isa<LoadInst>(LHS) && isa<LoadInst>(RHS); in HasTwoLoadInputs()
78 LoadInst *getBaseLoad() const { in getBaseLoad()
199 LoadInst *NewLd = nullptr;
200 SmallVector<LoadInst*, 4> Loads;
203 WidenedLoad(SmallVectorImpl<LoadInst*> &Lds, LoadInst *Wide) in WidenedLoad()
207 LoadInst *getLoad() { in getLoad()
219 std::map<LoadInst*, LoadInst*> LoadPairs;
220 SmallPtrSet<LoadInst*, 4> OffsetLoads;
[all …]
/openbsd-src/gnu/llvm/llvm/lib/Transforms/ObjCARC/
H A DProvenanceAnalysis.cpp145 if ((AIsIdentified && isa<LoadInst>(B) && !IsStoredObjCPointer(A)) || in relatedCheck()
146 (BIsIdentified && isa<LoadInst>(A) && !IsStoredObjCPointer(B))) in relatedCheck()
149 if ((AIsIdentified && isa<LoadInst>(B)) || in relatedCheck()
150 (BIsIdentified && isa<LoadInst>(A))) in relatedCheck()
154 if (AIsIdentified && BIsIdentified && !isa<LoadInst>(A) && !isa<LoadInst>(B)) in relatedCheck()
/openbsd-src/gnu/llvm/llvm/tools/llvm-reduce/deltas/
H A DReduceMemoryOperations.cpp20 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in removeVolatileInFunction()
50 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in reduceAtomicSyncScopesInFunction()
83 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in reduceAtomicOrderingInFunction()
/openbsd-src/gnu/llvm/llvm/include/llvm/Transforms/Utils/
H A DLocal.h43 class LoadInst; variable
250 LoadInst *LI, DIBuilder &Builder);
391 void copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source);
425 void copyNonnullMetadata(const LoadInst &OldLI, MDNode *N, LoadInst &NewLI);
431 void copyRangeMetadata(const DataLayout &DL, const LoadInst &OldLI, MDNode *N,
432 LoadInst &NewLI);
H A DVNCoercion.h27 class LoadInst; variable
62 int analyzeLoadFromClobberingLoad(Type *LoadTy, Value *LoadPtr, LoadInst *DepLI,
87 Value *getLoadValueForLoad(LoadInst *SrcVal, unsigned Offset, Type *LoadTy,
/openbsd-src/gnu/llvm/llvm/lib/Target/NVPTX/
H A DNVPTXLowerArgs.cpp176 if (auto *LI = dyn_cast<LoadInst>(I.OldInstruction)) { in convertToParamAS()
258 LoadInst *Inst; in adjustByValArgAlignment()
276 if (auto *I = dyn_cast<LoadInst>(CurUser)) { in adjustByValArgAlignment()
322 if (isa<GetElementPtrInst>(V) || isa<BitCastInst>(V) || isa<LoadInst>(V)) in handleByValParam()
340 if (!isa<LoadInst>(V)) in handleByValParam()
386 LoadInst *LI = in handleByValParam()
387 new LoadInst(StructType, ArgInParam, Arg->getName(), in handleByValParam()
428 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnKernelFunction()
H A DNVPTXLowerAggrCopies.cpp60 SmallVector<LoadInst *, 4> AggrLoads; in runOnFunction()
71 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnFunction()
104 for (LoadInst *LI : AggrLoads) { in runOnFunction()
/openbsd-src/gnu/llvm/llvm/lib/Target/AMDGPU/
H A DAMDGPUPromoteKernelArguments.cpp45 bool promoteLoad(LoadInst *LI);
77 LoadInst *LD = cast<LoadInst>(U); in enqueueUsers()
97 LoadInst *LI = dyn_cast<LoadInst>(Ptr); in promotePointer()
130 bool AMDGPUPromoteKernelArguments::promoteLoad(LoadInst *LI) { in promoteLoad()
H A DAMDGPULateCodeGenPrepare.cpp77 bool canWidenScalarExtLoad(LoadInst &LI) const;
78 bool visitLoadInst(LoadInst &LI);
104 bool AMDGPULateCodeGenPrepare::canWidenScalarExtLoad(LoadInst &LI) const { in canWidenScalarExtLoad()
128 bool AMDGPULateCodeGenPrepare::visitLoadInst(LoadInst &LI) { in visitLoadInst()
171 LoadInst *NewLd = IRB.CreateAlignedLoad(IRB.getInt32Ty(), NewPtr, Align(4)); in visitLoadInst()
/openbsd-src/gnu/llvm/llvm/lib/Analysis/
H A DMemDerefPrinter.cpp55 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnFunction()
90 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in run()
H A DObjCARCAnalysisUtils.cpp39 if (const LoadInst *LI = dyn_cast<LoadInst>(Op)) in IsPotentialRetainableObjPtr()
/openbsd-src/gnu/llvm/llvm/include/llvm/Analysis/
H A DLoads.h26 class LoadInst; variable
85 bool isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
132 Value *FindAvailableLoadedValue(LoadInst *Load,
144 Value *FindAvailableLoadedValue(LoadInst *Load, AAResults &AA, bool *IsLoadCSE,
/openbsd-src/gnu/llvm/llvm/lib/Transforms/Utils/
H A DSSAUpdater.cpp326 if (const LoadInst *LI = dyn_cast<LoadInst>(Insts[0])) in LoadAndStorePromoter()
348 SmallVector<LoadInst *, 32> LiveInLoads; in run()
367 LiveInLoads.push_back(cast<LoadInst>(User)); in run()
386 LiveInLoads.push_back(cast<LoadInst>(I)); in run()
398 if (LoadInst *L = dyn_cast<LoadInst>(&I)) { in run()
432 for (LoadInst *ALoad : LiveInLoads) { in run()
468 replaceLoadWithValue(cast<LoadInst>(User), NewVal); in run()
H A DPromoteMemoryToRegister.cpp66 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) { in isAllocaPromotable()
205 LoadInst *LI = cast<LoadInst>(User); in AnalyzeAlloca()
259 return (isa<LoadInst>(I) && isa<AllocaInst>(I->getOperand(0))) || in isInterestingInstruction()
375 static void addAssumeNonNull(AssumptionCache *AC, LoadInst *LI) { in addAssumeNonNull()
386 static void convertMetadataToAssumes(LoadInst *LI, Value *Val, in convertMetadataToAssumes()
405 if (isa<LoadInst>(I) || isa<StoreInst>(I)) in removeIntrinsicUsers()
456 LoadInst *LI = cast<LoadInst>(UserInst); in rewriteSingleStoreAlloca()
568 LoadInst *LI = dyn_cast<LoadInst>(U); in promoteSingleBlockAlloca()
913 if (LoadInst *LI = dyn_cast<LoadInst>(I)) in ComputeLiveInBlocks()
1043 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in RenamePass()
H A DDemoteRegToStack.cpp74 V = new LoadInst(I.getType(), Slot, I.getName() + ".reload", in DemoteRegToStack()
83 Value *V = new LoadInst(I.getType(), Slot, I.getName() + ".reload", in DemoteRegToStack()
161 new LoadInst(P->getType(), Slot, P->getName() + ".reload", User); in DemotePHIToStack()
166 new LoadInst(P->getType(), Slot, P->getName() + ".reload", &*InsertPt); in DemotePHIToStack()
H A DGlobalStatus.cpp92 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) { in analyzeGlobalAux()
129 } else if (isa<LoadInst>(StoredVal) && in analyzeGlobalAux()
130 cast<LoadInst>(StoredVal)->getOperand(0) == GV) { in analyzeGlobalAux()
/openbsd-src/gnu/llvm/llvm/include/llvm/Transforms/Scalar/
H A DGVN.h46 class LoadInst; variable
316 bool processLoad(LoadInst *L);
317 bool processNonLocalLoad(LoadInst *L);
323 AnalyzeLoadAvailability(LoadInst *Load, MemDepResult DepInfo, Value *Address);
328 void AnalyzeLoadAvailability(LoadInst *Load, LoadDepVect &Deps,
332 bool PerformLoadPRE(LoadInst *Load, AvailValInBlkVect &ValuesPerBlock,
338 bool performLoopLoadPRE(LoadInst *Load, AvailValInBlkVect &ValuesPerBlock,
344 LoadInst *Load, AvailValInBlkVect &ValuesPerBlock,
H A DMemCpyOptimizer.h30 class LoadInst; variable
60 bool processStoreOfLoad(StoreInst *SI, LoadInst *LI, const DataLayout &DL,
78 bool moveUp(StoreInst *SI, Instruction *P, const LoadInst *LI);
/openbsd-src/gnu/llvm/llvm/lib/Transforms/Scalar/
H A DLowerAtomicPass.cpp30 static bool LowerLoadInst(LoadInst *LI) { in LowerLoadInst()
49 else if (LoadInst *LI = dyn_cast<LoadInst>(&Inst)) { in runOnBasicBlock()
H A DLoopLoadElimination.cpp88 LoadInst *Load;
91 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store) in StoreToLoadForwardingCandidate()
155 static bool isLoadConditional(LoadInst *Load, Loop *L) { in isLoadConditional()
193 if (isa<LoadInst>(Source)) in findStoreToLoadDependences()
195 if (isa<LoadInst>(Destination)) in findStoreToLoadDependences()
211 auto *Load = dyn_cast<LoadInst>(Destination); in findStoreToLoadDependences()
263 DenseMap<LoadInst *, const StoreToLoadForwardingCandidate *>; in removeDependencesFromMultipleStores()
344 LoadInst *LastLoad = in findPointersWrittenOnForwardingPath()
436 Value *Initial = new LoadInst( in propagateStoredValueToLoadUsers()
/openbsd-src/gnu/llvm/llvm/lib/CodeGen/
H A DInterleavedLoadCombinePass.cpp93 LoadInst *findFirstLoad(const std::set<LoadInst *> &LIs);
650 LoadInst *LI;
652 ElementInfo(Polynomial Offset = Polynomial(), LoadInst *LI = nullptr) in ElementInfo()
663 std::set<LoadInst *> LIs;
714 LoadInst *LI = dyn_cast<LoadInst>(V); in compute()
866 static bool computeFromLI(LoadInst *LI, VectorInfo &Result, in computeFromLI()
1099 LoadInst *
1100 InterleavedLoadCombineImpl::findFirstLoad(const std::set<LoadInst *> &LIs) { in findFirstLoad()
1109 return cast<LoadInst>(FLI); in findFirstLoad()
1119 LoadInst *InsertionPoint = InterleavedLoad.front().EI[0].LI; in combine()
[all …]
/openbsd-src/gnu/llvm/llvm/lib/Transforms/IPO/
H A DGlobalOpt.cpp168 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) || in IsSafeComputationToRemove()
303 else if (auto *LI = dyn_cast<LoadInst>(U)) { in CleanupConstantGlobalUsers()
573 if (auto *LI = dyn_cast<LoadInst>(V)) { in SRAGlobal()
610 if (isa<LoadInst>(U)) { in AllUsesOfValueWillTrapIfNull()
635 isa<LoadInst>(U->getOperand(0)) && in AllUsesOfValueWillTrapIfNull()
637 assert(isa<GlobalValue>(cast<LoadInst>(U->getOperand(0)) in AllUsesOfValueWillTrapIfNull()
660 if (auto *LI = dyn_cast<LoadInst>(U)) { in allUsesOfLoadedValueWillTrapIfNull()
696 assert((isa<LoadInst>(U) || isa<StoreInst>(U)) && in allUsesOfLoadAndStores()
711 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in OptimizeAwayTrappingUsesOfValue()
785 if (LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) { in OptimizeAwayTrappingUsesOfLoads()
[all …]
/openbsd-src/gnu/llvm/llvm/lib/Transforms/InstCombine/
H A DInstCombineLoadStoreAlloca.cpp69 if (auto *LI = dyn_cast<LoadInst>(I)) { in isOnlyCopiedFromConstantMemory()
300 if (auto *Load = dyn_cast<LoadInst>(Inst)) { in collectUsersRecursive()
361 if (auto *LT = dyn_cast<LoadInst>(I)) { in replace()
364 auto *NewI = new LoadInst(LT->getType(), V, "", LT->isVolatile(), in replace()
552 LoadInst *InstCombinerImpl::combineLoadToNewType(LoadInst &LI, Type *NewTy, in combineLoadToNewType()
565 LoadInst *NewLoad = Builder.CreateAlignedLoad( in combineLoadToNewType()
671 LoadInst &Load) { in combineLoadToOperationType()
702 LoadInst *NewLoad = IC.combineLoadToNewType(Load, DestTy); in combineLoadToOperationType()
715 static Instruction *unpackLoadToAggregate(InstCombinerImpl &IC, LoadInst &LI) { in unpackLoadToAggregate()
731 LoadInst *NewLoad = IC.combineLoadToNewType(LI, ST->getTypeAtIndex(0U), in unpackLoadToAggregate()
[all …]
/openbsd-src/gnu/llvm/llvm/lib/Target/AMDGPU/Utils/
H A DAMDGPUMemoryUtils.h21 class LoadInst; variable
41 bool isClobberedInFunction(const LoadInst *Load, MemorySSA *MSSA,

12345678910>>...12