/freebsd-src/contrib/llvm-project/llvm/lib/Analysis/ |
H A D | MemoryLocation.cpp | 95 MemoryLocation MemoryLocation::getForSource(const MemTransferInst *MTI) { in getForSource() argument 99 MemoryLocation MemoryLocation::getForSource(const AtomicMemTransferInst *MTI) { in getForSource() argument 103 MemoryLocation MemoryLocation::getForSource(const AnyMemTransferInst *MTI) { in getForSource() argument
|
H A D | AliasSetTracker.cpp | 341 void AliasSetTracker::add(AnyMemTransferInst *MTI) { in add() argument 385 if (AnyMemTransferInst *MTI = dyn_cast<AnyMemTransferInst>(I)) add() local
|
H A D | StackSafetyAnalysis.cpp | 319 if (const auto *MTI = dyn_cast<MemTransferInst>(MI)) { getMemIntrinsicAccessRange() local 473 if (const auto *MTI = dyn_cast<MemTransferInst>(MI)) { analyzeAllUses() local
|
H A D | LazyValueInfo.cpp | 686 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) AddNonNullPointersByInstruction() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Transforms/Utils/ |
H A D | VNCoercion.cpp | 269 MemTransferInst *MTI = cast<MemTransferInst>(MI); in analyzeLoadFromClobberingMemInst() local 400 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst); in getMemInstValueForLoad() local 424 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst); in getConstantMemInstValueForLoad() local
|
H A D | GlobalStatus.cpp | 162 } else if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(I)) { in analyzeGlobalAux() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Transforms/Scalar/ |
H A D | AlignmentFromAssumptions.cpp | 249 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { in processAssumption() local
|
H A D | InferAddressSpaces.cpp | 512 if (auto *MTI = dyn_cast<MemTransferInst>(MI)) in collectFlatAddressExpressions() local 1050 } else if (auto *MTI = dyn_cast<MemTransferInst>(MI)) { handleMemIntrinsicPtrUse() local [all...] |
/freebsd-src/contrib/llvm-project/llvm/lib/CodeGen/ |
H A D | SafeStack.cpp | 258 if (auto MTI = dyn_cast<MemTransferInst>(MI)) { in IsMemIntrinsicSafe() local
|
H A D | CodeGenPrepare.cpp | 2401 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { optimizeCallInst() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Target/AMDGPU/ |
H A D | AMDGPUPromoteAlloca.cpp | 500 if (auto *MTI = dyn_cast<MemTransferInst>(Inst)) { promoteAllocaUserToVector() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Transforms/InstCombine/ |
H A D | InstCombineCalls.cpp | 1509 if (AnyMemTransferInst *MTI = dyn_cast<AnyMemTransferInst>(MI)) { visitCallInst() local 1517 if (auto *MTI = dyn_cast<AnyMemTransferInst>(MI)) { visitCallInst() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Transforms/Instrumentation/ |
H A D | HWAddressSanitizer.cpp | 1003 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) { ignoreMemIntrinsic() local
|
H A D | DataFlowSanitizer.cpp | 2934 auto *MTI = cast<MemTransferInst>( visitMemTransferInst() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Target/Mips/ |
H A D | MipsFastISel.cpp | 1650 const auto *MTI = cast<MemTransferInst>(II); in fastLowerIntrinsicCall() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Transforms/IPO/ |
H A D | GlobalOpt.cpp | 227 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(U)) { CleanupPointerRootUsers() local
|
H A D | Attributor.cpp | 910 if (const MemTransferInst *MTI = dyn_cast<MemTransferInst>(&I)) in isPotentiallyAffectedByBarrier() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Target/ARM/ |
H A D | ARMFastISel.cpp | 2522 const MemTransferInst &MTI = cast<MemTransferInst>(I); in SelectIntrinsicCall() local [all...] |
/freebsd-src/contrib/llvm-project/llvm/lib/IR/ |
H A D | AutoUpgrade.cpp | 4693 if (auto *MTI = dyn_cast<MemTransferInst>(MemCI)) UpgradeIntrinsicCall() local
|
/freebsd-src/contrib/llvm-project/llvm/lib/Target/AArch64/ |
H A D | AArch64FastISel.cpp | 3477 const auto *MTI = cast<MemTransferInst>(II); fastLowerIntrinsicCall() local [all...] |