1 //===-------- LoopDataPrefetch.cpp - Loop Data Prefetching Pass -----------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements a Loop Data Prefetching Pass. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #define DEBUG_TYPE "loop-data-prefetch" 15 #include "llvm/Transforms/Scalar.h" 16 #include "llvm/ADT/DepthFirstIterator.h" 17 #include "llvm/ADT/Statistic.h" 18 #include "llvm/Analysis/AssumptionCache.h" 19 #include "llvm/Analysis/CodeMetrics.h" 20 #include "llvm/Analysis/InstructionSimplify.h" 21 #include "llvm/Analysis/LoopInfo.h" 22 #include "llvm/Analysis/ScalarEvolution.h" 23 #include "llvm/Analysis/ScalarEvolutionAliasAnalysis.h" 24 #include "llvm/Analysis/ScalarEvolutionExpander.h" 25 #include "llvm/Analysis/ScalarEvolutionExpressions.h" 26 #include "llvm/Analysis/TargetTransformInfo.h" 27 #include "llvm/Analysis/ValueTracking.h" 28 #include "llvm/IR/CFG.h" 29 #include "llvm/IR/Dominators.h" 30 #include "llvm/IR/Function.h" 31 #include "llvm/IR/IntrinsicInst.h" 32 #include "llvm/IR/Module.h" 33 #include "llvm/Support/CommandLine.h" 34 #include "llvm/Support/Debug.h" 35 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 36 #include "llvm/Transforms/Utils/Local.h" 37 #include "llvm/Transforms/Utils/ValueMapper.h" 38 using namespace llvm; 39 40 // By default, we limit this to creating 16 PHIs (which is a little over half 41 // of the allocatable register set). 42 static cl::opt<bool> 43 PrefetchWrites("loop-prefetch-writes", cl::Hidden, cl::init(false), 44 cl::desc("Prefetch write addresses")); 45 46 STATISTIC(NumPrefetches, "Number of prefetches inserted"); 47 48 namespace llvm { 49 void initializeLoopDataPrefetchPass(PassRegistry&); 50 } 51 52 namespace { 53 54 class LoopDataPrefetch : public FunctionPass { 55 public: 56 static char ID; // Pass ID, replacement for typeid 57 LoopDataPrefetch() : FunctionPass(ID) { 58 initializeLoopDataPrefetchPass(*PassRegistry::getPassRegistry()); 59 } 60 61 void getAnalysisUsage(AnalysisUsage &AU) const override { 62 AU.addRequired<AssumptionCacheTracker>(); 63 AU.addPreserved<DominatorTreeWrapperPass>(); 64 AU.addRequired<LoopInfoWrapperPass>(); 65 AU.addPreserved<LoopInfoWrapperPass>(); 66 AU.addRequired<ScalarEvolutionWrapperPass>(); 67 // FIXME: For some reason, preserving SE here breaks LSR (even if 68 // this pass changes nothing). 69 // AU.addPreserved<ScalarEvolutionWrapperPass>(); 70 AU.addRequired<TargetTransformInfoWrapperPass>(); 71 } 72 73 bool runOnFunction(Function &F) override; 74 75 private: 76 bool runOnLoop(Loop *L); 77 78 /// \brief Check if the the stride of the accesses is large enough to 79 /// warrant a prefetch. 80 bool isStrideLargeEnough(const SCEVAddRecExpr *AR); 81 82 AssumptionCache *AC; 83 LoopInfo *LI; 84 ScalarEvolution *SE; 85 const TargetTransformInfo *TTI; 86 const DataLayout *DL; 87 }; 88 } 89 90 char LoopDataPrefetch::ID = 0; 91 INITIALIZE_PASS_BEGIN(LoopDataPrefetch, "loop-data-prefetch", 92 "Loop Data Prefetch", false, false) 93 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) 94 INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) 95 INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) 96 INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass) 97 INITIALIZE_PASS_END(LoopDataPrefetch, "loop-data-prefetch", 98 "Loop Data Prefetch", false, false) 99 100 FunctionPass *llvm::createLoopDataPrefetchPass() { return new LoopDataPrefetch(); } 101 102 bool LoopDataPrefetch::isStrideLargeEnough(const SCEVAddRecExpr *AR) { 103 unsigned TargetMinStride = TTI->getMinPrefetchStride(); 104 // No need to check if any stride goes. 105 if (TargetMinStride <= 1) 106 return true; 107 108 const auto *ConstStride = dyn_cast<SCEVConstant>(AR->getStepRecurrence(*SE)); 109 // If MinStride is set, don't prefetch unless we can ensure that stride is 110 // larger. 111 if (!ConstStride) 112 return false; 113 114 unsigned AbsStride = std::abs(ConstStride->getAPInt().getSExtValue()); 115 return TargetMinStride <= AbsStride; 116 } 117 118 bool LoopDataPrefetch::runOnFunction(Function &F) { 119 LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); 120 SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); 121 DL = &F.getParent()->getDataLayout(); 122 AC = &getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); 123 TTI = &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); 124 125 // If PrefetchDistance is not set, don't run the pass. This gives an 126 // opportunity for targets to run this pass for selected subtargets only 127 // (whose TTI sets PrefetchDistance). 128 if (TTI->getPrefetchDistance() == 0) 129 return false; 130 assert(TTI->getCacheLineSize() && "Cache line size is not set for target"); 131 132 bool MadeChange = false; 133 134 for (auto I = LI->begin(), IE = LI->end(); I != IE; ++I) 135 for (auto L = df_begin(*I), LE = df_end(*I); L != LE; ++L) 136 MadeChange |= runOnLoop(*L); 137 138 return MadeChange; 139 } 140 141 bool LoopDataPrefetch::runOnLoop(Loop *L) { 142 bool MadeChange = false; 143 144 // Only prefetch in the inner-most loop 145 if (!L->empty()) 146 return MadeChange; 147 148 SmallPtrSet<const Value *, 32> EphValues; 149 CodeMetrics::collectEphemeralValues(L, AC, EphValues); 150 151 // Calculate the number of iterations ahead to prefetch 152 CodeMetrics Metrics; 153 for (Loop::block_iterator I = L->block_begin(), IE = L->block_end(); 154 I != IE; ++I) { 155 156 // If the loop already has prefetches, then assume that the user knows 157 // what he or she is doing and don't add any more. 158 for (BasicBlock::iterator J = (*I)->begin(), JE = (*I)->end(); 159 J != JE; ++J) 160 if (CallInst *CI = dyn_cast<CallInst>(J)) 161 if (Function *F = CI->getCalledFunction()) 162 if (F->getIntrinsicID() == Intrinsic::prefetch) 163 return MadeChange; 164 165 Metrics.analyzeBasicBlock(*I, *TTI, EphValues); 166 } 167 unsigned LoopSize = Metrics.NumInsts; 168 if (!LoopSize) 169 LoopSize = 1; 170 171 unsigned ItersAhead = TTI->getPrefetchDistance() / LoopSize; 172 if (!ItersAhead) 173 ItersAhead = 1; 174 175 if (ItersAhead > TTI->getMaxPrefetchIterationsAhead()) 176 return MadeChange; 177 178 DEBUG(dbgs() << "Prefetching " << ItersAhead 179 << " iterations ahead (loop size: " << LoopSize << ") in " 180 << L->getHeader()->getParent()->getName() << ": " << *L); 181 182 SmallVector<std::pair<Instruction *, const SCEVAddRecExpr *>, 16> PrefLoads; 183 for (Loop::block_iterator I = L->block_begin(), IE = L->block_end(); 184 I != IE; ++I) { 185 for (BasicBlock::iterator J = (*I)->begin(), JE = (*I)->end(); 186 J != JE; ++J) { 187 Value *PtrValue; 188 Instruction *MemI; 189 190 if (LoadInst *LMemI = dyn_cast<LoadInst>(J)) { 191 MemI = LMemI; 192 PtrValue = LMemI->getPointerOperand(); 193 } else if (StoreInst *SMemI = dyn_cast<StoreInst>(J)) { 194 if (!PrefetchWrites) continue; 195 MemI = SMemI; 196 PtrValue = SMemI->getPointerOperand(); 197 } else continue; 198 199 unsigned PtrAddrSpace = PtrValue->getType()->getPointerAddressSpace(); 200 if (PtrAddrSpace) 201 continue; 202 203 if (L->isLoopInvariant(PtrValue)) 204 continue; 205 206 const SCEV *LSCEV = SE->getSCEV(PtrValue); 207 const SCEVAddRecExpr *LSCEVAddRec = dyn_cast<SCEVAddRecExpr>(LSCEV); 208 if (!LSCEVAddRec) 209 continue; 210 211 // Check if the the stride of the accesses is large enough to warrant a 212 // prefetch. 213 if (!isStrideLargeEnough(LSCEVAddRec)) 214 continue; 215 216 // We don't want to double prefetch individual cache lines. If this load 217 // is known to be within one cache line of some other load that has 218 // already been prefetched, then don't prefetch this one as well. 219 bool DupPref = false; 220 for (SmallVector<std::pair<Instruction *, const SCEVAddRecExpr *>, 221 16>::iterator K = PrefLoads.begin(), KE = PrefLoads.end(); 222 K != KE; ++K) { 223 const SCEV *PtrDiff = SE->getMinusSCEV(LSCEVAddRec, K->second); 224 if (const SCEVConstant *ConstPtrDiff = 225 dyn_cast<SCEVConstant>(PtrDiff)) { 226 int64_t PD = std::abs(ConstPtrDiff->getValue()->getSExtValue()); 227 if (PD < (int64_t) TTI->getCacheLineSize()) { 228 DupPref = true; 229 break; 230 } 231 } 232 } 233 if (DupPref) 234 continue; 235 236 const SCEV *NextLSCEV = SE->getAddExpr(LSCEVAddRec, SE->getMulExpr( 237 SE->getConstant(LSCEVAddRec->getType(), ItersAhead), 238 LSCEVAddRec->getStepRecurrence(*SE))); 239 if (!isSafeToExpand(NextLSCEV, *SE)) 240 continue; 241 242 PrefLoads.push_back(std::make_pair(MemI, LSCEVAddRec)); 243 244 Type *I8Ptr = Type::getInt8PtrTy((*I)->getContext(), PtrAddrSpace); 245 SCEVExpander SCEVE(*SE, J->getModule()->getDataLayout(), "prefaddr"); 246 Value *PrefPtrValue = SCEVE.expandCodeFor(NextLSCEV, I8Ptr, MemI); 247 248 IRBuilder<> Builder(MemI); 249 Module *M = (*I)->getParent()->getParent(); 250 Type *I32 = Type::getInt32Ty((*I)->getContext()); 251 Value *PrefetchFunc = Intrinsic::getDeclaration(M, Intrinsic::prefetch); 252 Builder.CreateCall( 253 PrefetchFunc, 254 {PrefPtrValue, 255 ConstantInt::get(I32, MemI->mayReadFromMemory() ? 0 : 1), 256 ConstantInt::get(I32, 3), ConstantInt::get(I32, 1)}); 257 ++NumPrefetches; 258 DEBUG(dbgs() << " Access: " << *PtrValue << ", SCEV: " << *LSCEV 259 << "\n"); 260 261 MadeChange = true; 262 } 263 } 264 265 return MadeChange; 266 } 267 268