1 //===- AssumptionCache.cpp - Cache finding @llvm.assume calls -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains a pass that keeps track of @llvm.assume intrinsics in 10 // the functions of a module. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/AssumeBundleQueries.h" 15 #include "llvm/Analysis/AssumptionCache.h" 16 #include "llvm/ADT/STLExtras.h" 17 #include "llvm/ADT/SmallPtrSet.h" 18 #include "llvm/ADT/SmallVector.h" 19 #include "llvm/IR/BasicBlock.h" 20 #include "llvm/IR/Function.h" 21 #include "llvm/IR/InstrTypes.h" 22 #include "llvm/IR/Instruction.h" 23 #include "llvm/IR/Instructions.h" 24 #include "llvm/IR/Intrinsics.h" 25 #include "llvm/IR/PassManager.h" 26 #include "llvm/IR/PatternMatch.h" 27 #include "llvm/InitializePasses.h" 28 #include "llvm/Pass.h" 29 #include "llvm/Support/Casting.h" 30 #include "llvm/Support/CommandLine.h" 31 #include "llvm/Support/ErrorHandling.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include <algorithm> 34 #include <cassert> 35 #include <utility> 36 37 using namespace llvm; 38 using namespace llvm::PatternMatch; 39 40 static cl::opt<bool> 41 VerifyAssumptionCache("verify-assumption-cache", cl::Hidden, 42 cl::desc("Enable verification of assumption cache"), 43 cl::init(false)); 44 45 SmallVector<AssumptionCache::ResultElem, 1> & 46 AssumptionCache::getOrInsertAffectedValues(Value *V) { 47 // Try using find_as first to avoid creating extra value handles just for the 48 // purpose of doing the lookup. 49 auto AVI = AffectedValues.find_as(V); 50 if (AVI != AffectedValues.end()) 51 return AVI->second; 52 53 auto AVIP = AffectedValues.insert( 54 {AffectedValueCallbackVH(V, this), SmallVector<ResultElem, 1>()}); 55 return AVIP.first->second; 56 } 57 58 static void 59 findAffectedValues(CallInst *CI, 60 SmallVectorImpl<AssumptionCache::ResultElem> &Affected) { 61 // Note: This code must be kept in-sync with the code in 62 // computeKnownBitsFromAssume in ValueTracking. 63 64 auto AddAffected = [&Affected](Value *V, unsigned Idx = 65 AssumptionCache::ExprResultIdx) { 66 if (isa<Argument>(V)) { 67 Affected.push_back({V, Idx}); 68 } else if (auto *I = dyn_cast<Instruction>(V)) { 69 Affected.push_back({I, Idx}); 70 71 // Peek through unary operators to find the source of the condition. 72 Value *Op; 73 if (match(I, m_BitCast(m_Value(Op))) || 74 match(I, m_PtrToInt(m_Value(Op))) || match(I, m_Not(m_Value(Op)))) { 75 if (isa<Instruction>(Op) || isa<Argument>(Op)) 76 Affected.push_back({Op, Idx}); 77 } 78 } 79 }; 80 81 for (unsigned Idx = 0; Idx != CI->getNumOperandBundles(); Idx++) { 82 if (CI->getOperandBundleAt(Idx).Inputs.size() > ABA_WasOn && 83 CI->getOperandBundleAt(Idx).getTagName() != IgnoreBundleTag) 84 AddAffected(CI->getOperandBundleAt(Idx).Inputs[ABA_WasOn], Idx); 85 } 86 87 Value *Cond = CI->getArgOperand(0), *A, *B; 88 AddAffected(Cond); 89 90 CmpInst::Predicate Pred; 91 if (match(Cond, m_ICmp(Pred, m_Value(A), m_Value(B)))) { 92 AddAffected(A); 93 AddAffected(B); 94 95 if (Pred == ICmpInst::ICMP_EQ) { 96 // For equality comparisons, we handle the case of bit inversion. 97 auto AddAffectedFromEq = [&AddAffected](Value *V) { 98 Value *A; 99 if (match(V, m_Not(m_Value(A)))) { 100 AddAffected(A); 101 V = A; 102 } 103 104 Value *B; 105 // (A & B) or (A | B) or (A ^ B). 106 if (match(V, m_BitwiseLogic(m_Value(A), m_Value(B)))) { 107 AddAffected(A); 108 AddAffected(B); 109 // (A << C) or (A >>_s C) or (A >>_u C) where C is some constant. 110 } else if (match(V, m_Shift(m_Value(A), m_ConstantInt()))) { 111 AddAffected(A); 112 } 113 }; 114 115 AddAffectedFromEq(A); 116 AddAffectedFromEq(B); 117 } 118 119 Value *X; 120 // Handle (A + C1) u< C2, which is the canonical form of A > C3 && A < C4, 121 // and recognized by LVI at least. 122 if (Pred == ICmpInst::ICMP_ULT && 123 match(A, m_Add(m_Value(X), m_ConstantInt())) && 124 match(B, m_ConstantInt())) 125 AddAffected(X); 126 } 127 } 128 129 void AssumptionCache::updateAffectedValues(CallInst *CI) { 130 SmallVector<AssumptionCache::ResultElem, 16> Affected; 131 findAffectedValues(CI, Affected); 132 133 for (auto &AV : Affected) { 134 auto &AVV = getOrInsertAffectedValues(AV.Assume); 135 if (std::find_if(AVV.begin(), AVV.end(), [&](ResultElem &Elem) { 136 return Elem.Assume == CI && Elem.Index == AV.Index; 137 }) == AVV.end()) 138 AVV.push_back({CI, AV.Index}); 139 } 140 } 141 142 void AssumptionCache::unregisterAssumption(CallInst *CI) { 143 SmallVector<AssumptionCache::ResultElem, 16> Affected; 144 findAffectedValues(CI, Affected); 145 146 for (auto &AV : Affected) { 147 auto AVI = AffectedValues.find_as(AV.Assume); 148 if (AVI == AffectedValues.end()) 149 continue; 150 bool Found = false; 151 bool HasNonnull = false; 152 for (ResultElem &Elem : AVI->second) { 153 if (Elem.Assume == CI) { 154 Found = true; 155 Elem.Assume = nullptr; 156 } 157 HasNonnull |= !!Elem.Assume; 158 if (HasNonnull && Found) 159 break; 160 } 161 assert(Found && "already unregistered or incorrect cache state"); 162 if (!HasNonnull) 163 AffectedValues.erase(AVI); 164 } 165 166 AssumeHandles.erase( 167 remove_if(AssumeHandles, [CI](ResultElem &RE) { return CI == RE; }), 168 AssumeHandles.end()); 169 } 170 171 void AssumptionCache::AffectedValueCallbackVH::deleted() { 172 auto AVI = AC->AffectedValues.find(getValPtr()); 173 if (AVI != AC->AffectedValues.end()) 174 AC->AffectedValues.erase(AVI); 175 // 'this' now dangles! 176 } 177 178 void AssumptionCache::transferAffectedValuesInCache(Value *OV, Value *NV) { 179 auto &NAVV = getOrInsertAffectedValues(NV); 180 auto AVI = AffectedValues.find(OV); 181 if (AVI == AffectedValues.end()) 182 return; 183 184 for (auto &A : AVI->second) 185 if (!llvm::is_contained(NAVV, A)) 186 NAVV.push_back(A); 187 AffectedValues.erase(OV); 188 } 189 190 void AssumptionCache::AffectedValueCallbackVH::allUsesReplacedWith(Value *NV) { 191 if (!isa<Instruction>(NV) && !isa<Argument>(NV)) 192 return; 193 194 // Any assumptions that affected this value now affect the new value. 195 196 AC->transferAffectedValuesInCache(getValPtr(), NV); 197 // 'this' now might dangle! If the AffectedValues map was resized to add an 198 // entry for NV then this object might have been destroyed in favor of some 199 // copy in the grown map. 200 } 201 202 void AssumptionCache::scanFunction() { 203 assert(!Scanned && "Tried to scan the function twice!"); 204 assert(AssumeHandles.empty() && "Already have assumes when scanning!"); 205 206 // Go through all instructions in all blocks, add all calls to @llvm.assume 207 // to this cache. 208 for (BasicBlock &B : F) 209 for (Instruction &II : B) 210 if (match(&II, m_Intrinsic<Intrinsic::assume>())) 211 AssumeHandles.push_back({&II, ExprResultIdx}); 212 213 // Mark the scan as complete. 214 Scanned = true; 215 216 // Update affected values. 217 for (auto &A : AssumeHandles) 218 updateAffectedValues(cast<CallInst>(A)); 219 } 220 221 void AssumptionCache::registerAssumption(CallInst *CI) { 222 assert(match(CI, m_Intrinsic<Intrinsic::assume>()) && 223 "Registered call does not call @llvm.assume"); 224 225 // If we haven't scanned the function yet, just drop this assumption. It will 226 // be found when we scan later. 227 if (!Scanned) 228 return; 229 230 AssumeHandles.push_back({CI, ExprResultIdx}); 231 232 #ifndef NDEBUG 233 assert(CI->getParent() && 234 "Cannot register @llvm.assume call not in a basic block"); 235 assert(&F == CI->getParent()->getParent() && 236 "Cannot register @llvm.assume call not in this function"); 237 238 // We expect the number of assumptions to be small, so in an asserts build 239 // check that we don't accumulate duplicates and that all assumptions point 240 // to the same function. 241 SmallPtrSet<Value *, 16> AssumptionSet; 242 for (auto &VH : AssumeHandles) { 243 if (!VH) 244 continue; 245 246 assert(&F == cast<Instruction>(VH)->getParent()->getParent() && 247 "Cached assumption not inside this function!"); 248 assert(match(cast<CallInst>(VH), m_Intrinsic<Intrinsic::assume>()) && 249 "Cached something other than a call to @llvm.assume!"); 250 assert(AssumptionSet.insert(VH).second && 251 "Cache contains multiple copies of a call!"); 252 } 253 #endif 254 255 updateAffectedValues(CI); 256 } 257 258 AnalysisKey AssumptionAnalysis::Key; 259 260 PreservedAnalyses AssumptionPrinterPass::run(Function &F, 261 FunctionAnalysisManager &AM) { 262 AssumptionCache &AC = AM.getResult<AssumptionAnalysis>(F); 263 264 OS << "Cached assumptions for function: " << F.getName() << "\n"; 265 for (auto &VH : AC.assumptions()) 266 if (VH) 267 OS << " " << *cast<CallInst>(VH)->getArgOperand(0) << "\n"; 268 269 return PreservedAnalyses::all(); 270 } 271 272 void AssumptionCacheTracker::FunctionCallbackVH::deleted() { 273 auto I = ACT->AssumptionCaches.find_as(cast<Function>(getValPtr())); 274 if (I != ACT->AssumptionCaches.end()) 275 ACT->AssumptionCaches.erase(I); 276 // 'this' now dangles! 277 } 278 279 AssumptionCache &AssumptionCacheTracker::getAssumptionCache(Function &F) { 280 // We probe the function map twice to try and avoid creating a value handle 281 // around the function in common cases. This makes insertion a bit slower, 282 // but if we have to insert we're going to scan the whole function so that 283 // shouldn't matter. 284 auto I = AssumptionCaches.find_as(&F); 285 if (I != AssumptionCaches.end()) 286 return *I->second; 287 288 // Ok, build a new cache by scanning the function, insert it and the value 289 // handle into our map, and return the newly populated cache. 290 auto IP = AssumptionCaches.insert(std::make_pair( 291 FunctionCallbackVH(&F, this), std::make_unique<AssumptionCache>(F))); 292 assert(IP.second && "Scanning function already in the map?"); 293 return *IP.first->second; 294 } 295 296 AssumptionCache *AssumptionCacheTracker::lookupAssumptionCache(Function &F) { 297 auto I = AssumptionCaches.find_as(&F); 298 if (I != AssumptionCaches.end()) 299 return I->second.get(); 300 return nullptr; 301 } 302 303 void AssumptionCacheTracker::verifyAnalysis() const { 304 // FIXME: In the long term the verifier should not be controllable with a 305 // flag. We should either fix all passes to correctly update the assumption 306 // cache and enable the verifier unconditionally or somehow arrange for the 307 // assumption list to be updated automatically by passes. 308 if (!VerifyAssumptionCache) 309 return; 310 311 SmallPtrSet<const CallInst *, 4> AssumptionSet; 312 for (const auto &I : AssumptionCaches) { 313 for (auto &VH : I.second->assumptions()) 314 if (VH) 315 AssumptionSet.insert(cast<CallInst>(VH)); 316 317 for (const BasicBlock &B : cast<Function>(*I.first)) 318 for (const Instruction &II : B) 319 if (match(&II, m_Intrinsic<Intrinsic::assume>()) && 320 !AssumptionSet.count(cast<CallInst>(&II))) 321 report_fatal_error("Assumption in scanned function not in cache"); 322 } 323 } 324 325 AssumptionCacheTracker::AssumptionCacheTracker() : ImmutablePass(ID) { 326 initializeAssumptionCacheTrackerPass(*PassRegistry::getPassRegistry()); 327 } 328 329 AssumptionCacheTracker::~AssumptionCacheTracker() = default; 330 331 char AssumptionCacheTracker::ID = 0; 332 333 INITIALIZE_PASS(AssumptionCacheTracker, "assumption-cache-tracker", 334 "Assumption Cache Tracker", false, true) 335