1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains routines that help determine which pointers are captured. 10 // A pointer value is captured if the function makes a copy of any part of the 11 // pointer that outlives the call. Not being captured means, more or less, that 12 // the pointer is only dereferenced and not stored in a global. Returning part 13 // of the pointer as the function return value may or may not count as capturing 14 // the pointer, depending on the context. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/Analysis/CaptureTracking.h" 19 #include "llvm/ADT/SmallPtrSet.h" 20 #include "llvm/ADT/SmallSet.h" 21 #include "llvm/ADT/SmallVector.h" 22 #include "llvm/ADT/Statistic.h" 23 #include "llvm/Analysis/AliasAnalysis.h" 24 #include "llvm/Analysis/CFG.h" 25 #include "llvm/Analysis/ValueTracking.h" 26 #include "llvm/IR/Constants.h" 27 #include "llvm/IR/Dominators.h" 28 #include "llvm/IR/Instructions.h" 29 #include "llvm/IR/IntrinsicInst.h" 30 #include "llvm/Support/CommandLine.h" 31 32 using namespace llvm; 33 34 #define DEBUG_TYPE "capture-tracking" 35 36 STATISTIC(NumCaptured, "Number of pointers maybe captured"); 37 STATISTIC(NumNotCaptured, "Number of pointers not captured"); 38 STATISTIC(NumCapturedBefore, "Number of pointers maybe captured before"); 39 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before"); 40 41 /// The default value for MaxUsesToExplore argument. It's relatively small to 42 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis, 43 /// where the results can't be cached. 44 /// TODO: we should probably introduce a caching CaptureTracking analysis and 45 /// use it where possible. The caching version can use much higher limit or 46 /// don't have this cap at all. 47 static cl::opt<unsigned> 48 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden, 49 cl::desc("Maximal number of uses to explore."), 50 cl::init(20)); 51 52 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() { 53 return DefaultMaxUsesToExplore; 54 } 55 56 CaptureTracker::~CaptureTracker() = default; 57 58 bool CaptureTracker::shouldExplore(const Use *U) { return true; } 59 60 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) { 61 // An inbounds GEP can either be a valid pointer (pointing into 62 // or to the end of an allocation), or be null in the default 63 // address space. So for an inbounds GEP there is no way to let 64 // the pointer escape using clever GEP hacking because doing so 65 // would make the pointer point outside of the allocated object 66 // and thus make the GEP result a poison value. Similarly, other 67 // dereferenceable pointers cannot be manipulated without producing 68 // poison. 69 if (auto *GEP = dyn_cast<GetElementPtrInst>(O)) 70 if (GEP->isInBounds()) 71 return true; 72 bool CanBeNull, CanBeFreed; 73 return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed); 74 } 75 76 namespace { 77 struct SimpleCaptureTracker : public CaptureTracker { 78 explicit SimpleCaptureTracker( 79 80 const SmallPtrSetImpl<const Value *> &EphValues, bool ReturnCaptures) 81 : EphValues(EphValues), ReturnCaptures(ReturnCaptures) {} 82 83 void tooManyUses() override { Captured = true; } 84 85 bool captured(const Use *U) override { 86 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures) 87 return false; 88 89 if (EphValues.contains(U->getUser())) 90 return false; 91 92 Captured = true; 93 return true; 94 } 95 96 const SmallPtrSetImpl<const Value *> &EphValues; 97 98 bool ReturnCaptures; 99 100 bool Captured = false; 101 }; 102 103 /// Only find pointer captures which happen before the given instruction. Uses 104 /// the dominator tree to determine whether one instruction is before another. 105 /// Only support the case where the Value is defined in the same basic block 106 /// as the given instruction and the use. 107 struct CapturesBefore : public CaptureTracker { 108 109 CapturesBefore(bool ReturnCaptures, const Instruction *I, 110 const DominatorTree *DT, bool IncludeI, const LoopInfo *LI) 111 : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures), 112 IncludeI(IncludeI), LI(LI) {} 113 114 void tooManyUses() override { Captured = true; } 115 116 bool isSafeToPrune(Instruction *I) { 117 if (BeforeHere == I) 118 return !IncludeI; 119 120 // We explore this usage only if the usage can reach "BeforeHere". 121 // If use is not reachable from entry, there is no need to explore. 122 if (!DT->isReachableFromEntry(I->getParent())) 123 return true; 124 125 // Check whether there is a path from I to BeforeHere. 126 return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI); 127 } 128 129 bool captured(const Use *U) override { 130 Instruction *I = cast<Instruction>(U->getUser()); 131 if (isa<ReturnInst>(I) && !ReturnCaptures) 132 return false; 133 134 // Check isSafeToPrune() here rather than in shouldExplore() to avoid 135 // an expensive reachability query for every instruction we look at. 136 // Instead we only do one for actual capturing candidates. 137 if (isSafeToPrune(I)) 138 return false; 139 140 Captured = true; 141 return true; 142 } 143 144 const Instruction *BeforeHere; 145 const DominatorTree *DT; 146 147 bool ReturnCaptures; 148 bool IncludeI; 149 150 bool Captured = false; 151 152 const LoopInfo *LI; 153 }; 154 155 /// Find the 'earliest' instruction before which the pointer is known not to 156 /// be captured. Here an instruction A is considered earlier than instruction 157 /// B, if A dominates B. If 2 escapes do not dominate each other, the 158 /// terminator of the common dominator is chosen. If not all uses cannot be 159 /// analyzed, the earliest escape is set to the first instruction in the 160 /// function entry block. 161 // NOTE: Users have to make sure instructions compared against the earliest 162 // escape are not in a cycle. 163 struct EarliestCaptures : public CaptureTracker { 164 165 EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT) 166 : DT(DT), ReturnCaptures(ReturnCaptures), F(F) {} 167 168 void tooManyUses() override { 169 Captured = true; 170 EarliestCapture = &*F.getEntryBlock().begin(); 171 } 172 173 bool captured(const Use *U) override { 174 Instruction *I = cast<Instruction>(U->getUser()); 175 if (isa<ReturnInst>(I) && !ReturnCaptures) 176 return false; 177 178 if (!EarliestCapture) { 179 EarliestCapture = I; 180 } else if (EarliestCapture->getParent() == I->getParent()) { 181 if (I->comesBefore(EarliestCapture)) 182 EarliestCapture = I; 183 } else { 184 BasicBlock *CurrentBB = I->getParent(); 185 BasicBlock *EarliestBB = EarliestCapture->getParent(); 186 if (DT.dominates(EarliestBB, CurrentBB)) { 187 // EarliestCapture already comes before the current use. 188 } else if (DT.dominates(CurrentBB, EarliestBB)) { 189 EarliestCapture = I; 190 } else { 191 // Otherwise find the nearest common dominator and use its terminator. 192 auto *NearestCommonDom = 193 DT.findNearestCommonDominator(CurrentBB, EarliestBB); 194 EarliestCapture = NearestCommonDom->getTerminator(); 195 } 196 } 197 Captured = true; 198 199 // Return false to continue analysis; we need to see all potential 200 // captures. 201 return false; 202 } 203 204 Instruction *EarliestCapture = nullptr; 205 206 const DominatorTree &DT; 207 208 bool ReturnCaptures; 209 210 bool Captured = false; 211 212 Function &F; 213 }; 214 } 215 216 /// PointerMayBeCaptured - Return true if this pointer value may be captured 217 /// by the enclosing function (which is required to exist). This routine can 218 /// be expensive, so consider caching the results. The boolean ReturnCaptures 219 /// specifies whether returning the value (or part of it) from the function 220 /// counts as capturing it or not. The boolean StoreCaptures specified whether 221 /// storing the value (or part of it) into memory anywhere automatically 222 /// counts as capturing it or not. 223 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures, 224 bool StoreCaptures, unsigned MaxUsesToExplore) { 225 SmallPtrSet<const Value *, 1> Empty; 226 return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, Empty, 227 MaxUsesToExplore); 228 } 229 230 /// Variant of the above function which accepts a set of Values that are 231 /// ephemeral and cannot cause pointers to escape. 232 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures, 233 bool StoreCaptures, 234 const SmallPtrSetImpl<const Value *> &EphValues, 235 unsigned MaxUsesToExplore) { 236 assert(!isa<GlobalValue>(V) && 237 "It doesn't make sense to ask whether a global is captured."); 238 239 // TODO: If StoreCaptures is not true, we could do Fancy analysis 240 // to determine whether this store is not actually an escape point. 241 // In that case, BasicAliasAnalysis should be updated as well to 242 // take advantage of this. 243 (void)StoreCaptures; 244 245 SimpleCaptureTracker SCT(EphValues, ReturnCaptures); 246 PointerMayBeCaptured(V, &SCT, MaxUsesToExplore); 247 if (SCT.Captured) 248 ++NumCaptured; 249 else 250 ++NumNotCaptured; 251 return SCT.Captured; 252 } 253 254 /// PointerMayBeCapturedBefore - Return true if this pointer value may be 255 /// captured by the enclosing function (which is required to exist). If a 256 /// DominatorTree is provided, only captures which happen before the given 257 /// instruction are considered. This routine can be expensive, so consider 258 /// caching the results. The boolean ReturnCaptures specifies whether 259 /// returning the value (or part of it) from the function counts as capturing 260 /// it or not. The boolean StoreCaptures specified whether storing the value 261 /// (or part of it) into memory anywhere automatically counts as capturing it 262 /// or not. 263 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures, 264 bool StoreCaptures, const Instruction *I, 265 const DominatorTree *DT, bool IncludeI, 266 unsigned MaxUsesToExplore, 267 const LoopInfo *LI) { 268 assert(!isa<GlobalValue>(V) && 269 "It doesn't make sense to ask whether a global is captured."); 270 271 if (!DT) 272 return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, 273 MaxUsesToExplore); 274 275 // TODO: See comment in PointerMayBeCaptured regarding what could be done 276 // with StoreCaptures. 277 278 CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI); 279 PointerMayBeCaptured(V, &CB, MaxUsesToExplore); 280 if (CB.Captured) 281 ++NumCapturedBefore; 282 else 283 ++NumNotCapturedBefore; 284 return CB.Captured; 285 } 286 287 Instruction *llvm::FindEarliestCapture(const Value *V, Function &F, 288 bool ReturnCaptures, bool StoreCaptures, 289 const DominatorTree &DT, 290 unsigned MaxUsesToExplore) { 291 assert(!isa<GlobalValue>(V) && 292 "It doesn't make sense to ask whether a global is captured."); 293 294 EarliestCaptures CB(ReturnCaptures, F, DT); 295 PointerMayBeCaptured(V, &CB, MaxUsesToExplore); 296 if (CB.Captured) 297 ++NumCapturedBefore; 298 else 299 ++NumNotCapturedBefore; 300 return CB.EarliestCapture; 301 } 302 303 UseCaptureKind llvm::DetermineUseCaptureKind( 304 const Use &U, 305 function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) { 306 Instruction *I = cast<Instruction>(U.getUser()); 307 308 switch (I->getOpcode()) { 309 case Instruction::Call: 310 case Instruction::Invoke: { 311 auto *Call = cast<CallBase>(I); 312 // Not captured if the callee is readonly, doesn't return a copy through 313 // its return value and doesn't unwind (a readonly function can leak bits 314 // by throwing an exception or not depending on the input value). 315 if (Call->onlyReadsMemory() && Call->doesNotThrow() && 316 Call->getType()->isVoidTy()) 317 return UseCaptureKind::NO_CAPTURE; 318 319 // The pointer is not captured if returned pointer is not captured. 320 // NOTE: CaptureTracking users should not assume that only functions 321 // marked with nocapture do not capture. This means that places like 322 // getUnderlyingObject in ValueTracking or DecomposeGEPExpression 323 // in BasicAA also need to know about this property. 324 if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true)) 325 return UseCaptureKind::PASSTHROUGH; 326 327 // Volatile operations effectively capture the memory location that they 328 // load and store to. 329 if (auto *MI = dyn_cast<MemIntrinsic>(Call)) 330 if (MI->isVolatile()) 331 return UseCaptureKind::MAY_CAPTURE; 332 333 // Calling a function pointer does not in itself cause the pointer to 334 // be captured. This is a subtle point considering that (for example) 335 // the callee might return its own address. It is analogous to saying 336 // that loading a value from a pointer does not cause the pointer to be 337 // captured, even though the loaded value might be the pointer itself 338 // (think of self-referential objects). 339 if (Call->isCallee(&U)) 340 return UseCaptureKind::NO_CAPTURE; 341 342 // Not captured if only passed via 'nocapture' arguments. 343 if (Call->isDataOperand(&U) && 344 !Call->doesNotCapture(Call->getDataOperandNo(&U))) { 345 // The parameter is not marked 'nocapture' - captured. 346 return UseCaptureKind::MAY_CAPTURE; 347 } 348 return UseCaptureKind::NO_CAPTURE; 349 } 350 case Instruction::Load: 351 // Volatile loads make the address observable. 352 if (cast<LoadInst>(I)->isVolatile()) 353 return UseCaptureKind::MAY_CAPTURE; 354 return UseCaptureKind::NO_CAPTURE; 355 case Instruction::VAArg: 356 // "va-arg" from a pointer does not cause it to be captured. 357 return UseCaptureKind::NO_CAPTURE; 358 case Instruction::Store: 359 // Stored the pointer - conservatively assume it may be captured. 360 // Volatile stores make the address observable. 361 if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile()) 362 return UseCaptureKind::MAY_CAPTURE; 363 return UseCaptureKind::NO_CAPTURE; 364 case Instruction::AtomicRMW: { 365 // atomicrmw conceptually includes both a load and store from 366 // the same location. 367 // As with a store, the location being accessed is not captured, 368 // but the value being stored is. 369 // Volatile stores make the address observable. 370 auto *ARMWI = cast<AtomicRMWInst>(I); 371 if (U.getOperandNo() == 1 || ARMWI->isVolatile()) 372 return UseCaptureKind::MAY_CAPTURE; 373 return UseCaptureKind::NO_CAPTURE; 374 } 375 case Instruction::AtomicCmpXchg: { 376 // cmpxchg conceptually includes both a load and store from 377 // the same location. 378 // As with a store, the location being accessed is not captured, 379 // but the value being stored is. 380 // Volatile stores make the address observable. 381 auto *ACXI = cast<AtomicCmpXchgInst>(I); 382 if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile()) 383 return UseCaptureKind::MAY_CAPTURE; 384 return UseCaptureKind::NO_CAPTURE; 385 } 386 case Instruction::BitCast: 387 case Instruction::GetElementPtr: 388 case Instruction::PHI: 389 case Instruction::Select: 390 case Instruction::AddrSpaceCast: 391 // The original value is not captured via this if the new value isn't. 392 return UseCaptureKind::PASSTHROUGH; 393 case Instruction::ICmp: { 394 unsigned Idx = U.getOperandNo(); 395 unsigned OtherIdx = 1 - Idx; 396 if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) { 397 // Don't count comparisons of a no-alias return value against null as 398 // captures. This allows us to ignore comparisons of malloc results 399 // with null, for example. 400 if (CPN->getType()->getAddressSpace() == 0) 401 if (isNoAliasCall(U.get()->stripPointerCasts())) 402 return UseCaptureKind::NO_CAPTURE; 403 if (!I->getFunction()->nullPointerIsDefined()) { 404 auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation(); 405 // Comparing a dereferenceable_or_null pointer against null cannot 406 // lead to pointer escapes, because if it is not null it must be a 407 // valid (in-bounds) pointer. 408 const DataLayout &DL = I->getModule()->getDataLayout(); 409 if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL)) 410 return UseCaptureKind::NO_CAPTURE; 411 } 412 } 413 // Comparison against value stored in global variable. Given the pointer 414 // does not escape, its value cannot be guessed and stored separately in a 415 // global variable. 416 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx)); 417 if (LI && isa<GlobalVariable>(LI->getPointerOperand())) 418 return UseCaptureKind::NO_CAPTURE; 419 // Otherwise, be conservative. There are crazy ways to capture pointers 420 // using comparisons. 421 return UseCaptureKind::MAY_CAPTURE; 422 } 423 default: 424 // Something else - be conservative and say it is captured. 425 return UseCaptureKind::MAY_CAPTURE; 426 } 427 } 428 429 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker, 430 unsigned MaxUsesToExplore) { 431 assert(V->getType()->isPointerTy() && "Capture is for pointers only!"); 432 if (MaxUsesToExplore == 0) 433 MaxUsesToExplore = DefaultMaxUsesToExplore; 434 435 SmallVector<const Use *, 20> Worklist; 436 Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking()); 437 SmallSet<const Use *, 20> Visited; 438 439 auto AddUses = [&](const Value *V) { 440 unsigned Count = 0; 441 for (const Use &U : V->uses()) { 442 // If there are lots of uses, conservatively say that the value 443 // is captured to avoid taking too much compile time. 444 if (Count++ >= MaxUsesToExplore) { 445 Tracker->tooManyUses(); 446 return false; 447 } 448 if (!Visited.insert(&U).second) 449 continue; 450 if (!Tracker->shouldExplore(&U)) 451 continue; 452 Worklist.push_back(&U); 453 } 454 return true; 455 }; 456 if (!AddUses(V)) 457 return; 458 459 auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) { 460 return Tracker->isDereferenceableOrNull(V, DL); 461 }; 462 while (!Worklist.empty()) { 463 const Use *U = Worklist.pop_back_val(); 464 switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) { 465 case UseCaptureKind::NO_CAPTURE: 466 continue; 467 case UseCaptureKind::MAY_CAPTURE: 468 if (Tracker->captured(U)) 469 return; 470 continue; 471 case UseCaptureKind::PASSTHROUGH: 472 if (!AddUses(U->getUser())) 473 return; 474 continue; 475 } 476 } 477 478 // All uses examined. 479 } 480 481 bool llvm::isNonEscapingLocalObject( 482 const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) { 483 SmallDenseMap<const Value *, bool, 8>::iterator CacheIt; 484 if (IsCapturedCache) { 485 bool Inserted; 486 std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false}); 487 if (!Inserted) 488 // Found cached result, return it! 489 return CacheIt->second; 490 } 491 492 // If this is an identified function-local object, check to see if it escapes. 493 if (isIdentifiedFunctionLocal(V)) { 494 // Set StoreCaptures to True so that we can assume in our callers that the 495 // pointer is not the result of a load instruction. Currently 496 // PointerMayBeCaptured doesn't have any special analysis for the 497 // StoreCaptures=false case; if it did, our callers could be refined to be 498 // more precise. 499 auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true); 500 if (IsCapturedCache) 501 CacheIt->second = Ret; 502 return Ret; 503 } 504 505 return false; 506 } 507