xref: /llvm-project/llvm/lib/Analysis/CaptureTracking.cpp (revision 564ed0bd2245d4477dfd1c2f610294eff697edab)
1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallPtrSet.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Analysis/AliasAnalysis.h"
24 #include "llvm/Analysis/CFG.h"
25 #include "llvm/Analysis/ValueTracking.h"
26 #include "llvm/IR/Constants.h"
27 #include "llvm/IR/Dominators.h"
28 #include "llvm/IR/Instructions.h"
29 #include "llvm/IR/IntrinsicInst.h"
30 #include "llvm/Support/CommandLine.h"
31 
32 using namespace llvm;
33 
34 #define DEBUG_TYPE "capture-tracking"
35 
36 STATISTIC(NumCaptured,          "Number of pointers maybe captured");
37 STATISTIC(NumNotCaptured,       "Number of pointers not captured");
38 STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
39 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");
40 
41 /// The default value for MaxUsesToExplore argument. It's relatively small to
42 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
43 /// where the results can't be cached.
44 /// TODO: we should probably introduce a caching CaptureTracking analysis and
45 /// use it where possible. The caching version can use much higher limit or
46 /// don't have this cap at all.
47 static cl::opt<unsigned>
48     DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
49                             cl::desc("Maximal number of uses to explore."),
50                             cl::init(100));
51 
52 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
53   return DefaultMaxUsesToExplore;
54 }
55 
56 CaptureTracker::~CaptureTracker() = default;
57 
58 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
59 
60 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
61   // An inbounds GEP can either be a valid pointer (pointing into
62   // or to the end of an allocation), or be null in the default
63   // address space. So for an inbounds GEP there is no way to let
64   // the pointer escape using clever GEP hacking because doing so
65   // would make the pointer point outside of the allocated object
66   // and thus make the GEP result a poison value. Similarly, other
67   // dereferenceable pointers cannot be manipulated without producing
68   // poison.
69   if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
70     if (GEP->isInBounds())
71       return true;
72   bool CanBeNull, CanBeFreed;
73   return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
74 }
75 
76 namespace {
77   struct SimpleCaptureTracker : public CaptureTracker {
78     explicit SimpleCaptureTracker(
79 
80         const SmallPtrSetImpl<const Value *> &EphValues, bool ReturnCaptures)
81         : EphValues(EphValues), ReturnCaptures(ReturnCaptures) {}
82 
83     void tooManyUses() override {
84       LLVM_DEBUG(dbgs() << "Captured due to too many uses\n");
85       Captured = true;
86     }
87 
88     bool captured(const Use *U) override {
89       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
90         return false;
91 
92       if (EphValues.contains(U->getUser()))
93         return false;
94 
95       LLVM_DEBUG(dbgs() << "Captured by: " << *U->getUser() << "\n");
96 
97       Captured = true;
98       return true;
99     }
100 
101     const SmallPtrSetImpl<const Value *> &EphValues;
102 
103     bool ReturnCaptures;
104 
105     bool Captured = false;
106   };
107 
108   /// Only find pointer captures which happen before the given instruction. Uses
109   /// the dominator tree to determine whether one instruction is before another.
110   /// Only support the case where the Value is defined in the same basic block
111   /// as the given instruction and the use.
112   struct CapturesBefore : public CaptureTracker {
113 
114     CapturesBefore(bool ReturnCaptures, const Instruction *I,
115                    const DominatorTree *DT, bool IncludeI, const LoopInfo *LI)
116         : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures),
117           IncludeI(IncludeI), LI(LI) {}
118 
119     void tooManyUses() override { Captured = true; }
120 
121     bool isSafeToPrune(Instruction *I) {
122       if (BeforeHere == I)
123         return !IncludeI;
124 
125       // We explore this usage only if the usage can reach "BeforeHere".
126       // If use is not reachable from entry, there is no need to explore.
127       if (!DT->isReachableFromEntry(I->getParent()))
128         return true;
129 
130       // Check whether there is a path from I to BeforeHere.
131       return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI);
132     }
133 
134     bool captured(const Use *U) override {
135       Instruction *I = cast<Instruction>(U->getUser());
136       if (isa<ReturnInst>(I) && !ReturnCaptures)
137         return false;
138 
139       // Check isSafeToPrune() here rather than in shouldExplore() to avoid
140       // an expensive reachability query for every instruction we look at.
141       // Instead we only do one for actual capturing candidates.
142       if (isSafeToPrune(I))
143         return false;
144 
145       Captured = true;
146       return true;
147     }
148 
149     const Instruction *BeforeHere;
150     const DominatorTree *DT;
151 
152     bool ReturnCaptures;
153     bool IncludeI;
154 
155     bool Captured = false;
156 
157     const LoopInfo *LI;
158   };
159 
160   /// Find the 'earliest' instruction before which the pointer is known not to
161   /// be captured. Here an instruction A is considered earlier than instruction
162   /// B, if A dominates B. If 2 escapes do not dominate each other, the
163   /// terminator of the common dominator is chosen. If not all uses cannot be
164   /// analyzed, the earliest escape is set to the first instruction in the
165   /// function entry block.
166   // NOTE: Users have to make sure instructions compared against the earliest
167   // escape are not in a cycle.
168   struct EarliestCaptures : public CaptureTracker {
169 
170     EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT,
171                      const SmallPtrSetImpl<const Value *> &EphValues)
172         : EphValues(EphValues), DT(DT), ReturnCaptures(ReturnCaptures), F(F) {}
173 
174     void tooManyUses() override {
175       Captured = true;
176       EarliestCapture = &*F.getEntryBlock().begin();
177     }
178 
179     bool captured(const Use *U) override {
180       Instruction *I = cast<Instruction>(U->getUser());
181       if (isa<ReturnInst>(I) && !ReturnCaptures)
182         return false;
183 
184       if (EphValues.contains(I))
185         return false;
186 
187       if (!EarliestCapture)
188         EarliestCapture = I;
189       else
190         EarliestCapture = DT.findNearestCommonDominator(EarliestCapture, I);
191       Captured = true;
192 
193       // Return false to continue analysis; we need to see all potential
194       // captures.
195       return false;
196     }
197 
198     const SmallPtrSetImpl<const Value *> &EphValues;
199 
200     Instruction *EarliestCapture = nullptr;
201 
202     const DominatorTree &DT;
203 
204     bool ReturnCaptures;
205 
206     bool Captured = false;
207 
208     Function &F;
209   };
210 }
211 
212 /// PointerMayBeCaptured - Return true if this pointer value may be captured
213 /// by the enclosing function (which is required to exist).  This routine can
214 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
215 /// specifies whether returning the value (or part of it) from the function
216 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
217 /// storing the value (or part of it) into memory anywhere automatically
218 /// counts as capturing it or not.
219 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
220                                 bool StoreCaptures, unsigned MaxUsesToExplore) {
221   SmallPtrSet<const Value *, 1> Empty;
222   return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, Empty,
223                               MaxUsesToExplore);
224 }
225 
226 /// Variant of the above function which accepts a set of Values that are
227 /// ephemeral and cannot cause pointers to escape.
228 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
229                                 bool StoreCaptures,
230                                 const SmallPtrSetImpl<const Value *> &EphValues,
231                                 unsigned MaxUsesToExplore) {
232   assert(!isa<GlobalValue>(V) &&
233          "It doesn't make sense to ask whether a global is captured.");
234 
235   // TODO: If StoreCaptures is not true, we could do Fancy analysis
236   // to determine whether this store is not actually an escape point.
237   // In that case, BasicAliasAnalysis should be updated as well to
238   // take advantage of this.
239   (void)StoreCaptures;
240 
241   LLVM_DEBUG(dbgs() << "Captured?: " << *V << " = ");
242 
243   SimpleCaptureTracker SCT(EphValues, ReturnCaptures);
244   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
245   if (SCT.Captured)
246     ++NumCaptured;
247   else {
248     ++NumNotCaptured;
249     LLVM_DEBUG(dbgs() << "not captured\n");
250   }
251   return SCT.Captured;
252 }
253 
254 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
255 /// captured by the enclosing function (which is required to exist). If a
256 /// DominatorTree is provided, only captures which happen before the given
257 /// instruction are considered. This routine can be expensive, so consider
258 /// caching the results.  The boolean ReturnCaptures specifies whether
259 /// returning the value (or part of it) from the function counts as capturing
260 /// it or not.  The boolean StoreCaptures specified whether storing the value
261 /// (or part of it) into memory anywhere automatically counts as capturing it
262 /// or not.
263 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
264                                       bool StoreCaptures, const Instruction *I,
265                                       const DominatorTree *DT, bool IncludeI,
266                                       unsigned MaxUsesToExplore,
267                                       const LoopInfo *LI) {
268   assert(!isa<GlobalValue>(V) &&
269          "It doesn't make sense to ask whether a global is captured.");
270 
271   if (!DT)
272     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
273                                 MaxUsesToExplore);
274 
275   // TODO: See comment in PointerMayBeCaptured regarding what could be done
276   // with StoreCaptures.
277 
278   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI);
279   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
280   if (CB.Captured)
281     ++NumCapturedBefore;
282   else
283     ++NumNotCapturedBefore;
284   return CB.Captured;
285 }
286 
287 Instruction *
288 llvm::FindEarliestCapture(const Value *V, Function &F, bool ReturnCaptures,
289                           bool StoreCaptures, const DominatorTree &DT,
290 
291                           const SmallPtrSetImpl<const Value *> &EphValues,
292                           unsigned MaxUsesToExplore) {
293   assert(!isa<GlobalValue>(V) &&
294          "It doesn't make sense to ask whether a global is captured.");
295 
296   EarliestCaptures CB(ReturnCaptures, F, DT, EphValues);
297   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
298   if (CB.Captured)
299     ++NumCapturedBefore;
300   else
301     ++NumNotCapturedBefore;
302   return CB.EarliestCapture;
303 }
304 
305 UseCaptureKind llvm::DetermineUseCaptureKind(
306     const Use &U,
307     function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) {
308   Instruction *I = cast<Instruction>(U.getUser());
309 
310   switch (I->getOpcode()) {
311   case Instruction::Call:
312   case Instruction::Invoke: {
313     auto *Call = cast<CallBase>(I);
314     // Not captured if the callee is readonly, doesn't return a copy through
315     // its return value and doesn't unwind (a readonly function can leak bits
316     // by throwing an exception or not depending on the input value).
317     if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
318         Call->getType()->isVoidTy())
319       return UseCaptureKind::NO_CAPTURE;
320 
321     // The pointer is not captured if returned pointer is not captured.
322     // NOTE: CaptureTracking users should not assume that only functions
323     // marked with nocapture do not capture. This means that places like
324     // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
325     // in BasicAA also need to know about this property.
326     if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true))
327       return UseCaptureKind::PASSTHROUGH;
328 
329     // Volatile operations effectively capture the memory location that they
330     // load and store to.
331     if (auto *MI = dyn_cast<MemIntrinsic>(Call))
332       if (MI->isVolatile())
333         return UseCaptureKind::MAY_CAPTURE;
334 
335     // Calling a function pointer does not in itself cause the pointer to
336     // be captured.  This is a subtle point considering that (for example)
337     // the callee might return its own address.  It is analogous to saying
338     // that loading a value from a pointer does not cause the pointer to be
339     // captured, even though the loaded value might be the pointer itself
340     // (think of self-referential objects).
341     if (Call->isCallee(&U))
342       return UseCaptureKind::NO_CAPTURE;
343 
344     // Not captured if only passed via 'nocapture' arguments.
345     if (Call->isDataOperand(&U) &&
346         !Call->doesNotCapture(Call->getDataOperandNo(&U))) {
347       // The parameter is not marked 'nocapture' - captured.
348       return UseCaptureKind::MAY_CAPTURE;
349     }
350     return UseCaptureKind::NO_CAPTURE;
351   }
352   case Instruction::Load:
353     // Volatile loads make the address observable.
354     if (cast<LoadInst>(I)->isVolatile())
355       return UseCaptureKind::MAY_CAPTURE;
356     return UseCaptureKind::NO_CAPTURE;
357   case Instruction::VAArg:
358     // "va-arg" from a pointer does not cause it to be captured.
359     return UseCaptureKind::NO_CAPTURE;
360   case Instruction::Store:
361     // Stored the pointer - conservatively assume it may be captured.
362     // Volatile stores make the address observable.
363     if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
364       return UseCaptureKind::MAY_CAPTURE;
365     return UseCaptureKind::NO_CAPTURE;
366   case Instruction::AtomicRMW: {
367     // atomicrmw conceptually includes both a load and store from
368     // the same location.
369     // As with a store, the location being accessed is not captured,
370     // but the value being stored is.
371     // Volatile stores make the address observable.
372     auto *ARMWI = cast<AtomicRMWInst>(I);
373     if (U.getOperandNo() == 1 || ARMWI->isVolatile())
374       return UseCaptureKind::MAY_CAPTURE;
375     return UseCaptureKind::NO_CAPTURE;
376   }
377   case Instruction::AtomicCmpXchg: {
378     // cmpxchg conceptually includes both a load and store from
379     // the same location.
380     // As with a store, the location being accessed is not captured,
381     // but the value being stored is.
382     // Volatile stores make the address observable.
383     auto *ACXI = cast<AtomicCmpXchgInst>(I);
384     if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile())
385       return UseCaptureKind::MAY_CAPTURE;
386     return UseCaptureKind::NO_CAPTURE;
387   }
388   case Instruction::BitCast:
389   case Instruction::GetElementPtr:
390   case Instruction::PHI:
391   case Instruction::Select:
392   case Instruction::AddrSpaceCast:
393     // The original value is not captured via this if the new value isn't.
394     return UseCaptureKind::PASSTHROUGH;
395   case Instruction::ICmp: {
396     unsigned Idx = U.getOperandNo();
397     unsigned OtherIdx = 1 - Idx;
398     if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
399       // Don't count comparisons of a no-alias return value against null as
400       // captures. This allows us to ignore comparisons of malloc results
401       // with null, for example.
402       if (CPN->getType()->getAddressSpace() == 0)
403         if (isNoAliasCall(U.get()->stripPointerCasts()))
404           return UseCaptureKind::NO_CAPTURE;
405       if (!I->getFunction()->nullPointerIsDefined()) {
406         auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
407         // Comparing a dereferenceable_or_null pointer against null cannot
408         // lead to pointer escapes, because if it is not null it must be a
409         // valid (in-bounds) pointer.
410         const DataLayout &DL = I->getModule()->getDataLayout();
411         if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL))
412           return UseCaptureKind::NO_CAPTURE;
413       }
414     }
415 
416     // Otherwise, be conservative. There are crazy ways to capture pointers
417     // using comparisons.
418     return UseCaptureKind::MAY_CAPTURE;
419   }
420   default:
421     // Something else - be conservative and say it is captured.
422     return UseCaptureKind::MAY_CAPTURE;
423   }
424 }
425 
426 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
427                                 unsigned MaxUsesToExplore) {
428   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
429   if (MaxUsesToExplore == 0)
430     MaxUsesToExplore = DefaultMaxUsesToExplore;
431 
432   SmallVector<const Use *, 20> Worklist;
433   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
434   SmallSet<const Use *, 20> Visited;
435 
436   auto AddUses = [&](const Value *V) {
437     for (const Use &U : V->uses()) {
438       // If there are lots of uses, conservatively say that the value
439       // is captured to avoid taking too much compile time.
440       if (Visited.size()  >= MaxUsesToExplore) {
441         Tracker->tooManyUses();
442         return false;
443       }
444       if (!Visited.insert(&U).second)
445         continue;
446       if (!Tracker->shouldExplore(&U))
447         continue;
448       Worklist.push_back(&U);
449     }
450     return true;
451   };
452   if (!AddUses(V))
453     return;
454 
455   auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) {
456     return Tracker->isDereferenceableOrNull(V, DL);
457   };
458   while (!Worklist.empty()) {
459     const Use *U = Worklist.pop_back_val();
460     switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) {
461     case UseCaptureKind::NO_CAPTURE:
462       continue;
463     case UseCaptureKind::MAY_CAPTURE:
464       if (Tracker->captured(U))
465         return;
466       continue;
467     case UseCaptureKind::PASSTHROUGH:
468       if (!AddUses(U->getUser()))
469         return;
470       continue;
471     }
472   }
473 
474   // All uses examined.
475 }
476 
477 bool llvm::isNonEscapingLocalObject(
478     const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
479   SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
480   if (IsCapturedCache) {
481     bool Inserted;
482     std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
483     if (!Inserted)
484       // Found cached result, return it!
485       return CacheIt->second;
486   }
487 
488   // If this is an identified function-local object, check to see if it escapes.
489   if (isIdentifiedFunctionLocal(V)) {
490     // Set StoreCaptures to True so that we can assume in our callers that the
491     // pointer is not the result of a load instruction. Currently
492     // PointerMayBeCaptured doesn't have any special analysis for the
493     // StoreCaptures=false case; if it did, our callers could be refined to be
494     // more precise.
495     auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
496     if (IsCapturedCache)
497       CacheIt->second = Ret;
498     return Ret;
499   }
500 
501   return false;
502 }
503