xref: /freebsd/contrib/llvm-project/llvm/lib/Analysis/CaptureTracking.cpp (revision 59c8e88e72633afbc47a4ace0d2170d00d51f7dc)
1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallPtrSet.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Analysis/AliasAnalysis.h"
24 #include "llvm/Analysis/CFG.h"
25 #include "llvm/Analysis/ValueTracking.h"
26 #include "llvm/IR/Constants.h"
27 #include "llvm/IR/Dominators.h"
28 #include "llvm/IR/Instructions.h"
29 #include "llvm/IR/IntrinsicInst.h"
30 #include "llvm/Support/CommandLine.h"
31 
32 using namespace llvm;
33 
34 #define DEBUG_TYPE "capture-tracking"
35 
36 STATISTIC(NumCaptured,          "Number of pointers maybe captured");
37 STATISTIC(NumNotCaptured,       "Number of pointers not captured");
38 STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
39 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");
40 
41 /// The default value for MaxUsesToExplore argument. It's relatively small to
42 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
43 /// where the results can't be cached.
44 /// TODO: we should probably introduce a caching CaptureTracking analysis and
45 /// use it where possible. The caching version can use much higher limit or
46 /// don't have this cap at all.
47 static cl::opt<unsigned>
48     DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
49                             cl::desc("Maximal number of uses to explore."),
50                             cl::init(100));
51 
52 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
53   return DefaultMaxUsesToExplore;
54 }
55 
56 CaptureTracker::~CaptureTracker() = default;
57 
58 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
59 
60 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
61   // We want comparisons to null pointers to not be considered capturing,
62   // but need to guard against cases like gep(p, -ptrtoint(p2)) == null,
63   // which are equivalent to p == p2 and would capture the pointer.
64   //
65   // A dereferenceable pointer is a case where this is known to be safe,
66   // because the pointer resulting from such a construction would not be
67   // dereferenceable.
68   //
69   // It is not sufficient to check for inbounds GEP here, because GEP with
70   // zero offset is always inbounds.
71   bool CanBeNull, CanBeFreed;
72   return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
73 }
74 
75 namespace {
76   struct SimpleCaptureTracker : public CaptureTracker {
77     explicit SimpleCaptureTracker(
78 
79         const SmallPtrSetImpl<const Value *> &EphValues, bool ReturnCaptures)
80         : EphValues(EphValues), ReturnCaptures(ReturnCaptures) {}
81 
82     void tooManyUses() override {
83       LLVM_DEBUG(dbgs() << "Captured due to too many uses\n");
84       Captured = true;
85     }
86 
87     bool captured(const Use *U) override {
88       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
89         return false;
90 
91       if (EphValues.contains(U->getUser()))
92         return false;
93 
94       LLVM_DEBUG(dbgs() << "Captured by: " << *U->getUser() << "\n");
95 
96       Captured = true;
97       return true;
98     }
99 
100     const SmallPtrSetImpl<const Value *> &EphValues;
101 
102     bool ReturnCaptures;
103 
104     bool Captured = false;
105   };
106 
107   /// Only find pointer captures which happen before the given instruction. Uses
108   /// the dominator tree to determine whether one instruction is before another.
109   /// Only support the case where the Value is defined in the same basic block
110   /// as the given instruction and the use.
111   struct CapturesBefore : public CaptureTracker {
112 
113     CapturesBefore(bool ReturnCaptures, const Instruction *I,
114                    const DominatorTree *DT, bool IncludeI, const LoopInfo *LI)
115         : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures),
116           IncludeI(IncludeI), LI(LI) {}
117 
118     void tooManyUses() override { Captured = true; }
119 
120     bool isSafeToPrune(Instruction *I) {
121       if (BeforeHere == I)
122         return !IncludeI;
123 
124       // We explore this usage only if the usage can reach "BeforeHere".
125       // If use is not reachable from entry, there is no need to explore.
126       if (!DT->isReachableFromEntry(I->getParent()))
127         return true;
128 
129       // Check whether there is a path from I to BeforeHere.
130       return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI);
131     }
132 
133     bool captured(const Use *U) override {
134       Instruction *I = cast<Instruction>(U->getUser());
135       if (isa<ReturnInst>(I) && !ReturnCaptures)
136         return false;
137 
138       // Check isSafeToPrune() here rather than in shouldExplore() to avoid
139       // an expensive reachability query for every instruction we look at.
140       // Instead we only do one for actual capturing candidates.
141       if (isSafeToPrune(I))
142         return false;
143 
144       Captured = true;
145       return true;
146     }
147 
148     const Instruction *BeforeHere;
149     const DominatorTree *DT;
150 
151     bool ReturnCaptures;
152     bool IncludeI;
153 
154     bool Captured = false;
155 
156     const LoopInfo *LI;
157   };
158 
159   /// Find the 'earliest' instruction before which the pointer is known not to
160   /// be captured. Here an instruction A is considered earlier than instruction
161   /// B, if A dominates B. If 2 escapes do not dominate each other, the
162   /// terminator of the common dominator is chosen. If not all uses cannot be
163   /// analyzed, the earliest escape is set to the first instruction in the
164   /// function entry block.
165   // NOTE: Users have to make sure instructions compared against the earliest
166   // escape are not in a cycle.
167   struct EarliestCaptures : public CaptureTracker {
168 
169     EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT,
170                      const SmallPtrSetImpl<const Value *> &EphValues)
171         : EphValues(EphValues), DT(DT), ReturnCaptures(ReturnCaptures), F(F) {}
172 
173     void tooManyUses() override {
174       Captured = true;
175       EarliestCapture = &*F.getEntryBlock().begin();
176     }
177 
178     bool captured(const Use *U) override {
179       Instruction *I = cast<Instruction>(U->getUser());
180       if (isa<ReturnInst>(I) && !ReturnCaptures)
181         return false;
182 
183       if (EphValues.contains(I))
184         return false;
185 
186       if (!EarliestCapture)
187         EarliestCapture = I;
188       else
189         EarliestCapture = DT.findNearestCommonDominator(EarliestCapture, I);
190       Captured = true;
191 
192       // Return false to continue analysis; we need to see all potential
193       // captures.
194       return false;
195     }
196 
197     const SmallPtrSetImpl<const Value *> &EphValues;
198 
199     Instruction *EarliestCapture = nullptr;
200 
201     const DominatorTree &DT;
202 
203     bool ReturnCaptures;
204 
205     bool Captured = false;
206 
207     Function &F;
208   };
209 }
210 
211 /// PointerMayBeCaptured - Return true if this pointer value may be captured
212 /// by the enclosing function (which is required to exist).  This routine can
213 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
214 /// specifies whether returning the value (or part of it) from the function
215 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
216 /// storing the value (or part of it) into memory anywhere automatically
217 /// counts as capturing it or not.
218 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
219                                 bool StoreCaptures, unsigned MaxUsesToExplore) {
220   SmallPtrSet<const Value *, 1> Empty;
221   return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, Empty,
222                               MaxUsesToExplore);
223 }
224 
225 /// Variant of the above function which accepts a set of Values that are
226 /// ephemeral and cannot cause pointers to escape.
227 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
228                                 bool StoreCaptures,
229                                 const SmallPtrSetImpl<const Value *> &EphValues,
230                                 unsigned MaxUsesToExplore) {
231   assert(!isa<GlobalValue>(V) &&
232          "It doesn't make sense to ask whether a global is captured.");
233 
234   // TODO: If StoreCaptures is not true, we could do Fancy analysis
235   // to determine whether this store is not actually an escape point.
236   // In that case, BasicAliasAnalysis should be updated as well to
237   // take advantage of this.
238   (void)StoreCaptures;
239 
240   LLVM_DEBUG(dbgs() << "Captured?: " << *V << " = ");
241 
242   SimpleCaptureTracker SCT(EphValues, ReturnCaptures);
243   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
244   if (SCT.Captured)
245     ++NumCaptured;
246   else {
247     ++NumNotCaptured;
248     LLVM_DEBUG(dbgs() << "not captured\n");
249   }
250   return SCT.Captured;
251 }
252 
253 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
254 /// captured by the enclosing function (which is required to exist). If a
255 /// DominatorTree is provided, only captures which happen before the given
256 /// instruction are considered. This routine can be expensive, so consider
257 /// caching the results.  The boolean ReturnCaptures specifies whether
258 /// returning the value (or part of it) from the function counts as capturing
259 /// it or not.  The boolean StoreCaptures specified whether storing the value
260 /// (or part of it) into memory anywhere automatically counts as capturing it
261 /// or not.
262 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
263                                       bool StoreCaptures, const Instruction *I,
264                                       const DominatorTree *DT, bool IncludeI,
265                                       unsigned MaxUsesToExplore,
266                                       const LoopInfo *LI) {
267   assert(!isa<GlobalValue>(V) &&
268          "It doesn't make sense to ask whether a global is captured.");
269 
270   if (!DT)
271     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
272                                 MaxUsesToExplore);
273 
274   // TODO: See comment in PointerMayBeCaptured regarding what could be done
275   // with StoreCaptures.
276 
277   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI);
278   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
279   if (CB.Captured)
280     ++NumCapturedBefore;
281   else
282     ++NumNotCapturedBefore;
283   return CB.Captured;
284 }
285 
286 Instruction *
287 llvm::FindEarliestCapture(const Value *V, Function &F, bool ReturnCaptures,
288                           bool StoreCaptures, const DominatorTree &DT,
289 
290                           const SmallPtrSetImpl<const Value *> &EphValues,
291                           unsigned MaxUsesToExplore) {
292   assert(!isa<GlobalValue>(V) &&
293          "It doesn't make sense to ask whether a global is captured.");
294 
295   EarliestCaptures CB(ReturnCaptures, F, DT, EphValues);
296   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
297   if (CB.Captured)
298     ++NumCapturedBefore;
299   else
300     ++NumNotCapturedBefore;
301   return CB.EarliestCapture;
302 }
303 
304 UseCaptureKind llvm::DetermineUseCaptureKind(
305     const Use &U,
306     function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) {
307   Instruction *I = cast<Instruction>(U.getUser());
308 
309   switch (I->getOpcode()) {
310   case Instruction::Call:
311   case Instruction::Invoke: {
312     auto *Call = cast<CallBase>(I);
313     // Not captured if the callee is readonly, doesn't return a copy through
314     // its return value and doesn't unwind (a readonly function can leak bits
315     // by throwing an exception or not depending on the input value).
316     if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
317         Call->getType()->isVoidTy())
318       return UseCaptureKind::NO_CAPTURE;
319 
320     // The pointer is not captured if returned pointer is not captured.
321     // NOTE: CaptureTracking users should not assume that only functions
322     // marked with nocapture do not capture. This means that places like
323     // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
324     // in BasicAA also need to know about this property.
325     if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true))
326       return UseCaptureKind::PASSTHROUGH;
327 
328     // Volatile operations effectively capture the memory location that they
329     // load and store to.
330     if (auto *MI = dyn_cast<MemIntrinsic>(Call))
331       if (MI->isVolatile())
332         return UseCaptureKind::MAY_CAPTURE;
333 
334     // Calling a function pointer does not in itself cause the pointer to
335     // be captured.  This is a subtle point considering that (for example)
336     // the callee might return its own address.  It is analogous to saying
337     // that loading a value from a pointer does not cause the pointer to be
338     // captured, even though the loaded value might be the pointer itself
339     // (think of self-referential objects).
340     if (Call->isCallee(&U))
341       return UseCaptureKind::NO_CAPTURE;
342 
343     // Not captured if only passed via 'nocapture' arguments.
344     if (Call->isDataOperand(&U) &&
345         !Call->doesNotCapture(Call->getDataOperandNo(&U))) {
346       // The parameter is not marked 'nocapture' - captured.
347       return UseCaptureKind::MAY_CAPTURE;
348     }
349     return UseCaptureKind::NO_CAPTURE;
350   }
351   case Instruction::Load:
352     // Volatile loads make the address observable.
353     if (cast<LoadInst>(I)->isVolatile())
354       return UseCaptureKind::MAY_CAPTURE;
355     return UseCaptureKind::NO_CAPTURE;
356   case Instruction::VAArg:
357     // "va-arg" from a pointer does not cause it to be captured.
358     return UseCaptureKind::NO_CAPTURE;
359   case Instruction::Store:
360     // Stored the pointer - conservatively assume it may be captured.
361     // Volatile stores make the address observable.
362     if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
363       return UseCaptureKind::MAY_CAPTURE;
364     return UseCaptureKind::NO_CAPTURE;
365   case Instruction::AtomicRMW: {
366     // atomicrmw conceptually includes both a load and store from
367     // the same location.
368     // As with a store, the location being accessed is not captured,
369     // but the value being stored is.
370     // Volatile stores make the address observable.
371     auto *ARMWI = cast<AtomicRMWInst>(I);
372     if (U.getOperandNo() == 1 || ARMWI->isVolatile())
373       return UseCaptureKind::MAY_CAPTURE;
374     return UseCaptureKind::NO_CAPTURE;
375   }
376   case Instruction::AtomicCmpXchg: {
377     // cmpxchg conceptually includes both a load and store from
378     // the same location.
379     // As with a store, the location being accessed is not captured,
380     // but the value being stored is.
381     // Volatile stores make the address observable.
382     auto *ACXI = cast<AtomicCmpXchgInst>(I);
383     if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile())
384       return UseCaptureKind::MAY_CAPTURE;
385     return UseCaptureKind::NO_CAPTURE;
386   }
387   case Instruction::BitCast:
388   case Instruction::GetElementPtr:
389   case Instruction::PHI:
390   case Instruction::Select:
391   case Instruction::AddrSpaceCast:
392     // The original value is not captured via this if the new value isn't.
393     return UseCaptureKind::PASSTHROUGH;
394   case Instruction::ICmp: {
395     unsigned Idx = U.getOperandNo();
396     unsigned OtherIdx = 1 - Idx;
397     if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
398       // Don't count comparisons of a no-alias return value against null as
399       // captures. This allows us to ignore comparisons of malloc results
400       // with null, for example.
401       if (CPN->getType()->getAddressSpace() == 0)
402         if (isNoAliasCall(U.get()->stripPointerCasts()))
403           return UseCaptureKind::NO_CAPTURE;
404       if (!I->getFunction()->nullPointerIsDefined()) {
405         auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
406         // Comparing a dereferenceable_or_null pointer against null cannot
407         // lead to pointer escapes, because if it is not null it must be a
408         // valid (in-bounds) pointer.
409         const DataLayout &DL = I->getModule()->getDataLayout();
410         if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL))
411           return UseCaptureKind::NO_CAPTURE;
412       }
413     }
414 
415     // Otherwise, be conservative. There are crazy ways to capture pointers
416     // using comparisons.
417     return UseCaptureKind::MAY_CAPTURE;
418   }
419   default:
420     // Something else - be conservative and say it is captured.
421     return UseCaptureKind::MAY_CAPTURE;
422   }
423 }
424 
425 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
426                                 unsigned MaxUsesToExplore) {
427   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
428   if (MaxUsesToExplore == 0)
429     MaxUsesToExplore = DefaultMaxUsesToExplore;
430 
431   SmallVector<const Use *, 20> Worklist;
432   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
433   SmallSet<const Use *, 20> Visited;
434 
435   auto AddUses = [&](const Value *V) {
436     for (const Use &U : V->uses()) {
437       // If there are lots of uses, conservatively say that the value
438       // is captured to avoid taking too much compile time.
439       if (Visited.size()  >= MaxUsesToExplore) {
440         Tracker->tooManyUses();
441         return false;
442       }
443       if (!Visited.insert(&U).second)
444         continue;
445       if (!Tracker->shouldExplore(&U))
446         continue;
447       Worklist.push_back(&U);
448     }
449     return true;
450   };
451   if (!AddUses(V))
452     return;
453 
454   auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) {
455     return Tracker->isDereferenceableOrNull(V, DL);
456   };
457   while (!Worklist.empty()) {
458     const Use *U = Worklist.pop_back_val();
459     switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) {
460     case UseCaptureKind::NO_CAPTURE:
461       continue;
462     case UseCaptureKind::MAY_CAPTURE:
463       if (Tracker->captured(U))
464         return;
465       continue;
466     case UseCaptureKind::PASSTHROUGH:
467       if (!AddUses(U->getUser()))
468         return;
469       continue;
470     }
471   }
472 
473   // All uses examined.
474 }
475 
476 bool llvm::isNonEscapingLocalObject(
477     const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
478   SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
479   if (IsCapturedCache) {
480     bool Inserted;
481     std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
482     if (!Inserted)
483       // Found cached result, return it!
484       return CacheIt->second;
485   }
486 
487   // If this is an identified function-local object, check to see if it escapes.
488   if (isIdentifiedFunctionLocal(V)) {
489     // Set StoreCaptures to True so that we can assume in our callers that the
490     // pointer is not the result of a load instruction. Currently
491     // PointerMayBeCaptured doesn't have any special analysis for the
492     // StoreCaptures=false case; if it did, our callers could be refined to be
493     // more precise.
494     auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
495     if (IsCapturedCache)
496       CacheIt->second = Ret;
497     return Ret;
498   }
499 
500   return false;
501 }
502