xref: /freebsd/contrib/llvm-project/llvm/lib/Analysis/CaptureTracking.cpp (revision 5e3190f700637fcfc1a52daeaa4a031fdd2557c7)
1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallPtrSet.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Analysis/AliasAnalysis.h"
24 #include "llvm/Analysis/CFG.h"
25 #include "llvm/Analysis/ValueTracking.h"
26 #include "llvm/IR/Constants.h"
27 #include "llvm/IR/Dominators.h"
28 #include "llvm/IR/Instructions.h"
29 #include "llvm/IR/IntrinsicInst.h"
30 #include "llvm/Support/CommandLine.h"
31 
32 using namespace llvm;
33 
34 #define DEBUG_TYPE "capture-tracking"
35 
36 STATISTIC(NumCaptured,          "Number of pointers maybe captured");
37 STATISTIC(NumNotCaptured,       "Number of pointers not captured");
38 STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
39 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");
40 
41 /// The default value for MaxUsesToExplore argument. It's relatively small to
42 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
43 /// where the results can't be cached.
44 /// TODO: we should probably introduce a caching CaptureTracking analysis and
45 /// use it where possible. The caching version can use much higher limit or
46 /// don't have this cap at all.
47 static cl::opt<unsigned>
48     DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
49                             cl::desc("Maximal number of uses to explore."),
50                             cl::init(100));
51 
52 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
53   return DefaultMaxUsesToExplore;
54 }
55 
56 CaptureTracker::~CaptureTracker() = default;
57 
58 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
59 
60 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
61   // An inbounds GEP can either be a valid pointer (pointing into
62   // or to the end of an allocation), or be null in the default
63   // address space. So for an inbounds GEP there is no way to let
64   // the pointer escape using clever GEP hacking because doing so
65   // would make the pointer point outside of the allocated object
66   // and thus make the GEP result a poison value. Similarly, other
67   // dereferenceable pointers cannot be manipulated without producing
68   // poison.
69   if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
70     if (GEP->isInBounds())
71       return true;
72   bool CanBeNull, CanBeFreed;
73   return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
74 }
75 
76 namespace {
77   struct SimpleCaptureTracker : public CaptureTracker {
78     explicit SimpleCaptureTracker(
79 
80         const SmallPtrSetImpl<const Value *> &EphValues, bool ReturnCaptures)
81         : EphValues(EphValues), ReturnCaptures(ReturnCaptures) {}
82 
83     void tooManyUses() override { Captured = true; }
84 
85     bool captured(const Use *U) override {
86       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
87         return false;
88 
89       if (EphValues.contains(U->getUser()))
90         return false;
91 
92       Captured = true;
93       return true;
94     }
95 
96     const SmallPtrSetImpl<const Value *> &EphValues;
97 
98     bool ReturnCaptures;
99 
100     bool Captured = false;
101   };
102 
103   /// Only find pointer captures which happen before the given instruction. Uses
104   /// the dominator tree to determine whether one instruction is before another.
105   /// Only support the case where the Value is defined in the same basic block
106   /// as the given instruction and the use.
107   struct CapturesBefore : public CaptureTracker {
108 
109     CapturesBefore(bool ReturnCaptures, const Instruction *I,
110                    const DominatorTree *DT, bool IncludeI, const LoopInfo *LI)
111         : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures),
112           IncludeI(IncludeI), LI(LI) {}
113 
114     void tooManyUses() override { Captured = true; }
115 
116     bool isSafeToPrune(Instruction *I) {
117       if (BeforeHere == I)
118         return !IncludeI;
119 
120       // We explore this usage only if the usage can reach "BeforeHere".
121       // If use is not reachable from entry, there is no need to explore.
122       if (!DT->isReachableFromEntry(I->getParent()))
123         return true;
124 
125       // Check whether there is a path from I to BeforeHere.
126       return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI);
127     }
128 
129     bool captured(const Use *U) override {
130       Instruction *I = cast<Instruction>(U->getUser());
131       if (isa<ReturnInst>(I) && !ReturnCaptures)
132         return false;
133 
134       // Check isSafeToPrune() here rather than in shouldExplore() to avoid
135       // an expensive reachability query for every instruction we look at.
136       // Instead we only do one for actual capturing candidates.
137       if (isSafeToPrune(I))
138         return false;
139 
140       Captured = true;
141       return true;
142     }
143 
144     const Instruction *BeforeHere;
145     const DominatorTree *DT;
146 
147     bool ReturnCaptures;
148     bool IncludeI;
149 
150     bool Captured = false;
151 
152     const LoopInfo *LI;
153   };
154 
155   /// Find the 'earliest' instruction before which the pointer is known not to
156   /// be captured. Here an instruction A is considered earlier than instruction
157   /// B, if A dominates B. If 2 escapes do not dominate each other, the
158   /// terminator of the common dominator is chosen. If not all uses cannot be
159   /// analyzed, the earliest escape is set to the first instruction in the
160   /// function entry block.
161   // NOTE: Users have to make sure instructions compared against the earliest
162   // escape are not in a cycle.
163   struct EarliestCaptures : public CaptureTracker {
164 
165     EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT,
166                      const SmallPtrSetImpl<const Value *> &EphValues)
167         : EphValues(EphValues), DT(DT), ReturnCaptures(ReturnCaptures), F(F) {}
168 
169     void tooManyUses() override {
170       Captured = true;
171       EarliestCapture = &*F.getEntryBlock().begin();
172     }
173 
174     bool captured(const Use *U) override {
175       Instruction *I = cast<Instruction>(U->getUser());
176       if (isa<ReturnInst>(I) && !ReturnCaptures)
177         return false;
178 
179       if (EphValues.contains(I))
180         return false;
181 
182       if (!EarliestCapture)
183         EarliestCapture = I;
184       else
185         EarliestCapture = DT.findNearestCommonDominator(EarliestCapture, I);
186       Captured = true;
187 
188       // Return false to continue analysis; we need to see all potential
189       // captures.
190       return false;
191     }
192 
193     const SmallPtrSetImpl<const Value *> &EphValues;
194 
195     Instruction *EarliestCapture = nullptr;
196 
197     const DominatorTree &DT;
198 
199     bool ReturnCaptures;
200 
201     bool Captured = false;
202 
203     Function &F;
204   };
205 }
206 
207 /// PointerMayBeCaptured - Return true if this pointer value may be captured
208 /// by the enclosing function (which is required to exist).  This routine can
209 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
210 /// specifies whether returning the value (or part of it) from the function
211 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
212 /// storing the value (or part of it) into memory anywhere automatically
213 /// counts as capturing it or not.
214 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
215                                 bool StoreCaptures, unsigned MaxUsesToExplore) {
216   SmallPtrSet<const Value *, 1> Empty;
217   return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, Empty,
218                               MaxUsesToExplore);
219 }
220 
221 /// Variant of the above function which accepts a set of Values that are
222 /// ephemeral and cannot cause pointers to escape.
223 bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
224                                 bool StoreCaptures,
225                                 const SmallPtrSetImpl<const Value *> &EphValues,
226                                 unsigned MaxUsesToExplore) {
227   assert(!isa<GlobalValue>(V) &&
228          "It doesn't make sense to ask whether a global is captured.");
229 
230   // TODO: If StoreCaptures is not true, we could do Fancy analysis
231   // to determine whether this store is not actually an escape point.
232   // In that case, BasicAliasAnalysis should be updated as well to
233   // take advantage of this.
234   (void)StoreCaptures;
235 
236   SimpleCaptureTracker SCT(EphValues, ReturnCaptures);
237   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
238   if (SCT.Captured)
239     ++NumCaptured;
240   else
241     ++NumNotCaptured;
242   return SCT.Captured;
243 }
244 
245 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
246 /// captured by the enclosing function (which is required to exist). If a
247 /// DominatorTree is provided, only captures which happen before the given
248 /// instruction are considered. This routine can be expensive, so consider
249 /// caching the results.  The boolean ReturnCaptures specifies whether
250 /// returning the value (or part of it) from the function counts as capturing
251 /// it or not.  The boolean StoreCaptures specified whether storing the value
252 /// (or part of it) into memory anywhere automatically counts as capturing it
253 /// or not.
254 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
255                                       bool StoreCaptures, const Instruction *I,
256                                       const DominatorTree *DT, bool IncludeI,
257                                       unsigned MaxUsesToExplore,
258                                       const LoopInfo *LI) {
259   assert(!isa<GlobalValue>(V) &&
260          "It doesn't make sense to ask whether a global is captured.");
261 
262   if (!DT)
263     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
264                                 MaxUsesToExplore);
265 
266   // TODO: See comment in PointerMayBeCaptured regarding what could be done
267   // with StoreCaptures.
268 
269   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI);
270   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
271   if (CB.Captured)
272     ++NumCapturedBefore;
273   else
274     ++NumNotCapturedBefore;
275   return CB.Captured;
276 }
277 
278 Instruction *
279 llvm::FindEarliestCapture(const Value *V, Function &F, bool ReturnCaptures,
280                           bool StoreCaptures, const DominatorTree &DT,
281 
282                           const SmallPtrSetImpl<const Value *> &EphValues,
283                           unsigned MaxUsesToExplore) {
284   assert(!isa<GlobalValue>(V) &&
285          "It doesn't make sense to ask whether a global is captured.");
286 
287   EarliestCaptures CB(ReturnCaptures, F, DT, EphValues);
288   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
289   if (CB.Captured)
290     ++NumCapturedBefore;
291   else
292     ++NumNotCapturedBefore;
293   return CB.EarliestCapture;
294 }
295 
296 UseCaptureKind llvm::DetermineUseCaptureKind(
297     const Use &U,
298     function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) {
299   Instruction *I = cast<Instruction>(U.getUser());
300 
301   switch (I->getOpcode()) {
302   case Instruction::Call:
303   case Instruction::Invoke: {
304     auto *Call = cast<CallBase>(I);
305     // Not captured if the callee is readonly, doesn't return a copy through
306     // its return value and doesn't unwind (a readonly function can leak bits
307     // by throwing an exception or not depending on the input value).
308     if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
309         Call->getType()->isVoidTy())
310       return UseCaptureKind::NO_CAPTURE;
311 
312     // The pointer is not captured if returned pointer is not captured.
313     // NOTE: CaptureTracking users should not assume that only functions
314     // marked with nocapture do not capture. This means that places like
315     // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
316     // in BasicAA also need to know about this property.
317     if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true))
318       return UseCaptureKind::PASSTHROUGH;
319 
320     // Volatile operations effectively capture the memory location that they
321     // load and store to.
322     if (auto *MI = dyn_cast<MemIntrinsic>(Call))
323       if (MI->isVolatile())
324         return UseCaptureKind::MAY_CAPTURE;
325 
326     // Calling a function pointer does not in itself cause the pointer to
327     // be captured.  This is a subtle point considering that (for example)
328     // the callee might return its own address.  It is analogous to saying
329     // that loading a value from a pointer does not cause the pointer to be
330     // captured, even though the loaded value might be the pointer itself
331     // (think of self-referential objects).
332     if (Call->isCallee(&U))
333       return UseCaptureKind::NO_CAPTURE;
334 
335     // Not captured if only passed via 'nocapture' arguments.
336     if (Call->isDataOperand(&U) &&
337         !Call->doesNotCapture(Call->getDataOperandNo(&U))) {
338       // The parameter is not marked 'nocapture' - captured.
339       return UseCaptureKind::MAY_CAPTURE;
340     }
341     return UseCaptureKind::NO_CAPTURE;
342   }
343   case Instruction::Load:
344     // Volatile loads make the address observable.
345     if (cast<LoadInst>(I)->isVolatile())
346       return UseCaptureKind::MAY_CAPTURE;
347     return UseCaptureKind::NO_CAPTURE;
348   case Instruction::VAArg:
349     // "va-arg" from a pointer does not cause it to be captured.
350     return UseCaptureKind::NO_CAPTURE;
351   case Instruction::Store:
352     // Stored the pointer - conservatively assume it may be captured.
353     // Volatile stores make the address observable.
354     if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
355       return UseCaptureKind::MAY_CAPTURE;
356     return UseCaptureKind::NO_CAPTURE;
357   case Instruction::AtomicRMW: {
358     // atomicrmw conceptually includes both a load and store from
359     // the same location.
360     // As with a store, the location being accessed is not captured,
361     // but the value being stored is.
362     // Volatile stores make the address observable.
363     auto *ARMWI = cast<AtomicRMWInst>(I);
364     if (U.getOperandNo() == 1 || ARMWI->isVolatile())
365       return UseCaptureKind::MAY_CAPTURE;
366     return UseCaptureKind::NO_CAPTURE;
367   }
368   case Instruction::AtomicCmpXchg: {
369     // cmpxchg conceptually includes both a load and store from
370     // the same location.
371     // As with a store, the location being accessed is not captured,
372     // but the value being stored is.
373     // Volatile stores make the address observable.
374     auto *ACXI = cast<AtomicCmpXchgInst>(I);
375     if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile())
376       return UseCaptureKind::MAY_CAPTURE;
377     return UseCaptureKind::NO_CAPTURE;
378   }
379   case Instruction::BitCast:
380   case Instruction::GetElementPtr:
381   case Instruction::PHI:
382   case Instruction::Select:
383   case Instruction::AddrSpaceCast:
384     // The original value is not captured via this if the new value isn't.
385     return UseCaptureKind::PASSTHROUGH;
386   case Instruction::ICmp: {
387     unsigned Idx = U.getOperandNo();
388     unsigned OtherIdx = 1 - Idx;
389     if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
390       // Don't count comparisons of a no-alias return value against null as
391       // captures. This allows us to ignore comparisons of malloc results
392       // with null, for example.
393       if (CPN->getType()->getAddressSpace() == 0)
394         if (isNoAliasCall(U.get()->stripPointerCasts()))
395           return UseCaptureKind::NO_CAPTURE;
396       if (!I->getFunction()->nullPointerIsDefined()) {
397         auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
398         // Comparing a dereferenceable_or_null pointer against null cannot
399         // lead to pointer escapes, because if it is not null it must be a
400         // valid (in-bounds) pointer.
401         const DataLayout &DL = I->getModule()->getDataLayout();
402         if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL))
403           return UseCaptureKind::NO_CAPTURE;
404       }
405     }
406     // Comparison against value stored in global variable. Given the pointer
407     // does not escape, its value cannot be guessed and stored separately in a
408     // global variable.
409     auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
410     if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
411       return UseCaptureKind::NO_CAPTURE;
412     // Otherwise, be conservative. There are crazy ways to capture pointers
413     // using comparisons.
414     return UseCaptureKind::MAY_CAPTURE;
415   }
416   default:
417     // Something else - be conservative and say it is captured.
418     return UseCaptureKind::MAY_CAPTURE;
419   }
420 }
421 
422 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
423                                 unsigned MaxUsesToExplore) {
424   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
425   if (MaxUsesToExplore == 0)
426     MaxUsesToExplore = DefaultMaxUsesToExplore;
427 
428   SmallVector<const Use *, 20> Worklist;
429   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
430   SmallSet<const Use *, 20> Visited;
431 
432   auto AddUses = [&](const Value *V) {
433     for (const Use &U : V->uses()) {
434       // If there are lots of uses, conservatively say that the value
435       // is captured to avoid taking too much compile time.
436       if (Visited.size()  >= MaxUsesToExplore) {
437         Tracker->tooManyUses();
438         return false;
439       }
440       if (!Visited.insert(&U).second)
441         continue;
442       if (!Tracker->shouldExplore(&U))
443         continue;
444       Worklist.push_back(&U);
445     }
446     return true;
447   };
448   if (!AddUses(V))
449     return;
450 
451   auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) {
452     return Tracker->isDereferenceableOrNull(V, DL);
453   };
454   while (!Worklist.empty()) {
455     const Use *U = Worklist.pop_back_val();
456     switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) {
457     case UseCaptureKind::NO_CAPTURE:
458       continue;
459     case UseCaptureKind::MAY_CAPTURE:
460       if (Tracker->captured(U))
461         return;
462       continue;
463     case UseCaptureKind::PASSTHROUGH:
464       if (!AddUses(U->getUser()))
465         return;
466       continue;
467     }
468   }
469 
470   // All uses examined.
471 }
472 
473 bool llvm::isNonEscapingLocalObject(
474     const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
475   SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
476   if (IsCapturedCache) {
477     bool Inserted;
478     std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
479     if (!Inserted)
480       // Found cached result, return it!
481       return CacheIt->second;
482   }
483 
484   // If this is an identified function-local object, check to see if it escapes.
485   if (isIdentifiedFunctionLocal(V)) {
486     // Set StoreCaptures to True so that we can assume in our callers that the
487     // pointer is not the result of a load instruction. Currently
488     // PointerMayBeCaptured doesn't have any special analysis for the
489     // StoreCaptures=false case; if it did, our callers could be refined to be
490     // more precise.
491     auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
492     if (IsCapturedCache)
493       CacheIt->second = Ret;
494     return Ret;
495   }
496 
497   return false;
498 }
499