1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains routines that help determine which pointers are captured. 10 // A pointer value is captured if the function makes a copy of any part of the 11 // pointer that outlives the call. Not being captured means, more or less, that 12 // the pointer is only dereferenced and not stored in a global. Returning part 13 // of the pointer as the function return value may or may not count as capturing 14 // the pointer, depending on the context. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/Analysis/CaptureTracking.h" 19 #include "llvm/ADT/SmallSet.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/Analysis/AliasAnalysis.h" 23 #include "llvm/Analysis/CFG.h" 24 #include "llvm/Analysis/ValueTracking.h" 25 #include "llvm/IR/Constants.h" 26 #include "llvm/IR/Dominators.h" 27 #include "llvm/IR/Instructions.h" 28 #include "llvm/IR/IntrinsicInst.h" 29 #include "llvm/Support/CommandLine.h" 30 31 using namespace llvm; 32 33 #define DEBUG_TYPE "capture-tracking" 34 35 STATISTIC(NumCaptured, "Number of pointers maybe captured"); 36 STATISTIC(NumNotCaptured, "Number of pointers not captured"); 37 STATISTIC(NumCapturedBefore, "Number of pointers maybe captured before"); 38 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before"); 39 40 /// The default value for MaxUsesToExplore argument. It's relatively small to 41 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis, 42 /// where the results can't be cached. 43 /// TODO: we should probably introduce a caching CaptureTracking analysis and 44 /// use it where possible. The caching version can use much higher limit or 45 /// don't have this cap at all. 46 static cl::opt<unsigned> 47 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden, 48 cl::desc("Maximal number of uses to explore."), 49 cl::init(20)); 50 51 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() { 52 return DefaultMaxUsesToExplore; 53 } 54 55 CaptureTracker::~CaptureTracker() {} 56 57 bool CaptureTracker::shouldExplore(const Use *U) { return true; } 58 59 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) { 60 // An inbounds GEP can either be a valid pointer (pointing into 61 // or to the end of an allocation), or be null in the default 62 // address space. So for an inbounds GEP there is no way to let 63 // the pointer escape using clever GEP hacking because doing so 64 // would make the pointer point outside of the allocated object 65 // and thus make the GEP result a poison value. Similarly, other 66 // dereferenceable pointers cannot be manipulated without producing 67 // poison. 68 if (auto *GEP = dyn_cast<GetElementPtrInst>(O)) 69 if (GEP->isInBounds()) 70 return true; 71 bool CanBeNull, CanBeFreed; 72 return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed); 73 } 74 75 namespace { 76 struct SimpleCaptureTracker : public CaptureTracker { 77 explicit SimpleCaptureTracker(bool ReturnCaptures) 78 : ReturnCaptures(ReturnCaptures) {} 79 80 void tooManyUses() override { Captured = true; } 81 82 bool captured(const Use *U) override { 83 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures) 84 return false; 85 86 Captured = true; 87 return true; 88 } 89 90 bool ReturnCaptures; 91 92 bool Captured = false; 93 }; 94 95 /// Only find pointer captures which happen before the given instruction. Uses 96 /// the dominator tree to determine whether one instruction is before another. 97 /// Only support the case where the Value is defined in the same basic block 98 /// as the given instruction and the use. 99 struct CapturesBefore : public CaptureTracker { 100 101 CapturesBefore(bool ReturnCaptures, const Instruction *I, 102 const DominatorTree *DT, bool IncludeI, const LoopInfo *LI) 103 : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures), 104 IncludeI(IncludeI), LI(LI) {} 105 106 void tooManyUses() override { Captured = true; } 107 108 bool isSafeToPrune(Instruction *I) { 109 if (BeforeHere == I) 110 return !IncludeI; 111 112 // We explore this usage only if the usage can reach "BeforeHere". 113 // If use is not reachable from entry, there is no need to explore. 114 if (!DT->isReachableFromEntry(I->getParent())) 115 return true; 116 117 // Check whether there is a path from I to BeforeHere. 118 return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI); 119 } 120 121 bool captured(const Use *U) override { 122 Instruction *I = cast<Instruction>(U->getUser()); 123 if (isa<ReturnInst>(I) && !ReturnCaptures) 124 return false; 125 126 // Check isSafeToPrune() here rather than in shouldExplore() to avoid 127 // an expensive reachability query for every instruction we look at. 128 // Instead we only do one for actual capturing candidates. 129 if (isSafeToPrune(I)) 130 return false; 131 132 Captured = true; 133 return true; 134 } 135 136 const Instruction *BeforeHere; 137 const DominatorTree *DT; 138 139 bool ReturnCaptures; 140 bool IncludeI; 141 142 bool Captured = false; 143 144 const LoopInfo *LI; 145 }; 146 147 /// Find the 'earliest' instruction before which the pointer is known not to 148 /// be captured. Here an instruction A is considered earlier than instruction 149 /// B, if A dominates B. If 2 escapes do not dominate each other, the 150 /// terminator of the common dominator is chosen. If not all uses cannot be 151 /// analyzed, the earliest escape is set to the first instruction in the 152 /// function entry block. 153 // NOTE: Users have to make sure instructions compared against the earliest 154 // escape are not in a cycle. 155 struct EarliestCaptures : public CaptureTracker { 156 157 EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT) 158 : DT(DT), ReturnCaptures(ReturnCaptures), F(F) {} 159 160 void tooManyUses() override { 161 Captured = true; 162 EarliestCapture = &*F.getEntryBlock().begin(); 163 } 164 165 bool captured(const Use *U) override { 166 Instruction *I = cast<Instruction>(U->getUser()); 167 if (isa<ReturnInst>(I) && !ReturnCaptures) 168 return false; 169 170 if (!EarliestCapture) { 171 EarliestCapture = I; 172 } else if (EarliestCapture->getParent() == I->getParent()) { 173 if (I->comesBefore(EarliestCapture)) 174 EarliestCapture = I; 175 } else { 176 BasicBlock *CurrentBB = I->getParent(); 177 BasicBlock *EarliestBB = EarliestCapture->getParent(); 178 if (DT.dominates(EarliestBB, CurrentBB)) { 179 // EarliestCapture already comes before the current use. 180 } else if (DT.dominates(CurrentBB, EarliestBB)) { 181 EarliestCapture = I; 182 } else { 183 // Otherwise find the nearest common dominator and use its terminator. 184 auto *NearestCommonDom = 185 DT.findNearestCommonDominator(CurrentBB, EarliestBB); 186 EarliestCapture = NearestCommonDom->getTerminator(); 187 } 188 } 189 Captured = true; 190 191 // Return false to continue analysis; we need to see all potential 192 // captures. 193 return false; 194 } 195 196 Instruction *EarliestCapture = nullptr; 197 198 const DominatorTree &DT; 199 200 bool ReturnCaptures; 201 202 bool Captured = false; 203 204 Function &F; 205 }; 206 } 207 208 /// PointerMayBeCaptured - Return true if this pointer value may be captured 209 /// by the enclosing function (which is required to exist). This routine can 210 /// be expensive, so consider caching the results. The boolean ReturnCaptures 211 /// specifies whether returning the value (or part of it) from the function 212 /// counts as capturing it or not. The boolean StoreCaptures specified whether 213 /// storing the value (or part of it) into memory anywhere automatically 214 /// counts as capturing it or not. 215 bool llvm::PointerMayBeCaptured(const Value *V, 216 bool ReturnCaptures, bool StoreCaptures, 217 unsigned MaxUsesToExplore) { 218 assert(!isa<GlobalValue>(V) && 219 "It doesn't make sense to ask whether a global is captured."); 220 221 // TODO: If StoreCaptures is not true, we could do Fancy analysis 222 // to determine whether this store is not actually an escape point. 223 // In that case, BasicAliasAnalysis should be updated as well to 224 // take advantage of this. 225 (void)StoreCaptures; 226 227 SimpleCaptureTracker SCT(ReturnCaptures); 228 PointerMayBeCaptured(V, &SCT, MaxUsesToExplore); 229 if (SCT.Captured) 230 ++NumCaptured; 231 else 232 ++NumNotCaptured; 233 return SCT.Captured; 234 } 235 236 /// PointerMayBeCapturedBefore - Return true if this pointer value may be 237 /// captured by the enclosing function (which is required to exist). If a 238 /// DominatorTree is provided, only captures which happen before the given 239 /// instruction are considered. This routine can be expensive, so consider 240 /// caching the results. The boolean ReturnCaptures specifies whether 241 /// returning the value (or part of it) from the function counts as capturing 242 /// it or not. The boolean StoreCaptures specified whether storing the value 243 /// (or part of it) into memory anywhere automatically counts as capturing it 244 /// or not. 245 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures, 246 bool StoreCaptures, const Instruction *I, 247 const DominatorTree *DT, bool IncludeI, 248 unsigned MaxUsesToExplore, 249 const LoopInfo *LI) { 250 assert(!isa<GlobalValue>(V) && 251 "It doesn't make sense to ask whether a global is captured."); 252 253 if (!DT) 254 return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, 255 MaxUsesToExplore); 256 257 // TODO: See comment in PointerMayBeCaptured regarding what could be done 258 // with StoreCaptures. 259 260 CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI); 261 PointerMayBeCaptured(V, &CB, MaxUsesToExplore); 262 if (CB.Captured) 263 ++NumCapturedBefore; 264 else 265 ++NumNotCapturedBefore; 266 return CB.Captured; 267 } 268 269 Instruction *llvm::FindEarliestCapture(const Value *V, Function &F, 270 bool ReturnCaptures, bool StoreCaptures, 271 const DominatorTree &DT, 272 unsigned MaxUsesToExplore) { 273 assert(!isa<GlobalValue>(V) && 274 "It doesn't make sense to ask whether a global is captured."); 275 276 EarliestCaptures CB(ReturnCaptures, F, DT); 277 PointerMayBeCaptured(V, &CB, MaxUsesToExplore); 278 if (CB.Captured) 279 ++NumCapturedBefore; 280 else 281 ++NumNotCapturedBefore; 282 return CB.EarliestCapture; 283 } 284 285 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker, 286 unsigned MaxUsesToExplore) { 287 assert(V->getType()->isPointerTy() && "Capture is for pointers only!"); 288 if (MaxUsesToExplore == 0) 289 MaxUsesToExplore = DefaultMaxUsesToExplore; 290 291 SmallVector<const Use *, 20> Worklist; 292 Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking()); 293 SmallSet<const Use *, 20> Visited; 294 295 auto AddUses = [&](const Value *V) { 296 unsigned Count = 0; 297 for (const Use &U : V->uses()) { 298 // If there are lots of uses, conservatively say that the value 299 // is captured to avoid taking too much compile time. 300 if (Count++ >= MaxUsesToExplore) { 301 Tracker->tooManyUses(); 302 return false; 303 } 304 if (!Visited.insert(&U).second) 305 continue; 306 if (!Tracker->shouldExplore(&U)) 307 continue; 308 Worklist.push_back(&U); 309 } 310 return true; 311 }; 312 if (!AddUses(V)) 313 return; 314 315 while (!Worklist.empty()) { 316 const Use *U = Worklist.pop_back_val(); 317 Instruction *I = cast<Instruction>(U->getUser()); 318 319 switch (I->getOpcode()) { 320 case Instruction::Call: 321 case Instruction::Invoke: { 322 auto *Call = cast<CallBase>(I); 323 // Not captured if the callee is readonly, doesn't return a copy through 324 // its return value and doesn't unwind (a readonly function can leak bits 325 // by throwing an exception or not depending on the input value). 326 if (Call->onlyReadsMemory() && Call->doesNotThrow() && 327 Call->getType()->isVoidTy()) 328 break; 329 330 // The pointer is not captured if returned pointer is not captured. 331 // NOTE: CaptureTracking users should not assume that only functions 332 // marked with nocapture do not capture. This means that places like 333 // getUnderlyingObject in ValueTracking or DecomposeGEPExpression 334 // in BasicAA also need to know about this property. 335 if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, 336 true)) { 337 if (!AddUses(Call)) 338 return; 339 break; 340 } 341 342 // Volatile operations effectively capture the memory location that they 343 // load and store to. 344 if (auto *MI = dyn_cast<MemIntrinsic>(Call)) 345 if (MI->isVolatile()) 346 if (Tracker->captured(U)) 347 return; 348 349 // Calling a function pointer does not in itself cause the pointer to 350 // be captured. This is a subtle point considering that (for example) 351 // the callee might return its own address. It is analogous to saying 352 // that loading a value from a pointer does not cause the pointer to be 353 // captured, even though the loaded value might be the pointer itself 354 // (think of self-referential objects). 355 if (Call->isCallee(U)) 356 break; 357 358 // Not captured if only passed via 'nocapture' arguments. 359 if (Call->isDataOperand(U) && 360 !Call->doesNotCapture(Call->getDataOperandNo(U))) { 361 // The parameter is not marked 'nocapture' - captured. 362 if (Tracker->captured(U)) 363 return; 364 } 365 break; 366 } 367 case Instruction::Load: 368 // Volatile loads make the address observable. 369 if (cast<LoadInst>(I)->isVolatile()) 370 if (Tracker->captured(U)) 371 return; 372 break; 373 case Instruction::VAArg: 374 // "va-arg" from a pointer does not cause it to be captured. 375 break; 376 case Instruction::Store: 377 // Stored the pointer - conservatively assume it may be captured. 378 // Volatile stores make the address observable. 379 if (U->getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile()) 380 if (Tracker->captured(U)) 381 return; 382 break; 383 case Instruction::AtomicRMW: { 384 // atomicrmw conceptually includes both a load and store from 385 // the same location. 386 // As with a store, the location being accessed is not captured, 387 // but the value being stored is. 388 // Volatile stores make the address observable. 389 auto *ARMWI = cast<AtomicRMWInst>(I); 390 if (U->getOperandNo() == 1 || ARMWI->isVolatile()) 391 if (Tracker->captured(U)) 392 return; 393 break; 394 } 395 case Instruction::AtomicCmpXchg: { 396 // cmpxchg conceptually includes both a load and store from 397 // the same location. 398 // As with a store, the location being accessed is not captured, 399 // but the value being stored is. 400 // Volatile stores make the address observable. 401 auto *ACXI = cast<AtomicCmpXchgInst>(I); 402 if (U->getOperandNo() == 1 || U->getOperandNo() == 2 || 403 ACXI->isVolatile()) 404 if (Tracker->captured(U)) 405 return; 406 break; 407 } 408 case Instruction::BitCast: 409 case Instruction::GetElementPtr: 410 case Instruction::PHI: 411 case Instruction::Select: 412 case Instruction::AddrSpaceCast: 413 // The original value is not captured via this if the new value isn't. 414 if (!AddUses(I)) 415 return; 416 break; 417 case Instruction::ICmp: { 418 unsigned Idx = U->getOperandNo(); 419 unsigned OtherIdx = 1 - Idx; 420 if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) { 421 // Don't count comparisons of a no-alias return value against null as 422 // captures. This allows us to ignore comparisons of malloc results 423 // with null, for example. 424 if (CPN->getType()->getAddressSpace() == 0) 425 if (isNoAliasCall(U->get()->stripPointerCasts())) 426 break; 427 if (!I->getFunction()->nullPointerIsDefined()) { 428 auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation(); 429 // Comparing a dereferenceable_or_null pointer against null cannot 430 // lead to pointer escapes, because if it is not null it must be a 431 // valid (in-bounds) pointer. 432 if (Tracker->isDereferenceableOrNull(O, I->getModule()->getDataLayout())) 433 break; 434 } 435 } 436 // Comparison against value stored in global variable. Given the pointer 437 // does not escape, its value cannot be guessed and stored separately in a 438 // global variable. 439 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx)); 440 if (LI && isa<GlobalVariable>(LI->getPointerOperand())) 441 break; 442 // Otherwise, be conservative. There are crazy ways to capture pointers 443 // using comparisons. 444 if (Tracker->captured(U)) 445 return; 446 break; 447 } 448 default: 449 // Something else - be conservative and say it is captured. 450 if (Tracker->captured(U)) 451 return; 452 break; 453 } 454 } 455 456 // All uses examined. 457 } 458 459 bool llvm::isNonEscapingLocalObject( 460 const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) { 461 SmallDenseMap<const Value *, bool, 8>::iterator CacheIt; 462 if (IsCapturedCache) { 463 bool Inserted; 464 std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false}); 465 if (!Inserted) 466 // Found cached result, return it! 467 return CacheIt->second; 468 } 469 470 // If this is an identified function-local object, check to see if it escapes. 471 if (isIdentifiedFunctionLocal(V)) { 472 // Set StoreCaptures to True so that we can assume in our callers that the 473 // pointer is not the result of a load instruction. Currently 474 // PointerMayBeCaptured doesn't have any special analysis for the 475 // StoreCaptures=false case; if it did, our callers could be refined to be 476 // more precise. 477 auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true); 478 if (IsCapturedCache) 479 CacheIt->second = Ret; 480 return Ret; 481 } 482 483 return false; 484 } 485