1 //===- StackSafetyAnalysis.cpp - Stack memory safety analysis -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 //===----------------------------------------------------------------------===// 10 11 #include "llvm/Analysis/StackSafetyAnalysis.h" 12 #include "llvm/ADT/APInt.h" 13 #include "llvm/ADT/SmallPtrSet.h" 14 #include "llvm/ADT/SmallVector.h" 15 #include "llvm/ADT/Statistic.h" 16 #include "llvm/Analysis/ModuleSummaryAnalysis.h" 17 #include "llvm/Analysis/ScalarEvolution.h" 18 #include "llvm/Analysis/StackLifetime.h" 19 #include "llvm/IR/ConstantRange.h" 20 #include "llvm/IR/DerivedTypes.h" 21 #include "llvm/IR/GlobalValue.h" 22 #include "llvm/IR/InstIterator.h" 23 #include "llvm/IR/Instruction.h" 24 #include "llvm/IR/Instructions.h" 25 #include "llvm/IR/IntrinsicInst.h" 26 #include "llvm/IR/ModuleSummaryIndex.h" 27 #include "llvm/InitializePasses.h" 28 #include "llvm/Support/Casting.h" 29 #include "llvm/Support/CommandLine.h" 30 #include "llvm/Support/FormatVariadic.h" 31 #include "llvm/Support/raw_ostream.h" 32 #include <algorithm> 33 #include <memory> 34 #include <tuple> 35 36 using namespace llvm; 37 38 #define DEBUG_TYPE "stack-safety" 39 40 STATISTIC(NumAllocaStackSafe, "Number of safe allocas"); 41 STATISTIC(NumAllocaTotal, "Number of total allocas"); 42 43 STATISTIC(NumCombinedCalleeLookupTotal, 44 "Number of total callee lookups on combined index."); 45 STATISTIC(NumCombinedCalleeLookupFailed, 46 "Number of failed callee lookups on combined index."); 47 STATISTIC(NumModuleCalleeLookupTotal, 48 "Number of total callee lookups on module index."); 49 STATISTIC(NumModuleCalleeLookupFailed, 50 "Number of failed callee lookups on module index."); 51 STATISTIC(NumCombinedParamAccessesBefore, 52 "Number of total param accesses before generateParamAccessSummary."); 53 STATISTIC(NumCombinedParamAccessesAfter, 54 "Number of total param accesses after generateParamAccessSummary."); 55 STATISTIC(NumCombinedDataFlowNodes, 56 "Number of total nodes in combined index for dataflow processing."); 57 STATISTIC(NumIndexCalleeUnhandled, "Number of index callee which are unhandled."); 58 STATISTIC(NumIndexCalleeMultipleWeak, "Number of index callee non-unique weak."); 59 STATISTIC(NumIndexCalleeMultipleExternal, "Number of index callee non-unique external."); 60 61 62 static cl::opt<int> StackSafetyMaxIterations("stack-safety-max-iterations", 63 cl::init(20), cl::Hidden); 64 65 static cl::opt<bool> StackSafetyPrint("stack-safety-print", cl::init(false), 66 cl::Hidden); 67 68 static cl::opt<bool> StackSafetyRun("stack-safety-run", cl::init(false), 69 cl::Hidden); 70 71 namespace { 72 73 // Check if we should bailout for such ranges. 74 bool isUnsafe(const ConstantRange &R) { 75 return R.isEmptySet() || R.isFullSet() || R.isUpperSignWrapped(); 76 } 77 78 ConstantRange addOverflowNever(const ConstantRange &L, const ConstantRange &R) { 79 assert(!L.isSignWrappedSet()); 80 assert(!R.isSignWrappedSet()); 81 if (L.signedAddMayOverflow(R) != 82 ConstantRange::OverflowResult::NeverOverflows) 83 return ConstantRange::getFull(L.getBitWidth()); 84 ConstantRange Result = L.add(R); 85 assert(!Result.isSignWrappedSet()); 86 return Result; 87 } 88 89 ConstantRange unionNoWrap(const ConstantRange &L, const ConstantRange &R) { 90 assert(!L.isSignWrappedSet()); 91 assert(!R.isSignWrappedSet()); 92 auto Result = L.unionWith(R); 93 // Two non-wrapped sets can produce wrapped. 94 if (Result.isSignWrappedSet()) 95 Result = ConstantRange::getFull(Result.getBitWidth()); 96 return Result; 97 } 98 99 /// Describes use of address in as a function call argument. 100 template <typename CalleeTy> struct CallInfo { 101 /// Function being called. 102 const CalleeTy *Callee = nullptr; 103 /// Index of argument which pass address. 104 size_t ParamNo = 0; 105 106 CallInfo(const CalleeTy *Callee, size_t ParamNo) 107 : Callee(Callee), ParamNo(ParamNo) {} 108 109 struct Less { 110 bool operator()(const CallInfo &L, const CallInfo &R) const { 111 return std::tie(L.ParamNo, L.Callee) < std::tie(R.ParamNo, R.Callee); 112 } 113 }; 114 }; 115 116 /// Describe uses of address (alloca or parameter) inside of the function. 117 template <typename CalleeTy> struct UseInfo { 118 // Access range if the address (alloca or parameters). 119 // It is allowed to be empty-set when there are no known accesses. 120 ConstantRange Range; 121 std::set<const Instruction *> UnsafeAccesses; 122 123 // List of calls which pass address as an argument. 124 // Value is offset range of address from base address (alloca or calling 125 // function argument). Range should never set to empty-set, that is an invalid 126 // access range that can cause empty-set to be propagated with 127 // ConstantRange::add 128 using CallsTy = std::map<CallInfo<CalleeTy>, ConstantRange, 129 typename CallInfo<CalleeTy>::Less>; 130 CallsTy Calls; 131 132 UseInfo(unsigned PointerSize) : Range{PointerSize, false} {} 133 134 void updateRange(const ConstantRange &R) { Range = unionNoWrap(Range, R); } 135 void addRange(const Instruction *I, const ConstantRange &R, bool IsSafe) { 136 if (!IsSafe) 137 UnsafeAccesses.insert(I); 138 updateRange(R); 139 } 140 }; 141 142 template <typename CalleeTy> 143 raw_ostream &operator<<(raw_ostream &OS, const UseInfo<CalleeTy> &U) { 144 OS << U.Range; 145 for (auto &Call : U.Calls) 146 OS << ", " 147 << "@" << Call.first.Callee->getName() << "(arg" << Call.first.ParamNo 148 << ", " << Call.second << ")"; 149 return OS; 150 } 151 152 /// Calculate the allocation size of a given alloca. Returns empty range 153 // in case of confution. 154 ConstantRange getStaticAllocaSizeRange(const AllocaInst &AI) { 155 const DataLayout &DL = AI.getModule()->getDataLayout(); 156 TypeSize TS = DL.getTypeAllocSize(AI.getAllocatedType()); 157 unsigned PointerSize = DL.getPointerTypeSizeInBits(AI.getType()); 158 // Fallback to empty range for alloca size. 159 ConstantRange R = ConstantRange::getEmpty(PointerSize); 160 if (TS.isScalable()) 161 return R; 162 APInt APSize(PointerSize, TS.getFixedValue(), true); 163 if (APSize.isNonPositive()) 164 return R; 165 if (AI.isArrayAllocation()) { 166 const auto *C = dyn_cast<ConstantInt>(AI.getArraySize()); 167 if (!C) 168 return R; 169 bool Overflow = false; 170 APInt Mul = C->getValue(); 171 if (Mul.isNonPositive()) 172 return R; 173 Mul = Mul.sextOrTrunc(PointerSize); 174 APSize = APSize.smul_ov(Mul, Overflow); 175 if (Overflow) 176 return R; 177 } 178 R = ConstantRange(APInt::getZero(PointerSize), APSize); 179 assert(!isUnsafe(R)); 180 return R; 181 } 182 183 template <typename CalleeTy> struct FunctionInfo { 184 std::map<const AllocaInst *, UseInfo<CalleeTy>> Allocas; 185 std::map<uint32_t, UseInfo<CalleeTy>> Params; 186 // TODO: describe return value as depending on one or more of its arguments. 187 188 // StackSafetyDataFlowAnalysis counter stored here for faster access. 189 int UpdateCount = 0; 190 191 void print(raw_ostream &O, StringRef Name, const Function *F) const { 192 // TODO: Consider different printout format after 193 // StackSafetyDataFlowAnalysis. Calls and parameters are irrelevant then. 194 O << " @" << Name << ((F && F->isDSOLocal()) ? "" : " dso_preemptable") 195 << ((F && F->isInterposable()) ? " interposable" : "") << "\n"; 196 197 O << " args uses:\n"; 198 for (auto &KV : Params) { 199 O << " "; 200 if (F) 201 O << F->getArg(KV.first)->getName(); 202 else 203 O << formatv("arg{0}", KV.first); 204 O << "[]: " << KV.second << "\n"; 205 } 206 207 O << " allocas uses:\n"; 208 if (F) { 209 for (const auto &I : instructions(F)) { 210 if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) { 211 auto &AS = Allocas.find(AI)->second; 212 O << " " << AI->getName() << "[" 213 << getStaticAllocaSizeRange(*AI).getUpper() << "]: " << AS << "\n"; 214 } 215 } 216 } else { 217 assert(Allocas.empty()); 218 } 219 } 220 }; 221 222 using GVToSSI = std::map<const GlobalValue *, FunctionInfo<GlobalValue>>; 223 224 } // namespace 225 226 struct StackSafetyInfo::InfoTy { 227 FunctionInfo<GlobalValue> Info; 228 }; 229 230 struct StackSafetyGlobalInfo::InfoTy { 231 GVToSSI Info; 232 SmallPtrSet<const AllocaInst *, 8> SafeAllocas; 233 std::set<const Instruction *> UnsafeAccesses; 234 }; 235 236 namespace { 237 238 class StackSafetyLocalAnalysis { 239 Function &F; 240 const DataLayout &DL; 241 ScalarEvolution &SE; 242 unsigned PointerSize = 0; 243 244 const ConstantRange UnknownRange; 245 246 ConstantRange offsetFrom(Value *Addr, Value *Base); 247 ConstantRange getAccessRange(Value *Addr, Value *Base, 248 const ConstantRange &SizeRange); 249 ConstantRange getAccessRange(Value *Addr, Value *Base, TypeSize Size); 250 ConstantRange getMemIntrinsicAccessRange(const MemIntrinsic *MI, const Use &U, 251 Value *Base); 252 253 void analyzeAllUses(Value *Ptr, UseInfo<GlobalValue> &AS, 254 const StackLifetime &SL); 255 256 257 bool isSafeAccess(const Use &U, AllocaInst *AI, const SCEV *AccessSize); 258 bool isSafeAccess(const Use &U, AllocaInst *AI, Value *V); 259 bool isSafeAccess(const Use &U, AllocaInst *AI, TypeSize AccessSize); 260 261 public: 262 StackSafetyLocalAnalysis(Function &F, ScalarEvolution &SE) 263 : F(F), DL(F.getParent()->getDataLayout()), SE(SE), 264 PointerSize(DL.getPointerSizeInBits()), 265 UnknownRange(PointerSize, true) {} 266 267 // Run the transformation on the associated function. 268 FunctionInfo<GlobalValue> run(); 269 }; 270 271 ConstantRange StackSafetyLocalAnalysis::offsetFrom(Value *Addr, Value *Base) { 272 if (!SE.isSCEVable(Addr->getType()) || !SE.isSCEVable(Base->getType())) 273 return UnknownRange; 274 275 auto *PtrTy = PointerType::getUnqual(SE.getContext()); 276 const SCEV *AddrExp = SE.getTruncateOrZeroExtend(SE.getSCEV(Addr), PtrTy); 277 const SCEV *BaseExp = SE.getTruncateOrZeroExtend(SE.getSCEV(Base), PtrTy); 278 const SCEV *Diff = SE.getMinusSCEV(AddrExp, BaseExp); 279 if (isa<SCEVCouldNotCompute>(Diff)) 280 return UnknownRange; 281 282 ConstantRange Offset = SE.getSignedRange(Diff); 283 if (isUnsafe(Offset)) 284 return UnknownRange; 285 return Offset.sextOrTrunc(PointerSize); 286 } 287 288 ConstantRange 289 StackSafetyLocalAnalysis::getAccessRange(Value *Addr, Value *Base, 290 const ConstantRange &SizeRange) { 291 // Zero-size loads and stores do not access memory. 292 if (SizeRange.isEmptySet()) 293 return ConstantRange::getEmpty(PointerSize); 294 assert(!isUnsafe(SizeRange)); 295 296 ConstantRange Offsets = offsetFrom(Addr, Base); 297 if (isUnsafe(Offsets)) 298 return UnknownRange; 299 300 Offsets = addOverflowNever(Offsets, SizeRange); 301 if (isUnsafe(Offsets)) 302 return UnknownRange; 303 return Offsets; 304 } 305 306 ConstantRange StackSafetyLocalAnalysis::getAccessRange(Value *Addr, Value *Base, 307 TypeSize Size) { 308 if (Size.isScalable()) 309 return UnknownRange; 310 APInt APSize(PointerSize, Size.getFixedValue(), true); 311 if (APSize.isNegative()) 312 return UnknownRange; 313 return getAccessRange(Addr, Base, 314 ConstantRange(APInt::getZero(PointerSize), APSize)); 315 } 316 317 ConstantRange StackSafetyLocalAnalysis::getMemIntrinsicAccessRange( 318 const MemIntrinsic *MI, const Use &U, Value *Base) { 319 if (const auto *MTI = dyn_cast<MemTransferInst>(MI)) { 320 if (MTI->getRawSource() != U && MTI->getRawDest() != U) 321 return ConstantRange::getEmpty(PointerSize); 322 } else { 323 if (MI->getRawDest() != U) 324 return ConstantRange::getEmpty(PointerSize); 325 } 326 327 auto *CalculationTy = IntegerType::getIntNTy(SE.getContext(), PointerSize); 328 if (!SE.isSCEVable(MI->getLength()->getType())) 329 return UnknownRange; 330 331 const SCEV *Expr = 332 SE.getTruncateOrZeroExtend(SE.getSCEV(MI->getLength()), CalculationTy); 333 ConstantRange Sizes = SE.getSignedRange(Expr); 334 if (!Sizes.getUpper().isStrictlyPositive() || isUnsafe(Sizes)) 335 return UnknownRange; 336 Sizes = Sizes.sextOrTrunc(PointerSize); 337 ConstantRange SizeRange(APInt::getZero(PointerSize), Sizes.getUpper() - 1); 338 return getAccessRange(U, Base, SizeRange); 339 } 340 341 bool StackSafetyLocalAnalysis::isSafeAccess(const Use &U, AllocaInst *AI, 342 Value *V) { 343 return isSafeAccess(U, AI, SE.getSCEV(V)); 344 } 345 346 bool StackSafetyLocalAnalysis::isSafeAccess(const Use &U, AllocaInst *AI, 347 TypeSize TS) { 348 if (TS.isScalable()) 349 return false; 350 auto *CalculationTy = IntegerType::getIntNTy(SE.getContext(), PointerSize); 351 const SCEV *SV = SE.getConstant(CalculationTy, TS.getFixedValue()); 352 return isSafeAccess(U, AI, SV); 353 } 354 355 bool StackSafetyLocalAnalysis::isSafeAccess(const Use &U, AllocaInst *AI, 356 const SCEV *AccessSize) { 357 358 if (!AI) 359 return true; // This only judges whether it is a safe *stack* access. 360 if (isa<SCEVCouldNotCompute>(AccessSize)) 361 return false; 362 363 const auto *I = cast<Instruction>(U.getUser()); 364 365 auto ToCharPtr = [&](const SCEV *V) { 366 auto *PtrTy = PointerType::getUnqual(SE.getContext()); 367 return SE.getTruncateOrZeroExtend(V, PtrTy); 368 }; 369 370 const SCEV *AddrExp = ToCharPtr(SE.getSCEV(U.get())); 371 const SCEV *BaseExp = ToCharPtr(SE.getSCEV(AI)); 372 const SCEV *Diff = SE.getMinusSCEV(AddrExp, BaseExp); 373 if (isa<SCEVCouldNotCompute>(Diff)) 374 return false; 375 376 auto Size = getStaticAllocaSizeRange(*AI); 377 378 auto *CalculationTy = IntegerType::getIntNTy(SE.getContext(), PointerSize); 379 auto ToDiffTy = [&](const SCEV *V) { 380 return SE.getTruncateOrZeroExtend(V, CalculationTy); 381 }; 382 const SCEV *Min = ToDiffTy(SE.getConstant(Size.getLower())); 383 const SCEV *Max = SE.getMinusSCEV(ToDiffTy(SE.getConstant(Size.getUpper())), 384 ToDiffTy(AccessSize)); 385 return SE.evaluatePredicateAt(ICmpInst::Predicate::ICMP_SGE, Diff, Min, I) 386 .value_or(false) && 387 SE.evaluatePredicateAt(ICmpInst::Predicate::ICMP_SLE, Diff, Max, I) 388 .value_or(false); 389 } 390 391 /// The function analyzes all local uses of Ptr (alloca or argument) and 392 /// calculates local access range and all function calls where it was used. 393 void StackSafetyLocalAnalysis::analyzeAllUses(Value *Ptr, 394 UseInfo<GlobalValue> &US, 395 const StackLifetime &SL) { 396 SmallPtrSet<const Value *, 16> Visited; 397 SmallVector<const Value *, 8> WorkList; 398 WorkList.push_back(Ptr); 399 AllocaInst *AI = dyn_cast<AllocaInst>(Ptr); 400 401 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc. 402 while (!WorkList.empty()) { 403 const Value *V = WorkList.pop_back_val(); 404 for (const Use &UI : V->uses()) { 405 const auto *I = cast<Instruction>(UI.getUser()); 406 if (!SL.isReachable(I)) 407 continue; 408 409 assert(V == UI.get()); 410 411 auto RecordStore = [&](const Value* StoredVal) { 412 if (V == StoredVal) { 413 // Stored the pointer - conservatively assume it may be unsafe. 414 US.addRange(I, UnknownRange, /*IsSafe=*/false); 415 return; 416 } 417 if (AI && !SL.isAliveAfter(AI, I)) { 418 US.addRange(I, UnknownRange, /*IsSafe=*/false); 419 return; 420 } 421 auto TypeSize = DL.getTypeStoreSize(StoredVal->getType()); 422 auto AccessRange = getAccessRange(UI, Ptr, TypeSize); 423 bool Safe = isSafeAccess(UI, AI, TypeSize); 424 US.addRange(I, AccessRange, Safe); 425 return; 426 }; 427 428 switch (I->getOpcode()) { 429 case Instruction::Load: { 430 if (AI && !SL.isAliveAfter(AI, I)) { 431 US.addRange(I, UnknownRange, /*IsSafe=*/false); 432 break; 433 } 434 auto TypeSize = DL.getTypeStoreSize(I->getType()); 435 auto AccessRange = getAccessRange(UI, Ptr, TypeSize); 436 bool Safe = isSafeAccess(UI, AI, TypeSize); 437 US.addRange(I, AccessRange, Safe); 438 break; 439 } 440 441 case Instruction::VAArg: 442 // "va-arg" from a pointer is safe. 443 break; 444 case Instruction::Store: 445 RecordStore(cast<StoreInst>(I)->getValueOperand()); 446 break; 447 case Instruction::AtomicCmpXchg: 448 RecordStore(cast<AtomicCmpXchgInst>(I)->getNewValOperand()); 449 break; 450 case Instruction::AtomicRMW: 451 RecordStore(cast<AtomicRMWInst>(I)->getValOperand()); 452 break; 453 454 case Instruction::Ret: 455 // Information leak. 456 // FIXME: Process parameters correctly. This is a leak only if we return 457 // alloca. 458 US.addRange(I, UnknownRange, /*IsSafe=*/false); 459 break; 460 461 case Instruction::Call: 462 case Instruction::Invoke: { 463 if (I->isLifetimeStartOrEnd()) 464 break; 465 466 if (AI && !SL.isAliveAfter(AI, I)) { 467 US.addRange(I, UnknownRange, /*IsSafe=*/false); 468 break; 469 } 470 if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) { 471 auto AccessRange = getMemIntrinsicAccessRange(MI, UI, Ptr); 472 bool Safe = false; 473 if (const auto *MTI = dyn_cast<MemTransferInst>(MI)) { 474 if (MTI->getRawSource() != UI && MTI->getRawDest() != UI) 475 Safe = true; 476 } else if (MI->getRawDest() != UI) { 477 Safe = true; 478 } 479 Safe = Safe || isSafeAccess(UI, AI, MI->getLength()); 480 US.addRange(I, AccessRange, Safe); 481 break; 482 } 483 484 const auto &CB = cast<CallBase>(*I); 485 if (CB.getReturnedArgOperand() == V) { 486 if (Visited.insert(I).second) 487 WorkList.push_back(cast<const Instruction>(I)); 488 } 489 490 if (!CB.isArgOperand(&UI)) { 491 US.addRange(I, UnknownRange, /*IsSafe=*/false); 492 break; 493 } 494 495 unsigned ArgNo = CB.getArgOperandNo(&UI); 496 if (CB.isByValArgument(ArgNo)) { 497 auto TypeSize = DL.getTypeStoreSize(CB.getParamByValType(ArgNo)); 498 auto AccessRange = getAccessRange(UI, Ptr, TypeSize); 499 bool Safe = isSafeAccess(UI, AI, TypeSize); 500 US.addRange(I, AccessRange, Safe); 501 break; 502 } 503 504 // FIXME: consult devirt? 505 // Do not follow aliases, otherwise we could inadvertently follow 506 // dso_preemptable aliases or aliases with interposable linkage. 507 const GlobalValue *Callee = 508 dyn_cast<GlobalValue>(CB.getCalledOperand()->stripPointerCasts()); 509 if (!Callee) { 510 US.addRange(I, UnknownRange, /*IsSafe=*/false); 511 break; 512 } 513 514 assert(isa<Function>(Callee) || isa<GlobalAlias>(Callee)); 515 ConstantRange Offsets = offsetFrom(UI, Ptr); 516 auto Insert = 517 US.Calls.emplace(CallInfo<GlobalValue>(Callee, ArgNo), Offsets); 518 if (!Insert.second) 519 Insert.first->second = Insert.first->second.unionWith(Offsets); 520 break; 521 } 522 523 default: 524 if (Visited.insert(I).second) 525 WorkList.push_back(cast<const Instruction>(I)); 526 } 527 } 528 } 529 } 530 531 FunctionInfo<GlobalValue> StackSafetyLocalAnalysis::run() { 532 FunctionInfo<GlobalValue> Info; 533 assert(!F.isDeclaration() && 534 "Can't run StackSafety on a function declaration"); 535 536 LLVM_DEBUG(dbgs() << "[StackSafety] " << F.getName() << "\n"); 537 538 SmallVector<AllocaInst *, 64> Allocas; 539 for (auto &I : instructions(F)) 540 if (auto *AI = dyn_cast<AllocaInst>(&I)) 541 Allocas.push_back(AI); 542 StackLifetime SL(F, Allocas, StackLifetime::LivenessType::Must); 543 SL.run(); 544 545 for (auto *AI : Allocas) { 546 auto &UI = Info.Allocas.emplace(AI, PointerSize).first->second; 547 analyzeAllUses(AI, UI, SL); 548 } 549 550 for (Argument &A : F.args()) { 551 // Non pointers and bypass arguments are not going to be used in any global 552 // processing. 553 if (A.getType()->isPointerTy() && !A.hasByValAttr()) { 554 auto &UI = Info.Params.emplace(A.getArgNo(), PointerSize).first->second; 555 analyzeAllUses(&A, UI, SL); 556 } 557 } 558 559 LLVM_DEBUG(Info.print(dbgs(), F.getName(), &F)); 560 LLVM_DEBUG(dbgs() << "\n[StackSafety] done\n"); 561 return Info; 562 } 563 564 template <typename CalleeTy> class StackSafetyDataFlowAnalysis { 565 using FunctionMap = std::map<const CalleeTy *, FunctionInfo<CalleeTy>>; 566 567 FunctionMap Functions; 568 const ConstantRange UnknownRange; 569 570 // Callee-to-Caller multimap. 571 DenseMap<const CalleeTy *, SmallVector<const CalleeTy *, 4>> Callers; 572 SetVector<const CalleeTy *> WorkList; 573 574 bool updateOneUse(UseInfo<CalleeTy> &US, bool UpdateToFullSet); 575 void updateOneNode(const CalleeTy *Callee, FunctionInfo<CalleeTy> &FS); 576 void updateOneNode(const CalleeTy *Callee) { 577 updateOneNode(Callee, Functions.find(Callee)->second); 578 } 579 void updateAllNodes() { 580 for (auto &F : Functions) 581 updateOneNode(F.first, F.second); 582 } 583 void runDataFlow(); 584 #ifndef NDEBUG 585 void verifyFixedPoint(); 586 #endif 587 588 public: 589 StackSafetyDataFlowAnalysis(uint32_t PointerBitWidth, FunctionMap Functions) 590 : Functions(std::move(Functions)), 591 UnknownRange(ConstantRange::getFull(PointerBitWidth)) {} 592 593 const FunctionMap &run(); 594 595 ConstantRange getArgumentAccessRange(const CalleeTy *Callee, unsigned ParamNo, 596 const ConstantRange &Offsets) const; 597 }; 598 599 template <typename CalleeTy> 600 ConstantRange StackSafetyDataFlowAnalysis<CalleeTy>::getArgumentAccessRange( 601 const CalleeTy *Callee, unsigned ParamNo, 602 const ConstantRange &Offsets) const { 603 auto FnIt = Functions.find(Callee); 604 // Unknown callee (outside of LTO domain or an indirect call). 605 if (FnIt == Functions.end()) 606 return UnknownRange; 607 auto &FS = FnIt->second; 608 auto ParamIt = FS.Params.find(ParamNo); 609 if (ParamIt == FS.Params.end()) 610 return UnknownRange; 611 auto &Access = ParamIt->second.Range; 612 if (Access.isEmptySet()) 613 return Access; 614 if (Access.isFullSet()) 615 return UnknownRange; 616 return addOverflowNever(Access, Offsets); 617 } 618 619 template <typename CalleeTy> 620 bool StackSafetyDataFlowAnalysis<CalleeTy>::updateOneUse(UseInfo<CalleeTy> &US, 621 bool UpdateToFullSet) { 622 bool Changed = false; 623 for (auto &KV : US.Calls) { 624 assert(!KV.second.isEmptySet() && 625 "Param range can't be empty-set, invalid offset range"); 626 627 ConstantRange CalleeRange = 628 getArgumentAccessRange(KV.first.Callee, KV.first.ParamNo, KV.second); 629 if (!US.Range.contains(CalleeRange)) { 630 Changed = true; 631 if (UpdateToFullSet) 632 US.Range = UnknownRange; 633 else 634 US.updateRange(CalleeRange); 635 } 636 } 637 return Changed; 638 } 639 640 template <typename CalleeTy> 641 void StackSafetyDataFlowAnalysis<CalleeTy>::updateOneNode( 642 const CalleeTy *Callee, FunctionInfo<CalleeTy> &FS) { 643 bool UpdateToFullSet = FS.UpdateCount > StackSafetyMaxIterations; 644 bool Changed = false; 645 for (auto &KV : FS.Params) 646 Changed |= updateOneUse(KV.second, UpdateToFullSet); 647 648 if (Changed) { 649 LLVM_DEBUG(dbgs() << "=== update [" << FS.UpdateCount 650 << (UpdateToFullSet ? ", full-set" : "") << "] " << &FS 651 << "\n"); 652 // Callers of this function may need updating. 653 for (auto &CallerID : Callers[Callee]) 654 WorkList.insert(CallerID); 655 656 ++FS.UpdateCount; 657 } 658 } 659 660 template <typename CalleeTy> 661 void StackSafetyDataFlowAnalysis<CalleeTy>::runDataFlow() { 662 SmallVector<const CalleeTy *, 16> Callees; 663 for (auto &F : Functions) { 664 Callees.clear(); 665 auto &FS = F.second; 666 for (auto &KV : FS.Params) 667 for (auto &CS : KV.second.Calls) 668 Callees.push_back(CS.first.Callee); 669 670 llvm::sort(Callees); 671 Callees.erase(std::unique(Callees.begin(), Callees.end()), Callees.end()); 672 673 for (auto &Callee : Callees) 674 Callers[Callee].push_back(F.first); 675 } 676 677 updateAllNodes(); 678 679 while (!WorkList.empty()) { 680 const CalleeTy *Callee = WorkList.pop_back_val(); 681 updateOneNode(Callee); 682 } 683 } 684 685 #ifndef NDEBUG 686 template <typename CalleeTy> 687 void StackSafetyDataFlowAnalysis<CalleeTy>::verifyFixedPoint() { 688 WorkList.clear(); 689 updateAllNodes(); 690 assert(WorkList.empty()); 691 } 692 #endif 693 694 template <typename CalleeTy> 695 const typename StackSafetyDataFlowAnalysis<CalleeTy>::FunctionMap & 696 StackSafetyDataFlowAnalysis<CalleeTy>::run() { 697 runDataFlow(); 698 LLVM_DEBUG(verifyFixedPoint()); 699 return Functions; 700 } 701 702 FunctionSummary *findCalleeFunctionSummary(ValueInfo VI, StringRef ModuleId) { 703 if (!VI) 704 return nullptr; 705 auto SummaryList = VI.getSummaryList(); 706 GlobalValueSummary* S = nullptr; 707 for (const auto& GVS : SummaryList) { 708 if (!GVS->isLive()) 709 continue; 710 if (const AliasSummary *AS = dyn_cast<AliasSummary>(GVS.get())) 711 if (!AS->hasAliasee()) 712 continue; 713 if (!isa<FunctionSummary>(GVS->getBaseObject())) 714 continue; 715 if (GlobalValue::isLocalLinkage(GVS->linkage())) { 716 if (GVS->modulePath() == ModuleId) { 717 S = GVS.get(); 718 break; 719 } 720 } else if (GlobalValue::isExternalLinkage(GVS->linkage())) { 721 if (S) { 722 ++NumIndexCalleeMultipleExternal; 723 return nullptr; 724 } 725 S = GVS.get(); 726 } else if (GlobalValue::isWeakLinkage(GVS->linkage())) { 727 if (S) { 728 ++NumIndexCalleeMultipleWeak; 729 return nullptr; 730 } 731 S = GVS.get(); 732 } else if (GlobalValue::isAvailableExternallyLinkage(GVS->linkage()) || 733 GlobalValue::isLinkOnceLinkage(GVS->linkage())) { 734 if (SummaryList.size() == 1) 735 S = GVS.get(); 736 // According thinLTOResolvePrevailingGUID these are unlikely prevailing. 737 } else { 738 ++NumIndexCalleeUnhandled; 739 } 740 }; 741 while (S) { 742 if (!S->isLive() || !S->isDSOLocal()) 743 return nullptr; 744 if (FunctionSummary *FS = dyn_cast<FunctionSummary>(S)) 745 return FS; 746 AliasSummary *AS = dyn_cast<AliasSummary>(S); 747 if (!AS || !AS->hasAliasee()) 748 return nullptr; 749 S = AS->getBaseObject(); 750 if (S == AS) 751 return nullptr; 752 } 753 return nullptr; 754 } 755 756 const Function *findCalleeInModule(const GlobalValue *GV) { 757 while (GV) { 758 if (GV->isDeclaration() || GV->isInterposable() || !GV->isDSOLocal()) 759 return nullptr; 760 if (const Function *F = dyn_cast<Function>(GV)) 761 return F; 762 const GlobalAlias *A = dyn_cast<GlobalAlias>(GV); 763 if (!A) 764 return nullptr; 765 GV = A->getAliaseeObject(); 766 if (GV == A) 767 return nullptr; 768 } 769 return nullptr; 770 } 771 772 const ConstantRange *findParamAccess(const FunctionSummary &FS, 773 uint32_t ParamNo) { 774 assert(FS.isLive()); 775 assert(FS.isDSOLocal()); 776 for (const auto &PS : FS.paramAccesses()) 777 if (ParamNo == PS.ParamNo) 778 return &PS.Use; 779 return nullptr; 780 } 781 782 void resolveAllCalls(UseInfo<GlobalValue> &Use, 783 const ModuleSummaryIndex *Index) { 784 ConstantRange FullSet(Use.Range.getBitWidth(), true); 785 // Move Use.Calls to a temp storage and repopulate - don't use std::move as it 786 // leaves Use.Calls in an undefined state. 787 UseInfo<GlobalValue>::CallsTy TmpCalls; 788 std::swap(TmpCalls, Use.Calls); 789 for (const auto &C : TmpCalls) { 790 const Function *F = findCalleeInModule(C.first.Callee); 791 if (F) { 792 Use.Calls.emplace(CallInfo<GlobalValue>(F, C.first.ParamNo), C.second); 793 continue; 794 } 795 796 if (!Index) 797 return Use.updateRange(FullSet); 798 FunctionSummary *FS = 799 findCalleeFunctionSummary(Index->getValueInfo(C.first.Callee->getGUID()), 800 C.first.Callee->getParent()->getModuleIdentifier()); 801 ++NumModuleCalleeLookupTotal; 802 if (!FS) { 803 ++NumModuleCalleeLookupFailed; 804 return Use.updateRange(FullSet); 805 } 806 const ConstantRange *Found = findParamAccess(*FS, C.first.ParamNo); 807 if (!Found || Found->isFullSet()) 808 return Use.updateRange(FullSet); 809 ConstantRange Access = Found->sextOrTrunc(Use.Range.getBitWidth()); 810 if (!Access.isEmptySet()) 811 Use.updateRange(addOverflowNever(Access, C.second)); 812 } 813 } 814 815 GVToSSI createGlobalStackSafetyInfo( 816 std::map<const GlobalValue *, FunctionInfo<GlobalValue>> Functions, 817 const ModuleSummaryIndex *Index) { 818 GVToSSI SSI; 819 if (Functions.empty()) 820 return SSI; 821 822 // FIXME: Simplify printing and remove copying here. 823 auto Copy = Functions; 824 825 for (auto &FnKV : Copy) 826 for (auto &KV : FnKV.second.Params) { 827 resolveAllCalls(KV.second, Index); 828 if (KV.second.Range.isFullSet()) 829 KV.second.Calls.clear(); 830 } 831 832 uint32_t PointerSize = 833 Copy.begin()->first->getParent()->getDataLayout().getPointerSizeInBits(); 834 StackSafetyDataFlowAnalysis<GlobalValue> SSDFA(PointerSize, std::move(Copy)); 835 836 for (const auto &F : SSDFA.run()) { 837 auto FI = F.second; 838 auto &SrcF = Functions[F.first]; 839 for (auto &KV : FI.Allocas) { 840 auto &A = KV.second; 841 resolveAllCalls(A, Index); 842 for (auto &C : A.Calls) { 843 A.updateRange(SSDFA.getArgumentAccessRange(C.first.Callee, 844 C.first.ParamNo, C.second)); 845 } 846 // FIXME: This is needed only to preserve calls in print() results. 847 A.Calls = SrcF.Allocas.find(KV.first)->second.Calls; 848 } 849 for (auto &KV : FI.Params) { 850 auto &P = KV.second; 851 P.Calls = SrcF.Params.find(KV.first)->second.Calls; 852 } 853 SSI[F.first] = std::move(FI); 854 } 855 856 return SSI; 857 } 858 859 } // end anonymous namespace 860 861 StackSafetyInfo::StackSafetyInfo() = default; 862 863 StackSafetyInfo::StackSafetyInfo(Function *F, 864 std::function<ScalarEvolution &()> GetSE) 865 : F(F), GetSE(GetSE) {} 866 867 StackSafetyInfo::StackSafetyInfo(StackSafetyInfo &&) = default; 868 869 StackSafetyInfo &StackSafetyInfo::operator=(StackSafetyInfo &&) = default; 870 871 StackSafetyInfo::~StackSafetyInfo() = default; 872 873 const StackSafetyInfo::InfoTy &StackSafetyInfo::getInfo() const { 874 if (!Info) { 875 StackSafetyLocalAnalysis SSLA(*F, GetSE()); 876 Info.reset(new InfoTy{SSLA.run()}); 877 } 878 return *Info; 879 } 880 881 void StackSafetyInfo::print(raw_ostream &O) const { 882 getInfo().Info.print(O, F->getName(), dyn_cast<Function>(F)); 883 O << "\n"; 884 } 885 886 const StackSafetyGlobalInfo::InfoTy &StackSafetyGlobalInfo::getInfo() const { 887 if (!Info) { 888 std::map<const GlobalValue *, FunctionInfo<GlobalValue>> Functions; 889 for (auto &F : M->functions()) { 890 if (!F.isDeclaration()) { 891 auto FI = GetSSI(F).getInfo().Info; 892 Functions.emplace(&F, std::move(FI)); 893 } 894 } 895 Info.reset(new InfoTy{ 896 createGlobalStackSafetyInfo(std::move(Functions), Index), {}, {}}); 897 898 for (auto &FnKV : Info->Info) { 899 for (auto &KV : FnKV.second.Allocas) { 900 ++NumAllocaTotal; 901 const AllocaInst *AI = KV.first; 902 auto AIRange = getStaticAllocaSizeRange(*AI); 903 if (AIRange.contains(KV.second.Range)) { 904 Info->SafeAllocas.insert(AI); 905 ++NumAllocaStackSafe; 906 } 907 Info->UnsafeAccesses.insert(KV.second.UnsafeAccesses.begin(), 908 KV.second.UnsafeAccesses.end()); 909 } 910 } 911 912 if (StackSafetyPrint) 913 print(errs()); 914 } 915 return *Info; 916 } 917 918 std::vector<FunctionSummary::ParamAccess> 919 StackSafetyInfo::getParamAccesses(ModuleSummaryIndex &Index) const { 920 // Implementation transforms internal representation of parameter information 921 // into FunctionSummary format. 922 std::vector<FunctionSummary::ParamAccess> ParamAccesses; 923 for (const auto &KV : getInfo().Info.Params) { 924 auto &PS = KV.second; 925 // Parameter accessed by any or unknown offset, represented as FullSet by 926 // StackSafety, is handled as the parameter for which we have no 927 // StackSafety info at all. So drop it to reduce summary size. 928 if (PS.Range.isFullSet()) 929 continue; 930 931 ParamAccesses.emplace_back(KV.first, PS.Range); 932 FunctionSummary::ParamAccess &Param = ParamAccesses.back(); 933 934 Param.Calls.reserve(PS.Calls.size()); 935 for (const auto &C : PS.Calls) { 936 // Parameter forwarded into another function by any or unknown offset 937 // will make ParamAccess::Range as FullSet anyway. So we can drop the 938 // entire parameter like we did above. 939 // TODO(vitalybuka): Return already filtered parameters from getInfo(). 940 if (C.second.isFullSet()) { 941 ParamAccesses.pop_back(); 942 break; 943 } 944 Param.Calls.emplace_back(C.first.ParamNo, 945 Index.getOrInsertValueInfo(C.first.Callee), 946 C.second); 947 } 948 } 949 for (FunctionSummary::ParamAccess &Param : ParamAccesses) { 950 sort(Param.Calls, [](const FunctionSummary::ParamAccess::Call &L, 951 const FunctionSummary::ParamAccess::Call &R) { 952 return std::tie(L.ParamNo, L.Callee) < std::tie(R.ParamNo, R.Callee); 953 }); 954 } 955 return ParamAccesses; 956 } 957 958 StackSafetyGlobalInfo::StackSafetyGlobalInfo() = default; 959 960 StackSafetyGlobalInfo::StackSafetyGlobalInfo( 961 Module *M, std::function<const StackSafetyInfo &(Function &F)> GetSSI, 962 const ModuleSummaryIndex *Index) 963 : M(M), GetSSI(GetSSI), Index(Index) { 964 if (StackSafetyRun) 965 getInfo(); 966 } 967 968 StackSafetyGlobalInfo::StackSafetyGlobalInfo(StackSafetyGlobalInfo &&) = 969 default; 970 971 StackSafetyGlobalInfo & 972 StackSafetyGlobalInfo::operator=(StackSafetyGlobalInfo &&) = default; 973 974 StackSafetyGlobalInfo::~StackSafetyGlobalInfo() = default; 975 976 bool StackSafetyGlobalInfo::isSafe(const AllocaInst &AI) const { 977 const auto &Info = getInfo(); 978 return Info.SafeAllocas.count(&AI); 979 } 980 981 bool StackSafetyGlobalInfo::stackAccessIsSafe(const Instruction &I) const { 982 const auto &Info = getInfo(); 983 return Info.UnsafeAccesses.find(&I) == Info.UnsafeAccesses.end(); 984 } 985 986 void StackSafetyGlobalInfo::print(raw_ostream &O) const { 987 auto &SSI = getInfo().Info; 988 if (SSI.empty()) 989 return; 990 const Module &M = *SSI.begin()->first->getParent(); 991 for (const auto &F : M.functions()) { 992 if (!F.isDeclaration()) { 993 SSI.find(&F)->second.print(O, F.getName(), &F); 994 O << " safe accesses:" 995 << "\n"; 996 for (const auto &I : instructions(F)) { 997 const CallInst *Call = dyn_cast<CallInst>(&I); 998 if ((isa<StoreInst>(I) || isa<LoadInst>(I) || isa<MemIntrinsic>(I) || 999 isa<AtomicCmpXchgInst>(I) || isa<AtomicRMWInst>(I) || 1000 (Call && Call->hasByValArgument())) && 1001 stackAccessIsSafe(I)) { 1002 O << " " << I << "\n"; 1003 } 1004 } 1005 O << "\n"; 1006 } 1007 } 1008 } 1009 1010 LLVM_DUMP_METHOD void StackSafetyGlobalInfo::dump() const { print(dbgs()); } 1011 1012 AnalysisKey StackSafetyAnalysis::Key; 1013 1014 StackSafetyInfo StackSafetyAnalysis::run(Function &F, 1015 FunctionAnalysisManager &AM) { 1016 return StackSafetyInfo(&F, [&AM, &F]() -> ScalarEvolution & { 1017 return AM.getResult<ScalarEvolutionAnalysis>(F); 1018 }); 1019 } 1020 1021 PreservedAnalyses StackSafetyPrinterPass::run(Function &F, 1022 FunctionAnalysisManager &AM) { 1023 OS << "'Stack Safety Local Analysis' for function '" << F.getName() << "'\n"; 1024 AM.getResult<StackSafetyAnalysis>(F).print(OS); 1025 return PreservedAnalyses::all(); 1026 } 1027 1028 char StackSafetyInfoWrapperPass::ID = 0; 1029 1030 StackSafetyInfoWrapperPass::StackSafetyInfoWrapperPass() : FunctionPass(ID) { 1031 initializeStackSafetyInfoWrapperPassPass(*PassRegistry::getPassRegistry()); 1032 } 1033 1034 void StackSafetyInfoWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const { 1035 AU.addRequiredTransitive<ScalarEvolutionWrapperPass>(); 1036 AU.setPreservesAll(); 1037 } 1038 1039 void StackSafetyInfoWrapperPass::print(raw_ostream &O, const Module *M) const { 1040 SSI.print(O); 1041 } 1042 1043 bool StackSafetyInfoWrapperPass::runOnFunction(Function &F) { 1044 auto *SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); 1045 SSI = {&F, [SE]() -> ScalarEvolution & { return *SE; }}; 1046 return false; 1047 } 1048 1049 AnalysisKey StackSafetyGlobalAnalysis::Key; 1050 1051 StackSafetyGlobalInfo 1052 StackSafetyGlobalAnalysis::run(Module &M, ModuleAnalysisManager &AM) { 1053 // FIXME: Lookup Module Summary. 1054 FunctionAnalysisManager &FAM = 1055 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 1056 return {&M, 1057 [&FAM](Function &F) -> const StackSafetyInfo & { 1058 return FAM.getResult<StackSafetyAnalysis>(F); 1059 }, 1060 nullptr}; 1061 } 1062 1063 PreservedAnalyses StackSafetyGlobalPrinterPass::run(Module &M, 1064 ModuleAnalysisManager &AM) { 1065 OS << "'Stack Safety Analysis' for module '" << M.getName() << "'\n"; 1066 AM.getResult<StackSafetyGlobalAnalysis>(M).print(OS); 1067 return PreservedAnalyses::all(); 1068 } 1069 1070 char StackSafetyGlobalInfoWrapperPass::ID = 0; 1071 1072 StackSafetyGlobalInfoWrapperPass::StackSafetyGlobalInfoWrapperPass() 1073 : ModulePass(ID) { 1074 initializeStackSafetyGlobalInfoWrapperPassPass( 1075 *PassRegistry::getPassRegistry()); 1076 } 1077 1078 StackSafetyGlobalInfoWrapperPass::~StackSafetyGlobalInfoWrapperPass() = default; 1079 1080 void StackSafetyGlobalInfoWrapperPass::print(raw_ostream &O, 1081 const Module *M) const { 1082 SSGI.print(O); 1083 } 1084 1085 void StackSafetyGlobalInfoWrapperPass::getAnalysisUsage( 1086 AnalysisUsage &AU) const { 1087 AU.setPreservesAll(); 1088 AU.addRequired<StackSafetyInfoWrapperPass>(); 1089 } 1090 1091 bool StackSafetyGlobalInfoWrapperPass::runOnModule(Module &M) { 1092 const ModuleSummaryIndex *ImportSummary = nullptr; 1093 if (auto *IndexWrapperPass = 1094 getAnalysisIfAvailable<ImmutableModuleSummaryIndexWrapperPass>()) 1095 ImportSummary = IndexWrapperPass->getIndex(); 1096 1097 SSGI = {&M, 1098 [this](Function &F) -> const StackSafetyInfo & { 1099 return getAnalysis<StackSafetyInfoWrapperPass>(F).getResult(); 1100 }, 1101 ImportSummary}; 1102 return false; 1103 } 1104 1105 bool llvm::needsParamAccessSummary(const Module &M) { 1106 if (StackSafetyRun) 1107 return true; 1108 for (const auto &F : M.functions()) 1109 if (F.hasFnAttribute(Attribute::SanitizeMemTag)) 1110 return true; 1111 return false; 1112 } 1113 1114 void llvm::generateParamAccessSummary(ModuleSummaryIndex &Index) { 1115 if (!Index.hasParamAccess()) 1116 return; 1117 const ConstantRange FullSet(FunctionSummary::ParamAccess::RangeWidth, true); 1118 1119 auto CountParamAccesses = [&](auto &Stat) { 1120 if (!AreStatisticsEnabled()) 1121 return; 1122 for (auto &GVS : Index) 1123 for (auto &GV : GVS.second.SummaryList) 1124 if (FunctionSummary *FS = dyn_cast<FunctionSummary>(GV.get())) 1125 Stat += FS->paramAccesses().size(); 1126 }; 1127 1128 CountParamAccesses(NumCombinedParamAccessesBefore); 1129 1130 std::map<const FunctionSummary *, FunctionInfo<FunctionSummary>> Functions; 1131 1132 // Convert the ModuleSummaryIndex to a FunctionMap 1133 for (auto &GVS : Index) { 1134 for (auto &GV : GVS.second.SummaryList) { 1135 FunctionSummary *FS = dyn_cast<FunctionSummary>(GV.get()); 1136 if (!FS || FS->paramAccesses().empty()) 1137 continue; 1138 if (FS->isLive() && FS->isDSOLocal()) { 1139 FunctionInfo<FunctionSummary> FI; 1140 for (const auto &PS : FS->paramAccesses()) { 1141 auto &US = 1142 FI.Params 1143 .emplace(PS.ParamNo, FunctionSummary::ParamAccess::RangeWidth) 1144 .first->second; 1145 US.Range = PS.Use; 1146 for (const auto &Call : PS.Calls) { 1147 assert(!Call.Offsets.isFullSet()); 1148 FunctionSummary *S = 1149 findCalleeFunctionSummary(Call.Callee, FS->modulePath()); 1150 ++NumCombinedCalleeLookupTotal; 1151 if (!S) { 1152 ++NumCombinedCalleeLookupFailed; 1153 US.Range = FullSet; 1154 US.Calls.clear(); 1155 break; 1156 } 1157 US.Calls.emplace(CallInfo<FunctionSummary>(S, Call.ParamNo), 1158 Call.Offsets); 1159 } 1160 } 1161 Functions.emplace(FS, std::move(FI)); 1162 } 1163 // Reset data for all summaries. Alive and DSO local will be set back from 1164 // of data flow results below. Anything else will not be accessed 1165 // by ThinLTO backend, so we can save on bitcode size. 1166 FS->setParamAccesses({}); 1167 } 1168 } 1169 NumCombinedDataFlowNodes += Functions.size(); 1170 StackSafetyDataFlowAnalysis<FunctionSummary> SSDFA( 1171 FunctionSummary::ParamAccess::RangeWidth, std::move(Functions)); 1172 for (const auto &KV : SSDFA.run()) { 1173 std::vector<FunctionSummary::ParamAccess> NewParams; 1174 NewParams.reserve(KV.second.Params.size()); 1175 for (const auto &Param : KV.second.Params) { 1176 // It's not needed as FullSet is processed the same as a missing value. 1177 if (Param.second.Range.isFullSet()) 1178 continue; 1179 NewParams.emplace_back(); 1180 FunctionSummary::ParamAccess &New = NewParams.back(); 1181 New.ParamNo = Param.first; 1182 New.Use = Param.second.Range; // Only range is needed. 1183 } 1184 const_cast<FunctionSummary *>(KV.first)->setParamAccesses( 1185 std::move(NewParams)); 1186 } 1187 1188 CountParamAccesses(NumCombinedParamAccessesAfter); 1189 } 1190 1191 static const char LocalPassArg[] = "stack-safety-local"; 1192 static const char LocalPassName[] = "Stack Safety Local Analysis"; 1193 INITIALIZE_PASS_BEGIN(StackSafetyInfoWrapperPass, LocalPassArg, LocalPassName, 1194 false, true) 1195 INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass) 1196 INITIALIZE_PASS_END(StackSafetyInfoWrapperPass, LocalPassArg, LocalPassName, 1197 false, true) 1198 1199 static const char GlobalPassName[] = "Stack Safety Analysis"; 1200 INITIALIZE_PASS_BEGIN(StackSafetyGlobalInfoWrapperPass, DEBUG_TYPE, 1201 GlobalPassName, false, true) 1202 INITIALIZE_PASS_DEPENDENCY(StackSafetyInfoWrapperPass) 1203 INITIALIZE_PASS_DEPENDENCY(ImmutableModuleSummaryIndexWrapperPass) 1204 INITIALIZE_PASS_END(StackSafetyGlobalInfoWrapperPass, DEBUG_TYPE, 1205 GlobalPassName, false, true) 1206