Lines Matching full:retain

134 // The second retain and autorelease can be deleted.
160 // TODO: Delete release+retain pairs (rare).
166 "retain+autoreleases eliminated");
167 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
497 /// retain/release pairs should be performed.
554 Instruction *Retain,
643 Function *NewDecl = EP.get(ARCRuntimeEntryPointKind::Retain); in OptimizeRetainRVCall()
1057 // Keep track of which of retain, release, autorelease, and retain_block in OptimizeIndividualCallImpl()
1107 case ARCInstKind::Retain: in OptimizeIndividualCallImpl()
1112 // These can't be moved across things that care about the retain in OptimizeIndividualCallImpl()
1201 llvm_unreachable("bottom-up pointer in retain state!"); in CheckForUseCFGHazard()
1229 llvm_unreachable("bottom-up pointer in retain state!"); in CheckForCanReleaseCFGHazard()
1348 case ARCInstKind::Retain: in VisitInstructionBottomUp()
1353 // Don't do retain+release tracking for ARCInstKind::RetainRV, because in VisitInstructionBottomUp()
1361 // A retain moving bottom up can be a use. in VisitInstructionBottomUp()
1470 Instruction *Retain = cast<Instruction>(P.first); in collectReleaseInsertPts() local
1471 Value *Root = GetRCIdentityRoot(Retain->getOperand(0)); in collectReleaseInsertPts()
1523 // a retain can be a potential use. in VisitInstructionTopDown()
1525 case ARCInstKind::Retain: in VisitInstructionTopDown()
1530 // A retain can be a potential use; proceed to the generic checking in VisitInstructionTopDown()
1540 // If we succeed, copy S's RRInfo into the Release -> {Retain Set in VisitInstructionTopDown()
1606 // prevents retain calls that live outside a loop from being moved into the in VisitTopDown()
1725 // will be well behaved, i.e. they won't repeatedly call retain on a single in Visit()
1771 // Insert the new retain and release calls. in MoveCalls()
1776 Function *Decl = EP.get(ARCRuntimeEntryPointKind::Retain); in MoveCalls()
1784 LLVM_DEBUG(dbgs() << "Inserting new Retain: " << *Call in MoveCalls()
1811 // Delete the original retain and release calls. in MoveCalls()
1815 LLVM_DEBUG(dbgs() << "Deleting retain: " << *OrigRetain << "\n"); in MoveCalls()
1828 Instruction *Retain, in PairUpRetainsAndReleases() argument
1847 for (SmallVector<Instruction *, 4> NewRetains{Retain};;) { in PairUpRetainsAndReleases()
1861 // If the release does not have a reference to the retain as well, in PairUpRetainsAndReleases()
1933 // If the retain does not have a reference to the release as well, in PairUpRetainsAndReleases()
1987 // balance of retain and release calls through the program. in PairUpRetainsAndReleases()
2003 // Determine whether the original call points are balanced in the retain and in PairUpRetainsAndReleases()
2032 // Visit each retain. in PerformCodePlacement()
2039 Instruction *Retain = cast<Instruction>(V); in PerformCodePlacement() local
2041 LLVM_DEBUG(dbgs() << "Visiting: " << *Retain << "\n"); in PerformCodePlacement()
2043 Value *Arg = GetArgRCIdentityRoot(Retain); in PerformCodePlacement()
2064 BBStates, Retains, Releases, M, Retain, DeadInsts, in PerformCodePlacement()
2131 // If the load has a builtin retain, insert a plain retain for it. in OptimizeWeakCalls()
2133 Function *Decl = EP.get(ARCRuntimeEntryPointKind::Retain); in OptimizeWeakCalls()
2161 // If the load has a builtin retain, insert a plain retain for it. in OptimizeWeakCalls()
2163 Function *Decl = EP.get(ARCRuntimeEntryPointKind::Retain); in OptimizeWeakCalls()
2272 /// between the Retain and the call that can affect the reference count of their
2273 /// shared pointer argument. Note that Retain need not be in BB.
2275 Instruction *Retain, in HasSafePathToPredecessorCall() argument
2278 CanChangeRetainCount, Arg, Retain->getParent(), Retain, PA)); in HasSafePathToPredecessorCall()
2291 /// Find a dependent retain that precedes the given autorelease for which there
2298 auto *Retain = dyn_cast_or_null<CallInst>( in FindPredecessorRetainWithSafePath() local
2301 // Check that we found a retain with the same argument. in FindPredecessorRetainWithSafePath()
2302 if (!Retain || !IsRetain(GetBasicARCInstKind(Retain)) || in FindPredecessorRetainWithSafePath()
2303 GetArgRCIdentityRoot(Retain) != Arg) { in FindPredecessorRetainWithSafePath()
2307 return Retain; in FindPredecessorRetainWithSafePath()
2339 /// And delete the retain and autorelease.
2364 CallInst *Retain = FindPredecessorRetainWithSafePath( in OptimizeReturns() local
2367 if (!Retain) in OptimizeReturns()
2371 // between the retain and the call. Note that Retain need not be in BB. in OptimizeReturns()
2372 CallInst *Call = HasSafePathToPredecessorCall(Arg, Retain, PA); in OptimizeReturns()
2377 GetBasicARCInstKind(Retain) == ARCInstKind::RetainRV && in OptimizeReturns()
2381 // If so, we can zap the retain and autorelease. in OptimizeReturns()
2384 LLVM_DEBUG(dbgs() << "Erasing: " << *Retain << "\nErasing: " << *Autorelease in OptimizeReturns()
2386 BundledInsts->eraseInst(Retain); in OptimizeReturns()
2404 case ARCInstKind::Retain: in GatherStatistics()
2474 // Optimizations for retain+release pairs. in run()
2475 if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::Retain)) | in run()
2480 // no retain+release pair nesting is detected. in run()