Lines Matching +full:auto +full:- +full:load
1 //===- Loads.cpp - Local load analysis ------------------------------------===//
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file defines simple local analyses for load instructions.
11 //===----------------------------------------------------------------------===//
31 Align BA = Base->getPointerAlignment(DL); in isAligned()
36 /// a simple load or store.
42 assert(V->getType()->isPointerTy() && "Base must be pointer"); in isDereferenceableAndAlignedPointer()
45 if (MaxDepth-- == 0) in isDereferenceableAndAlignedPointer()
57 const Value *Base = GEP->getPointerOperand(); in isDereferenceableAndAlignedPointer()
59 APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0); in isDereferenceableAndAlignedPointer()
60 if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() || in isDereferenceableAndAlignedPointer()
78 // bitcast instructions are no-ops as far as dereferenceability is concerned. in isDereferenceableAndAlignedPointer()
80 if (BC->getSrcTy()->isPointerTy()) in isDereferenceableAndAlignedPointer()
82 BC->getOperand(0), Alignment, Size, DL, CtxI, AC, DT, TLI, in isDereferenceableAndAlignedPointer()
88 return isDereferenceableAndAlignedPointer(Sel->getTrueValue(), Alignment, in isDereferenceableAndAlignedPointer()
91 isDereferenceableAndAlignedPointer(Sel->getFalseValue(), Alignment, in isDereferenceableAndAlignedPointer()
98 V->getPointerDereferenceableBytes(DL, CheckForNonNull, in isDereferenceableAndAlignedPointer()
107 APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0); in isDereferenceableAndAlignedPointer()
115 if (const auto *Call = dyn_cast<CallBase>(V)) { in isDereferenceableAndAlignedPointer()
116 if (auto *RP = getArgumentAliasingToReturnedPointer(Call, true)) in isDereferenceableAndAlignedPointer()
123 // that we still need to prove the result non-null at point of use. in isDereferenceableAndAlignedPointer()
138 !V->canBeFreed()) { in isDereferenceableAndAlignedPointer()
143 APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0); in isDereferenceableAndAlignedPointer()
151 return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(), in isDereferenceableAndAlignedPointer()
156 return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment, in isDereferenceableAndAlignedPointer()
167 [&](RetainedKnowledge RK, Instruction *Assume, auto) { in isDereferenceableAndAlignedPointer() argument
207 if (!Ty->isSized() || Ty->isScalableTy()) in isDereferenceableAndAlignedPointer()
215 APInt AccessSize(DL.getPointerTypeSizeInBits(V->getType()), in isDereferenceableAndAlignedPointer()
239 /// %t2 = load i32* %t1
255 if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI)) in AreEquivalentAddressValues()
266 auto &DL = LI->getDataLayout(); in isDereferenceableAndAlignedInLoop()
267 Value *Ptr = LI->getPointerOperand(); in isDereferenceableAndAlignedInLoop()
269 APInt EltSize(DL.getIndexTypeSizeInBits(Ptr->getType()), in isDereferenceableAndAlignedInLoop()
270 DL.getTypeStoreSize(LI->getType()).getFixedValue()); in isDereferenceableAndAlignedInLoop()
271 const Align Alignment = LI->getAlign(); in isDereferenceableAndAlignedInLoop()
273 Instruction *HeaderFirstNonPHI = L->getHeader()->getFirstNonPHI(); in isDereferenceableAndAlignedInLoop()
275 // If given a uniform (i.e. non-varying) address, see if we can prove the in isDereferenceableAndAlignedInLoop()
277 if (L->isLoopInvariant(Ptr)) in isDereferenceableAndAlignedInLoop()
283 auto *AddRec = dyn_cast<SCEVAddRecExpr>(SE.getSCEV(Ptr)); in isDereferenceableAndAlignedInLoop()
284 if (!AddRec || AddRec->getLoop() != L || !AddRec->isAffine()) in isDereferenceableAndAlignedInLoop()
286 auto* Step = dyn_cast<SCEVConstant>(AddRec->getStepRecurrence(SE)); in isDereferenceableAndAlignedInLoop()
290 auto TC = SE.getSmallConstantMaxTripCount(L); in isDereferenceableAndAlignedInLoop()
295 // We should be computing AccessSize as (TC - 1) * Step + EltSize. in isDereferenceableAndAlignedInLoop()
296 if (EltSize.sgt(Step->getAPInt())) in isDereferenceableAndAlignedInLoop()
304 APInt AccessSize = TC * Step->getAPInt(); in isDereferenceableAndAlignedInLoop()
306 assert(SE.isLoopInvariant(AddRec->getStart(), L) && in isDereferenceableAndAlignedInLoop()
309 if (auto *StartS = dyn_cast<SCEVUnknown>(AddRec->getStart())) { in isDereferenceableAndAlignedInLoop()
310 Base = StartS->getValue(); in isDereferenceableAndAlignedInLoop()
311 } else if (auto *StartS = dyn_cast<SCEVAddExpr>(AddRec->getStart())) { in isDereferenceableAndAlignedInLoop()
313 const auto *Offset = dyn_cast<SCEVConstant>(StartS->getOperand(0)); in isDereferenceableAndAlignedInLoop()
314 const auto *NewBase = dyn_cast<SCEVUnknown>(StartS->getOperand(1)); in isDereferenceableAndAlignedInLoop()
315 if (StartS->getNumOperands() == 2 && Offset && NewBase) { in isDereferenceableAndAlignedInLoop()
319 // the offset will be treated as (i8 -1) and sign-extended to (i64 -1). in isDereferenceableAndAlignedInLoop()
320 if (Offset->getAPInt().isNegative()) in isDereferenceableAndAlignedInLoop()
326 if (Offset->getAPInt().urem(Alignment.value()) != 0) in isDereferenceableAndAlignedInLoop()
328 Base = NewBase->getValue(); in isDereferenceableAndAlignedInLoop()
330 AccessSize = AccessSize.uadd_ov(Offset->getAPInt(), Overflow); in isDereferenceableAndAlignedInLoop()
348 /// Check if executing a load of this pointer value cannot trap.
350 /// If DT and ScanFrom are specified this method performs context-sensitive
351 /// analysis and returns true if it is safe to load immediately before ScanFrom.
353 /// If it is not obviously safe to load from the specified pointer, we do
358 /// load from the pointer.
365 // If DT is not specified we can't make context-sensitive query in isSafeToLoadUnconditionally()
380 // from/to. If so, the previous load or store would have already trapped, in isSafeToLoadUnconditionally()
381 // so there is no harm doing an extra load (also, CSE will later eliminate in isSafeToLoadUnconditionally()
382 // the load entirely). in isSafeToLoadUnconditionally()
383 BasicBlock::iterator BBI = ScanFrom->getIterator(), in isSafeToLoadUnconditionally()
384 E = ScanFrom->getParent()->begin(); in isSafeToLoadUnconditionally()
388 V = V->stripPointerCasts(); in isSafeToLoadUnconditionally()
391 --BBI; in isSafeToLoadUnconditionally()
395 if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() && in isSafeToLoadUnconditionally()
403 // Ignore volatile loads. The execution of a volatile load cannot in isSafeToLoadUnconditionally()
406 if (LI->isVolatile()) in isSafeToLoadUnconditionally()
408 AccessedPtr = LI->getPointerOperand(); in isSafeToLoadUnconditionally()
409 AccessedTy = LI->getType(); in isSafeToLoadUnconditionally()
410 AccessedAlign = LI->getAlign(); in isSafeToLoadUnconditionally()
413 if (SI->isVolatile()) in isSafeToLoadUnconditionally()
415 AccessedPtr = SI->getPointerOperand(); in isSafeToLoadUnconditionally()
416 AccessedTy = SI->getValueOperand()->getType(); in isSafeToLoadUnconditionally()
417 AccessedAlign = SI->getAlign(); in isSafeToLoadUnconditionally()
429 if (AreEquivalentAddressValues(AccessedPtr->stripPointerCasts(), V) && in isSafeToLoadUnconditionally()
445 APInt Size(DL.getIndexTypeSizeInBits(V->getType()), TySize.getFixedValue()); in isSafeToLoadUnconditionally()
450 /// DefMaxInstsToScan - the default number of maximum instructions
457 llvm::DefMaxInstsToScan("available-load-scan-limit", cl::init(6), cl::Hidden,
462 Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BasicBlock *ScanBB, in FindAvailableLoadedValue() argument
467 // Don't CSE load that is volatile or anything stronger than unordered. in FindAvailableLoadedValue()
468 if (!Load->isUnordered()) in FindAvailableLoadedValue()
471 MemoryLocation Loc = MemoryLocation::get(Load); in FindAvailableLoadedValue()
472 return findAvailablePtrLoadStore(Loc, Load->getType(), Load->isAtomic(), in FindAvailableLoadedValue()
477 // Check if the load and the store have the same base, constant offsets and
478 // non-overlapping access ranges.
484 APInt LoadOffset(DL.getIndexTypeSizeInBits(LoadPtr->getType()), 0); in areNonOverlapSameBaseLoadAndStore()
485 APInt StoreOffset(DL.getIndexTypeSizeInBits(StorePtr->getType()), 0); in areNonOverlapSameBaseLoadAndStore()
486 const Value *LoadBase = LoadPtr->stripAndAccumulateConstantOffsets( in areNonOverlapSameBaseLoadAndStore()
488 const Value *StoreBase = StorePtr->stripAndAccumulateConstantOffsets( in areNonOverlapSameBaseLoadAndStore()
492 auto LoadAccessSize = LocationSize::precise(DL.getTypeStoreSize(LoadTy)); in areNonOverlapSameBaseLoadAndStore()
493 auto StoreAccessSize = LocationSize::precise(DL.getTypeStoreSize(StoreTy)); in areNonOverlapSameBaseLoadAndStore()
504 // If this is a load of Ptr, the loaded value is available. in getAvailableLoadStore()
505 // (This is true even if the load is volatile or atomic, although in getAvailableLoadStore()
508 // We can value forward from an atomic to a non-atomic, but not the in getAvailableLoadStore()
510 if (LI->isAtomic() < AtLeastAtomic) in getAvailableLoadStore()
513 Value *LoadPtr = LI->getPointerOperand()->stripPointerCasts(); in getAvailableLoadStore()
517 if (CastInst::isBitOrNoopPointerCastable(LI->getType(), AccessTy, DL)) { in getAvailableLoadStore()
528 // We can value forward from an atomic to a non-atomic, but not the in getAvailableLoadStore()
530 if (SI->isAtomic() < AtLeastAtomic) in getAvailableLoadStore()
533 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts(); in getAvailableLoadStore()
540 Value *Val = SI->getValueOperand(); in getAvailableLoadStore()
541 if (CastInst::isBitOrNoopPointerCastable(Val->getType(), AccessTy, DL)) in getAvailableLoadStore()
544 TypeSize StoreSize = DL.getTypeSizeInBits(Val->getType()); in getAvailableLoadStore()
547 if (auto *C = dyn_cast<Constant>(Val)) in getAvailableLoadStore()
551 if (auto *MSI = dyn_cast<MemSetInst>(Inst)) { in getAvailableLoadStore()
552 // Don't forward from (non-atomic) memset to atomic load. in getAvailableLoadStore()
557 auto *Val = dyn_cast<ConstantInt>(MSI->getValue()); in getAvailableLoadStore()
558 auto *Len = dyn_cast<ConstantInt>(MSI->getLength()); in getAvailableLoadStore()
563 Value *Dst = MSI->getDest(); in getAvailableLoadStore()
576 if ((Len->getValue() * 8).ult(LoadSize)) in getAvailableLoadStore()
579 APInt Splat = LoadSize >= 8 ? APInt::getSplat(LoadSize, Val->getValue()) in getAvailableLoadStore()
580 : Val->getValue().trunc(LoadSize); in getAvailableLoadStore()
581 ConstantInt *SplatC = ConstantInt::get(MSI->getContext(), Splat); in getAvailableLoadStore()
582 if (CastInst::isBitOrNoopPointerCastable(SplatC->getType(), AccessTy, DL)) in getAvailableLoadStore()
598 const DataLayout &DL = ScanBB->getDataLayout(); in findAvailablePtrLoadStore()
599 const Value *StrippedPtr = Loc.Ptr->stripPointerCasts(); in findAvailablePtrLoadStore()
601 while (ScanFrom != ScanBB->begin()) { in findAvailablePtrLoadStore()
604 Instruction *Inst = &*--ScanFrom; in findAvailablePtrLoadStore()
605 if (Inst->isDebugOrPseudoInst()) in findAvailablePtrLoadStore()
615 if (MaxInstsToScan-- == 0) in findAvailablePtrLoadStore()
618 --ScanFrom; in findAvailablePtrLoadStore()
626 Value *StorePtr = SI->getPointerOperand()->stripPointerCasts(); in findAvailablePtrLoadStore()
637 // When AA isn't available, but if the load and the store have the same in findAvailablePtrLoadStore()
638 // base, constant offsets and non-overlapping access ranges, ignore the in findAvailablePtrLoadStore()
642 Loc.Ptr, AccessTy, SI->getPointerOperand(), in findAvailablePtrLoadStore()
643 SI->getValueOperand()->getType(), DL)) in findAvailablePtrLoadStore()
648 if (!isModSet(AA->getModRefInfo(SI, Loc))) in findAvailablePtrLoadStore()
658 if (Inst->mayWriteToMemory()) { in findAvailablePtrLoadStore()
659 // If alias analysis claims that it really won't modify the load, in findAvailablePtrLoadStore()
661 if (AA && !isModSet(AA->getModRefInfo(Inst, Loc))) in findAvailablePtrLoadStore()
675 Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BatchAAResults &AA, in FindAvailableLoadedValue() argument
678 const DataLayout &DL = Load->getDataLayout(); in FindAvailableLoadedValue()
679 Value *StrippedPtr = Load->getPointerOperand()->stripPointerCasts(); in FindAvailableLoadedValue()
680 BasicBlock *ScanBB = Load->getParent(); in FindAvailableLoadedValue()
681 Type *AccessTy = Load->getType(); in FindAvailableLoadedValue()
682 bool AtLeastAtomic = Load->isAtomic(); in FindAvailableLoadedValue()
684 if (!Load->isUnordered()) in FindAvailableLoadedValue()
691 for (Instruction &Inst : make_range(++Load->getReverseIterator(), in FindAvailableLoadedValue()
692 ScanBB->rend())) { in FindAvailableLoadedValue()
696 if (MaxInstsToScan-- == 0) in FindAvailableLoadedValue()
711 MemoryLocation Loc = MemoryLocation::get(Load); in FindAvailableLoadedValue()
727 while (!Worklist.empty() && --Limit) { in isPointerUseReplacable()
728 auto *User = Worklist.pop_back_val(); in isPointerUseReplacable()
734 Worklist.append(User->user_begin(), User->user_end()); in isPointerUseReplacable()
751 isDereferenceablePointer(To, Type::getInt8Ty(To->getContext()), DL)) in isPointerAlwaysReplaceable()
759 assert(U->getType() == To->getType() && "values must have matching types"); in canReplacePointersInUseIfEqual()
761 if (!To->getType()->isPointerTy()) in canReplacePointersInUseIfEqual()
771 assert(From->getType() == To->getType() && "values must have matching types"); in canReplacePointersIfEqual()
773 if (!From->getType()->isPointerTy()) in canReplacePointersIfEqual()
782 for (BasicBlock *BB : L->blocks()) { in isDereferenceableReadOnlyLoop()
784 if (auto *LI = dyn_cast<LoadInst>(&I)) { in isDereferenceableReadOnlyLoop()