/freebsd/contrib/llvm-project/llvm/lib/Target/X86/ |
H A D | X86AvoidStoreForwardingBlocks.cpp | 102 void breakBlockedCopies(MachineInstr *LoadInst, MachineInstr *StoreInst, 105 void buildCopies(int Size, MachineInstr *LoadInst, int64_t LdDispImm, 109 void buildCopy(MachineInstr *LoadInst, unsigned NLoadOpcode, int64_t LoadDisp, 339 findPotentialBlockers(MachineInstr *LoadInst) { in findPotentialBlockers() argument 343 for (auto PBInst = std::next(MachineBasicBlock::reverse_iterator(LoadInst)), in findPotentialBlockers() 344 E = LoadInst->getParent()->rend(); in findPotentialBlockers() 361 MachineBasicBlock *MBB = LoadInst->getParent(); in findPotentialBlockers() 380 void X86AvoidSFBPass::buildCopy(MachineInstr *LoadInst, unsigned NLoadOpcode, in buildCopy() argument 385 MachineOperand &LoadBase = getBaseOperand(LoadInst); in buildCopy() 387 MachineBasicBlock *MBB = LoadInst->getParent(); in buildCopy() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Target/ARM/ |
H A D | ARMParallelDSP.cpp | 59 using MemInstList = SmallVectorImpl<LoadInst*>; 70 SmallVector<LoadInst*, 2> VecLd; // Container for loads to widen. 76 return isa<LoadInst>(LHS) && isa<LoadInst>(RHS); in HasTwoLoadInputs() 79 LoadInst *getBaseLoad() const { in getBaseLoad() 196 LoadInst *NewLd = nullptr; 197 SmallVector<LoadInst*, 4> Loads; 200 WidenedLoad(SmallVectorImpl<LoadInst*> &Lds, LoadInst *Wide) in WidenedLoad() 204 LoadInst *getLoad() { in getLoad() 216 std::map<LoadInst*, LoadInst*> LoadPairs; 217 SmallPtrSet<LoadInst*, 4> OffsetLoads; [all …]
|
/freebsd/contrib/llvm-project/llvm/include/llvm/Transforms/Scalar/ |
H A D | GVN.h | 46 class LoadInst; variable 325 bool processLoad(LoadInst *L); 326 bool processNonLocalLoad(LoadInst *L); 332 AnalyzeLoadAvailability(LoadInst *Load, MemDepResult DepInfo, Value *Address); 337 void AnalyzeLoadAvailability(LoadInst *Load, LoadDepVect &Deps, 343 LoadInst *findLoadToHoistIntoPred(BasicBlock *Pred, BasicBlock *LoadBB, 344 LoadInst *Load); 346 bool PerformLoadPRE(LoadInst *Load, AvailValInBlkVect &ValuesPerBlock, 352 bool performLoopLoadPRE(LoadInst *Load, AvailValInBlkVect &ValuesPerBlock, 358 LoadInst *Load, AvailValInBlkVect &ValuesPerBlock, [all …]
|
H A D | MemCpyOptimizer.h | 31 class LoadInst; variable 65 bool processStoreOfLoad(StoreInst *SI, LoadInst *LI, const DataLayout &DL, 84 bool moveUp(StoreInst *SI, Instruction *P, const LoadInst *LI);
|
/freebsd/contrib/llvm-project/llvm/include/llvm/Transforms/Utils/ |
H A D | Local.h | 44 class LoadInst; variable 272 LoadInst *LI, DIBuilder &Builder); 273 void ConvertDebugDeclareToDebugValue(DbgVariableRecord *DVR, LoadInst *LI, 424 void copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source); 470 void copyNonnullMetadata(const LoadInst &OldLI, MDNode *N, LoadInst &NewLI); 476 void copyRangeMetadata(const DataLayout &DL, const LoadInst &OldLI, MDNode *N, 477 LoadInst &NewLI);
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AMDGPU/ |
H A D | AMDGPUPromoteKernelArguments.cpp | 45 bool promoteLoad(LoadInst *LI); 77 LoadInst *LD = cast<LoadInst>(U); in enqueueUsers() 97 LoadInst *LI = dyn_cast<LoadInst>(Ptr); in promotePointer() 130 bool AMDGPUPromoteKernelArguments::promoteLoad(LoadInst *LI) { in promoteLoad()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/NVPTX/ |
H A D | NVPTXLowerAggrCopies.cpp | 60 SmallVector<LoadInst *, 4> AggrLoads; in runOnFunction() 71 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnFunction() 104 for (LoadInst *LI : AggrLoads) { in runOnFunction()
|
H A D | NVPTXLowerArgs.cpp | 232 if (auto *LI = dyn_cast<LoadInst>(I.OldInstruction)) { in INITIALIZE_PASS_DEPENDENCY() 350 LoadInst *Inst; in adjustByValArgAlignment() 369 if (auto *I = dyn_cast<LoadInst>(CurUser)) { in adjustByValArgAlignment() 423 if (isa<GetElementPtrInst>(V) || isa<BitCastInst>(V) || isa<LoadInst>(V)) in handleByValParam() 447 if (!isa<LoadInst>(V) && !isa<CallInst>(V) && !isa<StoreInst>(V) && in handleByValParam() 519 LoadInst *LI = in handleByValParam() 520 new LoadInst(StructType, ArgInParam, Arg->getName(), in handleByValParam() 572 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) { in runOnKernelFunction()
|
/freebsd/contrib/llvm-project/llvm/include/llvm/Analysis/ |
H A D | Loads.h | 26 class LoadInst; variable 85 bool isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L, 137 Value *FindAvailableLoadedValue(LoadInst *Load, BasicBlock *ScanBB, 148 Value *FindAvailableLoadedValue(LoadInst *Load, BatchAAResults &AA,
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Utils/ |
H A D | SSAUpdater.cpp | 377 if (const LoadInst *LI = dyn_cast<LoadInst>(Insts[0])) in LoadAndStorePromoter() 399 SmallVector<LoadInst *, 32> LiveInLoads; in run() 418 LiveInLoads.push_back(cast<LoadInst>(User)); in run() 437 LiveInLoads.push_back(cast<LoadInst>(I)); in run() 449 if (LoadInst *L = dyn_cast<LoadInst>(&I)) { in run() 483 for (LoadInst *ALoad : LiveInLoads) { in run() 519 replaceLoadWithValue(cast<LoadInst>(User), NewVal); in run()
|
H A D | PromoteMemoryToRegister.cpp | 68 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) { in isAllocaPromotable() 254 LoadInst *LI = cast<LoadInst>(User); in AnalyzeAlloca() 312 return (isa<LoadInst>(I) && isa<AllocaInst>(I->getOperand(0))) || in isInterestingInstruction() 446 static void addAssumeNonNull(AssumptionCache *AC, LoadInst *LI) { in addAssumeNonNull() 457 static void convertMetadataToAssumes(LoadInst *LI, Value *Val, in convertMetadataToAssumes() 485 if (isa<LoadInst>(I) || isa<StoreInst>(I)) in removeIntrinsicUsers() 545 LoadInst *LI = cast<LoadInst>(UserInst); in rewriteSingleStoreAlloca() 662 LoadInst *LI = dyn_cast<LoadInst>(U); in promoteSingleBlockAlloca() 1032 if (LoadInst *LI = dyn_cast<LoadInst>(I)) in ComputeLiveInBlocks() 1174 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in RenamePass()
|
H A D | RelLookupTableConverter.cpp | 42 LoadInst *Load = dyn_cast<LoadInst>(GEP->use_begin()->getUser()); in shouldConvertToRelLookupTable() 133 LoadInst *Load = cast<LoadInst>(GEP->use_begin()->getUser()); in convertToRelLookupTable()
|
H A D | GlobalStatus.cpp | 92 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) { in analyzeGlobalAux() 129 } else if (isa<LoadInst>(StoredVal) && in analyzeGlobalAux() 130 cast<LoadInst>(StoredVal)->getOperand(0) == GV) { in analyzeGlobalAux()
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Scalar/ |
H A D | LowerAtomicPass.cpp | 30 static bool LowerLoadInst(LoadInst *LI) { in LowerLoadInst() 49 else if (LoadInst *LI = dyn_cast<LoadInst>(&Inst)) { in runOnBasicBlock()
|
H A D | LoopLoadElimination.cpp | 85 LoadInst *Load; 88 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store) in StoreToLoadForwardingCandidate() 163 static bool isLoadConditional(LoadInst *Load, Loop *L) { in isLoadConditional() 203 if (isa<LoadInst>(Source)) in findStoreToLoadDependences() 205 if (isa<LoadInst>(Destination)) in findStoreToLoadDependences() 221 auto *Load = dyn_cast<LoadInst>(Destination); in findStoreToLoadDependences() 273 DenseMap<LoadInst *, const StoreToLoadForwardingCandidate *>; in removeDependencesFromMultipleStores() 354 LoadInst *LastLoad = in findPointersWrittenOnForwardingPath() 448 new LoadInst(Cand.Load->getType(), InitialPtr, "load_initial", in propagateStoredValueToLoadUsers()
|
H A D | SROA.cpp | 146 PointerIntPair<LoadInst *, 2, SelectHandSpeculativity>; 1108 void visitLoadInst(LoadInst &LI) { in visitLoadInst() 1291 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in hasUnsafePHIOrSelectUse() 1479 if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) { in findCommonType() 1541 LoadInst *LI = dyn_cast<LoadInst>(U); in isSafePHIToSpeculate() 1606 LoadInst *SomeLoad = cast<LoadInst>(PN.user_back()); in speculatePHINodeLoads() 1619 LoadInst *LI = cast<LoadInst>(PN.user_back()); in speculatePHINodeLoads() 1642 LoadInst *Load = IRB.CreateAlignedLoad( in speculatePHINodeLoads() 1684 isSafeLoadOfSelectToSpeculate(LoadInst &LI, SelectInst &SI, bool PreserveCFG) { in isSafeLoadOfSelectToSpeculate() 1717 auto *LI = dyn_cast<LoadInst>(U); in isSafeSelectToSpeculate() [all …]
|
H A D | GVN.cpp | 228 static AvailableValue getLoad(LoadInst *Load, unsigned Offset = 0) { in getLoad() 265 LoadInst *getCoercedLoadValue() const { in getCoercedLoadValue() 267 return cast<LoadInst>(Val); in getCoercedLoadValue() 282 Value *MaterializeAdjustedValue(LoadInst *Load, Instruction *InsertPt, 318 Value *MaterializeAdjustedValue(LoadInst *Load, GVNPass &gvn) const { in MaterializeAdjustedValue() 1027 ConstructSSAForLoadSet(LoadInst *Load, in ConstructSSAForLoadSet() 1070 Value *AvailableValue::MaterializeAdjustedValue(LoadInst *Load, in MaterializeAdjustedValue() 1087 LoadInst *CoercedLoad = getCoercedLoadValue(); in MaterializeAdjustedValue() 1151 static void reportMayClobberedLoad(LoadInst *Load, MemDepResult DepInfo, in reportMayClobberedLoad() 1163 if (U != Load && (isa<LoadInst>(U) || isa<StoreInst>(U))) { in reportMayClobberedLoad() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/CodeGen/ |
H A D | InterleavedLoadCombinePass.cpp | 94 LoadInst *findFirstLoad(const std::set<LoadInst *> &LIs); 651 LoadInst *LI; 653 ElementInfo(Polynomial Offset = Polynomial(), LoadInst *LI = nullptr) in ElementInfo() 664 std::set<LoadInst *> LIs; 717 LoadInst *LI = dyn_cast<LoadInst>(V); in compute() 869 static bool computeFromLI(LoadInst *LI, VectorInfo &Result, in computeFromLI() 1105 LoadInst * 1106 InterleavedLoadCombineImpl::findFirstLoad(const std::set<LoadInst *> &LIs) { in findFirstLoad() 1115 return cast<LoadInst>(FLI); in findFirstLoad() 1125 LoadInst *InsertionPoint = InterleavedLoad.front().EI[0].LI; in combine() [all …]
|
H A D | InterleavedAccessPass.cpp | 103 bool lowerInterleavedLoad(LoadInst *LI, 134 LoadInst *LI); 253 LoadInst *LI, SmallVector<Instruction *, 32> &DeadInsts) { in lowerInterleavedLoad() 360 SmallVectorImpl<ShuffleVectorInst *> &Shuffles, LoadInst *LI) { in replaceBinOpShuffles() 484 LoadInst *LI = dyn_cast<LoadInst>(DI->getOperand(0)); in lowerDeinterleaveIntrinsic() 529 if (auto *LI = dyn_cast<LoadInst>(&I)) in runOnFunction()
|
/freebsd/contrib/llvm-project/llvm/lib/SandboxIR/ |
H A D | SandboxIR.cpp | 503 LoadInst *LoadInst::create(Type *Ty, Value *Ptr, MaybeAlign Align, in create() 515 LoadInst *LoadInst::create(Type *Ty, Value *Ptr, MaybeAlign Align, in create() 526 bool LoadInst::classof(const Value *From) { in classof() 530 Value *LoadInst::getPointerOperand() const { in getPointerOperand() 531 return Ctx.getValue(cast<llvm::LoadInst>(Val)->getPointerOperand()); in getPointerOperand() 535 void LoadInst::dump(raw_ostream &OS) const { in dump() 540 void LoadInst::dump() const { in dump() 762 auto *LLVMLd = cast<llvm::LoadInst>(LLVMV); in getOrCreateValueInternal() 763 It->second = std::unique_ptr<LoadInst>(new LoadInst(LLVMLd, *this)); in getOrCreateValueInternal() 799 LoadInst *Context::createLoadInst(llvm::LoadInst *LI) { in createLoadInst() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/IPO/ |
H A D | GlobalOpt.cpp | 168 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) || in IsSafeComputationToRemove() 296 else if (auto *LI = dyn_cast<LoadInst>(U)) { in CleanupConstantGlobalUsers() 418 It->second.IsLoaded |= isa<LoadInst>(V); in collectSRATypes() 624 if (auto *LI = dyn_cast<LoadInst>(V)) { in SRAGlobal() 661 if (isa<LoadInst>(U)) { in AllUsesOfValueWillTrapIfNull() 687 isa<LoadInst>(U->getOperand(0)) && in AllUsesOfValueWillTrapIfNull() 689 assert(isa<GlobalValue>(cast<LoadInst>(U->getOperand(0)) in AllUsesOfValueWillTrapIfNull() 712 if (auto *LI = dyn_cast<LoadInst>(U)) { in allUsesOfLoadedValueWillTrapIfNull() 748 assert((isa<LoadInst>(U) || isa<StoreInst>(U)) && in allUsesOfLoadAndStores() 763 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { in OptimizeAwayTrappingUsesOfValue() [all …]
|
/freebsd/contrib/llvm-project/llvm/include/llvm/SandboxIR/ |
H A D | SandboxIR.h | 79 class LoadInst; variable 182 friend class LoadInst; // For getting `Val`. variable 507 friend class LoadInst; // For getTopmostLLVMInstruction(). variable 620 class LoadInst final : public Instruction { 622 LoadInst(llvm::LoadInst *LI, Context &Ctx) in LoadInst() function 638 static LoadInst *create(Type *Ty, Value *Ptr, MaybeAlign Align, 641 static LoadInst *create(Type *Ty, Value *Ptr, MaybeAlign Align, 647 Align getAlign() const { return cast<llvm::LoadInst>(Val)->getAlign(); } in getAlign() 648 bool isUnordered() const { return cast<llvm::LoadInst>(Val)->isUnordered(); } in isUnordered() 649 bool isSimple() const { return cast<llvm::LoadInst>(Val)->isSimple(); } in isSimple() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Analysis/ |
H A D | MemoryDependenceAnalysis.cpp | 110 if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in GetLocation() 246 if (auto *LI = dyn_cast<LoadInst>(QueryInst)) { in getPointerDependencyFrom() 277 MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI, in getInvariantGroupPointerDependency() 340 if ((isa<LoadInst>(U) || in getInvariantGroupPointerDependency() 382 auto *LI = dyn_cast<LoadInst>(SI->getValueOperand()); in canSkipClobberingStore() 441 if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) { in getSimplePointerDependencyFrom() 453 if (auto *LI = dyn_cast<LoadInst>(I)) in getSimplePointerDependencyFrom() 511 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in getSimplePointerDependencyFrom() 874 bool isLoad = isa<LoadInst>(QueryInst); in getNonLocalPointerDependency() 901 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { in getNonLocalPointerDependency() [all …]
|
H A D | ObjCARCAnalysisUtils.cpp | 39 if (const LoadInst *LI = dyn_cast<LoadInst>(Op)) in IsPotentialRetainableObjPtr()
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/InstCombine/ |
H A D | InstCombineLoadStoreAlloca.cpp | 76 if (auto *LI = dyn_cast<LoadInst>(I)) { in isOnlyCopiedFromConstantMemory() 299 if (auto *Load = dyn_cast<LoadInst>(Inst)) { in collectUsersRecursive() 366 if (auto *LT = dyn_cast<LoadInst>(I)) { in replace() 369 auto *NewI = new LoadInst(LT->getType(), V, "", LT->isVolatile(), in replace() 562 LoadInst *InstCombinerImpl::combineLoadToNewType(LoadInst &LI, Type *NewTy, in combineLoadToNewType() 567 LoadInst *NewLoad = in combineLoadToNewType() 649 LoadInst &Load) { in combineLoadToOperationType() 680 LoadInst *NewLoad = IC.combineLoadToNewType(Load, DestTy); in combineLoadToOperationType() 693 static Instruction *unpackLoadToAggregate(InstCombinerImpl &IC, LoadInst &LI) { in unpackLoadToAggregate() 709 LoadInst *NewLoad = IC.combineLoadToNewType(LI, ST->getTypeAtIndex(0U), in unpackLoadToAggregate() [all …]
|