/freebsd/contrib/llvm-project/llvm/lib/Target/AMDGPU/ |
H A D | AMDGPULowerKernelAttributes.cpp | 113 auto *Load = dyn_cast<LoadInst>(U); // Load from ImplicitArgPtr/DispatchPtr? in processUse() local 115 if (!Load && !BCI) { in processUse() 118 Load = dyn_cast<LoadInst>(*U->user_begin()); // Load from GEP? in processUse() 125 Load = dyn_cast<LoadInst>(*BCI->user_begin()); // Load from BCI? in processUse() 128 if (!Load || !Load->isSimple()) in processUse() 131 unsigned LoadSize = DL.getTypeStoreSize(Load->getType()); in processUse() 138 BlockCounts[0] = Load; in processUse() 142 BlockCounts[1] = Load; in processUse() 146 BlockCounts[2] = Load; in processUse() 150 GroupSizes[0] = Load; in processUse() [all …]
|
H A D | AMDGPULowerKernelArguments.cpp | 224 LoadInst *Load = in lowerKernelArguments() local 226 Load->setMetadata(LLVMContext::MD_invariant_load, MDNode::get(Ctx, {})); in lowerKernelArguments() 232 Load->setMetadata(LLVMContext::MD_nonnull, MDNode::get(Ctx, {})); in lowerKernelArguments() 236 Load->setMetadata( in lowerKernelArguments() 245 Load->setMetadata( in lowerKernelArguments() 253 Load->setMetadata( in lowerKernelArguments() 264 Load : Builder.CreateLShr(Load, OffsetDiff * 8); in lowerKernelArguments() 272 Value *Shuf = Builder.CreateShuffleVector(Load, ArrayRef<int>{0, 1, 2}, in lowerKernelArguments() 276 Load->setName(Arg.getName() + ".load"); in lowerKernelArguments() 277 Arg.replaceAllUsesWith(Load); in lowerKernelArguments()
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Scalar/ |
H A D | LoopLoadElimination.cpp | 85 LoadInst *Load; member 88 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store) in StoreToLoadForwardingCandidate() 89 : Load(Load), Store(Store) {} in StoreToLoadForwardingCandidate() 96 Value *LoadPtr = Load->getPointerOperand(); in isDependenceDistanceOfOne() 98 Type *LoadType = getLoadStoreType(Load); in isDependenceDistanceOfOne() 99 auto &DL = Load->getDataLayout(); in isDependenceDistanceOfOne() 137 Value *getLoadPtr() const { return Load->getPointerOperand(); } in getLoadPtr() 143 OS.indent(2) << *Cand.Load << "\n"; in operator <<() 163 static bool isLoadConditional(LoadInst *Load, Loop *L) { in isLoadConditional() argument 164 return Load->getParent() != L->getHeader(); in isLoadConditional() [all …]
|
H A D | GVN.cpp | 228 static AvailableValue getLoad(LoadInst *Load, unsigned Offset = 0) { in getLoad() 230 Res.Val = Load; in getLoad() 282 Value *MaterializeAdjustedValue(LoadInst *Load, Instruction *InsertPt, 318 Value *MaterializeAdjustedValue(LoadInst *Load, GVNPass &gvn) const { in MaterializeAdjustedValue() 319 return AV.MaterializeAdjustedValue(Load, BB->getTerminator(), gvn); in MaterializeAdjustedValue() 1027 ConstructSSAForLoadSet(LoadInst *Load, in ConstructSSAForLoadSet() argument 1034 Load->getParent())) { in ConstructSSAForLoadSet() 1037 return ValuesPerBlock[0].MaterializeAdjustedValue(Load, gvn); in ConstructSSAForLoadSet() 1043 SSAUpdate.Initialize(Load->getType(), Load->getName()); in ConstructSSAForLoadSet() 1058 if (BB == Load->getParent() && in ConstructSSAForLoadSet() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Object/ |
H A D | MachOObjectFile.cpp | 286 const MachOObjectFile &Obj, const MachOObjectFile::LoadCommandInfo &Load, in parseSegmentLoadCommand() argument 291 if (Load.C.cmdsize < SegmentLoadSize) in parseSegmentLoadCommand() 294 if (auto SegOrErr = getStructOrErr<Segment>(Obj, Load.Ptr)) { in parseSegmentLoadCommand() 299 S.nsects * SectionSize > Load.C.cmdsize - SegmentLoadSize) in parseSegmentLoadCommand() 304 const char *Sec = getSectionPtr(Obj, Load, J); in parseSegmentLoadCommand() 410 const MachOObjectFile::LoadCommandInfo &Load, in checkSymtabCommand() argument 414 if (Load.C.cmdsize < sizeof(MachO::symtab_command)) in checkSymtabCommand() 419 auto SymtabOrErr = getStructOrErr<MachO::symtab_command>(Obj, Load.Ptr); in checkSymtabCommand() 463 *SymtabLoadCmd = Load.Ptr; in checkSymtabCommand() 468 const MachOObjectFile::LoadCommandInfo &Load, in checkDysymtabCommand() argument [all …]
|
/freebsd/contrib/llvm-project/compiler-rt/lib/hwasan/ |
H A D | hwasan.cpp | 498 CheckAddressSized<ErrorAction::Abort, AccessType::Load>(p, sz); in __hwasan_loadN() 501 CheckAddress<ErrorAction::Abort, AccessType::Load, 0>(p); in __hwasan_load1() 504 CheckAddress<ErrorAction::Abort, AccessType::Load, 1>(p); in __hwasan_load2() 507 CheckAddress<ErrorAction::Abort, AccessType::Load, 2>(p); in __hwasan_load4() 510 CheckAddress<ErrorAction::Abort, AccessType::Load, 3>(p); in __hwasan_load8() 513 CheckAddress<ErrorAction::Abort, AccessType::Load, 4>(p); in __hwasan_load16() 517 CheckAddressSized<ErrorAction::Recover, AccessType::Load>(p, sz); in __hwasan_loadN_noabort() 520 CheckAddress<ErrorAction::Recover, AccessType::Load, 0>(p); in __hwasan_load1_noabort() 523 CheckAddress<ErrorAction::Recover, AccessType::Load, 1>(p); in __hwasan_load2_noabort() 526 CheckAddress<ErrorAction::Recover, AccessType::Load, 2>(p); in __hwasan_load4_noabort() [all …]
|
H A D | hwasan_memintrinsics.cpp | 33 CheckAddressSized<ErrorAction::Recover, AccessType::Load>( in __hwasan_memcpy() 41 CheckAddressSized<ErrorAction::Recover, AccessType::Load>( in __hwasan_memmove() 60 CheckAddressSized<ErrorAction::Recover, AccessType::Load>( in __hwasan_memcpy_match_all() 71 CheckAddressSized<ErrorAction::Recover, AccessType::Load>( in __hwasan_memmove_match_all()
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Utils/ |
H A D | RelLookupTableConverter.cpp | 42 LoadInst *Load = dyn_cast<LoadInst>(GEP->use_begin()->getUser()); in shouldConvertToRelLookupTable() local 43 if (!Load || !Load->hasOneUse() || in shouldConvertToRelLookupTable() 44 Load->getType() != GEP->getResultElementType()) in shouldConvertToRelLookupTable() 133 LoadInst *Load = cast<LoadInst>(GEP->use_begin()->getUser()); in convertToRelLookupTable() local 153 Builder.SetInsertPoint(Load); in convertToRelLookupTable() 163 Load->replaceAllUsesWith(Result); in convertToRelLookupTable() 165 Load->eraseFromParent(); in convertToRelLookupTable()
|
H A D | LowerMemIntrinsics.cpp | 77 LoadInst *Load = LoopBuilder.CreateAlignedLoad(LoopOpType, SrcGEP, in createMemCpyLoopKnownSize() local 81 Load->setMetadata(LLVMContext::MD_alias_scope, in createMemCpyLoopKnownSize() 87 Load, DstGEP, PartDstAlign, DstIsVolatile); in createMemCpyLoopKnownSize() 93 Load->setAtomic(AtomicOrdering::Unordered); in createMemCpyLoopKnownSize() 133 LoadInst *Load = in createMemCpyLoopKnownSize() local 137 Load->setMetadata(LLVMContext::MD_alias_scope, in createMemCpyLoopKnownSize() 142 StoreInst *Store = RBuilder.CreateAlignedStore(Load, DstGEP, PartDstAlign, in createMemCpyLoopKnownSize() 149 Load->setAtomic(AtomicOrdering::Unordered); in createMemCpyLoopKnownSize() 234 LoadInst *Load = LoopBuilder.CreateAlignedLoad(LoopOpType, SrcGEP, in createMemCpyLoopUnknownSize() local 238 Load->setMetadata(LLVMContext::MD_alias_scope, MDNode::get(Ctx, NewScope)); in createMemCpyLoopUnknownSize() [all …]
|
/freebsd/share/examples/bootforth/ |
H A D | loader.rc | 6 \ Load configuration file words 12 \ Load the screen manipulation words 17 \ Load frame support 21 \ Load our little menu
|
H A D | boot.4th | 4 \ Load the screen manipulation words 11 \ Load frame support 15 \ Load our little menu
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/ObjCARC/ |
H A D | ObjCARCContract.cpp | 194 static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load, in findSafeStoreForStoreStrongContraction() argument 202 MemoryLocation Loc = MemoryLocation::get(Load); in findSafeStoreForStoreStrongContraction() 206 for (auto I = std::next(BasicBlock::iterator(Load)), in findSafeStoreForStoreStrongContraction() 207 E = Load->getParent()->end(); in findSafeStoreForStoreStrongContraction() 233 if (!CanUse(Inst, Load, PA, Class)) { in findSafeStoreForStoreStrongContraction() 339 auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release)); in tryToContractReleaseIntoStoreStrong() local 340 if (!Load || !Load->isSimple()) in tryToContractReleaseIntoStoreStrong() 345 if (Load->getParent() != BB) in tryToContractReleaseIntoStoreStrong() 351 findSafeStoreForStoreStrongContraction(Load, Release, PA, AA); in tryToContractReleaseIntoStoreStrong() 377 << " Load: " << *Load << "\n"); in tryToContractReleaseIntoStoreStrong() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/ |
H A D | SVEIntrinsicOpts.cpp | 375 auto *Load = dyn_cast<LoadInst>(IntrI->getOperand(1)); in optimizePredicateLoad() local 376 if (!Load || !Load->isSimple()) in optimizePredicateLoad() 380 if (Load->getType() != FixedPredType) in optimizePredicateLoad() 384 Builder.SetInsertPoint(Load); in optimizePredicateLoad() 386 auto *LoadPred = Builder.CreateLoad(PredType, Load->getPointerOperand()); in optimizePredicateLoad() 392 if (Load->getNumUses() == 0) in optimizePredicateLoad() 393 Load->eraseFromParent(); in optimizePredicateLoad()
|
H A D | AArch64SchedAmpere1B.td | 570 } // Load from base addr plus immediate offset 582 } // Load from a register index (maybe scaled). 687 // -- Load 1-element structure to one/all lanes 694 // -- Load 1-element structure to one/all lanes, 1D size 697 // -- Load 1-element structures to 1 register 700 // -- Load 1-element structures to 2 registers 703 // -- Load 1-element structures to 3 registers 706 // -- Load 1-element structures to 4 registers 709 // -- Load 2-element structure to all lanes of 2 registers, 1D size 712 // -- Load 2-element structure to all lanes of 2 registers, other sizes [all …]
|
H A D | AArch64SchedAmpere1.td | 614 } // Load from base addr plus immediate offset 626 } // Load from a register index (maybe scaled). 721 // -- Load 1-element structure to one/all lanes 728 // -- Load 1-element structure to one/all lanes, 1D size 731 // -- Load 1-element structures to 1 register 734 // -- Load 1-element structures to 2 registers 737 // -- Load 1-element structures to 3 registers 740 // -- Load 1-element structures to 4 registers 743 // -- Load 2-element structure to all lanes of 2 registers, 1D size 746 // -- Load 2-element structure to all lanes of 2 registers, other sizes [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Target/BPF/ |
H A D | BPFPreserveStaticOffset.cpp | 225 LoadInst *Load) { in makeGEPAndLoad() argument 227 fillCommonArgs(M->getContext(), Args, GEP, Load); in makeGEPAndLoad() 229 {Load->getType()}, Args); in makeGEPAndLoad() 231 Call->applyMergedLocation(mergeDILocations(GEP.Members), Load->getDebugLoc()); in makeGEPAndLoad() 233 if (Load->isUnordered()) { in makeGEPAndLoad() 240 Call->setAAMetadata(Load->getAAMetadata()); in makeGEPAndLoad() 307 auto *Load = new LoadInst(ReturnType, GEP, "", in reconstructLoad() local 310 reconstructCommon(Call, GEP, Load, 0); in reconstructLoad() 311 return std::pair{GEP, Load}; in reconstructLoad() 423 if (auto *Load = dyn_cast<LoadInst>(LoadOrStoreTemplate)) { in tryToReplaceWithGEPBuiltin() local [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Analysis/ |
H A D | ValueLatticeUtils.cpp | 37 if (auto *Load = dyn_cast<LoadInst>(U)) in canTrackGlobalVariableInterprocedurally() local 38 return !Load->isVolatile() && Load->getType() == GV->getValueType(); in canTrackGlobalVariableInterprocedurally()
|
H A D | Loads.cpp | 462 Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BasicBlock *ScanBB, in FindAvailableLoadedValue() argument 468 if (!Load->isUnordered()) in FindAvailableLoadedValue() 471 MemoryLocation Loc = MemoryLocation::get(Load); in FindAvailableLoadedValue() 472 return findAvailablePtrLoadStore(Loc, Load->getType(), Load->isAtomic(), in FindAvailableLoadedValue() 675 Value *llvm::FindAvailableLoadedValue(LoadInst *Load, BatchAAResults &AA, in FindAvailableLoadedValue() argument 678 const DataLayout &DL = Load->getDataLayout(); in FindAvailableLoadedValue() 679 Value *StrippedPtr = Load->getPointerOperand()->stripPointerCasts(); in FindAvailableLoadedValue() 680 BasicBlock *ScanBB = Load->getParent(); in FindAvailableLoadedValue() 681 Type *AccessTy = Load->getType(); in FindAvailableLoadedValue() 682 bool AtLeastAtomic = Load->isAtomic(); in FindAvailableLoadedValue() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Target/SystemZ/ |
H A D | SystemZISelDAGToDAG.cpp | 332 bool canUseBlockOperation(StoreSDNode *Store, LoadSDNode *Load) const; 1022 else if (auto *Load = dyn_cast<LoadSDNode>(RISBG.Input)) { in tryRISBGZero() local 1023 if (Load->getMemoryVT() == MVT::i32 && in tryRISBGZero() 1024 (Load->getExtensionType() == ISD::EXTLOAD || in tryRISBGZero() 1025 Load->getExtensionType() == ISD::ZEXTLOAD) && in tryRISBGZero() 1116 if (auto *Load = dyn_cast<LoadSDNode>(Op0.getNode())) in tryRxSBG() local 1117 if (Load->getMemoryVT() == MVT::i8) in tryRxSBG() 1246 auto *Load = dyn_cast<LoadSDNode>(N->getOperand(1)); in tryGather() local 1247 if (!Load || !Load->hasNUsesOfValue(1, 0)) in tryGather() 1249 if (Load->getMemoryVT().getSizeInBits() != in tryGather() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Vectorize/ |
H A D | VPlanSLP.cpp | 113 if (Opcode == Instruction::Load) { in areVectorizable() 120 if (VPI->getOpcode() == Instruction::Load && in areVectorizable() 176 case Instruction::Load: in getOperands() 207 if (A->getOpcode() != Instruction::Load && in areConsecutiveOrMatch() 241 assert((Mode == OpMode::Load || Mode == OpMode::Opcode) && in getBest() 307 Instruction::Load) in reorderMultiNodeOps() 308 Mode.push_back(OpMode::Load); in reorderMultiNodeOps() 437 if (ValuesOpcode == Instruction::Load) in buildGraph() 447 case Instruction::Load: in buildGraph()
|
/freebsd/contrib/llvm-project/llvm/tools/llvm-readobj/ |
H A D | MachODumper.cpp | 814 for (const auto &Load : Obj->load_commands()) { in printMachODataInCode() 815 if (Load.C.cmd == MachO::LC_DATA_IN_CODE) { in printMachODataInCode() 816 MachO::linkedit_data_command LLC = Obj->getLinkeditDataLoadCommand(Load); in printMachODataInCode() 836 for (const auto &Load : Obj->load_commands()) { in printMachOVersionMin() 838 switch (Load.C.cmd) { in printMachOVersionMin() 860 if (Load.C.cmd == MachO::LC_BUILD_VERSION) { in printMachOVersionMin() 861 MachO::build_version_command BVC = Obj->getBuildVersionLoadCommand(Load); in printMachOVersionMin() 874 MachO::version_min_command VMC = Obj->getVersionMinLoadCommand(Load); in printMachOVersionMin() 899 for (const auto &Load : Obj->load_commands()) { in printMachODysymtab() 900 if (Load in printMachODysymtab() 812 for (const auto &Load : Obj->load_commands()) { printMachODataInCode() local 834 for (const auto &Load : Obj->load_commands()) { printMachOVersionMin() local 897 for (const auto &Load : Obj->load_commands()) { printMachODysymtab() local 924 for (const auto &Load : Obj->load_commands()) { printMachOSegment() local 945 for (const auto &Load : Obj->load_commands()) { printMachOIndirectSymbols() local 961 for (const auto &Load : Obj->load_commands()) { printMachOLinkerOptions() local [all...] |
/freebsd/contrib/llvm-project/llvm/lib/Target/ARM/ |
H A D | MVEGatherScatterLowering.cpp | 427 Instruction *Load = tryCreateIncrementingGatScat(I, Ptr, Builder); in lowerGather() local 428 if (!Load) in lowerGather() 429 Load = tryCreateMaskedGatherOffset(I, Ptr, Root, Builder); in lowerGather() 430 if (!Load) in lowerGather() 431 Load = tryCreateMaskedGatherBase(I, Ptr, Builder); in lowerGather() 432 if (!Load) in lowerGather() 438 Load = SelectInst::Create(Mask, Load, PassThru); in lowerGather() 439 Builder.Insert(Load); in lowerGather() 442 Root->replaceAllUsesWith(Load); in lowerGather() 450 << *Load << "\n"); in lowerGather() [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Target/PowerPC/ |
H A D | P9InstrResources.td | 29 // - Four Load/Store Queues. P9_LS_* 709 // 6 Cycle Load uses a single slice. 715 // 5 Cycle Load uses a single slice. 734 // 4 Cycle Load uses a single slice. 772 // Cracked Load Instructions. 773 // Load instructions that can be done in parallel. 785 // Cracked Load Instruction. 786 // Requires Load and ALU pieces totaling 6 cycles. The Load and ALU 795 // Requires Load and ALU pieces totaling 6 cycles. The Load and ALU 813 // Cracked Load instruction. [all …]
|
/freebsd/contrib/llvm-project/compiler-rt/lib/sanitizer_common/ |
H A D | sanitizer_allocator_secondary.h | 186 Header *const *chunks = AddressSpaceView::Load(chunks_, n_chunks_); in GetBlockBegin() 197 AddressSpaceView::Load(reinterpret_cast<Header *>(nearest_chunk)); in GetBlockBegin() 224 Header *const *chunks = AddressSpaceView::Load(chunks_, n_chunks_); in GetBlockBeginFastLocked() 227 AddressSpaceView::Load(chunks[n - 1])->map_size; in GetBlockBeginFastLocked() 248 const Header *h = AddressSpaceView::Load(chunks[beg]); in GetBlockBeginFastLocked() 278 const Header *const *chunks = AddressSpaceView::Load(chunks_, n_chunks_); in ForEachChunk() 284 CHECK_EQ(AddressSpaceView::Load(chunks[i])->chunk_idx, i); in ForEachChunk()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/Mips/ |
H A D | MipsEVAInstrInfo.td | 19 // Memory Load/Store EVA encodings 36 // Load-linked EVA, Store-conditional EVA encodings 52 // Memory Load/Store EVA descriptions 88 // Load/Store Left/Right EVA descriptions 123 // Load-linked EVA, Store-conditional EVA descriptions 189 /// Load and Store EVA Instructions 205 /// Load-linked EVA, Store-conditional EVA
|