Home
last modified time | relevance | path

Searched refs:VL (Results 1 – 25 of 85) sorted by relevance

1234

/freebsd/contrib/llvm-project/llvm/lib/CodeGen/LiveDebugValues/
H A DVarLocBasedImpl.cpp477 VarLoc VL(MI); in CreateEntryLoc() local
478 assert(VL.Locs.size() == 1 && in CreateEntryLoc()
479 VL.Locs[0].Kind == MachineLocKind::RegisterKind); in CreateEntryLoc()
480 VL.EVKind = EntryValueLocKind::EntryValueKind; in CreateEntryLoc()
481 VL.Expr = EntryExpr; in CreateEntryLoc()
482 VL.Locs[0].Value.RegNo = Reg; in CreateEntryLoc()
483 return VL; in CreateEntryLoc()
492 VarLoc VL(MI); in CreateEntryBackupLoc() local
493 assert(VL.Locs.size() == 1 && in CreateEntryBackupLoc()
494 VL.Locs[0].Kind == MachineLocKind::RegisterKind); in CreateEntryBackupLoc()
[all …]
/freebsd/contrib/llvm-project/clang/lib/AST/
H A DDeclOpenMP.cpp31 ArrayRef<Expr *> VL) { in Create() argument
33 C, DC, std::nullopt, VL.size(), L); in Create()
34 D->setVars(VL); in Create()
45 void OMPThreadPrivateDecl::setVars(ArrayRef<Expr *> VL) { in setVars() argument
46 assert(VL.size() == Data->getNumChildren() && in setVars()
48 llvm::copy(VL, getVars().begin()); in setVars()
58 SourceLocation L, ArrayRef<Expr *> VL, in Create() argument
61 C, DC, CL, VL.size(), L); in Create()
62 D->setVars(VL); in Create()
74 void OMPAllocateDecl::setVars(ArrayRef<Expr *> VL) { in setVars() argument
[all …]
H A DOpenMPClause.cpp427 void OMPPrivateClause::setPrivateCopies(ArrayRef<Expr *> VL) { in setPrivateCopies() argument
428 assert(VL.size() == varlist_size() && in setPrivateCopies()
430 std::copy(VL.begin(), VL.end(), varlist_end()); in setPrivateCopies()
436 ArrayRef<Expr *> VL, ArrayRef<Expr *> PrivateVL) { in Create() argument
438 void *Mem = C.Allocate(totalSizeToAlloc<Expr *>(2 * VL.size())); in Create()
440 new (Mem) OMPPrivateClause(StartLoc, LParenLoc, EndLoc, VL.size()); in Create()
441 Clause->setVarRefs(VL); in Create()
452 void OMPFirstprivateClause::setPrivateCopies(ArrayRef<Expr *> VL) { in setPrivateCopies() argument
453 assert(VL.size() == varlist_size() && in setPrivateCopies()
455 std::copy(VL.begin(), VL.end(), varlist_end()); in setPrivateCopies()
[all …]
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Vectorize/
H A DSLPVectorizer.cpp297 static std::string shortBundleName(ArrayRef<Value *> VL) { in shortBundleName() argument
300 OS << "n=" << VL.size() << " [" << *VL.front() << ", ..]"; in shortBundleName()
306 /// \returns true if all of the instructions in \p VL are in the same block or
308 static bool allSameBlock(ArrayRef<Value *> VL) { in allSameBlock() argument
309 Instruction *I0 = dyn_cast<Instruction>(VL[0]); in allSameBlock()
312 if (all_of(VL, isVectorLikeInstWithConstOps)) in allSameBlock()
316 for (int I = 1, E = VL.size(); I < E; I++) { in allSameBlock()
317 auto *II = dyn_cast<Instruction>(VL[I]); in allSameBlock()
327 /// \returns True if all of the values in \p VL ar
329 allConstant(ArrayRef<Value * > VL) allConstant() argument
337 isSplat(ArrayRef<Value * > VL) isSplat() argument
550 isFixedVectorShuffle(ArrayRef<Value * > VL,SmallVectorImpl<int> & Mask) isFixedVectorShuffle() argument
753 getSameOpcode(ArrayRef<Value * > VL,const TargetLibraryInfo & TLI,unsigned BaseIndex) getSameOpcode() argument
915 allSameType(ArrayRef<Value * > VL) allSameType() argument
1033 getAltInstrMask(ArrayRef<Value * > VL,unsigned Opcode0,unsigned Opcode1) getAltInstrMask() argument
1111 doesNotNeedToSchedule(ArrayRef<Value * > VL) doesNotNeedToSchedule() argument
2132 appendOperandsOfVL(ArrayRef<Value * > VL) appendOperandsOfVL() argument
2606 analyzedReductionVals(ArrayRef<Value * > VL) analyzedReductionVals() argument
2666 ArrayRef<Value *> VL = UserTE->getOperand(OpIdx); getVectorizedOperand() local
3201 newTreeEntry(ArrayRef<Value * > VL,std::optional<ScheduleData * > Bundle,const InstructionsState & S,const EdgeInfo & UserTreeIdx,ArrayRef<int> ReuseShuffleIndices=std::nullopt,ArrayRef<unsigned> ReorderIndices=std::nullopt) newTreeEntry() argument
3213 newTreeEntry(ArrayRef<Value * > VL,TreeEntry::EntryState EntryState,std::optional<ScheduleData * > Bundle,const InstructionsState & S,const EdgeInfo & UserTreeIdx,ArrayRef<int> ReuseShuffleIndices=std::nullopt,ArrayRef<unsigned> ReorderIndices=std::nullopt) newTreeEntry() argument
4406 computeCommonAlignment(ArrayRef<Value * > VL) computeCommonAlignment() argument
4547 canVectorizeLoads(ArrayRef<Value * > VL,const Value * VL0,SmallVectorImpl<unsigned> & Order,SmallVectorImpl<Value * > & PointerOps,bool TryRecursiveCheck) const canVectorizeLoads() argument
4788 clusterSortPtrAccesses(ArrayRef<Value * > VL,Type * ElemTy,const DataLayout & DL,ScalarEvolution & SE,SmallVectorImpl<unsigned> & SortedIndices) clusterSortPtrAccesses() argument
6073 needToScheduleSingleInstruction(ArrayRef<Value * > VL) needToScheduleSingleInstruction() argument
6290 getScalarsVectorizationState(InstructionsState & S,ArrayRef<Value * > VL,bool IsScatterVectorizeUserTE,OrdersType & CurrentOrder,SmallVectorImpl<Value * > & PointerOps) const getScalarsVectorizationState() argument
6687 buildTree_rec(ArrayRef<Value * > VL,unsigned Depth,const EdgeInfo & UserTreeIdx) buildTree_rec() argument
6821 __anon07b2d7746302(ArrayRef<Value *> VL) buildTree_rec() argument
7502 canReuseExtract(ArrayRef<Value * > VL,Value * OpValue,SmallVectorImpl<unsigned> & CurrentOrder,bool ResizeAllowed) const canReuseExtract() argument
8253 getBuildVectorCost(ArrayRef<Value * > VL,Value * Root) getBuildVectorCost() argument
8457 computeExtractCost(ArrayRef<Value * > VL,ArrayRef<int> Mask,ArrayRef<std::optional<TTI::ShuffleKind>> ShuffleKinds,unsigned NumParts) computeExtractCost() argument
8905 ArrayRef<Value *> VL = E->Scalars; adjustExtracts() local
9118 gather(ArrayRef<Value * > VL,unsigned MaskVF=0,Value * Root=nullptr) gather() argument
9251 ArrayRef<Value *> VL = E->Scalars; getEntryCost() local
10804 tryToGatherSingleRegisterExtractElements(MutableArrayRef<Value * > VL,SmallVectorImpl<int> & Mask) const tryToGatherSingleRegisterExtractElements() argument
10901 tryToGatherExtractElements(SmallVectorImpl<Value * > & VL,SmallVectorImpl<int> & Mask,unsigned NumParts) const tryToGatherExtractElements() argument
10928 isGatherShuffledSingleRegisterEntry(const TreeEntry * TE,ArrayRef<Value * > VL,MutableArrayRef<int> Mask,SmallVectorImpl<const TreeEntry * > & Entries,unsigned Part,bool ForOrder) isGatherShuffledSingleRegisterEntry() argument
11299 isGatherShuffledEntry(const TreeEntry * TE,ArrayRef<Value * > VL,SmallVectorImpl<int> & Mask,SmallVectorImpl<SmallVector<const TreeEntry * >> & Entries,unsigned NumParts,bool ForOrder) isGatherShuffledEntry() argument
11354 getGatherCost(ArrayRef<Value * > VL,bool ForPoisonSrc,Type * ScalarTy) const getGatherCost() argument
11409 reorderInputsAccordingToOpcode(ArrayRef<Value * > VL,SmallVectorImpl<Value * > & Left,SmallVectorImpl<Value * > & Right,const BoUpSLP & R) reorderInputsAccordingToOpcode() argument
11595 gather(ArrayRef<Value * > VL,Value * Root,Type * ScalarTy) gather() argument
11924 ArrayRef<Value *> VL = adjustExtracts() local
12137 gather(ArrayRef<Value * > VL,unsigned MaskVF=0,Value * Root=nullptr) gather() argument
12201 ValueList &VL = E->getOperand(NodeIdx); vectorizeOperand() local
14453 buildBundle(ArrayRef<Value * > VL) buildBundle() argument
14482 tryScheduleBundle(ArrayRef<Value * > VL,BoUpSLP * SLP,const InstructionsState & S) tryScheduleBundle() argument
14578 cancelScheduling(ArrayRef<Value * > VL,Value * OpValue) cancelScheduling() argument
16444 tryToVectorizeList(ArrayRef<Value * > VL,BoUpSLP & R,bool MaxVFOnly) tryToVectorizeList() argument
17430 ArrayRef<Value *> VL(std::next(Candidates.begin(), Pos), ReduxWidth); tryToReduce() local
17938 ArrayRef<Value *> VL = R.getRootNodeScalars(); emitReusedOps() local
18414 SmallVector<T *> VL; tryToVectorizeSequence() local
18480 SmallVector<T *> VL; tryToVectorizeSequence() local
[all...]
H A DLoopIdiomVectorize.cpp511 Value *VL = Builder.CreateIntrinsic(Intrinsic::experimental_get_vector_length, in createPredicatedFindMismatch() local
522 {VectorLhsGep, AllTrueMask, VL}, nullptr, "lhs.load"); in createPredicatedFindMismatch()
528 {VectorRhsGep, AllTrueMask, VL}, nullptr, "rhs.load"); in createPredicatedFindMismatch()
535 {VectorLhsLoad, VectorRhsLoad, Pred, AllTrueMask, VL}, nullptr, in createPredicatedFindMismatch()
540 VL}); in createPredicatedFindMismatch()
541 Value *MismatchFound = Builder.CreateICmpNE(CTZ, VL); in createPredicatedFindMismatch()
554 Value *VL64 = Builder.CreateZExt(VL, I64Type); in createPredicatedFindMismatch()
/freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/
H A DRISCVInstrFormatsV.td71 let Defs = [VTYPE, VL];
87 let Defs = [VTYPE, VL];
104 let Defs = [VTYPE, VL];
123 let Uses = [VTYPE, VL];
143 let Uses = [VTYPE, VL];
162 let Uses = [VTYPE, VL];
182 let Uses = [VTYPE, VL];
201 let Uses = [VTYPE, VL];
223 let Uses = [VTYPE, VL];
245 let Uses = [VTYPE, VL];
[all …]
H A DRISCVISelLowering.cpp2761 /// vector length VL. . in getAllOnesMask() argument
2762 static SDValue getAllOnesMask(MVT VecVT, SDValue VL, const SDLoc &DL, in getAllOnesMask()
2765 return DAG.getNode(RISCVISD::VMSET_VL, DL, MaskVT, VL); in getAllOnesMask()
2770 // If we know the exact VLEN, and our VL is exactly equal to VLMAX, in getVLOp()
2785 SDValue VL = DAG.getRegister(RISCV::X0, Subtarget.getXLenVT()); in getDefaultScalableVLOps()
2786 SDValue Mask = getAllOnesMask(VecVT, VL, DL, DAG); in getDefaultScalableVLOps()
2787 return {Mask, VL}; in getDefaultScalableVLOps()
2794 SDValue VL = getVLOp(NumElts, ContainerVT, DL, DAG, Subtarget); in getDefaultVLOps()
2795 SDValue Mask = getAllOnesMask(ContainerVT, VL, DL, DAG); in getDefaultVLOps()
2796 return {Mask, VL}; in getDefaultVLOps()
2784 SDValue VL = DAG.getRegister(RISCV::X0, Subtarget.getXLenVT()); getDefaultScalableVLOps() local
2793 SDValue VL = getVLOp(NumElts, ContainerVT, DL, DAG, Subtarget); getDefaultVLOps() local
3054 SDValue Mask, VL; lowerVectorFTRUNC_FCEIL_FFLOOR_FROUND() local
3304 getVSlidedown(SelectionDAG & DAG,const RISCVSubtarget & Subtarget,const SDLoc & DL,EVT VT,SDValue Merge,SDValue Op,SDValue Offset,SDValue Mask,SDValue VL,unsigned Policy=RISCVII::TAIL_UNDISTURBED_MASK_UNDISTURBED) getVSlidedown() argument
3316 getVSlideup(SelectionDAG & DAG,const RISCVSubtarget & Subtarget,const SDLoc & DL,EVT VT,SDValue Merge,SDValue Op,SDValue Offset,SDValue Mask,SDValue VL,unsigned Policy=RISCVII::TAIL_UNDISTURBED_MASK_UNDISTURBED) getVSlideup() argument
4238 splatPartsI64WithVL(const SDLoc & DL,MVT VT,SDValue Passthru,SDValue Lo,SDValue Hi,SDValue VL,SelectionDAG & DAG) splatPartsI64WithVL() argument
4294 splatSplitI64WithVL(const SDLoc & DL,MVT VT,SDValue Passthru,SDValue Scalar,SDValue VL,SelectionDAG & DAG) splatSplitI64WithVL() argument
4305 lowerScalarSplat(SDValue Passthru,SDValue Scalar,SDValue VL,MVT VT,const SDLoc & DL,SelectionDAG & DAG,const RISCVSubtarget & Subtarget) lowerScalarSplat() argument
4343 lowerScalarInsert(SDValue Scalar,SDValue VL,MVT VT,const SDLoc & DL,SelectionDAG & DAG,const RISCVSubtarget & Subtarget) lowerScalarInsert() argument
4722 SDValue VL = DAG.getConstant(NumSubElts + Index, DL, XLenVT); lowerVECTOR_SHUFFLEAsVSlideup() local
5410 SDValue Mask, VL; lowerCTLZ_CTTZ_ZERO_UNDEF() local
5903 SDValue Mask, VL; lowerFMAXIMUM_FMINIMUM() local
6425 SDValue VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second; LowerOperation() local
8060 SDValue VL = getDefaultScalableVLOps(VT, DL, DAG, Subtarget).second; lowerVectorMaskSplat() local
8064 SDValue VL = getDefaultScalableVLOps(VT, DL, DAG, Subtarget).second; lowerVectorMaskSplat() local
8094 auto VL = getDefaultVLOps(VecVT, ContainerVT, DL, DAG, Subtarget).second; lowerSPLAT_VECTOR_PARTS() local
8130 SDValue VL = getDefaultVLOps(VecVT, ContainerVT, DL, DAG, Subtarget).second; lowerVectorMaskExt() local
8187 SDValue Mask, VL; lowerVectorMaskTruncLike() local
8256 SDValue Mask, VL; lowerVectorTruncLike() local
8370 SDValue Mask, VL; lowerVectorFPExtendOrRoundLike() local
8881 SDValue VL = DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, XLenVT, SETVL, AVL, lowerVectorIntrinsicScalars() local
8935 SDValue VL = getVLOperand(Op); lowerVectorIntrinsicScalars() local
9248 SDValue VL = getVLOperand(Op); LowerINTRINSIC_WO_CHAIN() local
9431 SDValue VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second; LowerINTRINSIC_W_CHAIN() local
9501 SDValue VL = getVLOp(VT.getVectorNumElements(), ContainerVT, DL, DAG, LowerINTRINSIC_W_CHAIN() local
9583 SDValue VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second; LowerINTRINSIC_VOID() local
9622 SDValue VL = getVLOp(VT.getVectorNumElements(), ContainerVT, DL, DAG, LowerINTRINSIC_VOID() local
9731 SDValue Mask, VL; lowerVectorMaskVecReduction() local
9802 lowerReductionSeq(unsigned RVVOpcode,MVT ResVT,SDValue StartValue,SDValue Vec,SDValue Mask,SDValue VL,const SDLoc & DL,SelectionDAG & DAG,const RISCVSubtarget & Subtarget) lowerReductionSeq() argument
9981 SDValue VL = Op.getOperand(3); lowerVPREDUCE() local
10089 SDValue VL = getVLOp(EndIndex, ContainerVT, DL, DAG, Subtarget); lowerINSERT_SUBVECTOR() local
10326 SDValue VL = getVLOp(SubVecVT.getVectorNumElements(), ContainerVT, DL, DAG, lowerEXTRACT_SUBVECTOR() local
10537 SDValue VL = DAG.getRegister(RISCV::X0, XLenVT); lowerVECTOR_INTERLEAVE() local
10783 SDValue VL = getVLOp(VT.getVectorNumElements(), ContainerVT, DL, DAG, Subtarget); lowerFixedLengthVectorLoadToRVV() local
10843 SDValue VL = getVLOp(VT.getVectorNumElements(), ContainerVT, DL, DAG, lowerFixedLengthVectorStoreToRVV() local
10866 SDValue Mask, PassThru, VL; lowerMaskedLoad() local
10929 SDValue Val, Mask, VL; lowerMaskedStore() local
11113 SDValue Mask, VL; lowerABS() local
11173 SDValue VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second; lowerFixedLengthVectorSelectToRVV() local
11297 SDValue VL = Op.getOperand(2); lowerVPExtMaskOp() local
11332 SDValue VL = Op.getOperand(4); lowerVPSetCCMaskOp() local
11409 SDValue VL = Op.getOperand(2); lowerVPFPIntConvOp() local
11625 SDValue VL = Op.getOperand(2); lowerVPSplatExperimental() local
11773 SDValue VL = Op->getOperand(3); lowerLogicVPOp() local
11891 SDValue Index, Mask, PassThru, VL; lowerMaskedGather() local
11988 SDValue Index, Mask, Val, VL; lowerMaskedScatter() local
14767 SDValue Mask, VL, Merge; materialize() local
15513 SDValue VL = N->getOperand(4 + Offset); combineVFMADD_VLWithVFNEG_VL() local
15559 SDValue VL = N->getOperand(4); performVFMADD_VLCombine() local
16472 SDValue VL = N->getOperand(2); combineTruncOfSraSext() local
16531 SDValue VL = N->getOperand(2); combineTruncToVnclip() local
17255 SDValue VL = N->getOperand(4); PerformDAGCombine() local
17446 SDValue VL = N->getOperand(2); PerformDAGCombine() local
17457 SDValue VL = N->getOperand(2); PerformDAGCombine() local
17500 SDValue VL = N->getOperand(2); PerformDAGCombine() local
17590 SDValue VL = N->getOperand(2); PerformDAGCombine() local
21709 Value *VL = ConstantInt::get(XLenTy, VTy->getNumElements()); lowerInterleavedLoad() local
21775 Value *VL = ConstantInt::get(XLenTy, VTy->getNumElements()); lowerInterleavedStore() local
21803 Value *VL; lowerDeinterleaveIntrinsicToLoad() local
21853 Value *VL; lowerInterleaveIntrinsicToStore() local
[all...]
H A DRISCVCodeGenPrepare.cpp172 Value *BasePtr, *VL; in expandVPStrideLoad() local
176 m_Value(BasePtr), m_Zero(), m_AllOnes(), m_Value(VL)))) in expandVPStrideLoad()
184 if (!isKnownNonZero(VL, {*DL, DT, nullptr, &II})) in expandVPStrideLoad()
193 {Val, II.getOperand(2), VL}); in expandVPStrideLoad()
H A DRISCVRegisterInfo.cpp138 markSuperRegs(Reserved, RISCV::VL); in getReservedRegs()
342 Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass); in lowerVSPILL() local
347 STI.getInstrInfo()->movImm(MBB, II, DL, VL, Offset); in lowerVSPILL()
349 BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL); in lowerVSPILL()
352 BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL) in lowerVSPILL()
353 .addReg(VL) in lowerVSPILL()
374 .addReg(VL, getKillRegState(I == NF - 2)); in lowerVSPILL()
419 Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass); in lowerVRELOAD() local
424 STI.getInstrInfo()->movImm(MBB, II, DL, VL, Offset); in lowerVRELOAD()
426 BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL); in lowerVRELOAD()
[all …]
H A DRISCVVectorPeephole.cpp84 MachineOperand &VL = MI.getOperand(RISCVII::getVLOpNum(MI.getDesc())); in convertToVLMAX() local
85 if (!VL.isReg()) in convertToVLMAX()
87 MachineInstr *Def = MRI->getVRegDef(VL.getReg()); in convertToVLMAX()
126 VL.ChangeToImmediate(RISCV::VLMaxSentinel); in convertToVLMAX()
H A DRISCVISelDAGToDAG.cpp67 SDValue VL = CurDAG->getRegister(RISCV::X0, Subtarget->getXLenVT()); in PreprocessISelDAG() local
72 Result = CurDAG->getNode(Opc, DL, VT, CurDAG->getUNDEF(VT), Src, VL); in PreprocessISelDAG()
84 SDValue VL = N->getOperand(3); in PreprocessISelDAG() local
115 VL}; in PreprocessISelDAG()
315 SDValue VL; in addVectorLoadStoreOperands() local
316 selectVLOp(Node->getOperand(CurOp++), VL); in addVectorLoadStoreOperands()
317 Operands.push_back(VL); in addVectorLoadStoreOperands()
1640 SDValue VL; in Select() local
1641 selectVLOp(Node->getOperand(3), VL); in Select()
1645 ReplaceNode(Node, CurDAG->getMachineNode(VMSetOpcode, DL, VT, VL, SEW)); in Select()
[all …]
H A DRISCVInsertVSETVLI.cpp396 MI.readsRegister(RISCV::VL, /*TRI=*/nullptr)) in getDemanded()
1088 .addReg(RISCV::VL, RegState::Implicit); in insertVSETVLI()
1106 .addReg(RISCV::VL, RegState::Implicit); in insertVSETVLI()
1287 MI.modifiesRegister(RISCV::VL, /*TRI=*/nullptr) || in transferAfter()
1422 assert(MI.getOperand(3).getReg() == RISCV::VL && in emitVSETVLIs()
1474 MI.addOperand(MachineOperand::CreateReg(RISCV::VL, /*isDef*/ false, in emitVSETVLIs()
1482 MI.modifiesRegister(RISCV::VL, /*TRI=*/nullptr) || in emitVSETVLIs()
1670 MI.modifiesRegister(RISCV::VL, /*TRI=*/nullptr) || in coalesceVSETVLIs()
/freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/GISel/
H A DRISCVLegalizerInfo.cpp665 static MachineInstrBuilder buildAllOnesMask(LLT VecTy, const SrcOp &VL, in buildAllOnesMask() argument
669 return MIB.buildInstr(RISCV::G_VMSET_VL, {MaskTy}, {VL}); in buildAllOnesMask()
679 Register VL(RISCV::X0); in buildDefaultVLOps() local
680 MachineInstrBuilder Mask = buildAllOnesMask(VecTy, VL, MIB, MRI); in buildDefaultVLOps()
681 return {Mask, VL}; in buildDefaultVLOps()
686 Register Hi, Register VL, MachineIRBuilder &MIB, in buildSplatPartsS64WithVL() argument
697 {Passthru, Lo, Hi, VL}); in buildSplatPartsS64WithVL()
702 const SrcOp &Scalar, Register VL, in buildSplatSplitS64WithVL() argument
707 Unmerge.getReg(1), VL, MIB, MRI); in buildSplatSplitS64WithVL()
729 auto [_, VL] = buildDefaultVLOps(Dst, MIB, MRI); in legalizeSplatVector()
[all …]
/freebsd/sys/contrib/device-tree/src/arm/marvell/
H A Dkirkwood-linkstation-lsvl.dts3 * Device Tree file for Buffalo Linkstation LS-VL
13 model = "Buffalo Linkstation LS-VL";
/freebsd/contrib/llvm-project/llvm/include/llvm/CodeGen/PBQP/
H A DReductionRules.h152 unsigned VL = V.getLength(); in hasRegisterOptions() local
155 if (VL <= 1) in hasRegisterOptions()
160 for (unsigned i = 1; i < VL; ++i) in hasRegisterOptions()
/freebsd/sys/contrib/device-tree/Bindings/sound/
H A Dcs43130.txt10 - VA-supply, VP-supply, VL-supply, VCP-supply, VD-supply:
57 VL-supply = <&dummy_vreg>;
H A Dcs42l42.txt9 - VP-supply, VCP-supply, VD_FILT-supply, VL-supply, VA-supply :
102 VL-supply = <&dummy_vreg>;
/freebsd/contrib/llvm-project/clang/include/clang/Basic/
H A Driscv_vector.td257 (Address0, ..., Address{NF - 1}, Ptr, VL)
259 (Address0, ..., Address{NF - 1}, Mask, Ptr, VL)
262 Ptr, VL)
265 Ptr, VL)
273 (Address0, ..., Address{NF - 1}, Ptr, NewVL, VL)
275 (Address0, ..., Address{NF - 1}, Mask, Ptr, NewVL, VL)
278 Ptr, NewVL, VL)
281 Ptr, NewVL, VL)
286 (Address0, ..., Address{NF - 1}, Ptr, Stride, VL)
288 (Address0, ..., Address{NF - 1}, Mask, Ptr, Stride, VL)
[all …]
/freebsd/contrib/llvm-project/clang/include/clang/AST/
H A DDeclOpenMP.h129 void setVars(ArrayRef<Expr *> VL);
134 ArrayRef<Expr *> VL);
493 void setVars(ArrayRef<Expr *> VL);
497 SourceLocation L, ArrayRef<Expr *> VL,
H A DOpenMPClause.h303 void setVarRefs(ArrayRef<Expr *> VL) { in setVarRefs() argument
304 assert(VL.size() == NumVars && in setVarRefs()
306 std::copy(VL.begin(), VL.end(), in setVarRefs()
484 SourceLocation EndLoc, ArrayRef<Expr *> VL);
843 void setSizesRefs(ArrayRef<Expr *> VL) { in setSizesRefs() argument
844 assert(VL.size() == NumSizes); in setSizesRefs()
845 std::copy(VL.begin(), VL.end(), in setSizesRefs()
2675 void setPrivateCopies(ArrayRef<Expr *> VL);
2697 SourceLocation EndLoc, ArrayRef<Expr *> VL,
2784 void setPrivateCopies(ArrayRef<Expr *> VL);
[all …]
/freebsd/contrib/llvm-project/llvm/lib/Analysis/
H A DVectorUtils.cpp841 Instruction *llvm::propagateMetadata(Instruction *Inst, ArrayRef<Value *> VL) { in propagateMetadata() argument
842 if (VL.empty()) in propagateMetadata()
844 Instruction *I0 = cast<Instruction>(VL[0]); in propagateMetadata()
853 for (int J = 1, E = VL.size(); MD && J != E; ++J) { in propagateMetadata()
854 const Instruction *IJ = cast<Instruction>(VL[J]); in propagateMetadata()
1539 SmallVector<Value *, 4> VL; in addMetadata() local
1540 std::transform(Members.begin(), Members.end(), std::back_inserter(VL), in addMetadata()
1542 propagateMetadata(NewInst, VL); in addMetadata()
/freebsd/contrib/llvm-project/llvm/lib/Target/VE/
H A DVEInstrFormats.td46 /// VLIndex is the index of VL register in MI's operands. The HW instruction
48 /// For example, the index of VL of (VST $sy, $sz, $sx, $vl) is 3 (beginning
49 /// from 0), and the index of VL of (VST $sy, $sz, $sx, $vm, $vl) is 4. We
/freebsd/contrib/llvm-project/llvm/include/llvm/IR/
H A DDominators.h87 BBDomTree::VerificationLevel VL);
89 BBPostDomTree::VerificationLevel VL);
/freebsd/contrib/llvm-project/llvm/include/llvm/CodeGen/
H A DMachinePostDominators.h39 MBBPostDomTree::VerificationLevel VL);
/freebsd/contrib/llvm-project/llvm/lib/CodeGen/
H A DMachinePostDominators.cpp36 MBBPostDomTree::VerificationLevel VL);

1234