/freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/ |
H A D | RISCVInstrInfoZb.td | 54 def shfl_uimm : RISCVOp, ImmLeaf<XLenVT, [{ 85 def BCLRMask : ImmLeaf<XLenVT, [{ 92 def SingleBitSetMask : ImmLeaf<XLenVT, [{ 230 def Shifted32OnesMask : IntImmLeaf<XLenVT, [{ 239 def sh1add_op : ComplexPattern<XLenVT, 1, "selectSHXADDOp<1>", [], [], 6>; 240 def sh2add_op : ComplexPattern<XLenVT, 1, "selectSHXADDOp<2>", [], [], 6>; 241 def sh3add_op : ComplexPattern<XLenVT, 1, "selectSHXADDOp<3>", [], [], 6>; 243 def sh1add_uw_op : ComplexPattern<XLenVT, 1, "selectSHXADD_UWOp<1>", [], [], 6>; 244 def sh2add_uw_op : ComplexPattern<XLenVT, 1, "selectSHXADD_UWOp<2>", [], [], 6>; 245 def sh3add_uw_op : ComplexPattern<XLenVT, 1, "selectSHXADD_UWOp<3>", [], [], 6>; [all …]
|
H A D | RISCVInstrInfoXTHead.td | 20 SDTCisVT<3, XLenVT>]>; 24 SDTCisVT<3, XLenVT>]>; 539 def : Pat<(add (XLenVT GPR:$rs1), (shl GPR:$rs2, uimm2:$uimm2)), 541 def : Pat<(XLenVT (riscv_shl_add GPR:$rs1, uimm2:$uimm2, GPR:$rs2)), 545 def : Pat<(add_non_imm12 sh1add_op:$rs1, (XLenVT GPR:$rs2)), 547 def : Pat<(add_non_imm12 sh2add_op:$rs1, (XLenVT GPR:$rs2)), 549 def : Pat<(add_non_imm12 sh3add_op:$rs1, (XLenVT GPR:$rs2)), 552 def : Pat<(add (XLenVT GPR:$r), CSImm12MulBy4:$i), 553 … (TH_ADDSL GPR:$r, (XLenVT (ADDI (XLenVT X0), (SimmShiftRightBy2XForm CSImm12MulBy4:$i))), 2)>; 554 def : Pat<(add (XLenVT GPR:$r), CSImm12MulBy8:$i), [all …]
|
H A D | RISCVInstrInfoZa.td | 71 multiclass AMOCASPat<string AtomicOp, string BaseInst, ValueType vt = XLenVT, 191 defm : AMOPat<"atomic_swap_i8", "AMOSWAP_B", XLenVT, [HasStdExtZabha]>; 192 defm : AMOPat<"atomic_load_add_i8", "AMOADD_B", XLenVT, [HasStdExtZabha]>; 193 defm : AMOPat<"atomic_load_and_i8", "AMOAND_B", XLenVT, [HasStdExtZabha]>; 194 defm : AMOPat<"atomic_load_or_i8", "AMOOR_B", XLenVT, [HasStdExtZabha]>; 195 defm : AMOPat<"atomic_load_xor_i8", "AMOXOR_B", XLenVT, [HasStdExtZabha]>; 196 defm : AMOPat<"atomic_load_max_i8", "AMOMAX_B", XLenVT, [HasStdExtZabha]>; 197 defm : AMOPat<"atomic_load_min_i8", "AMOMIN_B", XLenVT, [HasStdExtZabha]>; 198 defm : AMOPat<"atomic_load_umax_i8", "AMOMAXU_B", XLenVT, [HasStdExtZabha]>; 199 defm : AMOPat<"atomic_load_umin_i8", "AMOMINU_B", XLenVT, [HasStdExtZabha]>; [all …]
|
H A D | RISCVInstrInfoZicbo.td | 20 ImmLeaf<XLenVT, [{return isShiftedInt<7, 5>(Imm);}]> { 79 def : Pat<(prefetch (AddrRegImmLsb00000 (XLenVT GPR:$rs1), simm12_lsb00000:$imm12), 82 def : Pat<(prefetch (AddrRegImmLsb00000 (XLenVT GPR:$rs1), simm12_lsb00000:$imm12), 85 def : Pat<(prefetch (AddrRegImmLsb00000 (XLenVT GPR:$rs1), simm12_lsb00000:$imm12),
|
H A D | RISCVInstrInfoSFB.td | 21 (riscv_selectcc_frag:$cc (XLenVT GPR:$lhs), 23 (XLenVT GPR:$truev), 40 (riscv_selectcc_frag:$cc (XLenVT GPR:$lhs), 41 (XLenVT GPR:$rhs), 42 cond, (XLenVT GPRNoX0:$truev), 43 (XLenVT GPRNoX0:$falsev)))]>, 197 def : Pat<(XLenVT (abs GPR:$rs1)), 198 (PseudoCCSUB (XLenVT GPR:$rs1), (XLenVT X0), /* COND_LT */ 2, 199 (XLenVT GPR:$rs1), (XLenVT X0), (XLenVT GPR:$rs1))>;
|
H A D | RISCVInstrInfo.td | 24 def SDT_RISCVCall : SDTypeProfile<0, -1, [SDTCisVT<0, XLenVT>]>; 150 class RISCVOp<ValueType vt = XLenVT> : Operand<vt> { 161 RISCVUImmOp<bitsNum>, ImmLeaf<XLenVT, "return isUInt<" # bitsNum # ">(Imm);">; 171 RISCVSImmOp<bitsNum>, ImmLeaf<XLenVT, "return isInt<" # bitsNum # ">(Imm);">; 192 def uimmlog2xlen : RISCVOp, ImmLeaf<XLenVT, [{ 249 def simm12_no6 : ImmLeaf<XLenVT, [{ 326 def bare_symbol : Operand<XLenVT> { 338 def call_symbol : Operand<XLenVT> { 350 def pseudo_jump_symbol : Operand<XLenVT> { 362 def tprel_add_symbol : Operand<XLenVT> { [all …]
|
H A D | RISCVInstrInfoXSf.td | 32 class PayloadOp<int bitsNum> : RISCVOp, TImmLeaf<XLenVT, "return isUInt<" # bitsNum # ">(Imm);"> { 42 def tsimm5 : Operand<XLenVT>, TImmLeaf<XLenVT, [{return isInt<5>(Imm);}]> { 464 SDTCisVT<1, XLenVT>, 472 SDTCisVT<0, XLenVT>]>; 475 SDTCisVT<1, XLenVT>, 479 def SDT_SF_VC_XVV : SDTypeProfile<0, 5, [SDTCisVT<0, XLenVT>, 485 SDTCisVT<1, XLenVT>, 490 def SDT_SF_VC_XVW : SDTypeProfile<0, 5, [SDTCisVT<0, XLenVT>, 495 SDTCisVT<1, XLenVT>, [all...] |
H A D | RISCVInstrInfoVVLPatterns.td | 29 SDTCisVT<4, XLenVT>]>; 37 SDTCisVT<5, XLenVT>]>; 48 SDTCisVT<5, XLenVT>, 49 SDTCisVT<6, XLenVT>]>; 55 SDTCisVT<3, XLenVT>]>; 62 SDTCisVT<5, XLenVT>]>; 70 SDTCisVT<5, XLenVT>]>; 76 SDTCisVT<3, XLenVT>]>>; 80 SDTCisVT<2, XLenVT>, 81 SDTCisVT<3, XLenVT>]>>; [all …]
|
H A D | RISCVInstrInfoD.td | 412 def : Pat<(XLenVT (strict_fsetccs FPR64:$rs1, FPR64:$rs2, SETEQ)), 413 (AND (XLenVT (FLE_D $rs1, $rs2)), 414 (XLenVT (FLE_D $rs2, $rs1)))>; 415 def : Pat<(XLenVT (strict_fsetccs FPR64:$rs1, FPR64:$rs2, SETOEQ)), 416 (AND (XLenVT (FLE_D $rs1, $rs2)), 417 (XLenVT (FLE_D $rs2, $rs1)))>; 419 def : Pat<(XLenVT (strict_fsetccs FPR64:$rs1, FPR64:$rs1, SETEQ)), 421 def : Pat<(XLenVT (strict_fsetccs FPR64:$rs1, FPR64:$rs1, SETOEQ)), 432 def : Pat<(XLenVT (strict_fsetccs (f64 FPR64INX:$rs1), FPR64INX:$rs2, SETEQ)), 433 (AND (XLenVT (FLE_D_INX $rs1, $rs2)), [all …]
|
H A D | RISCVISelLowering.cpp | 118 MVT XLenVT = Subtarget.getXLenVT(); in RISCVTargetLowering() local 121 addRegisterClass(XLenVT, &RISCV::GPRRegClass); in RISCVTargetLowering() 235 setLoadExtAction({ISD::EXTLOAD, ISD::SEXTLOAD, ISD::ZEXTLOAD}, XLenVT, in RISCVTargetLowering() 242 setOperationAction(ISD::DYNAMIC_STACKALLOC, XLenVT, Expand); in RISCVTargetLowering() 245 setOperationAction(ISD::BR_CC, XLenVT, Expand); in RISCVTargetLowering() 249 setOperationAction(ISD::SELECT_CC, XLenVT, Expand); in RISCVTargetLowering() 254 setCondCodeAction(ISD::SETLE, XLenVT, Expand); in RISCVTargetLowering() 255 setCondCodeAction(ISD::SETGT, XLenVT, Custom); in RISCVTargetLowering() 256 setCondCodeAction(ISD::SETGE, XLenVT, Expand); in RISCVTargetLowering() 258 setCondCodeAction(ISD::SETULE, XLenVT, Expan in RISCVTargetLowering() 3093 MVT XLenVT = Subtarget.getXLenVT(); lowerVectorFTRUNC_FCEIL_FFLOOR_FROUND() local 3201 MVT XLenVT = Subtarget.getXLenVT(); lowerVectorStrictFTRUNC_FCEIL_FFLOOR_FROUND() local 3522 MVT XLenVT = Subtarget.getXLenVT(); lowerBuildVectorViaDominantValues() local 3629 MVT XLenVT = Subtarget.getXLenVT(); lowerBuildVectorOfConstants() local 3962 MVT XLenVT = Subtarget.getXLenVT(); lowerBuildVectorViaPacking() local 4028 MVT XLenVT = Subtarget.getXLenVT(); lowerBUILD_VECTOR() local 4314 MVT XLenVT = Subtarget.getXLenVT(); lowerScalarSplat() local 4348 const MVT XLenVT = Subtarget.getXLenVT(); lowerScalarInsert() local 4668 MVT XLenVT = Subtarget.getXLenVT(); lowerVECTOR_SHUFFLEAsVSlidedown() local 4709 MVT XLenVT = Subtarget.getXLenVT(); lowerVECTOR_SHUFFLEAsVSlideup() local 5082 MVT XLenVT = Subtarget.getXLenVT(); lowerVECTOR_SHUFFLE() local 5521 MVT XLenVT = Subtarget.getXLenVT(); lowerVPCttzElements() local 5761 MVT XLenVT = Subtarget.getXLenVT(); LowerIS_FPCLASS() local 5868 MVT XLenVT = Subtarget.getXLenVT(); lowerFMAXIMUM_FMINIMUM() local 6300 MVT XLenVT = Subtarget.getXLenVT(); LowerOperation() local 6434 MVT XLenVT = Subtarget.getXLenVT(); LowerOperation() local 7409 MVT XLenVT = Subtarget.getXLenVT(); getStaticTLSAddr() local 7697 MVT XLenVT = Subtarget.getXLenVT(); lowerSELECT() local 7872 MVT XLenVT = Subtarget.getXLenVT(); lowerBRCOND() local 7936 MVT XLenVT = Subtarget.getXLenVT(); lowerRETURNADDR() local 8132 MVT XLenVT = Subtarget.getXLenVT(); lowerVectorMaskExt() local 8504 MVT XLenVT = Subtarget.getXLenVT(); lowerINSERT_VECTOR_ELT() local 8619 MVT XLenVT = Subtarget.getXLenVT(); lowerEXTRACT_VECTOR_ELT() local 8790 MVT XLenVT = Subtarget.getXLenVT(); lowerVectorIntrinsicScalars() local 8953 MVT XLenVT = Subtarget.getXLenVT(); lowerGetVectorLength() local 8995 MVT XLenVT = Subtarget.getXLenVT(); lowerCttzElts() local 9030 MVT XLenVT = Subtarget.getXLenVT(); promoteVCIXScalar() local 9081 MVT XLenVT = Subtarget.getXLenVT(); LowerINTRINSIC_WO_CHAIN() local 9409 MVT XLenVT = Subtarget.getXLenVT(); LowerINTRINSIC_W_CHAIN() local 9497 MVT XLenVT = Subtarget.getXLenVT(); LowerINTRINSIC_W_CHAIN() local 9563 MVT XLenVT = Subtarget.getXLenVT(); LowerINTRINSIC_VOID() local 9618 MVT XLenVT = Subtarget.getXLenVT(); LowerINTRINSIC_VOID() local 9723 MVT XLenVT = Subtarget.getXLenVT(); lowerVectorMaskVecReduction() local 9806 const MVT XLenVT = Subtarget.getXLenVT(); lowerReductionSeq() local 9949 MVT XLenVT = Subtarget.getXLenVT(); lowerFPVECREDUCE() local 9966 MVT XLenVT = Subtarget.getXLenVT(); lowerVPREDUCE() local 10017 MVT XLenVT = Subtarget.getXLenVT(); lowerINSERT_SUBVECTOR() local 10253 MVT XLenVT = Subtarget.getXLenVT(); lowerEXTRACT_SUBVECTOR() local 10536 MVT XLenVT = Subtarget.getXLenVT(); lowerVECTOR_INTERLEAVE() local 10620 MVT XLenVT = Subtarget.getXLenVT(); lowerSTEP_VECTOR() local 10693 MVT XLenVT = Subtarget.getXLenVT(); lowerVECTOR_REVERSE() local 10724 MVT XLenVT = Subtarget.getXLenVT(); lowerVECTOR_SPLICE() local 10765 MVT XLenVT = Subtarget.getXLenVT(); lowerFixedLengthVectorLoadToRVV() local 10815 MVT XLenVT = Subtarget.getXLenVT(); lowerFixedLengthVectorStoreToRVV() local 10879 MVT XLenVT = Subtarget.getXLenVT(); lowerMaskedLoad() local 10947 MVT XLenVT = Subtarget.getXLenVT(); lowerMaskedStore() local 11306 MVT XLenVT = Subtarget.getXLenVT(); lowerVPExtMaskOp() local 11437 MVT XLenVT = Subtarget.getXLenVT(); lowerVPFPIntConvOp() local 11505 MVT XLenVT = Subtarget.getXLenVT(); lowerVPFPIntConvOp() local 11548 const MVT XLenVT = Subtarget.getXLenVT(); lowerVPSpliceExperimental() local 11648 MVT XLenVT = Subtarget.getXLenVT(); lowerVPReverseExperimental() local 11793 MVT XLenVT = Subtarget.getXLenVT(); lowerVPStridedLoad() local 11839 MVT XLenVT = Subtarget.getXLenVT(); lowerVPStridedStore() local 11910 MVT XLenVT = Subtarget.getXLenVT(); lowerMaskedGather() local 12008 MVT XLenVT = Subtarget.getXLenVT(); lowerMaskedScatter() local 12060 const MVT XLenVT = Subtarget.getXLenVT(); lowerGET_ROUNDING() local 12091 const MVT XLenVT = Subtarget.getXLenVT(); lowerSET_ROUNDING() local 12297 MVT XLenVT = Subtarget.getXLenVT(); ReplaceNodeResults() local 12351 MVT XLenVT = Subtarget.getXLenVT(); ReplaceNodeResults() local 12605 MVT XLenVT = Subtarget.getXLenVT(); ReplaceNodeResults() local 12643 MVT XLenVT = Subtarget.getXLenVT(); ReplaceNodeResults() local 12685 MVT XLenVT = Subtarget.getXLenVT(); ReplaceNodeResults() local 12840 MVT XLenVT = Subtarget.getXLenVT(); ReplaceNodeResults() local 15150 MVT XLenVT = Subtarget.getXLenVT(); tryMemPairCombine() local 15286 MVT XLenVT = Subtarget.getXLenVT(); performFP_TO_INTCombine() local 15389 MVT XLenVT = Subtarget.getXLenVT(); performFP_TO_INT_SATCombine() local 16362 const MVT XLenVT = legalizeScatterGatherIndexType() local 16657 const MVT XLenVT = Subtarget.getXLenVT(); PerformDAGCombine() local 18998 MVT XLenVT = XLen == 32 ? MVT::i32 : MVT::i64; CC_RISCV() local 19696 MVT XLenVT = Subtarget.getXLenVT(); LowerFormalArguments() local 19903 MVT XLenVT = Subtarget.getXLenVT(); LowerCall() local [all...] |
H A D | RISCVInstrInfoZimop.td | 70 def : Pat<(XLenVT (riscv_mopr GPR:$rs1, (XLenVT i))), 75 def : Pat<(XLenVT (riscv_moprr GPR:$rs1, GPR:$rs2, (XLenVT i))),
|
H A D | RISCVInstrInfoVSDPatterns.td | 25 (xor node:$in, (riscv_vmset_vl (XLenVT srcvalue)))>; 133 (vop_type (SplatPatKind (XLenVT xop_kind:$rs2))))), 318 (vti.Vector (SplatPatKind (XLenVT xop_kind:$rs2))), cc)), 320 def : Pat<(vti.Mask (setcc (vti.Vector (SplatPatKind (XLenVT xop_kind:$rs2))), 502 (wti.Vector (extop2 (vti.Vector (SplatPat (XLenVT GPR:$rs1)))))), 523 (wti.Vector (extop (vti.Vector (SplatPat (XLenVT GPR:$rs1)))))), 560 (mul_oneuse (wti.Vector (extop1 (vti.Vector (SplatPat (XLenVT GPR:$rs1))))), 577 (vti.Mask true_mask), (XLenVT srcvalue))), 580 (vti.Mask true_mask), (XLenVT srcvalue)))), 586 (vti.Mask true_mask), (XLenVT srcvalue))), [all …]
|
H A D | RISCVInstrInfoF.td | 26 : SDTypeProfile<1, 2, [SDTCisVT<0, XLenVT>, SDTCisFP<1>, 27 SDTCisVT<2, XLenVT>]>; 31 SDTCisVT<3, XLenVT>]>; 33 : SDTypeProfile<1, 1, [SDTCisVT<0, XLenVT>, SDTCisFP<1>]>; 129 def frmarg : Operand<XLenVT> { 149 def frmarglegacy : Operand<XLenVT> { 482 : Pat<(XLenVT (OpNode (vt Ty:$rs1), Ty:$rs2, Cond)), (Inst $rs1, $rs2)>; 619 def : Pat<(XLenVT (strict_fsetccs FPR32:$rs1, FPR32:$rs2, SETEQ)), 620 (AND (XLenVT (FLE_S $rs1, $rs2)), 621 (XLenVT (FLE_S $rs2, $rs1)))>; [all …]
|
H A D | RISCVInstrInfoZk.td | 23 def SDT_RISCVZkByteSelect : SDTypeProfile<1, 3, [SDTCisVT<0, XLenVT>, 24 SDTCisVT<1, XLenVT>, 25 SDTCisVT<2, XLenVT>, 148 : Pat<(XLenVT (OpNode (XLenVT GPR:$rs1), (XLenVT GPR:$rs2), byteselect:$imm)),
|
H A D | RISCVInstrInfoA.td | 129 def : StPat<atomic_store_8, SB, GPR, XLenVT>; 130 def : StPat<atomic_store_16, SH, GPR, XLenVT>; 131 def : StPat<atomic_store_32, SW, GPR, XLenVT>; 141 multiclass AMOPat<string AtomicOp, string BaseInst, ValueType vt = XLenVT, 246 def : Pat<(XLenVT (atomic_load_nand_i32_monotonic GPR:$addr, GPR:$incr)), 248 def : Pat<(XLenVT (atomic_load_nand_i32_acquire GPR:$addr, GPR:$incr)), 250 def : Pat<(XLenVT (atomic_load_nand_i32_release GPR:$addr, GPR:$incr)), 252 def : Pat<(XLenVT (atomic_load_nand_i32_acq_rel GPR:$addr, GPR:$incr)), 254 def : Pat<(XLenVT (atomic_load_nand_i32_seq_cst GPR:$addr, GPR:$incr)), 342 ValueType vt = XLenVT> { [all …]
|
H A D | RISCVInstrInfoZfh.td | 19 : SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVT<1, XLenVT>]>; 21 : SDTypeProfile<1, 1, [SDTCisVT<0, XLenVT>, SDTCisFP<1>]>; 368 def : Pat<(XLenVT (strict_fsetccs (f16 FPR16:$rs1), FPR16:$rs2, SETEQ)), 369 (AND (XLenVT (FLE_H $rs1, $rs2)), 370 (XLenVT (FLE_H $rs2, $rs1)))>; 371 def : Pat<(XLenVT (strict_fsetccs (f16 FPR16:$rs1), FPR16:$rs2, SETOEQ)), 372 (AND (XLenVT (FLE_H $rs1, $rs2)), 373 (XLenVT (FLE_H $rs2, $rs1)))>; 375 def : Pat<(XLenVT (strict_fsetccs (f16 FPR16:$rs1), (f16 FPR16:$rs1), SETEQ)), 377 def : Pat<(XLenVT (strict_fsetccs (f16 FPR16:$rs1), (f16 FPR16:$rs1), SETOEQ)), [all …]
|
H A D | RISCVInstrInfoXVentana.td |
|
H A D | RISCVInstrInfoZicond.td | 37 def : Pat<(XLenVT (riscv_czero_eqz GPR:$rs1, GPR:$rc)), 39 def : Pat<(XLenVT (riscv_czero_nez GPR:$rs1, GPR:$rc)), 42 def : Pat<(XLenVT (riscv_czero_eqz GPR:$rs1, (riscv_setne (XLenVT GPR:$rc)))), 44 def : Pat<(XLenVT (riscv_czero_eqz GPR:$rs1, (riscv_seteq (XLenVT GPR:$rc)))), 46 def : Pat<(XLenVT (riscv_czero_nez GPR:$rs1, (riscv_setne (XLenVT GPR:$rc)))), 48 def : Pat<(XLenVT (riscv_czero_nez GPR:$rs1, (riscv_seteq (XLenVT GP [all...] |
H A D | RISCVInstrInfoZvk.td | 18 def tuimm5 : RISCVOp, TImmLeaf<XLenVT, [{return isUInt<5>(Imm);}]>; 669 (wti.Vector (Low8BitsSplatPat (XLenVT GPR:$rs1)))), 802 (wti.Vector (Low8BitsSplatPat (XLenVT GPR:$rs1))), 813 (wti.Vector (Low8BitsSplatPat (XLenVT GPR:$rs1))), 851 (vti.Vector (Low8BitsSplatPat (XLenVT GPR:$rs1))), 885 VLOpFrag, (XLenVT timm:$policy))), 889 GPR:$vl, sew, (XLenVT timm:$policy))>; 904 VLOpFrag, (XLenVT timm:$policy))), 908 GPR:$vl, sew, (XLenVT timm:$policy))>; 947 vti.Vector, vti.Vector, XLenVT, [all …]
|
H A D | RISCVISelDAGToDAG.cpp | 319 MVT XLenVT = Subtarget->getXLenVT(); in addVectorLoadStoreOperands() local 320 SDValue SEWOp = CurDAG->getTargetConstant(Log2SEW, DL, XLenVT); in addVectorLoadStoreOperands() 330 SDValue PolicyOp = CurDAG->getTargetConstant(Policy, DL, XLenVT); in addVectorLoadStoreOperands() 383 MVT XLenVT = Subtarget->getXLenVT(); in selectVLSEGFF() local 404 XLenVT, MVT::Other, Operands); in selectVLSEGFF() 553 MVT XLenVT = Subtarget->getXLenVT(); in selectVSETVLI() local 574 SDValue VTypeIOp = CurDAG->getTargetConstant(VTypeI, DL, XLenVT); in selectVSETVLI() 584 VLOperand = CurDAG->getRegister(RISCV::X0, XLenVT); in selectVSETVLI() 592 SDValue VLImm = CurDAG->getTargetConstant(AVL, DL, XLenVT); in selectVSETVLI() 594 XLenVT, VLImm, VTypeIOp)); in selectVSETVLI() [all …]
|
H A D | RISCVInstrInfoVPseudos.td | 77 SDTypeProfile<1, 0, [SDTCisVT<0, XLenVT>]>>; 92 def VLOp : ComplexPattern<XLenVT, 1, "selectVLOp">; 232 def VLOpFrag : PatFrag<(ops), (XLenVT (VLOp (XLenVT AVL:$vl)))>; 237 def VLMax : OutPatFrag<(ops), (XLenVT -1)>; 256 ValueType Scal = XLenVT, RegisterClass ScalarReg = GPR> { 269 string ScalarSuffix = !cond(!eq(Scal, XLenVT) : "X", 277 LMULInfo M, ValueType Scal = XLenVT, 3949 (XLenVT timm:$round), 3957 (XLenVT tim [all...] |
H A D | RISCVInstrInfoC.td | 21 def uimmlog2xlennonzero : RISCVOp, ImmLeaf<XLenVT, [{ 50 ImmLeaf<XLenVT, [{return (Imm != 0) && isInt<6>(Imm);}]> { 64 ImmLeaf<XLenVT, [{return (Imm == 0);}]> { 82 ImmLeaf<XLenVT, [{return (Imm != 0) && 100 ImmLeaf<XLenVT, [{return isShiftedUInt<5, 2>(Imm);}]> { 115 ImmLeaf<XLenVT, [{return isShiftedUInt<6, 2>(Imm);}]> { 130 ImmLeaf<XLenVT, [{return isShiftedUInt<5, 3>(Imm);}]> { 145 ImmLeaf<XLenVT, [{return isShiftedInt<8, 1>(Imm);}]> { 162 ImmLeaf<XLenVT, [{return isShiftedUInt<6, 3>(Imm);}]> { 178 ImmLeaf<XLenVT, [all …]
|
H A D | RISCVInstrInfoXCV.td | 668 : Pat<(XLenVT (LoadOp CVrr:$regreg)), 672 : Pat<(StoreOp (XLenVT GPR:$rs2), GPR:$rs1, simm12:$imm12), 676 : Pat<(StoreOp (XLenVT GPR:$rs2), GPR:$rs1, GPR:$rs3), 680 : Pat<(StoreOp (XLenVT GPR:$rs2), CVrr:$regreg), 705 def cv_tuimm2 : TImmLeaf<XLenVT, [{return isUInt<2>(Imm);}]>; 706 def cv_tuimm5 : TImmLeaf<XLenVT, [{return isUInt<5>(Imm);}]>; 707 def cv_uimm10 : ImmLeaf<XLenVT, [{return isUInt<10>(Imm);}]>; 719 def powerOf2Minus1 : ImmLeaf<XLenVT, [{ return isPowerOf2_32(Imm+1); }]>; 755 def : Pat<(bitreverse (XLenVT GPR:$rs)), (CV_BITREV GPR:$rs, 0, 0)>; 767 def : Pat<(intr (XLenVT GP [all...] |
H A D | RISCVInstrInfoXwch.td | 48 ImmLeaf<XLenVT, [{return isShiftedUInt<4, 1>(Imm);}]> { 63 ImmLeaf<XLenVT, [{return isShiftedUInt<5, 1>(Imm);}]> {
|
/freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/GISel/ |
H A D | RISCVCallLowering.cpp | 484 const MVT XLenVT = Subtarget.getXLenVT(); in saveVarArgRegisters() local 489 CCValAssign::getReg(I + MF.getFunction().getNumOperands(), XLenVT, in saveVarArgRegisters() 490 ArgRegs[I], XLenVT, CCValAssign::Full)); in saveVarArgRegisters()
|