Lines Matching refs:LLT

56 getNarrowTypeBreakDown(LLT OrigTy, LLT NarrowTy, LLT &LeftoverTy) {  in getNarrowTypeBreakDown()
73 LLT::scalarOrVector(ElementCount::getFixed(LeftoverSize / EltSize), in getNarrowTypeBreakDown()
76 LeftoverTy = LLT::scalar(LeftoverSize); in getNarrowTypeBreakDown()
83 static Type *getFloatTypeForLLT(LLVMContext &Ctx, LLT Ty) { in getFloatTypeForLLT()
163 LLT ResultTy, LLT PartTy, in insertParts()
165 LLT LeftoverTy, in insertParts()
192 LLT GCDTy = getGCDType(getGCDType(ResultTy, LeftoverTy), PartTy); in insertParts()
195 LLT ResultLCMTy = buildLCMMergePieces(ResultTy, LeftoverTy, GCDTy, GCDRegs); in insertParts()
201 LLT Ty = MRI.getType(Reg); in appendVectorElts()
237 LLT GCDTy, Register SrcReg) { in extractGCDType()
238 LLT SrcTy = MRI.getType(SrcReg); in extractGCDType()
250 LLT LegalizerHelper::extractGCDType(SmallVectorImpl<Register> &Parts, LLT DstTy, in extractGCDType()
251 LLT NarrowTy, Register SrcReg) { in extractGCDType()
252 LLT SrcTy = MRI.getType(SrcReg); in extractGCDType()
253 LLT GCDTy = getGCDType(getGCDType(SrcTy, NarrowTy), DstTy); in extractGCDType()
258 LLT LegalizerHelper::buildLCMMergePieces(LLT DstTy, LLT NarrowTy, LLT GCDTy, in buildLCMMergePieces()
261 LLT LCMTy = getLCMType(DstTy, NarrowTy); in buildLCMMergePieces()
281 MIRBuilder.buildConstant(LLT::scalar(64), GCDTy.getSizeInBits() - 1); in buildLCMMergePieces()
349 void LegalizerHelper::buildWidenedRemergeToDst(Register DstReg, LLT LCMTy, in buildWidenedRemergeToDst()
351 LLT DstTy = MRI.getType(DstReg); in buildWidenedRemergeToDst()
650 LLT OpLLT = MRI.getType(Reg); in createMemLibcall()
733 LLT MemType = MMO.getMemoryType(); in getOutlineAtomicLibcall()
788 LLT SuccessLLT; in createAtomicLibcall()
934 LLT StateTy = MRI.getType(Dst); in createGetStateLibcall()
974 LLT StateTy = MRI.getType(Src); in createSetStateLibcall()
1012 LLT MemTy = LLT::pointer(AddrSpace, PtrSize); in createResetStateLibcall()
1013 auto DefValue = MIRBuilder.buildConstant(LLT::scalar(PtrSize), -1LL); in createResetStateLibcall()
1037 LLT LLTy = MRI.getType(MI.getOperand(0).getReg()); in libcall()
1076 LLT LLTy = MRI.getType(MI.getOperand(0).getReg()); in libcall()
1090 LLT LLTy = MRI.getType(MI.getOperand(1).getReg()); in libcall()
1109 LLT LLTy = MRI.getType(MI.getOperand(0).getReg()); in libcall()
1222 LLT NarrowTy) { in narrowScalar()
1231 LLT DstTy = MRI.getType(DstReg); in narrowScalar()
1240 LLT ImplicitTy = NarrowTy; in narrowScalar()
1242 ImplicitTy = LLT::vector(DstTy.getElementCount(), ImplicitTy); in narrowScalar()
1265 LLT Ty = MRI.getType(MI.getOperand(0).getReg()); in narrowScalar()
1279 LLT LeftoverTy; in narrowScalar()
1283 LeftoverTy = LLT::scalar(LeftoverBits); in narrowScalar()
1320 LLT Ty = MRI.getType(MI.getOperand(0).getReg()); in narrowScalar()
1358 LLT DstTy = MRI.getType(DstReg); in narrowScalar()
1403 LLT SrcTy = MRI.getType(SrcReg); in narrowScalar()
1526 LLT SrcTy = MRI.getType(LHS); in narrowScalar()
1535 LLT LeftoverTy; // Example: s88 -> s64 (NarrowTy) + s24 (leftover) in narrowScalar()
1541 LLT Unused; // Matches LeftoverTy; G_ICMP LHS and RHS are the same type. in narrowScalar()
1550 LLT ResTy = MRI.getType(Dst); in narrowScalar()
1571 LLT GCDTy = extractGCDType(WidenedXors, NarrowTy, LeftoverTy, Xor); in narrowScalar()
1746 LLT Ty = MRI.getType(Dst); in narrowScalar()
1762 LLT Ty = MRI.getType(Val); in coerceToScalar()
1767 LLT NewTy = LLT::scalar(Ty.getSizeInBits()); in coerceToScalar()
1782 void LegalizerHelper::widenScalarSrc(MachineInstr &MI, LLT WideTy, in widenScalarSrc()
1789 void LegalizerHelper::narrowScalarSrc(MachineInstr &MI, LLT NarrowTy, in narrowScalarSrc()
1796 void LegalizerHelper::widenScalarDst(MachineInstr &MI, LLT WideTy, in widenScalarDst()
1805 void LegalizerHelper::narrowScalarDst(MachineInstr &MI, LLT NarrowTy, in narrowScalarDst()
1814 void LegalizerHelper::moreElementsVectorDst(MachineInstr &MI, LLT WideTy, in moreElementsVectorDst()
1824 void LegalizerHelper::moreElementsVectorSrc(MachineInstr &MI, LLT MoreTy, in moreElementsVectorSrc()
1831 void LegalizerHelper::bitcastSrc(MachineInstr &MI, LLT CastTy, unsigned OpIdx) { in bitcastSrc()
1836 void LegalizerHelper::bitcastDst(MachineInstr &MI, LLT CastTy, unsigned OpIdx) { in bitcastDst()
1846 LLT WideTy) { in widenScalarMergeValues()
1854 LLT SrcTy = MRI.getType(Src1Reg); in widenScalarMergeValues()
1872 assert(MRI.getType(SrcReg) == LLT::scalar(PartSize)); in widenScalarMergeValues()
1916 LLT GCDTy = LLT::scalar(GCD); in widenScalarMergeValues()
1921 LLT WideDstTy = LLT::scalar(NumMerge * WideSize); in widenScalarMergeValues()
1967 LLT WideTy) { in widenScalarUnmergeValues()
1973 LLT SrcTy = MRI.getType(SrcReg); in widenScalarUnmergeValues()
1978 LLT DstTy = MRI.getType(Dst0Reg); in widenScalarUnmergeValues()
1991 SrcTy = LLT::scalar(SrcTy.getSizeInBits()); in widenScalarUnmergeValues()
2019 LLT LCMTy = getLCMType(SrcTy, WideTy); in widenScalarUnmergeValues()
2049 const LLT GCDTy = getGCDType(WideTy, DstTy); in widenScalarUnmergeValues()
2096 LLT WideTy) { in widenScalarExtract()
2112 LLT SrcAsIntTy = LLT::scalar(SrcTy.getSizeInBits()); in widenScalarExtract()
2129 LLT ShiftTy = SrcTy; in widenScalarExtract()
2170 LLT WideTy) { in widenScalarInsert()
2182 LLT WideTy) { in widenScalarAddSubOverflow()
2244 LLT CarryOutTy = MRI.getType(MI.getOperand(1).getReg()); in widenScalarAddSubOverflow()
2252 LLT OrigTy = MRI.getType(MI.getOperand(0).getReg()); in widenScalarAddSubOverflow()
2265 LLT WideTy) { in widenScalarAddSubShlSat()
2309 LLT WideTy) { in widenScalarMulo()
2319 LLT SrcTy = MRI.getType(LHS); in widenScalarMulo()
2320 LLT OverflowTy = MRI.getType(OriginalOverflow); in widenScalarMulo()
2375 LegalizerHelper::widenScalar(MachineInstr &MI, unsigned TypeIdx, LLT WideTy) { in widenScalar()
2465 LLT CurTy = MRI.getType(SrcReg); in widenScalar()
2516 LLT Ty = MRI.getType(DstReg); in widenScalar()
2529 LLT Ty = MRI.getType(DstReg); in widenScalar()
2738 LLT Ty = MRI.getType(MI.getOperand(0).getReg()); in widenScalar()
2845 LLT VecTy = MRI.getType(VecReg); in widenScalar()
2849 MI, LLT::vector(VecTy.getElementCount(), WideTy.getSizeInBits()), 1, in widenScalar()
2868 const LLT WideEltTy = WideTy.getElementType(); in widenScalar()
2881 LLT VecTy = MRI.getType(VecReg); in widenScalar()
2882 LLT WideVecTy = LLT::vector(VecTy.getElementCount(), WideTy); in widenScalar()
3007 const LLT WideEltTy = TypeIdx == 1 ? WideTy : WideTy.getElementType(); in widenScalar()
3049 LLT VecTy = MRI.getType(VecReg); in widenScalar()
3050 LLT WideVecTy = VecTy.isVector() in widenScalar()
3051 ? LLT::vector(VecTy.getElementCount(), WideTy) in widenScalar()
3083 MachineIRBuilder &B, Register Src, LLT Ty) { in getUnmergePieces()
3095 LLT AddrPtrTy = LLT::pointer(AddrSpace, DL.getPointerSizeInBits(AddrSpace)); in emitLoadFromConstantPool()
3096 LLT DstLLT = MRI.getType(DstReg); in emitLoadFromConstantPool()
3137 LLT SrcEltTy = SrcTy.getElementType(); in lowerBitcast()
3144 LLT DstEltTy = DstTy.getElementType(); in lowerBitcast()
3145 LLT DstCastTy = DstEltTy; // Intermediate bitcast result type in lowerBitcast()
3146 LLT SrcPartTy = SrcEltTy; // Original unmerge result type. in lowerBitcast()
3159 DstCastTy = LLT::fixed_vector(NumDstElt / NumSrcElt, DstEltTy); in lowerBitcast()
3171 SrcPartTy = LLT::fixed_vector(NumSrcElt / NumDstElt, SrcEltTy); in lowerBitcast()
3209 LLT IdxTy = B.getMRI()->getType(Idx); in getBitcastWiderVectorElementOffset()
3227 LLT CastTy) { in bitcastExtractVectorElt()
3233 LLT SrcEltTy = SrcVecTy.getElementType(); in bitcastExtractVectorElt()
3237 LLT NewEltTy = CastTy.isVector() ? CastTy.getElementType() : CastTy; in bitcastExtractVectorElt()
3258 LLT MidTy = in bitcastExtractVectorElt()
3259 LLT::scalarOrVector(ElementCount::getFixed(NewEltsPerOldElt), NewEltTy); in bitcastExtractVectorElt()
3335 LLT TargetTy = B.getMRI()->getType(TargetReg); in buildBitFieldInsert()
3336 LLT InsertTy = B.getMRI()->getType(InsertReg); in buildBitFieldInsert()
3363 LLT CastTy) { in bitcastInsertVectorElt()
3369 LLT VecTy = DstTy; in bitcastInsertVectorElt()
3371 LLT VecEltTy = VecTy.getElementType(); in bitcastInsertVectorElt()
3372 LLT NewEltTy = CastTy.isVector() ? CastTy.getElementType() : CastTy; in bitcastInsertVectorElt()
3436 LLT CastTy) { in bitcastConcatVector()
3445 LLT SrcScalTy = LLT::scalar(SrcTy.getSizeInBits()); in bitcastConcatVector()
3473 LLT DstTy = MRI.getType(DstReg); in lowerLoad()
3475 LLT MemTy = MMO.getMemoryType(); in lowerLoad()
3487 LLT WideMemTy = LLT::scalar(MemStoreSizeInBits); in lowerLoad()
3492 LLT LoadTy = DstTy; in lowerLoad()
3572 LLT PtrTy = MRI.getType(PtrReg); in lowerLoad()
3574 LLT AnyExtTy = LLT::scalar(AnyExtSize); in lowerLoad()
3578 auto OffsetCst = MIRBuilder.buildConstant(LLT::scalar(PtrTy.getSizeInBits()), in lowerLoad()
3615 LLT SrcTy = MRI.getType(SrcReg); in lowerStore()
3618 LLT MemTy = MMO.getMemoryType(); in lowerStore()
3630 LLT WideTy = LLT::scalar(StoreSizeInBits); in lowerStore()
3675 const LLT NewSrcTy = LLT::scalar(AnyExtSize); in lowerStore()
3678 const LLT IntPtrTy = LLT::scalar(SrcTy.getSizeInBits()); in lowerStore()
3689 LLT PtrTy = MRI.getType(PtrReg); in lowerStore()
3691 LLT::scalar(PtrTy.getSizeInBits()), LargeSplitSize / 8); in lowerStore()
3706 LegalizerHelper::bitcast(MachineInstr &MI, unsigned TypeIdx, LLT CastTy) { in bitcast()
3788 LegalizerHelper::lower(MachineInstr &MI, unsigned TypeIdx, LLT LowerHintTy) { in lower()
3800 LLT Ty = MRI.getType(MI.getOperand(0).getReg()); in lower()
3821 LLT Ty = MRI.getType(Res); in lower()
3852 LLT Ty = MRI.getType(Res); in lower()
3867 LLT Ty = MRI.getType(Res); in lower()
3929 const LLT CondTy = MRI.getType(CarryOut); in lower()
3930 const LLT Ty = MRI.getType(Res); in lower()
3968 const LLT CondTy = MRI.getType(BorrowOut); in lower()
3969 const LLT Ty = MRI.getType(Res); in lower()
4026 LLT DstTy = MRI.getType(DstReg); in lower()
4064 LLT Ty = MRI.getType(MI.getOperand(0).getReg()); in lower()
4071 LLT Ty = MRI.getType(MI.getOperand(0).getReg()); in lower()
4117 Align LegalizerHelper::getStackTemporaryAlignment(LLT Ty, in getStackTemporaryAlignment()
4135 LLT FramePtrTy = LLT::pointer(AddrSpace, DL.getPointerSizeInBits(AddrSpace)); in createStackTemporary()
4142 LLT VecTy) { in clampVectorIndex()
4143 LLT IdxTy = B.getMRI()->getType(IdxReg); in clampVectorIndex()
4162 Register LegalizerHelper::getVectorElementPointer(Register VecPtr, LLT VecTy, in getVectorElementPointer()
4164 LLT EltTy = VecTy.getElementType(); in getVectorElementPointer()
4177 LLT IdxTy = MRI.getType(Index).changeElementSize(IndexSizeInBits); in getVectorElementPointer()
4184 LLT PtrTy = MRI.getType(VecPtr); in getVectorElementPointer()
4197 LLT VecTy = MRI.getType(MI.getReg(0)); in hasSameNumEltsOnAllVectorOperands()
4210 LLT Ty = MRI.getType(Op.getReg()); in hasSameNumEltsOnAllVectorOperands()
4229 static void makeDstOps(SmallVectorImpl<DstOp> &DstOps, LLT Ty, in makeDstOps()
4231 LLT LeftoverTy; in makeDstOps()
4233 LLT EltTy = Ty.getElementType(); in makeDstOps()
4234 LLT NarrowTy = (NumElts == 1) ? EltTy : LLT::fixed_vector(NumElts, EltTy); in makeDstOps()
4405 LLT NarrowTy) { in fewerElementsVectorUnmergeValues()
4408 LLT DstTy = MRI.getType(MI.getOperand(0).getReg()); in fewerElementsVectorUnmergeValues()
4409 LLT SrcTy = MRI.getType(SrcReg); in fewerElementsVectorUnmergeValues()
4453 LLT NarrowTy) { in fewerElementsVectorMerge()
4483 LLT EltTy = MRI.getType(MI.getOperand(1).getReg()).getScalarType(); in fewerElementsVectorMerge()
4540 LLT NarrowVecTy) { in fewerElementsVectorExtractInsertVectorElt()
4555 LLT VecTy = MRI.getType(SrcVec); in fewerElementsVectorExtractInsertVectorElt()
4571 LLT GCDTy = extractGCDType(VecParts, VecTy, NarrowVecTy, SrcVec); in fewerElementsVectorExtractInsertVectorElt()
4574 LLT LCMTy = buildLCMMergePieces(VecTy, NarrowVecTy, GCDTy, VecParts, in fewerElementsVectorExtractInsertVectorElt()
4579 LLT IdxTy = MRI.getType(Idx); in fewerElementsVectorExtractInsertVectorElt()
4585 LLT PartTy = MRI.getType(VecParts[PartIdx]); in fewerElementsVectorExtractInsertVectorElt()
4613 LLT NarrowTy) { in reduceLoadStoreWidth()
4626 LLT ValTy = MRI.getType(ValReg); in reduceLoadStoreWidth()
4636 LLT LeftoverTy; in reduceLoadStoreWidth()
4651 LLT PtrTy = MRI.getType(AddrReg); in reduceLoadStoreWidth()
4652 const LLT OffsetTy = LLT::scalar(PtrTy.getSizeInBits()); in reduceLoadStoreWidth()
4662 auto splitTypePieces = [=](LLT PartTy, SmallVectorImpl<Register> &ValRegs, in reduceLoadStoreWidth()
4708 LLT NarrowTy) { in fewerElementsVector()
4876 LLT NarrowTy) { in fewerElementsBitcast()
4886 LLT SrcNarrowTy = in fewerElementsBitcast()
4887 LLT::fixed_vector(NarrowTy.getSizeInBits() / SrcScalSize, SrcScalSize); in fewerElementsBitcast()
4906 MachineInstr &MI, unsigned int TypeIdx, LLT NarrowTy) { in fewerElementsVectorShuffle()
4992 LLT EltTy = NarrowTy.getElementType(); in fewerElementsVectorShuffle()
5016 MIRBuilder.buildConstant(LLT::scalar(32), Idx)) in fewerElementsVectorShuffle()
5044 MachineInstr &MI, unsigned int TypeIdx, LLT NarrowTy) { in fewerElementsVectorReductions()
5132 LLT NarrowTy) { in fewerElementsVectorSeqReductions()
5161 LLT SrcTy, LLT NarrowTy, in tryNarrowPow2Reduction()
5191 const LLT HalfTy, const LLT AmtTy) { in narrowScalarShiftByConstant()
5203 LLT NVT = HalfTy; in narrowScalarShiftByConstant()
5280 LLT RequestedTy) { in narrowScalarShift()
5289 LLT DstTy = MRI.getType(DstReg); in narrowScalarShift()
5294 LLT ShiftAmtTy = MRI.getType(Amt); in narrowScalarShift()
5303 const LLT HalfTy = LLT::scalar(NewBitSize); in narrowScalarShift()
5304 const LLT CondTy = LLT::scalar(1); in narrowScalarShift()
5389 LLT MoreTy) { in moreElementsVectorPhi()
5407 unsigned Opcode, MachineIRBuilder &MIRBuilder, LLT Ty) { in getNeutralElementForVecReduce()
5445 LLT MoreTy) { in moreElementsVector()
5604 LLT SrcExtTy; in moreElementsVector()
5605 LLT DstExtTy; in moreElementsVector()
5608 SrcExtTy = LLT::fixed_vector( in moreElementsVector()
5612 DstExtTy = LLT::fixed_vector( in moreElementsVector()
5630 LLT CondTy = LLT::fixed_vector( in moreElementsVector()
5641 LLT SrcTy = MRI.getType(MI.getOperand(1).getReg()); in moreElementsVector()
5642 LLT DstTy = MRI.getType(MI.getOperand(0).getReg()); in moreElementsVector()
5650 LLT NewTy = SrcTy.changeElementCount( in moreElementsVector()
5669 LLT OrigTy = MRI.getType(MI.getOperand(1).getReg()); in moreElementsVector()
5675 LLT IdxTy(TLI.getVectorIdxTy(MIRBuilder.getDataLayout())); in moreElementsVector()
5700 LLT DestEltTy = DstTy.getElementType(); in equalizeVectorShuffleLengths()
5724 LLT PaddedTy = LLT::fixed_vector(PaddedMaskNumElts, DestEltTy); in equalizeVectorShuffleLengths()
5768 unsigned int TypeIdx, LLT MoreTy) { in moreElementsVectorShuffle()
5812 LLT NarrowTy) { in multiplyRegisters()
5849 B.buildUAddo(NarrowTy, LLT::scalar(1), Factors[0], Factors[1]); in multiplyRegisters()
5854 B.buildUAddo(NarrowTy, LLT::scalar(1), FactorSum, Factors[i]); in multiplyRegisters()
5874 LLT NarrowTy) { in narrowScalarAddSub()
5879 LLT DstType = MRI.getType(DstReg); in narrowScalarAddSub()
5923 LLT RegTy = MRI.getType(MI.getOperand(0).getReg()); in narrowScalarAddSub()
5924 LLT LeftoverTy, DummyTy; in narrowScalarAddSub()
5941 Register CarryOut = MRI.createGenericVirtualRegister(LLT::scalar(1)); in narrowScalarAddSub()
5969 LegalizerHelper::narrowScalarMul(MachineInstr &MI, LLT NarrowTy) { in narrowScalarMul()
5972 LLT Ty = MRI.getType(DstReg); in narrowScalarMul()
6000 LLT NarrowTy) { in narrowScalarFPTOI()
6007 LLT SrcTy = MRI.getType(Src); in narrowScalarFPTOI()
6012 if (SrcTy.getScalarType() != LLT::scalar(16) || in narrowScalarFPTOI()
6025 LLT NarrowTy) { in narrowScalarExtract()
6073 SegReg = MRI.createGenericVirtualRegister(LLT::scalar(SegSize)); in narrowScalarExtract()
6093 LLT NarrowTy) { in narrowScalarInsert()
6100 LLT RegTy = MRI.getType(MI.getOperand(0).getReg()); in narrowScalarInsert()
6101 LLT LeftoverTy; in narrowScalarInsert()
6153 SegReg = MRI.createGenericVirtualRegister(LLT::scalar(SegSize)); in narrowScalarInsert()
6165 Register MergeReg = MRI.createGenericVirtualRegister(LLT::scalar(WideSize)); in narrowScalarInsert()
6177 LLT NarrowTy) { in narrowScalarBasic()
6179 LLT DstTy = MRI.getType(DstReg); in narrowScalarBasic()
6186 LLT LeftoverTy; in narrowScalarBasic()
6191 LLT Unused; in narrowScalarBasic()
6218 LLT NarrowTy) { in narrowScalarExt()
6224 LLT DstTy = MRI.getType(DstReg); in narrowScalarExt()
6229 LLT GCDTy = extractGCDType(Parts, DstTy, NarrowTy, SrcReg); in narrowScalarExt()
6230 LLT LCMTy = buildLCMMergePieces(DstTy, NarrowTy, GCDTy, Parts, MI.getOpcode()); in narrowScalarExt()
6239 LLT NarrowTy) { in narrowScalarSelect()
6244 LLT CondTy = MRI.getType(CondReg); in narrowScalarSelect()
6249 LLT DstTy = MRI.getType(DstReg); in narrowScalarSelect()
6254 LLT LeftoverTy; in narrowScalarSelect()
6259 LLT Unused; in narrowScalarSelect()
6285 LLT NarrowTy) { in narrowScalarCTLZ()
6299 auto HiIsZero = B.buildICmp(CmpInst::ICMP_EQ, LLT::scalar(1), in narrowScalarCTLZ()
6318 LLT NarrowTy) { in narrowScalarCTTZ()
6332 auto LoIsZero = B.buildICmp(CmpInst::ICMP_EQ, LLT::scalar(1), in narrowScalarCTTZ()
6351 LLT NarrowTy) { in narrowScalarCTPOP()
6374 LLT NarrowTy) { in narrowScalarFLDEXP()
6380 LLT ExpTy = MRI.getType(ExpReg); in narrowScalarFLDEXP()
6502 LLT Ty = MRI.getType(SrcReg); in lowerBitCount()
6548 auto IsMulSupported = [this](const LLT Ty) { in lowerBitCount()
6586 LLT Ty = MRI.getType(Dst); in lowerFunnelShiftWithInverse()
6587 LLT ShTy = MRI.getType(Z); in lowerFunnelShiftWithInverse()
6625 LLT Ty = MRI.getType(Dst); in lowerFunnelShiftAsShifts()
6626 LLT ShTy = MRI.getType(Z); in lowerFunnelShiftAsShifts()
6684 LLT Ty = MRI.getType(Dst); in lowerFunnelShift()
6685 LLT ShTy = MRI.getType(MI.getOperand(3).getReg()); in lowerFunnelShift()
6703 LLT DstTy = MRI.getType(Dst); in lowerEXT()
6704 LLT SrcTy = MRI.getType(Src); in lowerEXT()
6717 LLT MidTy = SrcTy.changeElementSize(SrcTyScalarSize * 2); in lowerEXT()
6722 LLT EltTy = MidTy.changeElementCount( in lowerEXT()
6727 LLT ZExtResTy = DstTy.changeElementCount( in lowerEXT()
6758 LLT DstTy = MRI.getType(DstReg); in lowerTRUNC()
6759 LLT SrcTy = MRI.getType(SrcReg); in lowerTRUNC()
6766 LLT SplitSrcTy = SrcTy.changeElementCount( in lowerTRUNC()
6774 LLT InterTy; in lowerTRUNC()
6884 const LLT S64 = LLT::scalar(64); in lowerU64ToF32BitOps()
6885 const LLT S32 = LLT::scalar(32); in lowerU64ToF32BitOps()
6886 const LLT S1 = LLT::scalar(1); in lowerU64ToF32BitOps()
6940 if (SrcTy == LLT::scalar(1)) { in lowerUITOFP()
6948 if (SrcTy != LLT::scalar(64)) in lowerUITOFP()
6951 if (DstTy == LLT::scalar(32)) { in lowerUITOFP()
6965 const LLT S64 = LLT::scalar(64); in lowerSITOFP()
6966 const LLT S32 = LLT::scalar(32); in lowerSITOFP()
6967 const LLT S1 = LLT::scalar(1); in lowerSITOFP()
7007 const LLT S64 = LLT::scalar(64); in lowerFPTOUI()
7008 const LLT S32 = LLT::scalar(32); in lowerFPTOUI()
7035 const LLT S1 = LLT::scalar(1); in lowerFPTOUI()
7047 const LLT S64 = LLT::scalar(64); in lowerFPTOSI()
7048 const LLT S32 = LLT::scalar(32); in lowerFPTOSI()
7088 const LLT S1 = LLT::scalar(1); in lowerFPTOSI()
7112 const LLT S1 = LLT::scalar(1); in lowerFPTRUNC_F64_TO_F16()
7113 const LLT S32 = LLT::scalar(32); in lowerFPTRUNC_F64_TO_F16()
7116 assert(MRI.getType(Dst).getScalarType() == LLT::scalar(16) && in lowerFPTRUNC_F64_TO_F16()
7117 MRI.getType(Src).getScalarType() == LLT::scalar(64)); in lowerFPTRUNC_F64_TO_F16()
7228 const LLT S64 = LLT::scalar(64); in lowerFPTRUNC()
7229 const LLT S16 = LLT::scalar(16); in lowerFPTRUNC()
7239 LLT Ty = MRI.getType(Dst); in lowerFPOWI()
7266 LLT CmpType = MRI.getType(Dst).changeElementSize(1); in lowerMinMax()
7280 LLT DstTy = MRI.getType(Dst); in lowerThreewayCompare()
7281 LLT CmpTy = DstTy.changeElementSize(1); in lowerThreewayCompare()
7353 LLT Ty = MRI.getType(Dst); in lowerFMinNumMaxNum()
7379 LLT Ty = MRI.getType(DstReg); in lowerFMad()
7393 const LLT Ty = MRI.getType(DstReg); in lowerIntrinsicRound()
7394 const LLT CondTy = Ty.changeElementSize(1); in lowerIntrinsicRound()
7426 LLT Ty = MRI.getType(DstReg); in lowerFFloor()
7427 const LLT CondTy = Ty.changeElementSize(1); in lowerFFloor()
7454 LLT WideTy = LLT::scalar(DstTy.getSizeInBits()); in lowerMergeValues()
7491 LLT DstTy = MRI.getType(Dst0Reg); in lowerUnmergeValues()
7500 LLT IntTy = MRI.getType(SrcReg); in lowerUnmergeValues()
7536 LLT VecTy = MRI.getType(SrcVec); in lowerExtractInsertVectorElt()
7537 LLT EltTy = VecTy.getElementType(); in lowerExtractInsertVectorElt()
7602 LLT IdxTy = LLT::scalar(32); in lowerShuffleVector()
7607 LLT EltTy = DstTy.getScalarType(); in lowerShuffleVector()
7654 LLT IdxTy = LLT::scalar(32); in lowerVECTOR_COMPRESS()
7655 LLT ValTy = VecTy.getElementType(); in lowerVECTOR_COMPRESS()
7676 {LLT::scalar(32)}, {Popcount}); in lowerVECTOR_COMPRESS()
7693 LLT MaskITy = MaskTy.getElementType(); in lowerVECTOR_COMPRESS()
7696 MaskI = MIRBuilder.buildTrunc(LLT::scalar(1), MaskI); in lowerVECTOR_COMPRESS()
7705 CmpInst::ICMP_UGT, LLT::scalar(1), OutPos, EndOfVector); in lowerVECTOR_COMPRESS()
7727 LLT PtrTy) { in getDynStackAllocTargetPtr()
7728 LLT IntPtrTy = LLT::scalar(PtrTy.getSizeInBits()); in getDynStackAllocTargetPtr()
7758 LLT PtrTy = MRI.getType(Dst); in lowerDynStackAlloc()
7826 LLT SrcIntTy = SrcTy; in lowerExtract()
7828 SrcIntTy = LLT::scalar(SrcTy.getSizeInBits()); in lowerExtract()
7851 LLT DstTy = MRI.getType(Src); in lowerInsert()
7852 LLT InsertTy = MRI.getType(InsertSrc); in lowerInsert()
7856 LLT EltTy = DstTy.getElementType(); in lowerInsert()
7906 LLT IntDstTy = DstTy; in lowerInsert()
7909 IntDstTy = LLT::scalar(DstTy.getSizeInBits()); in lowerInsert()
7914 const LLT IntInsertTy = LLT::scalar(InsertTy.getSizeInBits()); in lowerInsert()
7942 LLT Ty = Dst0Ty; in lowerSADDO_SSUBO()
7943 LLT BoolTy = Dst1Ty; in lowerSADDO_SSUBO()
7978 LLT Ty = MRI.getType(Res); in lowerAddSubSatToMinMax()
8053 LLT Ty = MRI.getType(Res); in lowerAddSubSatToAddoSubo()
8054 LLT BoolTy = Ty.changeElementSize(1); in lowerAddSubSatToAddoSubo()
8123 LLT Ty = MRI.getType(Res); in lowerShlSat()
8124 LLT BoolTy = Ty.changeElementSize(1); in lowerShlSat()
8150 const LLT Ty = MRI.getType(Src); in lowerBswap()
8184 const LLT Ty = Dst.getLLTTy(*B.getMRI()); in SwapN()
8195 const LLT Ty = MRI.getType(Src); in lowerBitreverse()
8255 const LLT Ty = MRI.getType(ValReg); in lowerReadWriteRegister()
8277 LLT OrigTy = MRI.getType(Result); in lowerSMULH_UMULH()
8279 LLT WideTy = OrigTy.changeElementSize(SizeInBits * 2); in lowerSMULH_UMULH()
8316 LLT IntTy = LLT::scalar(BitSize); in lowerISFPCLASS()
8318 IntTy = LLT::vector(SrcTy.getElementCount(), IntTy); in lowerISFPCLASS()
8343 LLT DstTyCopy = DstTy; in lowerISFPCLASS()
8475 LLT ScalarPtrTy = LLT::scalar(DstTy.getScalarSizeInBits()); in lowerSelect()
8476 LLT NewTy = DstTy.changeElementType(ScalarPtrTy); in lowerSelect()
8489 if (MaskTy != LLT::scalar(1)) in lowerSelect()
8548 LLT DstTy = MRI.getType(MI.getOperand(0).getReg()); in lowerAbsToAddXor()
8566 LLT Ty = MRI.getType(SrcReg); in lowerAbsToMaxNeg()
8578 LLT Ty = MRI.getType(SrcReg), IType = LLT::scalar(1); in lowerAbsToCNeg()
8590 LLT SrcTy = MRI.getType(SrcReg); in lowerVectorReduction()
8591 LLT DstTy = MRI.getType(SrcReg); in lowerVectorReduction()
8611 LLT PtrTy = MRI.getType(ListPtr); in lowerVAArg()
8621 LLT PtrTyAsScalarTy = LLT::scalar(PtrTy.getSizeInBits()); in lowerVAArg()
8634 LLT LLTTy = MRI.getType(Dst); in lowerVAArg()
8664 static bool findGISelOptimalMemOpLowering(std::vector<LLT> &MemOps, in findGISelOptimalMemOpLowering()
8672 LLT Ty = TLI.getOptimalMemOpLLT(Op, FuncAttributes); in findGISelOptimalMemOpLowering()
8674 if (Ty == LLT()) { in findGISelOptimalMemOpLowering()
8678 Ty = LLT::scalar(64); in findGISelOptimalMemOpLowering()
8682 Ty = LLT::scalar(Ty.getSizeInBytes()); in findGISelOptimalMemOpLowering()
8693 LLT NewTy = Ty; in findGISelOptimalMemOpLowering()
8697 NewTy = NewTy.getSizeInBits() > 64 ? LLT::scalar(64) : LLT::scalar(32); in findGISelOptimalMemOpLowering()
8698 NewTy = LLT::scalar(llvm::bit_floor(NewTy.getSizeInBits() - 1)); in findGISelOptimalMemOpLowering()
8730 static Register getMemsetValue(Register Val, LLT Ty, MachineIRBuilder &MIB) { in getMemsetValue()
8747 LLT ExtType = Ty.getScalarType(); in getMemsetValue()
8782 std::vector<LLT> MemOps; in lowerMemset()
8814 LLT LargestTy = MemOps[0]; in lowerMemset()
8829 LLT PtrTy = MRI.getType(Dst); in lowerMemset()
8833 LLT Ty = MemOps[I]; in lowerMemset()
8862 MIB.buildConstant(LLT::scalar(PtrTy.getSizeInBits()), DstOff); in lowerMemset()
8938 std::vector<LLT> MemOps; in lowerMemcpy()
9000 LLT SrcTy = MRI.getType(Src); in lowerMemcpy()
9001 Offset = MIB.buildConstant(LLT::scalar(SrcTy.getSizeInBits()), CurrOffset) in lowerMemcpy()
9010 LLT DstTy = MRI.getType(Dst); in lowerMemcpy()
9043 std::vector<LLT> MemOps; in lowerMemmove()
9098 LLT SrcTy = MRI.getType(Src); in lowerMemmove()
9100 MIB.buildConstant(LLT::scalar(SrcTy.getSizeInBits()), CurrOffset); in lowerMemmove()
9109 LLT CopyTy = MemOps[I]; in lowerMemmove()
9116 LLT DstTy = MRI.getType(Dst); in lowerMemmove()
9118 MIB.buildConstant(LLT::scalar(DstTy.getSizeInBits()), CurrOffset); in lowerMemmove()