Lines Matching refs:RetVT
199 bool optimizeIntExtLoad(const Instruction *I, MVT RetVT, MVT SrcVT);
204 unsigned emitAddSub(bool UseAdd, MVT RetVT, const Value *LHS,
207 unsigned emitAddSub_rr(bool UseAdd, MVT RetVT, unsigned LHSReg,
210 unsigned emitAddSub_ri(bool UseAdd, MVT RetVT, unsigned LHSReg,
213 unsigned emitAddSub_rs(bool UseAdd, MVT RetVT, unsigned LHSReg,
217 unsigned emitAddSub_rx(bool UseAdd, MVT RetVT, unsigned LHSReg,
225 bool emitICmp(MVT RetVT, const Value *LHS, const Value *RHS, bool IsZExt);
226 bool emitICmp_ri(MVT RetVT, unsigned LHSReg, uint64_t Imm);
227 bool emitFCmp(MVT RetVT, const Value *LHS, const Value *RHS);
236 unsigned emitAdd(MVT RetVT, const Value *LHS, const Value *RHS,
240 unsigned emitSub(MVT RetVT, const Value *LHS, const Value *RHS,
243 unsigned emitSubs_rr(MVT RetVT, unsigned LHSReg, unsigned RHSReg,
245 unsigned emitSubs_rs(MVT RetVT, unsigned LHSReg, unsigned RHSReg,
248 unsigned emitLogicalOp(unsigned ISDOpc, MVT RetVT, const Value *LHS,
250 unsigned emitLogicalOp_ri(unsigned ISDOpc, MVT RetVT, unsigned LHSReg,
252 unsigned emitLogicalOp_rs(unsigned ISDOpc, MVT RetVT, unsigned LHSReg,
254 unsigned emitAnd_ri(MVT RetVT, unsigned LHSReg, uint64_t Imm);
255 unsigned emitMul_rr(MVT RetVT, unsigned Op0, unsigned Op1);
256 unsigned emitSMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1);
257 unsigned emitUMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1);
258 unsigned emitLSL_rr(MVT RetVT, unsigned Op0Reg, unsigned Op1Reg);
259 unsigned emitLSL_ri(MVT RetVT, MVT SrcVT, unsigned Op0Reg, uint64_t Imm,
261 unsigned emitLSR_rr(MVT RetVT, unsigned Op0Reg, unsigned Op1Reg);
262 unsigned emitLSR_ri(MVT RetVT, MVT SrcVT, unsigned Op0Reg, uint64_t Imm,
264 unsigned emitASR_rr(MVT RetVT, unsigned Op0Reg, unsigned Op1Reg);
265 unsigned emitASR_ri(MVT RetVT, MVT SrcVT, unsigned Op0Reg, uint64_t Imm,
1165 unsigned AArch64FastISel::emitAddSub(bool UseAdd, MVT RetVT, const Value *LHS, in emitAddSub() argument
1170 switch (RetVT.SimpleTy) { in emitAddSub()
1188 MVT SrcVT = RetVT; in emitAddSub()
1189 RetVT.SimpleTy = std::max(RetVT.SimpleTy, MVT::i32); in emitAddSub()
1214 LHSReg = emitIntExt(SrcVT, LHSReg, RetVT, IsZExt); in emitAddSub()
1220 ResultReg = emitAddSub_ri(!UseAdd, RetVT, LHSReg, -Imm, SetFlags, in emitAddSub()
1223 ResultReg = emitAddSub_ri(UseAdd, RetVT, LHSReg, Imm, SetFlags, in emitAddSub()
1227 ResultReg = emitAddSub_ri(UseAdd, RetVT, LHSReg, 0, SetFlags, WantResult); in emitAddSub()
1238 return emitAddSub_rx(UseAdd, RetVT, LHSReg, RHSReg, ExtendType, 0, in emitAddSub()
1257 ResultReg = emitAddSub_rs(UseAdd, RetVT, LHSReg, RHSReg, AArch64_AM::LSL, in emitAddSub()
1280 ResultReg = emitAddSub_rs(UseAdd, RetVT, LHSReg, RHSReg, ShiftType, in emitAddSub()
1294 RHSReg = emitIntExt(SrcVT, RHSReg, RetVT, IsZExt); in emitAddSub()
1296 return emitAddSub_rr(UseAdd, RetVT, LHSReg, RHSReg, SetFlags, WantResult); in emitAddSub()
1299 unsigned AArch64FastISel::emitAddSub_rr(bool UseAdd, MVT RetVT, unsigned LHSReg, in emitAddSub_rr() argument
1308 if (RetVT != MVT::i32 && RetVT != MVT::i64) in emitAddSub_rr()
1317 bool Is64Bit = RetVT == MVT::i64; in emitAddSub_rr()
1336 unsigned AArch64FastISel::emitAddSub_ri(bool UseAdd, MVT RetVT, unsigned LHSReg, in emitAddSub_ri() argument
1341 if (RetVT != MVT::i32 && RetVT != MVT::i64) in emitAddSub_ri()
1359 bool Is64Bit = RetVT == MVT::i64; in emitAddSub_ri()
1381 unsigned AArch64FastISel::emitAddSub_rs(bool UseAdd, MVT RetVT, unsigned LHSReg, in emitAddSub_rs() argument
1390 if (RetVT != MVT::i32 && RetVT != MVT::i64) in emitAddSub_rs()
1394 if (ShiftImm >= RetVT.getSizeInBits()) in emitAddSub_rs()
1403 bool Is64Bit = RetVT == MVT::i64; in emitAddSub_rs()
1423 unsigned AArch64FastISel::emitAddSub_rx(bool UseAdd, MVT RetVT, unsigned LHSReg, in emitAddSub_rx() argument
1432 if (RetVT != MVT::i32 && RetVT != MVT::i64) in emitAddSub_rx()
1444 bool Is64Bit = RetVT == MVT::i64; in emitAddSub_rx()
1489 bool AArch64FastISel::emitICmp(MVT RetVT, const Value *LHS, const Value *RHS, in emitICmp() argument
1491 return emitSub(RetVT, LHS, RHS, /*SetFlags=*/true, /*WantResult=*/false, in emitICmp()
1495 bool AArch64FastISel::emitICmp_ri(MVT RetVT, unsigned LHSReg, uint64_t Imm) { in emitICmp_ri() argument
1496 return emitAddSub_ri(/*UseAdd=*/false, RetVT, LHSReg, Imm, in emitICmp_ri()
1500 bool AArch64FastISel::emitFCmp(MVT RetVT, const Value *LHS, const Value *RHS) { in emitFCmp() argument
1501 if (RetVT != MVT::f32 && RetVT != MVT::f64) in emitFCmp()
1516 unsigned Opc = (RetVT == MVT::f64) ? AArch64::FCMPDri : AArch64::FCMPSri; in emitFCmp()
1526 unsigned Opc = (RetVT == MVT::f64) ? AArch64::FCMPDrr : AArch64::FCMPSrr; in emitFCmp()
1533 unsigned AArch64FastISel::emitAdd(MVT RetVT, const Value *LHS, const Value *RHS, in emitAdd() argument
1535 return emitAddSub(/*UseAdd=*/true, RetVT, LHS, RHS, SetFlags, WantResult, in emitAdd()
1562 unsigned AArch64FastISel::emitSub(MVT RetVT, const Value *LHS, const Value *RHS, in emitSub() argument
1564 return emitAddSub(/*UseAdd=*/false, RetVT, LHS, RHS, SetFlags, WantResult, in emitSub()
1568 unsigned AArch64FastISel::emitSubs_rr(MVT RetVT, unsigned LHSReg, in emitSubs_rr() argument
1570 return emitAddSub_rr(/*UseAdd=*/false, RetVT, LHSReg, RHSReg, in emitSubs_rr()
1574 unsigned AArch64FastISel::emitSubs_rs(MVT RetVT, unsigned LHSReg, in emitSubs_rs() argument
1578 return emitAddSub_rs(/*UseAdd=*/false, RetVT, LHSReg, RHSReg, ShiftType, in emitSubs_rs()
1582 unsigned AArch64FastISel::emitLogicalOp(unsigned ISDOpc, MVT RetVT, in emitLogicalOp() argument
1606 ResultReg = emitLogicalOp_ri(ISDOpc, RetVT, LHSReg, Imm); in emitLogicalOp()
1627 ResultReg = emitLogicalOp_rs(ISDOpc, RetVT, LHSReg, RHSReg, ShiftVal); in emitLogicalOp()
1641 ResultReg = emitLogicalOp_rs(ISDOpc, RetVT, LHSReg, RHSReg, ShiftVal); in emitLogicalOp()
1651 MVT VT = std::max(MVT::i32, RetVT.SimpleTy); in emitLogicalOp()
1653 if (RetVT >= MVT::i8 && RetVT <= MVT::i16) { in emitLogicalOp()
1654 uint64_t Mask = (RetVT == MVT::i8) ? 0xff : 0xffff; in emitLogicalOp()
1660 unsigned AArch64FastISel::emitLogicalOp_ri(unsigned ISDOpc, MVT RetVT, in emitLogicalOp_ri() argument
1672 switch (RetVT.SimpleTy) { in emitLogicalOp_ri()
1698 if (RetVT >= MVT::i8 && RetVT <= MVT::i16 && ISDOpc != ISD::AND) { in emitLogicalOp_ri()
1699 uint64_t Mask = (RetVT == MVT::i8) ? 0xff : 0xffff; in emitLogicalOp_ri()
1705 unsigned AArch64FastISel::emitLogicalOp_rs(unsigned ISDOpc, MVT RetVT, in emitLogicalOp_rs() argument
1717 if (ShiftImm >= RetVT.getSizeInBits()) in emitLogicalOp_rs()
1722 switch (RetVT.SimpleTy) { in emitLogicalOp_rs()
1740 if (RetVT >= MVT::i8 && RetVT <= MVT::i16) { in emitLogicalOp_rs()
1741 uint64_t Mask = (RetVT == MVT::i8) ? 0xff : 0xffff; in emitLogicalOp_rs()
1747 unsigned AArch64FastISel::emitAnd_ri(MVT RetVT, unsigned LHSReg, in emitAnd_ri() argument
1749 return emitLogicalOp_ri(ISD::AND, RetVT, LHSReg, Imm); in emitAnd_ri()
1752 unsigned AArch64FastISel::emitLoad(MVT VT, MVT RetVT, Address Addr, in emitLoad() argument
1828 bool IsRet64Bit = RetVT == MVT::i64; in emitLoad()
1877 if (WantZExt && RetVT == MVT::i64 && VT <= MVT::i32) { in emitLoad()
1975 MVT RetVT = VT; in selectLoad() local
1979 if (isTypeSupported(ZE->getType(), RetVT)) in selectLoad()
1982 RetVT = VT; in selectLoad()
1984 if (isTypeSupported(SE->getType(), RetVT)) in selectLoad()
1987 RetVT = VT; in selectLoad()
1993 emitLoad(VT, RetVT, Addr, WantZExt, createMachineMemOperandFor(I)); in selectLoad()
2016 if (RetVT == MVT::i64 && VT <= MVT::i32) { in selectLoad()
3363 MVT RetVT; in foldXALUIntrinsic() local
3367 if (!isTypeLegal(RetTy, RetVT)) in foldXALUIntrinsic()
3370 if (RetVT != MVT::i32 && RetVT != MVT::i64) in foldXALUIntrinsic()
3544 MVT RetVT; in fastLowerIntrinsicCall() local
3545 if (!isTypeLegal(II->getType(), RetVT)) in fastLowerIntrinsicCall()
3548 if (RetVT != MVT::f32 && RetVT != MVT::f64) in fastLowerIntrinsicCall()
3557 bool Is64Bit = RetVT == MVT::f64; in fastLowerIntrinsicCall()
4047 unsigned AArch64FastISel::emitMul_rr(MVT RetVT, unsigned Op0, unsigned Op1) { in emitMul_rr() argument
4049 switch (RetVT.SimpleTy) { in emitMul_rr()
4054 RetVT = MVT::i32; in emitMul_rr()
4061 (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass; in emitMul_rr()
4065 unsigned AArch64FastISel::emitSMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1) { in emitSMULL_rr() argument
4066 if (RetVT != MVT::i64) in emitSMULL_rr()
4073 unsigned AArch64FastISel::emitUMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1) { in emitUMULL_rr() argument
4074 if (RetVT != MVT::i64) in emitUMULL_rr()
4081 unsigned AArch64FastISel::emitLSL_rr(MVT RetVT, unsigned Op0Reg, in emitLSL_rr() argument
4086 switch (RetVT.SimpleTy) { in emitLSL_rr()
4095 (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass; in emitLSL_rr()
4105 unsigned AArch64FastISel::emitLSL_ri(MVT RetVT, MVT SrcVT, unsigned Op0, in emitLSL_ri() argument
4107 assert(RetVT.SimpleTy >= SrcVT.SimpleTy && in emitLSL_ri()
4112 assert((RetVT == MVT::i8 || RetVT == MVT::i16 || RetVT == MVT::i32 || in emitLSL_ri()
4113 RetVT == MVT::i64) && "Unexpected return value type."); in emitLSL_ri()
4115 bool Is64Bit = (RetVT == MVT::i64); in emitLSL_ri()
4117 unsigned DstBits = RetVT.getSizeInBits(); in emitLSL_ri()
4124 if (RetVT == SrcVT) { in emitLSL_ri()
4131 return emitIntExt(SrcVT, Op0, RetVT, IsZExt); in emitLSL_ri()
4171 if (SrcVT.SimpleTy <= MVT::i32 && RetVT == MVT::i64) { in emitLSL_ri()
4183 unsigned AArch64FastISel::emitLSR_rr(MVT RetVT, unsigned Op0Reg, in emitLSR_rr() argument
4188 switch (RetVT.SimpleTy) { in emitLSR_rr()
4197 (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass; in emitLSR_rr()
4208 unsigned AArch64FastISel::emitLSR_ri(MVT RetVT, MVT SrcVT, unsigned Op0, in emitLSR_ri() argument
4210 assert(RetVT.SimpleTy >= SrcVT.SimpleTy && in emitLSR_ri()
4215 assert((RetVT == MVT::i8 || RetVT == MVT::i16 || RetVT == MVT::i32 || in emitLSR_ri()
4216 RetVT == MVT::i64) && "Unexpected return value type."); in emitLSR_ri()
4218 bool Is64Bit = (RetVT == MVT::i64); in emitLSR_ri()
4220 unsigned DstBits = RetVT.getSizeInBits(); in emitLSR_ri()
4227 if (RetVT == SrcVT) { in emitLSR_ri()
4234 return emitIntExt(SrcVT, Op0, RetVT, IsZExt); in emitLSR_ri()
4267 return materializeInt(ConstantInt::get(*Context, APInt(RegSize, 0)), RetVT); in emitLSR_ri()
4272 Op0 = emitIntExt(SrcVT, Op0, RetVT, IsZExt); in emitLSR_ri()
4275 SrcVT = RetVT; in emitLSR_ri()
4287 if (SrcVT.SimpleTy <= MVT::i32 && RetVT == MVT::i64) { in emitLSR_ri()
4299 unsigned AArch64FastISel::emitASR_rr(MVT RetVT, unsigned Op0Reg, in emitASR_rr() argument
4304 switch (RetVT.SimpleTy) { in emitASR_rr()
4313 (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass; in emitASR_rr()
4315 Op0Reg = emitIntExt(RetVT, Op0Reg, MVT::i32, /*isZExt=*/false); in emitASR_rr()
4324 unsigned AArch64FastISel::emitASR_ri(MVT RetVT, MVT SrcVT, unsigned Op0, in emitASR_ri() argument
4326 assert(RetVT.SimpleTy >= SrcVT.SimpleTy && in emitASR_ri()
4331 assert((RetVT == MVT::i8 || RetVT == MVT::i16 || RetVT == MVT::i32 || in emitASR_ri()
4332 RetVT == MVT::i64) && "Unexpected return value type."); in emitASR_ri()
4334 bool Is64Bit = (RetVT == MVT::i64); in emitASR_ri()
4336 unsigned DstBits = RetVT.getSizeInBits(); in emitASR_ri()
4343 if (RetVT == SrcVT) { in emitASR_ri()
4350 return emitIntExt(SrcVT, Op0, RetVT, IsZExt); in emitASR_ri()
4383 return materializeInt(ConstantInt::get(*Context, APInt(RegSize, 0)), RetVT); in emitASR_ri()
4392 if (SrcVT.SimpleTy <= MVT::i32 && RetVT == MVT::i64) { in emitASR_ri()
4513 bool AArch64FastISel::optimizeIntExtLoad(const Instruction *I, MVT RetVT, in optimizeIntExtLoad() argument
4542 if (RetVT != MVT::i64 || SrcVT > MVT::i32) { in optimizeIntExtLoad()
4570 MVT RetVT; in selectIntExt() local
4572 if (!isTypeSupported(I->getType(), RetVT)) in selectIntExt()
4579 if (optimizeIntExtLoad(I, RetVT, SrcVT)) in selectIntExt()
4590 if (RetVT == MVT::i64 && SrcVT != MVT::i64) { in selectIntExt()
4605 unsigned ResultReg = emitIntExt(SrcVT, SrcReg, RetVT, IsZExt); in selectIntExt()
4725 MVT RetVT; in selectShift() local
4726 if (!isTypeSupported(I->getType(), RetVT, /*IsVectorAllowed=*/true)) in selectShift()
4729 if (RetVT.isVector()) in selectShift()
4735 MVT SrcVT = RetVT; in selectShift()
4765 ResultReg = emitLSL_ri(RetVT, SrcVT, Op0Reg, ShiftVal, IsZExt); in selectShift()
4768 ResultReg = emitASR_ri(RetVT, SrcVT, Op0Reg, ShiftVal, IsZExt); in selectShift()
4771 ResultReg = emitLSR_ri(RetVT, SrcVT, Op0Reg, ShiftVal, IsZExt); in selectShift()
4793 ResultReg = emitLSL_rr(RetVT, Op0Reg, Op1Reg); in selectShift()
4796 ResultReg = emitASR_rr(RetVT, Op0Reg, Op1Reg); in selectShift()
4799 ResultReg = emitLSR_rr(RetVT, Op0Reg, Op1Reg); in selectShift()
4811 MVT RetVT, SrcVT; in selectBitCast() local
4815 if (!isTypeLegal(I->getType(), RetVT)) in selectBitCast()
4819 if (RetVT == MVT::f32 && SrcVT == MVT::i32) in selectBitCast()
4821 else if (RetVT == MVT::f64 && SrcVT == MVT::i64) in selectBitCast()
4823 else if (RetVT == MVT::i32 && SrcVT == MVT::f32) in selectBitCast()
4825 else if (RetVT == MVT::i64 && SrcVT == MVT::f64) in selectBitCast()
4831 switch (RetVT.SimpleTy) { in selectBitCast()
4851 MVT RetVT; in selectFRem() local
4852 if (!isTypeLegal(I->getType(), RetVT)) in selectFRem()
4856 switch (RetVT.SimpleTy) { in selectFRem()