Lines Matching refs:AMDGPU
46 using namespace llvm::AMDGPU;
279 return isRegOrImmWithInputMods(AMDGPU::VS_32RegClassID, MVT::i16); in isRegOrImmWithInt16InputMods()
283 return isRegOrImmWithInputMods(AMDGPU::VS_16RegClassID, MVT::i16); in isRegOrImmWithIntT16InputMods()
287 return isRegOrImmWithInputMods(AMDGPU::VS_32RegClassID, MVT::i32); in isRegOrImmWithInt32InputMods()
291 return isRegOrInline(AMDGPU::VS_32RegClassID, MVT::i16); in isRegOrInlineImmWithInt16InputMods()
295 return isRegOrInline(AMDGPU::VS_32RegClassID, MVT::i32); in isRegOrInlineImmWithInt32InputMods()
299 return isRegOrImmWithInputMods(AMDGPU::VS_64RegClassID, MVT::i64); in isRegOrImmWithInt64InputMods()
303 return isRegOrImmWithInputMods(AMDGPU::VS_32RegClassID, MVT::f16); in isRegOrImmWithFP16InputMods()
307 return isRegOrImmWithInputMods(AMDGPU::VS_16RegClassID, MVT::f16); in isRegOrImmWithFPT16InputMods()
311 return isRegOrImmWithInputMods(AMDGPU::VS_32RegClassID, MVT::f32); in isRegOrImmWithFP32InputMods()
315 return isRegOrImmWithInputMods(AMDGPU::VS_64RegClassID, MVT::f64); in isRegOrImmWithFP64InputMods()
320 IsFake16 ? AMDGPU::VS_32RegClassID : AMDGPU::VS_16RegClassID, MVT::f16); in isRegOrInlineImmWithFP16InputMods()
324 return isRegOrInline(AMDGPU::VS_32RegClassID, MVT::f32); in isRegOrInlineImmWithFP32InputMods()
328 return isRegOrImmWithInputMods(AMDGPU::VS_32RegClassID, MVT::v2f16); in isPackedFP16InputMods()
332 return isRegClass(AMDGPU::VGPR_32RegClassID) || in isVReg()
333 isRegClass(AMDGPU::VReg_64RegClassID) || in isVReg()
334 isRegClass(AMDGPU::VReg_96RegClassID) || in isVReg()
335 isRegClass(AMDGPU::VReg_128RegClassID) || in isVReg()
336 isRegClass(AMDGPU::VReg_160RegClassID) || in isVReg()
337 isRegClass(AMDGPU::VReg_192RegClassID) || in isVReg()
338 isRegClass(AMDGPU::VReg_256RegClassID) || in isVReg()
339 isRegClass(AMDGPU::VReg_512RegClassID) || in isVReg()
340 isRegClass(AMDGPU::VReg_1024RegClassID); in isVReg()
344 return isRegClass(AMDGPU::VGPR_32RegClassID); in isVReg32()
352 return isRegKind() && getReg() == AMDGPU::SGPR_NULL; in isNull()
419 return isRegOrInlineNoMods(AMDGPU::SReg_32RegClassID, MVT::i16); in isSCSrcB16()
427 return isRegOrInlineNoMods(AMDGPU::SReg_32RegClassID, MVT::i32); in isSCSrc_b32()
431 return isRegOrInlineNoMods(AMDGPU::SReg_64RegClassID, MVT::i64); in isSCSrc_b64()
437 return isRegOrInlineNoMods(AMDGPU::SReg_32RegClassID, MVT::f16); in isSCSrcF16()
445 return isRegOrInlineNoMods(AMDGPU::SReg_32RegClassID, MVT::f32); in isSCSrcF32()
449 return isRegOrInlineNoMods(AMDGPU::SReg_64RegClassID, MVT::f64); in isSCSrcF64()
505 return isRegOrInlineNoMods(AMDGPU::SRegOrLds_32RegClassID, MVT::i32) || in isSSrcOrLds_b32()
510 return isRegOrInlineNoMods(AMDGPU::VS_32RegClassID, MVT::i32); in isVCSrc_b32()
514 return isRegOrInlineNoMods(AMDGPU::VS_64RegClassID, MVT::i64); in isVCSrcB64()
518 return isRegOrInlineNoMods(AMDGPU::VS_16RegClassID, MVT::i16); in isVCSrcTB16()
522 return isRegOrInlineNoMods(AMDGPU::VS_16_Lo128RegClassID, MVT::i16); in isVCSrcTB16_Lo128()
526 return isRegOrInlineNoMods(AMDGPU::VS_32_Lo128RegClassID, MVT::i16); in isVCSrcFake16B16_Lo128()
530 return isRegOrInlineNoMods(AMDGPU::VS_32RegClassID, MVT::i16); in isVCSrc_b16()
536 return isRegOrInlineNoMods(AMDGPU::VS_32RegClassID, MVT::f32); in isVCSrc_f32()
540 return isRegOrInlineNoMods(AMDGPU::VS_64RegClassID, MVT::f64); in isVCSrcF64()
544 return isRegOrInlineNoMods(AMDGPU::VS_16RegClassID, MVT::bf16); in isVCSrcTBF16()
548 return isRegOrInlineNoMods(AMDGPU::VS_16RegClassID, MVT::f16); in isVCSrcTF16()
552 return isRegOrInlineNoMods(AMDGPU::VS_16_Lo128RegClassID, MVT::bf16); in isVCSrcTBF16_Lo128()
556 return isRegOrInlineNoMods(AMDGPU::VS_16_Lo128RegClassID, MVT::f16); in isVCSrcTF16_Lo128()
560 return isRegOrInlineNoMods(AMDGPU::VS_32_Lo128RegClassID, MVT::bf16); in isVCSrcFake16BF16_Lo128()
564 return isRegOrInlineNoMods(AMDGPU::VS_32_Lo128RegClassID, MVT::f16); in isVCSrcFake16F16_Lo128()
568 return isRegOrInlineNoMods(AMDGPU::VS_32RegClassID, MVT::bf16); in isVCSrc_bf16()
572 return isRegOrInlineNoMods(AMDGPU::VS_32RegClassID, MVT::f16); in isVCSrc_f16()
648 return isRegOrInlineNoMods(AMDGPU::VGPR_32RegClassID, MVT::i32); in isVISrcB32()
652 return isRegOrInlineNoMods(AMDGPU::VGPR_32RegClassID, MVT::i16); in isVISrcB16()
660 return isRegOrInlineNoMods(AMDGPU::VGPR_32RegClassID, MVT::f32); in isVISrcF32()
664 return isRegOrInlineNoMods(AMDGPU::VGPR_32RegClassID, MVT::f16); in isVISrcF16()
672 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::bf16); in isVISrc_64_bf16()
676 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::f16); in isVISrc_64_f16()
680 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::i32); in isVISrc_64_b32()
684 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::i64); in isVISrc_64B64()
688 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::f64); in isVISrc_64_f64()
692 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::f32); in isVISrc_64V2FP32()
696 return isRegOrInlineNoMods(AMDGPU::VReg_64RegClassID, MVT::i32); in isVISrc_64V2INT32()
700 return isRegOrInlineNoMods(AMDGPU::VReg_256RegClassID, MVT::i32); in isVISrc_256_b32()
704 return isRegOrInlineNoMods(AMDGPU::VReg_256RegClassID, MVT::f32); in isVISrc_256_f32()
708 return isRegOrInlineNoMods(AMDGPU::VReg_256RegClassID, MVT::i64); in isVISrc_256B64()
712 return isRegOrInlineNoMods(AMDGPU::VReg_256RegClassID, MVT::f64); in isVISrc_256_f64()
716 return isRegOrInlineNoMods(AMDGPU::VReg_128RegClassID, MVT::i16); in isVISrc_128B16()
724 return isRegOrInlineNoMods(AMDGPU::VReg_128RegClassID, MVT::i32); in isVISrc_128_b32()
728 return isRegOrInlineNoMods(AMDGPU::VReg_128RegClassID, MVT::f32); in isVISrc_128_f32()
732 return isRegOrInlineNoMods(AMDGPU::VReg_256RegClassID, MVT::f32); in isVISrc_256V2FP32()
736 return isRegOrInlineNoMods(AMDGPU::VReg_256RegClassID, MVT::i32); in isVISrc_256V2INT32()
740 return isRegOrInlineNoMods(AMDGPU::VReg_512RegClassID, MVT::i32); in isVISrc_512_b32()
744 return isRegOrInlineNoMods(AMDGPU::VReg_512RegClassID, MVT::i16); in isVISrc_512B16()
752 return isRegOrInlineNoMods(AMDGPU::VReg_512RegClassID, MVT::f32); in isVISrc_512_f32()
756 return isRegOrInlineNoMods(AMDGPU::VReg_512RegClassID, MVT::f16); in isVISrc_512F16()
764 return isRegOrInlineNoMods(AMDGPU::VReg_1024RegClassID, MVT::i32); in isVISrc_1024_b32()
768 return isRegOrInlineNoMods(AMDGPU::VReg_1024RegClassID, MVT::i16); in isVISrc_1024B16()
776 return isRegOrInlineNoMods(AMDGPU::VReg_1024RegClassID, MVT::f32); in isVISrc_1024_f32()
780 return isRegOrInlineNoMods(AMDGPU::VReg_1024RegClassID, MVT::f16); in isVISrc_1024F16()
788 return isRegOrInlineNoMods(AMDGPU::AGPR_32RegClassID, MVT::i32); in isAISrcB32()
792 return isRegOrInlineNoMods(AMDGPU::AGPR_32RegClassID, MVT::i16); in isAISrcB16()
800 return isRegOrInlineNoMods(AMDGPU::AGPR_32RegClassID, MVT::f32); in isAISrcF32()
804 return isRegOrInlineNoMods(AMDGPU::AGPR_32RegClassID, MVT::f16); in isAISrcF16()
812 return isRegOrInlineNoMods(AMDGPU::AReg_64RegClassID, MVT::i64); in isAISrc_64B64()
816 return isRegOrInlineNoMods(AMDGPU::AReg_64RegClassID, MVT::f64); in isAISrc_64_f64()
820 return isRegOrInlineNoMods(AMDGPU::AReg_128RegClassID, MVT::i32); in isAISrc_128_b32()
824 return isRegOrInlineNoMods(AMDGPU::AReg_128RegClassID, MVT::i16); in isAISrc_128B16()
832 return isRegOrInlineNoMods(AMDGPU::AReg_128RegClassID, MVT::f32); in isAISrc_128_f32()
836 return isRegOrInlineNoMods(AMDGPU::AReg_128RegClassID, MVT::f16); in isAISrc_128F16()
844 return isRegOrInlineNoMods(AMDGPU::VReg_128RegClassID, MVT::bf16); in isVISrc_128_bf16()
848 return isRegOrInlineNoMods(AMDGPU::VReg_128RegClassID, MVT::f16); in isVISrc_128_f16()
856 return isRegOrInlineNoMods(AMDGPU::AReg_256RegClassID, MVT::i64); in isAISrc_256B64()
860 return isRegOrInlineNoMods(AMDGPU::AReg_256RegClassID, MVT::f64); in isAISrc_256_f64()
864 return isRegOrInlineNoMods(AMDGPU::AReg_512RegClassID, MVT::i32); in isAISrc_512_b32()
868 return isRegOrInlineNoMods(AMDGPU::AReg_512RegClassID, MVT::i16); in isAISrc_512B16()
876 return isRegOrInlineNoMods(AMDGPU::AReg_512RegClassID, MVT::f32); in isAISrc_512_f32()
880 return isRegOrInlineNoMods(AMDGPU::AReg_512RegClassID, MVT::f16); in isAISrc_512F16()
888 return isRegOrInlineNoMods(AMDGPU::AReg_1024RegClassID, MVT::i32); in isAISrc_1024_b32()
892 return isRegOrInlineNoMods(AMDGPU::AReg_1024RegClassID, MVT::i16); in isAISrc_1024B16()
900 return isRegOrInlineNoMods(AMDGPU::AReg_1024RegClassID, MVT::f32); in isAISrc_1024_f32()
904 return isRegOrInlineNoMods(AMDGPU::AReg_1024RegClassID, MVT::f16); in isAISrc_1024F16()
1410 if (!FB[AMDGPU::FeatureWavefrontSize64] && in AMDGPUAsmParser()
1411 !FB[AMDGPU::FeatureWavefrontSize32]) { in AMDGPUAsmParser()
1415 copySTI().ToggleFeature(AMDGPU::FeatureWavefrontSize32); in AMDGPUAsmParser()
1420 AMDGPU::IsaVersion ISA = AMDGPU::getIsaVersion(getSTI().getCPU()); in AMDGPUAsmParser()
1436 for (auto [Symbol, Code] : AMDGPU::UCVersion::getGFXVersions()) in AMDGPUAsmParser()
1445 return AMDGPU::hasMIMG_R128(getSTI()); in hasMIMG_R128()
1449 return AMDGPU::hasPackedD16(getSTI()); in hasPackedD16()
1452 bool hasA16() const { return AMDGPU::hasA16(getSTI()); } in hasA16()
1454 bool hasG16() const { return AMDGPU::hasG16(getSTI()); } in hasG16()
1456 bool hasGDS() const { return AMDGPU::hasGDS(getSTI()); } in hasGDS()
1459 return AMDGPU::isSI(getSTI()); in isSI()
1463 return AMDGPU::isCI(getSTI()); in isCI()
1467 return AMDGPU::isVI(getSTI()); in isVI()
1471 return AMDGPU::isGFX9(getSTI()); in isGFX9()
1476 return AMDGPU::isGFX90A(getSTI()); in isGFX90A()
1480 return AMDGPU::isGFX940(getSTI()); in isGFX940()
1484 return AMDGPU::isGFX9Plus(getSTI()); in isGFX9Plus()
1488 return AMDGPU::isGFX10(getSTI()); in isGFX10()
1491 bool isGFX10Plus() const { return AMDGPU::isGFX10Plus(getSTI()); } in isGFX10Plus()
1494 return AMDGPU::isGFX11(getSTI()); in isGFX11()
1498 return AMDGPU::isGFX11Plus(getSTI()); in isGFX11Plus()
1501 bool isGFX12() const { return AMDGPU::isGFX12(getSTI()); } in isGFX12()
1503 bool isGFX12Plus() const { return AMDGPU::isGFX12Plus(getSTI()); } in isGFX12Plus()
1505 bool isGFX10_AEncoding() const { return AMDGPU::isGFX10_AEncoding(getSTI()); } in isGFX10_AEncoding()
1508 return AMDGPU::isGFX10_BEncoding(getSTI()); in isGFX10_BEncoding()
1512 return getFeatureBits()[AMDGPU::FeatureInv2PiInlineImm]; in hasInv2PiInlineImm()
1516 return getFeatureBits()[AMDGPU::FeatureFlatInstOffsets]; in hasFlatOffsets()
1520 return getFeatureBits()[AMDGPU::FeatureArchitectedFlatScratch]; in hasArchitectedFlatScratch()
1530 return getFeatureBits()[AMDGPU::FeatureIntClamp]; in hasIntClamp()
1534 return getFeatureBits()[AMDGPU::FeaturePartialNSAEncoding]; in hasPartialNSAEncoding()
1538 return AMDGPU::getNSAMaxSize(getSTI(), HasSampler); in getNSAMaxSize()
1542 return AMDGPU::getMaxNumUserSGPRs(getSTI()); in getMaxNumUserSGPRs()
1545 bool hasKernargPreload() const { return AMDGPU::hasKernargPreload(getSTI()); } in hasKernargPreload()
1921 case AMDGPU::OPERAND_REG_IMM_INT16: in getOpFltSemantics()
1922 case AMDGPU::OPERAND_REG_INLINE_C_INT16: in getOpFltSemantics()
1923 case AMDGPU::OPERAND_REG_INLINE_AC_INT16: in getOpFltSemantics()
1924 case AMDGPU::OPERAND_REG_IMM_INT32: in getOpFltSemantics()
1925 case AMDGPU::OPERAND_REG_IMM_FP32: in getOpFltSemantics()
1926 case AMDGPU::OPERAND_REG_IMM_FP32_DEFERRED: in getOpFltSemantics()
1927 case AMDGPU::OPERAND_REG_INLINE_C_INT32: in getOpFltSemantics()
1928 case AMDGPU::OPERAND_REG_INLINE_C_FP32: in getOpFltSemantics()
1929 case AMDGPU::OPERAND_REG_INLINE_AC_INT32: in getOpFltSemantics()
1930 case AMDGPU::OPERAND_REG_INLINE_AC_FP32: in getOpFltSemantics()
1931 case AMDGPU::OPERAND_REG_INLINE_C_V2FP32: in getOpFltSemantics()
1932 case AMDGPU::OPERAND_REG_IMM_V2FP32: in getOpFltSemantics()
1933 case AMDGPU::OPERAND_REG_INLINE_C_V2INT32: in getOpFltSemantics()
1934 case AMDGPU::OPERAND_REG_IMM_V2INT32: in getOpFltSemantics()
1935 case AMDGPU::OPERAND_REG_IMM_V2INT16: in getOpFltSemantics()
1936 case AMDGPU::OPERAND_REG_INLINE_C_V2INT16: in getOpFltSemantics()
1937 case AMDGPU::OPERAND_REG_INLINE_AC_V2INT16: in getOpFltSemantics()
1938 case AMDGPU::OPERAND_KIMM32: in getOpFltSemantics()
1939 case AMDGPU::OPERAND_INLINE_SPLIT_BARRIER_INT32: in getOpFltSemantics()
1941 case AMDGPU::OPERAND_REG_IMM_INT64: in getOpFltSemantics()
1942 case AMDGPU::OPERAND_REG_IMM_FP64: in getOpFltSemantics()
1943 case AMDGPU::OPERAND_REG_INLINE_C_INT64: in getOpFltSemantics()
1944 case AMDGPU::OPERAND_REG_INLINE_C_FP64: in getOpFltSemantics()
1945 case AMDGPU::OPERAND_REG_INLINE_AC_FP64: in getOpFltSemantics()
1947 case AMDGPU::OPERAND_REG_IMM_FP16: in getOpFltSemantics()
1948 case AMDGPU::OPERAND_REG_IMM_FP16_DEFERRED: in getOpFltSemantics()
1949 case AMDGPU::OPERAND_REG_INLINE_C_FP16: in getOpFltSemantics()
1950 case AMDGPU::OPERAND_REG_INLINE_C_V2FP16: in getOpFltSemantics()
1951 case AMDGPU::OPERAND_REG_INLINE_AC_FP16: in getOpFltSemantics()
1952 case AMDGPU::OPERAND_REG_INLINE_AC_V2FP16: in getOpFltSemantics()
1953 case AMDGPU::OPERAND_REG_IMM_V2FP16: in getOpFltSemantics()
1954 case AMDGPU::OPERAND_KIMM16: in getOpFltSemantics()
1956 case AMDGPU::OPERAND_REG_IMM_BF16: in getOpFltSemantics()
1957 case AMDGPU::OPERAND_REG_IMM_BF16_DEFERRED: in getOpFltSemantics()
1958 case AMDGPU::OPERAND_REG_INLINE_C_BF16: in getOpFltSemantics()
1959 case AMDGPU::OPERAND_REG_INLINE_C_V2BF16: in getOpFltSemantics()
1960 case AMDGPU::OPERAND_REG_INLINE_AC_BF16: in getOpFltSemantics()
1961 case AMDGPU::OPERAND_REG_INLINE_AC_V2BF16: in getOpFltSemantics()
1962 case AMDGPU::OPERAND_REG_IMM_V2BF16: in getOpFltSemantics()
2000 return AMDGPU::isInlinableLiteralFP16(Val, HasInv2Pi); in isInlineableLiteralOp16()
2004 return AMDGPU::isInlinableLiteralBF16(Val, HasInv2Pi); in isInlineableLiteralOp16()
2029 return AMDGPU::isInlinableLiteral64(Imm.Val, in isInlinableImm()
2065 return AMDGPU::isInlinableLiteral32( in isInlinableImm()
2072 return AMDGPU::isInlinableLiteral64(Imm.Val, in isInlinableImm()
2086 return AMDGPU::isInlinableLiteral32( in isInlinableImm()
2149 return isRegClass(AMDGPU::VGPR_32RegClassID) || in isVRegWithInputMods()
2151 (isRegClass(AMDGPU::VReg_64RegClassID) && in isVRegWithInputMods()
2152 AsmParser->getFeatureBits()[AMDGPU::FeatureDPALU_DPP]); in isVRegWithInputMods()
2156 return isRegClass(IsFake16 ? AMDGPU::VGPR_32_Lo128RegClassID in isT16VRegWithInputMods()
2157 : AMDGPU::VGPR_16_Lo128RegClassID); in isT16VRegWithInputMods()
2164 return isRegClass(AMDGPU::VS_32RegClassID) || isInlinableImm(type); in isSDWAOperand()
2186 return isReg() && ((FB[AMDGPU::FeatureWavefrontSize64] && isSCSrc_b64()) || in isBoolReg()
2187 (FB[AMDGPU::FeatureWavefrontSize32] && isSCSrc_b32())); in isBoolReg()
2213 if (AMDGPU::isSISrcOperand(AsmParser->getMII()->get(Inst.getOpcode()), in addImmOperands()
2229 assert(AMDGPU::isSISrcOperand(InstDesc, OpNum)); in addLiteralImmOperand()
2232 assert(AMDGPU::isSISrcFPOperand(InstDesc, OpNum)); in addLiteralImmOperand()
2242 case AMDGPU::OPERAND_REG_IMM_INT64: in addLiteralImmOperand()
2243 case AMDGPU::OPERAND_REG_IMM_FP64: in addLiteralImmOperand()
2244 case AMDGPU::OPERAND_REG_INLINE_C_INT64: in addLiteralImmOperand()
2245 case AMDGPU::OPERAND_REG_INLINE_C_FP64: in addLiteralImmOperand()
2246 case AMDGPU::OPERAND_REG_INLINE_AC_FP64: in addLiteralImmOperand()
2247 if (AMDGPU::isInlinableLiteral64(Literal.getZExtValue(), in addLiteralImmOperand()
2255 if (AMDGPU::isSISrcFPOperand(InstDesc, OpNum)) { // Expected 64-bit fp operand in addLiteralImmOperand()
2274 case AMDGPU::OPERAND_REG_IMM_BF16: in addLiteralImmOperand()
2275 case AMDGPU::OPERAND_REG_IMM_BF16_DEFERRED: in addLiteralImmOperand()
2276 case AMDGPU::OPERAND_REG_INLINE_C_BF16: in addLiteralImmOperand()
2277 case AMDGPU::OPERAND_REG_INLINE_C_V2BF16: in addLiteralImmOperand()
2278 case AMDGPU::OPERAND_REG_INLINE_AC_BF16: in addLiteralImmOperand()
2279 case AMDGPU::OPERAND_REG_INLINE_AC_V2BF16: in addLiteralImmOperand()
2280 case AMDGPU::OPERAND_REG_IMM_V2BF16: in addLiteralImmOperand()
2292 case AMDGPU::OPERAND_REG_IMM_INT32: in addLiteralImmOperand()
2293 case AMDGPU::OPERAND_REG_IMM_FP32: in addLiteralImmOperand()
2294 case AMDGPU::OPERAND_REG_IMM_FP32_DEFERRED: in addLiteralImmOperand()
2295 case AMDGPU::OPERAND_REG_INLINE_C_INT32: in addLiteralImmOperand()
2296 case AMDGPU::OPERAND_REG_INLINE_C_FP32: in addLiteralImmOperand()
2297 case AMDGPU::OPERAND_REG_INLINE_AC_INT32: in addLiteralImmOperand()
2298 case AMDGPU::OPERAND_REG_INLINE_AC_FP32: in addLiteralImmOperand()
2299 case AMDGPU::OPERAND_REG_IMM_INT16: in addLiteralImmOperand()
2300 case AMDGPU::OPERAND_REG_IMM_FP16: in addLiteralImmOperand()
2301 case AMDGPU::OPERAND_REG_IMM_FP16_DEFERRED: in addLiteralImmOperand()
2302 case AMDGPU::OPERAND_REG_INLINE_C_INT16: in addLiteralImmOperand()
2303 case AMDGPU::OPERAND_REG_INLINE_C_FP16: in addLiteralImmOperand()
2304 case AMDGPU::OPERAND_REG_INLINE_C_V2INT16: in addLiteralImmOperand()
2305 case AMDGPU::OPERAND_REG_INLINE_C_V2FP16: in addLiteralImmOperand()
2306 case AMDGPU::OPERAND_REG_INLINE_AC_INT16: in addLiteralImmOperand()
2307 case AMDGPU::OPERAND_REG_INLINE_AC_FP16: in addLiteralImmOperand()
2308 case AMDGPU::OPERAND_REG_INLINE_AC_V2INT16: in addLiteralImmOperand()
2309 case AMDGPU::OPERAND_REG_INLINE_AC_V2FP16: in addLiteralImmOperand()
2310 case AMDGPU::OPERAND_REG_IMM_V2INT16: in addLiteralImmOperand()
2311 case AMDGPU::OPERAND_REG_IMM_V2FP16: in addLiteralImmOperand()
2312 case AMDGPU::OPERAND_REG_INLINE_C_V2FP32: in addLiteralImmOperand()
2313 case AMDGPU::OPERAND_REG_IMM_V2FP32: in addLiteralImmOperand()
2314 case AMDGPU::OPERAND_REG_INLINE_C_V2INT32: in addLiteralImmOperand()
2315 case AMDGPU::OPERAND_REG_IMM_V2INT32: in addLiteralImmOperand()
2316 case AMDGPU::OPERAND_KIMM32: in addLiteralImmOperand()
2317 case AMDGPU::OPERAND_KIMM16: in addLiteralImmOperand()
2318 case AMDGPU::OPERAND_INLINE_SPLIT_BARRIER_INT32: { in addLiteralImmOperand()
2329 if (OpTy == AMDGPU::OPERAND_KIMM32 || OpTy == AMDGPU::OPERAND_KIMM16) { in addLiteralImmOperand()
2346 case AMDGPU::OPERAND_REG_IMM_INT32: in addLiteralImmOperand()
2347 case AMDGPU::OPERAND_REG_IMM_FP32: in addLiteralImmOperand()
2348 case AMDGPU::OPERAND_REG_IMM_FP32_DEFERRED: in addLiteralImmOperand()
2349 case AMDGPU::OPERAND_REG_INLINE_C_INT32: in addLiteralImmOperand()
2350 case AMDGPU::OPERAND_REG_INLINE_C_FP32: in addLiteralImmOperand()
2351 case AMDGPU::OPERAND_REG_INLINE_AC_INT32: in addLiteralImmOperand()
2352 case AMDGPU::OPERAND_REG_INLINE_AC_FP32: in addLiteralImmOperand()
2353 case AMDGPU::OPERAND_REG_IMM_V2INT16: in addLiteralImmOperand()
2354 case AMDGPU::OPERAND_REG_IMM_V2BF16: in addLiteralImmOperand()
2355 case AMDGPU::OPERAND_REG_IMM_V2FP16: in addLiteralImmOperand()
2356 case AMDGPU::OPERAND_REG_IMM_V2FP32: in addLiteralImmOperand()
2357 case AMDGPU::OPERAND_REG_INLINE_C_V2FP32: in addLiteralImmOperand()
2358 case AMDGPU::OPERAND_REG_IMM_V2INT32: in addLiteralImmOperand()
2359 case AMDGPU::OPERAND_REG_INLINE_C_V2INT32: in addLiteralImmOperand()
2360 case AMDGPU::OPERAND_INLINE_SPLIT_BARRIER_INT32: in addLiteralImmOperand()
2362 AMDGPU::isInlinableLiteral32(static_cast<int32_t>(Val), in addLiteralImmOperand()
2373 case AMDGPU::OPERAND_REG_IMM_INT64: in addLiteralImmOperand()
2374 case AMDGPU::OPERAND_REG_IMM_FP64: in addLiteralImmOperand()
2375 case AMDGPU::OPERAND_REG_INLINE_C_INT64: in addLiteralImmOperand()
2376 case AMDGPU::OPERAND_REG_INLINE_C_FP64: in addLiteralImmOperand()
2377 case AMDGPU::OPERAND_REG_INLINE_AC_FP64: in addLiteralImmOperand()
2378 if (AMDGPU::isInlinableLiteral64(Val, AsmParser->hasInv2PiInlineImm())) { in addLiteralImmOperand()
2384 Val = AMDGPU::isSISrcFPOperand(InstDesc, OpNum) ? (uint64_t)Val << 32 in addLiteralImmOperand()
2391 case AMDGPU::OPERAND_REG_IMM_INT16: in addLiteralImmOperand()
2392 case AMDGPU::OPERAND_REG_INLINE_C_INT16: in addLiteralImmOperand()
2393 case AMDGPU::OPERAND_REG_INLINE_AC_INT16: in addLiteralImmOperand()
2395 AMDGPU::isInlinableIntLiteral(static_cast<int16_t>(Val))) { in addLiteralImmOperand()
2405 case AMDGPU::OPERAND_REG_INLINE_C_FP16: in addLiteralImmOperand()
2406 case AMDGPU::OPERAND_REG_IMM_FP16: in addLiteralImmOperand()
2407 case AMDGPU::OPERAND_REG_IMM_FP16_DEFERRED: in addLiteralImmOperand()
2408 case AMDGPU::OPERAND_REG_INLINE_AC_FP16: in addLiteralImmOperand()
2410 AMDGPU::isInlinableLiteralFP16(static_cast<int16_t>(Val), in addLiteralImmOperand()
2421 case AMDGPU::OPERAND_REG_IMM_BF16: in addLiteralImmOperand()
2422 case AMDGPU::OPERAND_REG_IMM_BF16_DEFERRED: in addLiteralImmOperand()
2423 case AMDGPU::OPERAND_REG_INLINE_C_BF16: in addLiteralImmOperand()
2424 case AMDGPU::OPERAND_REG_INLINE_AC_BF16: in addLiteralImmOperand()
2426 AMDGPU::isInlinableLiteralBF16(static_cast<int16_t>(Val), in addLiteralImmOperand()
2437 case AMDGPU::OPERAND_REG_INLINE_C_V2INT16: in addLiteralImmOperand()
2438 case AMDGPU::OPERAND_REG_INLINE_AC_V2INT16: { in addLiteralImmOperand()
2440 assert(AMDGPU::isInlinableIntLiteral(static_cast<int16_t>(Val))); in addLiteralImmOperand()
2444 case AMDGPU::OPERAND_REG_INLINE_C_V2FP16: in addLiteralImmOperand()
2445 case AMDGPU::OPERAND_REG_INLINE_AC_V2FP16: { in addLiteralImmOperand()
2447 assert(AMDGPU::isInlinableLiteralFP16(static_cast<int16_t>(Val), in addLiteralImmOperand()
2454 case AMDGPU::OPERAND_REG_INLINE_C_V2BF16: in addLiteralImmOperand()
2455 case AMDGPU::OPERAND_REG_INLINE_AC_V2BF16: { in addLiteralImmOperand()
2457 assert(AMDGPU::isInlinableLiteralBF16(static_cast<int16_t>(Val), in addLiteralImmOperand()
2464 case AMDGPU::OPERAND_KIMM32: in addLiteralImmOperand()
2468 case AMDGPU::OPERAND_KIMM16: in addLiteralImmOperand()
2478 Inst.addOperand(MCOperand::createReg(AMDGPU::getMCReg(getReg(), AsmParser->getSTI()))); in addRegOperands()
2504 return AMDGPU::VGPR_32RegClassID; in getRegClass()
2506 return AMDGPU::VReg_64RegClassID; in getRegClass()
2508 return AMDGPU::VReg_96RegClassID; in getRegClass()
2510 return AMDGPU::VReg_128RegClassID; in getRegClass()
2512 return AMDGPU::VReg_160RegClassID; in getRegClass()
2514 return AMDGPU::VReg_192RegClassID; in getRegClass()
2516 return AMDGPU::VReg_224RegClassID; in getRegClass()
2518 return AMDGPU::VReg_256RegClassID; in getRegClass()
2520 return AMDGPU::VReg_288RegClassID; in getRegClass()
2522 return AMDGPU::VReg_320RegClassID; in getRegClass()
2524 return AMDGPU::VReg_352RegClassID; in getRegClass()
2526 return AMDGPU::VReg_384RegClassID; in getRegClass()
2528 return AMDGPU::VReg_512RegClassID; in getRegClass()
2530 return AMDGPU::VReg_1024RegClassID; in getRegClass()
2536 return AMDGPU::TTMP_32RegClassID; in getRegClass()
2538 return AMDGPU::TTMP_64RegClassID; in getRegClass()
2540 return AMDGPU::TTMP_128RegClassID; in getRegClass()
2542 return AMDGPU::TTMP_256RegClassID; in getRegClass()
2544 return AMDGPU::TTMP_512RegClassID; in getRegClass()
2550 return AMDGPU::SGPR_32RegClassID; in getRegClass()
2552 return AMDGPU::SGPR_64RegClassID; in getRegClass()
2554 return AMDGPU::SGPR_96RegClassID; in getRegClass()
2556 return AMDGPU::SGPR_128RegClassID; in getRegClass()
2558 return AMDGPU::SGPR_160RegClassID; in getRegClass()
2560 return AMDGPU::SGPR_192RegClassID; in getRegClass()
2562 return AMDGPU::SGPR_224RegClassID; in getRegClass()
2564 return AMDGPU::SGPR_256RegClassID; in getRegClass()
2566 return AMDGPU::SGPR_288RegClassID; in getRegClass()
2568 return AMDGPU::SGPR_320RegClassID; in getRegClass()
2570 return AMDGPU::SGPR_352RegClassID; in getRegClass()
2572 return AMDGPU::SGPR_384RegClassID; in getRegClass()
2574 return AMDGPU::SGPR_512RegClassID; in getRegClass()
2580 return AMDGPU::AGPR_32RegClassID; in getRegClass()
2582 return AMDGPU::AReg_64RegClassID; in getRegClass()
2584 return AMDGPU::AReg_96RegClassID; in getRegClass()
2586 return AMDGPU::AReg_128RegClassID; in getRegClass()
2588 return AMDGPU::AReg_160RegClassID; in getRegClass()
2590 return AMDGPU::AReg_192RegClassID; in getRegClass()
2592 return AMDGPU::AReg_224RegClassID; in getRegClass()
2594 return AMDGPU::AReg_256RegClassID; in getRegClass()
2596 return AMDGPU::AReg_288RegClassID; in getRegClass()
2598 return AMDGPU::AReg_320RegClassID; in getRegClass()
2600 return AMDGPU::AReg_352RegClassID; in getRegClass()
2602 return AMDGPU::AReg_384RegClassID; in getRegClass()
2604 return AMDGPU::AReg_512RegClassID; in getRegClass()
2606 return AMDGPU::AReg_1024RegClassID; in getRegClass()
2614 .Case("exec", AMDGPU::EXEC) in getSpecialRegForName()
2615 .Case("vcc", AMDGPU::VCC) in getSpecialRegForName()
2616 .Case("flat_scratch", AMDGPU::FLAT_SCR) in getSpecialRegForName()
2617 .Case("xnack_mask", AMDGPU::XNACK_MASK) in getSpecialRegForName()
2618 .Case("shared_base", AMDGPU::SRC_SHARED_BASE) in getSpecialRegForName()
2619 .Case("src_shared_base", AMDGPU::SRC_SHARED_BASE) in getSpecialRegForName()
2620 .Case("shared_limit", AMDGPU::SRC_SHARED_LIMIT) in getSpecialRegForName()
2621 .Case("src_shared_limit", AMDGPU::SRC_SHARED_LIMIT) in getSpecialRegForName()
2622 .Case("private_base", AMDGPU::SRC_PRIVATE_BASE) in getSpecialRegForName()
2623 .Case("src_private_base", AMDGPU::SRC_PRIVATE_BASE) in getSpecialRegForName()
2624 .Case("private_limit", AMDGPU::SRC_PRIVATE_LIMIT) in getSpecialRegForName()
2625 .Case("src_private_limit", AMDGPU::SRC_PRIVATE_LIMIT) in getSpecialRegForName()
2626 .Case("pops_exiting_wave_id", AMDGPU::SRC_POPS_EXITING_WAVE_ID) in getSpecialRegForName()
2627 .Case("src_pops_exiting_wave_id", AMDGPU::SRC_POPS_EXITING_WAVE_ID) in getSpecialRegForName()
2628 .Case("lds_direct", AMDGPU::LDS_DIRECT) in getSpecialRegForName()
2629 .Case("src_lds_direct", AMDGPU::LDS_DIRECT) in getSpecialRegForName()
2630 .Case("m0", AMDGPU::M0) in getSpecialRegForName()
2631 .Case("vccz", AMDGPU::SRC_VCCZ) in getSpecialRegForName()
2632 .Case("src_vccz", AMDGPU::SRC_VCCZ) in getSpecialRegForName()
2633 .Case("execz", AMDGPU::SRC_EXECZ) in getSpecialRegForName()
2634 .Case("src_execz", AMDGPU::SRC_EXECZ) in getSpecialRegForName()
2635 .Case("scc", AMDGPU::SRC_SCC) in getSpecialRegForName()
2636 .Case("src_scc", AMDGPU::SRC_SCC) in getSpecialRegForName()
2637 .Case("tba", AMDGPU::TBA) in getSpecialRegForName()
2638 .Case("tma", AMDGPU::TMA) in getSpecialRegForName()
2639 .Case("flat_scratch_lo", AMDGPU::FLAT_SCR_LO) in getSpecialRegForName()
2640 .Case("flat_scratch_hi", AMDGPU::FLAT_SCR_HI) in getSpecialRegForName()
2641 .Case("xnack_mask_lo", AMDGPU::XNACK_MASK_LO) in getSpecialRegForName()
2642 .Case("xnack_mask_hi", AMDGPU::XNACK_MASK_HI) in getSpecialRegForName()
2643 .Case("vcc_lo", AMDGPU::VCC_LO) in getSpecialRegForName()
2644 .Case("vcc_hi", AMDGPU::VCC_HI) in getSpecialRegForName()
2645 .Case("exec_lo", AMDGPU::EXEC_LO) in getSpecialRegForName()
2646 .Case("exec_hi", AMDGPU::EXEC_HI) in getSpecialRegForName()
2647 .Case("tma_lo", AMDGPU::TMA_LO) in getSpecialRegForName()
2648 .Case("tma_hi", AMDGPU::TMA_HI) in getSpecialRegForName()
2649 .Case("tba_lo", AMDGPU::TBA_LO) in getSpecialRegForName()
2650 .Case("tba_hi", AMDGPU::TBA_HI) in getSpecialRegForName()
2651 .Case("pc", AMDGPU::PC_REG) in getSpecialRegForName()
2652 .Case("null", AMDGPU::SGPR_NULL) in getSpecialRegForName()
2653 .Default(AMDGPU::NoRegister); in getSpecialRegForName()
2689 if (Reg == AMDGPU::EXEC_LO && Reg1 == AMDGPU::EXEC_HI) { in AddNextRegisterToList()
2690 Reg = AMDGPU::EXEC; in AddNextRegisterToList()
2694 if (Reg == AMDGPU::FLAT_SCR_LO && Reg1 == AMDGPU::FLAT_SCR_HI) { in AddNextRegisterToList()
2695 Reg = AMDGPU::FLAT_SCR; in AddNextRegisterToList()
2699 if (Reg == AMDGPU::XNACK_MASK_LO && Reg1 == AMDGPU::XNACK_MASK_HI) { in AddNextRegisterToList()
2700 Reg = AMDGPU::XNACK_MASK; in AddNextRegisterToList()
2704 if (Reg == AMDGPU::VCC_LO && Reg1 == AMDGPU::VCC_HI) { in AddNextRegisterToList()
2705 Reg = AMDGPU::VCC; in AddNextRegisterToList()
2709 if (Reg == AMDGPU::TBA_LO && Reg1 == AMDGPU::TBA_HI) { in AddNextRegisterToList()
2710 Reg = AMDGPU::TBA; in AddNextRegisterToList()
2714 if (Reg == AMDGPU::TMA_LO && Reg1 == AMDGPU::TMA_HI) { in AddNextRegisterToList()
2715 Reg = AMDGPU::TMA; in AddNextRegisterToList()
2799 return getSpecialRegForName(Str) != AMDGPU::NoRegister; in isRegister()
2822 return AMDGPU::NoRegister; in getRegularReg()
2829 return AMDGPU::NoRegister; in getRegularReg()
2836 return AMDGPU::NoRegister; in getRegularReg()
2919 return AMDGPU::NoRegister; in ParseRegularReg()
2934 SubReg = AMDGPU::lo16; in ParseRegularReg()
2936 SubReg = AMDGPU::hi16; in ParseRegularReg()
2941 return AMDGPU::NoRegister; in ParseRegularReg()
2947 return AMDGPU::NoRegister; in ParseRegularReg()
2956 unsigned Reg = AMDGPU::NoRegister; in ParseRegList()
2961 return AMDGPU::NoRegister; in ParseRegList()
2968 return AMDGPU::NoRegister; in ParseRegList()
2971 return AMDGPU::NoRegister; in ParseRegList()
2982 return AMDGPU::NoRegister; in ParseRegList()
2986 return AMDGPU::NoRegister; in ParseRegList()
2990 return AMDGPU::NoRegister; in ParseRegList()
2993 return AMDGPU::NoRegister; in ParseRegList()
2998 return AMDGPU::NoRegister; in ParseRegList()
3011 Reg = AMDGPU::NoRegister; in ParseAMDGPURegister()
3015 if (Reg == AMDGPU::NoRegister) in ParseAMDGPURegister()
3022 if (Reg == AMDGPU::NoRegister) { in ParseAMDGPURegister()
3028 if (Reg == AMDGPU::SGPR_NULL) { in ParseAMDGPURegister()
3042 Reg = AMDGPU::NoRegister; in ParseAMDGPURegister()
3079 if (AMDGPU::getIsaVersion(getSTI().getCPU()).Major < 6) in updateGprCountSymbols()
3467 if (Inst.getOpcode() == AMDGPU::V_MAC_F32_sdwa_vi || in checkTargetMatchPredicate()
3468 Inst.getOpcode() == AMDGPU::V_MAC_F16_sdwa_vi) { in checkTargetMatchPredicate()
3471 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::dst_sel); in checkTargetMatchPredicate()
3473 if (!Op.isImm() || Op.getImm() != AMDGPU::SDWA::SdwaSel::DWORD) { in checkTargetMatchPredicate()
3544 case AMDGPU::FLAT_SCR: in findImplicitSGPRReadInVOP()
3545 case AMDGPU::VCC: in findImplicitSGPRReadInVOP()
3546 case AMDGPU::VCC_LO: in findImplicitSGPRReadInVOP()
3547 case AMDGPU::VCC_HI: in findImplicitSGPRReadInVOP()
3548 case AMDGPU::M0: in findImplicitSGPRReadInVOP()
3554 return AMDGPU::NoRegister; in findImplicitSGPRReadInVOP()
3565 if (!AMDGPU::isSISrcOperand(Desc, OpIdx) || in isInlineConstant()
3566 AMDGPU::isKImmOperand(Desc, OpIdx)) { in isInlineConstant()
3573 auto OpSize = AMDGPU::getOperandSize(Desc, OpIdx); in isInlineConstant()
3577 return AMDGPU::isInlinableLiteral64(Val, hasInv2PiInlineImm()); in isInlineConstant()
3579 return AMDGPU::isInlinableLiteral32(Val, hasInv2PiInlineImm()); in isInlineConstant()
3582 if (OperandType == AMDGPU::OPERAND_REG_IMM_INT16 || in isInlineConstant()
3583 OperandType == AMDGPU::OPERAND_REG_INLINE_C_INT16 || in isInlineConstant()
3584 OperandType == AMDGPU::OPERAND_REG_INLINE_AC_INT16) in isInlineConstant()
3585 return AMDGPU::isInlinableLiteralI16(Val, hasInv2PiInlineImm()); in isInlineConstant()
3587 if (OperandType == AMDGPU::OPERAND_REG_INLINE_C_V2INT16 || in isInlineConstant()
3588 OperandType == AMDGPU::OPERAND_REG_INLINE_AC_V2INT16 || in isInlineConstant()
3589 OperandType == AMDGPU::OPERAND_REG_IMM_V2INT16) in isInlineConstant()
3590 return AMDGPU::isInlinableLiteralV2I16(Val); in isInlineConstant()
3592 if (OperandType == AMDGPU::OPERAND_REG_INLINE_C_V2FP16 || in isInlineConstant()
3593 OperandType == AMDGPU::OPERAND_REG_INLINE_AC_V2FP16 || in isInlineConstant()
3594 OperandType == AMDGPU::OPERAND_REG_IMM_V2FP16) in isInlineConstant()
3595 return AMDGPU::isInlinableLiteralV2F16(Val); in isInlineConstant()
3597 if (OperandType == AMDGPU::OPERAND_REG_INLINE_C_V2BF16 || in isInlineConstant()
3598 OperandType == AMDGPU::OPERAND_REG_INLINE_AC_V2BF16 || in isInlineConstant()
3599 OperandType == AMDGPU::OPERAND_REG_IMM_V2BF16) in isInlineConstant()
3600 return AMDGPU::isInlinableLiteralV2BF16(Val); in isInlineConstant()
3602 if (OperandType == AMDGPU::OPERAND_REG_IMM_FP16 || in isInlineConstant()
3603 OperandType == AMDGPU::OPERAND_REG_INLINE_C_FP16 || in isInlineConstant()
3604 OperandType == AMDGPU::OPERAND_REG_INLINE_AC_FP16 || in isInlineConstant()
3605 OperandType == AMDGPU::OPERAND_REG_IMM_FP16_DEFERRED) in isInlineConstant()
3606 return AMDGPU::isInlinableLiteralFP16(Val, hasInv2PiInlineImm()); in isInlineConstant()
3608 if (OperandType == AMDGPU::OPERAND_REG_IMM_BF16 || in isInlineConstant()
3609 OperandType == AMDGPU::OPERAND_REG_INLINE_C_BF16 || in isInlineConstant()
3610 OperandType == AMDGPU::OPERAND_REG_INLINE_AC_BF16 || in isInlineConstant()
3611 OperandType == AMDGPU::OPERAND_REG_IMM_BF16_DEFERRED) in isInlineConstant()
3612 return AMDGPU::isInlinableLiteralBF16(Val, hasInv2PiInlineImm()); in isInlineConstant()
3627 case AMDGPU::V_LSHLREV_B64_e64: in getConstantBusLimit()
3628 case AMDGPU::V_LSHLREV_B64_gfx10: in getConstantBusLimit()
3629 case AMDGPU::V_LSHLREV_B64_e64_gfx11: in getConstantBusLimit()
3630 case AMDGPU::V_LSHLREV_B64_e32_gfx12: in getConstantBusLimit()
3631 case AMDGPU::V_LSHLREV_B64_e64_gfx12: in getConstantBusLimit()
3632 case AMDGPU::V_LSHRREV_B64_e64: in getConstantBusLimit()
3633 case AMDGPU::V_LSHRREV_B64_gfx10: in getConstantBusLimit()
3634 case AMDGPU::V_LSHRREV_B64_e64_gfx11: in getConstantBusLimit()
3635 case AMDGPU::V_LSHRREV_B64_e64_gfx12: in getConstantBusLimit()
3636 case AMDGPU::V_ASHRREV_I64_e64: in getConstantBusLimit()
3637 case AMDGPU::V_ASHRREV_I64_gfx10: in getConstantBusLimit()
3638 case AMDGPU::V_ASHRREV_I64_e64_gfx11: in getConstantBusLimit()
3639 case AMDGPU::V_ASHRREV_I64_e64_gfx12: in getConstantBusLimit()
3640 case AMDGPU::V_LSHL_B64_e64: in getConstantBusLimit()
3641 case AMDGPU::V_LSHR_B64_e64: in getConstantBusLimit()
3642 case AMDGPU::V_ASHR_I64_e64: in getConstantBusLimit()
3713 unsigned LastSGPR = AMDGPU::NoRegister; in validateConstantBusLimitations()
3728 if (AMDGPU::hasNamedOperand(Opcode, AMDGPU::OpName::imm)) { in validateConstantBusLimitations()
3735 if (SGPRUsed != AMDGPU::NoRegister) { in validateConstantBusLimitations()
3773 unsigned Size = AMDGPU::getOperandSize(Desc, OpIdx); in validateConstantBusLimitations()
3815 bool SkipSrc = Opcode == AMDGPU::V_DUAL_MOV_B32_e32_X_MOV_B32_e32_gfx12; in validateVOPDRegBankConstraints()
3847 int ClampIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::clamp); in validateIntClampSupported()
3867 int VDataIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::vdata); in validateMIMGDataSize()
3868 int DMaskIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dmask); in validateMIMGDataSize()
3869 int TFEIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::tfe); in validateMIMGDataSize()
3877 unsigned VDataSize = AMDGPU::getRegOperandSize(getMRI(), Desc, VDataIdx); in validateMIMGDataSize()
3887 int D16Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::d16); in validateMIMGDataSize()
3914 const AMDGPU::MIMGInfo *Info = AMDGPU::getMIMGInfo(Opc); in validateMIMGAddrSize()
3916 const AMDGPU::MIMGBaseOpcodeInfo *BaseOpcode = in validateMIMGAddrSize()
3917 AMDGPU::getMIMGBaseOpcodeInfo(Info->BaseOpcode); in validateMIMGAddrSize()
3918 int VAddr0Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::vaddr0); in validateMIMGAddrSize()
3919 int RSrcOpName = (Desc.TSFlags & SIInstrFlags::MIMG) ? AMDGPU::OpName::srsrc in validateMIMGAddrSize()
3920 : AMDGPU::OpName::rsrc; in validateMIMGAddrSize()
3921 int SrsrcIdx = AMDGPU::getNamedOperandIdx(Opc, RSrcOpName); in validateMIMGAddrSize()
3922 int DimIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dim); in validateMIMGAddrSize()
3923 int A16Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::a16); in validateMIMGAddrSize()
3938 const AMDGPU::MIMGDimInfo *DimInfo = AMDGPU::getMIMGDimInfoByEncoding(Dim); in validateMIMGAddrSize()
3942 : AMDGPU::getRegOperandSize(getMRI(), Desc, VAddr0Idx) / 4; in validateMIMGAddrSize()
3945 AMDGPU::getAddrSizeMIMGOp(BaseOpcode, DimInfo, IsA16, hasG16()); in validateMIMGAddrSize()
3953 AMDGPU::getRegOperandSize(getMRI(), Desc, VAddrLastIdx) / 4; in validateMIMGAddrSize()
3985 int DMaskIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dmask); in validateMIMGAtomicDMask()
4003 int DMaskIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dmask); in validateMIMGGatherDMask()
4021 const AMDGPU::MIMGInfo *Info = AMDGPU::getMIMGInfo(Opc); in validateMIMGMSAA()
4022 const AMDGPU::MIMGBaseOpcodeInfo *BaseOpcode = in validateMIMGMSAA()
4023 AMDGPU::getMIMGBaseOpcodeInfo(Info->BaseOpcode); in validateMIMGMSAA()
4028 int DimIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dim); in validateMIMGMSAA()
4032 const AMDGPU::MIMGDimInfo *DimInfo = AMDGPU::getMIMGDimInfoByEncoding(Dim); in validateMIMGMSAA()
4040 case AMDGPU::V_MOVRELS_B32_sdwa_gfx10: in IsMovrelsSDWAOpcode()
4041 case AMDGPU::V_MOVRELSD_B32_sdwa_gfx10: in IsMovrelsSDWAOpcode()
4042 case AMDGPU::V_MOVRELSD_2_B32_sdwa_gfx10: in IsMovrelsSDWAOpcode()
4061 const int Src0Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src0); in validateMovrels()
4085 if (Opc != AMDGPU::V_ACCVGPR_WRITE_B32_vi) in validateMAIAccWrite()
4088 const int Src0Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src0); in validateMAIAccWrite()
4136 const int Src2Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src2); in validateMFMA()
4177 for (auto Name : {AMDGPU::OpName::src0_modifiers, in validateDivScale()
4178 AMDGPU::OpName::src2_modifiers, in validateDivScale()
4179 AMDGPU::OpName::src2_modifiers}) { in validateDivScale()
4180 if (Inst.getOperand(AMDGPU::getNamedOperandIdx(Inst.getOpcode(), Name)) in validateDivScale()
4198 int D16Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::d16); in validateMIMGD16()
4210 case AMDGPU::V_SUBREV_F32_e32: in IsRevOpcode()
4211 case AMDGPU::V_SUBREV_F32_e64: in IsRevOpcode()
4212 case AMDGPU::V_SUBREV_F32_e32_gfx10: in IsRevOpcode()
4213 case AMDGPU::V_SUBREV_F32_e32_gfx6_gfx7: in IsRevOpcode()
4214 case AMDGPU::V_SUBREV_F32_e32_vi: in IsRevOpcode()
4215 case AMDGPU::V_SUBREV_F32_e64_gfx10: in IsRevOpcode()
4216 case AMDGPU::V_SUBREV_F32_e64_gfx6_gfx7: in IsRevOpcode()
4217 case AMDGPU::V_SUBREV_F32_e64_vi: in IsRevOpcode()
4219 case AMDGPU::V_SUBREV_CO_U32_e32: in IsRevOpcode()
4220 case AMDGPU::V_SUBREV_CO_U32_e64: in IsRevOpcode()
4221 case AMDGPU::V_SUBREV_I32_e32_gfx6_gfx7: in IsRevOpcode()
4222 case AMDGPU::V_SUBREV_I32_e64_gfx6_gfx7: in IsRevOpcode()
4224 case AMDGPU::V_SUBBREV_U32_e32: in IsRevOpcode()
4225 case AMDGPU::V_SUBBREV_U32_e64: in IsRevOpcode()
4226 case AMDGPU::V_SUBBREV_U32_e32_gfx6_gfx7: in IsRevOpcode()
4227 case AMDGPU::V_SUBBREV_U32_e32_vi: in IsRevOpcode()
4228 case AMDGPU::V_SUBBREV_U32_e64_gfx6_gfx7: in IsRevOpcode()
4229 case AMDGPU::V_SUBBREV_U32_e64_vi: in IsRevOpcode()
4231 case AMDGPU::V_SUBREV_U32_e32: in IsRevOpcode()
4232 case AMDGPU::V_SUBREV_U32_e64: in IsRevOpcode()
4233 case AMDGPU::V_SUBREV_U32_e32_gfx9: in IsRevOpcode()
4234 case AMDGPU::V_SUBREV_U32_e32_vi: in IsRevOpcode()
4235 case AMDGPU::V_SUBREV_U32_e64_gfx9: in IsRevOpcode()
4236 case AMDGPU::V_SUBREV_U32_e64_vi: in IsRevOpcode()
4238 case AMDGPU::V_SUBREV_F16_e32: in IsRevOpcode()
4239 case AMDGPU::V_SUBREV_F16_e64: in IsRevOpcode()
4240 case AMDGPU::V_SUBREV_F16_e32_gfx10: in IsRevOpcode()
4241 case AMDGPU::V_SUBREV_F16_e32_vi: in IsRevOpcode()
4242 case AMDGPU::V_SUBREV_F16_e64_gfx10: in IsRevOpcode()
4243 case AMDGPU::V_SUBREV_F16_e64_vi: in IsRevOpcode()
4245 case AMDGPU::V_SUBREV_U16_e32: in IsRevOpcode()
4246 case AMDGPU::V_SUBREV_U16_e64: in IsRevOpcode()
4247 case AMDGPU::V_SUBREV_U16_e32_vi: in IsRevOpcode()
4248 case AMDGPU::V_SUBREV_U16_e64_vi: in IsRevOpcode()
4250 case AMDGPU::V_SUBREV_CO_U32_e32_gfx9: in IsRevOpcode()
4251 case AMDGPU::V_SUBREV_CO_U32_e64_gfx10: in IsRevOpcode()
4252 case AMDGPU::V_SUBREV_CO_U32_e64_gfx9: in IsRevOpcode()
4254 case AMDGPU::V_SUBBREV_CO_U32_e32_gfx9: in IsRevOpcode()
4255 case AMDGPU::V_SUBBREV_CO_U32_e64_gfx9: in IsRevOpcode()
4257 case AMDGPU::V_SUBREV_NC_U32_e32_gfx10: in IsRevOpcode()
4258 case AMDGPU::V_SUBREV_NC_U32_e64_gfx10: in IsRevOpcode()
4260 case AMDGPU::V_SUBREV_CO_CI_U32_e32_gfx10: in IsRevOpcode()
4261 case AMDGPU::V_SUBREV_CO_CI_U32_e64_gfx10: in IsRevOpcode()
4263 case AMDGPU::V_LSHRREV_B32_e32: in IsRevOpcode()
4264 case AMDGPU::V_LSHRREV_B32_e64: in IsRevOpcode()
4265 case AMDGPU::V_LSHRREV_B32_e32_gfx6_gfx7: in IsRevOpcode()
4266 case AMDGPU::V_LSHRREV_B32_e64_gfx6_gfx7: in IsRevOpcode()
4267 case AMDGPU::V_LSHRREV_B32_e32_vi: in IsRevOpcode()
4268 case AMDGPU::V_LSHRREV_B32_e64_vi: in IsRevOpcode()
4269 case AMDGPU::V_LSHRREV_B32_e32_gfx10: in IsRevOpcode()
4270 case AMDGPU::V_LSHRREV_B32_e64_gfx10: in IsRevOpcode()
4272 case AMDGPU::V_ASHRREV_I32_e32: in IsRevOpcode()
4273 case AMDGPU::V_ASHRREV_I32_e64: in IsRevOpcode()
4274 case AMDGPU::V_ASHRREV_I32_e32_gfx10: in IsRevOpcode()
4275 case AMDGPU::V_ASHRREV_I32_e32_gfx6_gfx7: in IsRevOpcode()
4276 case AMDGPU::V_ASHRREV_I32_e32_vi: in IsRevOpcode()
4277 case AMDGPU::V_ASHRREV_I32_e64_gfx10: in IsRevOpcode()
4278 case AMDGPU::V_ASHRREV_I32_e64_gfx6_gfx7: in IsRevOpcode()
4279 case AMDGPU::V_ASHRREV_I32_e64_vi: in IsRevOpcode()
4281 case AMDGPU::V_LSHLREV_B32_e32: in IsRevOpcode()
4282 case AMDGPU::V_LSHLREV_B32_e64: in IsRevOpcode()
4283 case AMDGPU::V_LSHLREV_B32_e32_gfx10: in IsRevOpcode()
4284 case AMDGPU::V_LSHLREV_B32_e32_gfx6_gfx7: in IsRevOpcode()
4285 case AMDGPU::V_LSHLREV_B32_e32_vi: in IsRevOpcode()
4286 case AMDGPU::V_LSHLREV_B32_e64_gfx10: in IsRevOpcode()
4287 case AMDGPU::V_LSHLREV_B32_e64_gfx6_gfx7: in IsRevOpcode()
4288 case AMDGPU::V_LSHLREV_B32_e64_vi: in IsRevOpcode()
4290 case AMDGPU::V_LSHLREV_B16_e32: in IsRevOpcode()
4291 case AMDGPU::V_LSHLREV_B16_e64: in IsRevOpcode()
4292 case AMDGPU::V_LSHLREV_B16_e32_vi: in IsRevOpcode()
4293 case AMDGPU::V_LSHLREV_B16_e64_vi: in IsRevOpcode()
4294 case AMDGPU::V_LSHLREV_B16_gfx10: in IsRevOpcode()
4296 case AMDGPU::V_LSHRREV_B16_e32: in IsRevOpcode()
4297 case AMDGPU::V_LSHRREV_B16_e64: in IsRevOpcode()
4298 case AMDGPU::V_LSHRREV_B16_e32_vi: in IsRevOpcode()
4299 case AMDGPU::V_LSHRREV_B16_e64_vi: in IsRevOpcode()
4300 case AMDGPU::V_LSHRREV_B16_gfx10: in IsRevOpcode()
4302 case AMDGPU::V_ASHRREV_I16_e32: in IsRevOpcode()
4303 case AMDGPU::V_ASHRREV_I16_e64: in IsRevOpcode()
4304 case AMDGPU::V_ASHRREV_I16_e32_vi: in IsRevOpcode()
4305 case AMDGPU::V_ASHRREV_I16_e64_vi: in IsRevOpcode()
4306 case AMDGPU::V_ASHRREV_I16_gfx10: in IsRevOpcode()
4308 case AMDGPU::V_LSHLREV_B64_e64: in IsRevOpcode()
4309 case AMDGPU::V_LSHLREV_B64_gfx10: in IsRevOpcode()
4310 case AMDGPU::V_LSHLREV_B64_vi: in IsRevOpcode()
4312 case AMDGPU::V_LSHRREV_B64_e64: in IsRevOpcode()
4313 case AMDGPU::V_LSHRREV_B64_gfx10: in IsRevOpcode()
4314 case AMDGPU::V_LSHRREV_B64_vi: in IsRevOpcode()
4316 case AMDGPU::V_ASHRREV_I64_e64: in IsRevOpcode()
4317 case AMDGPU::V_ASHRREV_I64_gfx10: in IsRevOpcode()
4318 case AMDGPU::V_ASHRREV_I64_vi: in IsRevOpcode()
4320 case AMDGPU::V_PK_LSHLREV_B16: in IsRevOpcode()
4321 case AMDGPU::V_PK_LSHLREV_B16_gfx10: in IsRevOpcode()
4322 case AMDGPU::V_PK_LSHLREV_B16_vi: in IsRevOpcode()
4324 case AMDGPU::V_PK_LSHRREV_B16: in IsRevOpcode()
4325 case AMDGPU::V_PK_LSHRREV_B16_gfx10: in IsRevOpcode()
4326 case AMDGPU::V_PK_LSHRREV_B16_vi: in IsRevOpcode()
4327 case AMDGPU::V_PK_ASHRREV_I16: in IsRevOpcode()
4328 case AMDGPU::V_PK_ASHRREV_I16_gfx10: in IsRevOpcode()
4329 case AMDGPU::V_PK_ASHRREV_I16_vi: in IsRevOpcode()
4382 auto OpNum = AMDGPU::getNamedOperandIdx(Opcode, AMDGPU::OpName::offset); in validateOffset()
4420 auto OpNum = AMDGPU::getNamedOperandIdx(Opcode, AMDGPU::OpName::offset); in validateFlatOffset()
4432 unsigned OffsetSize = AMDGPU::getNumFlatOffsetBits(getSTI()); in validateFlatOffset()
4467 auto OpNum = AMDGPU::getNamedOperandIdx(Opcode, AMDGPU::OpName::offset); in validateSMEMOffset()
4476 bool IsBuffer = AMDGPU::getSMEMIsBuffer(Opcode); in validateSMEMOffset()
4477 if (AMDGPU::isLegalSMRDEncodedUnsignedOffset(getSTI(), Offset) || in validateSMEMOffset()
4478 AMDGPU::isLegalSMRDEncodedSignedOffset(getSTI(), Offset, IsBuffer)) in validateSMEMOffset()
4495 const int Src0Idx = AMDGPU::getNamedOperandIdx(Opcode, AMDGPU::OpName::src0); in validateSOPLiteral()
4496 const int Src1Idx = AMDGPU::getNamedOperandIdx(Opcode, AMDGPU::OpName::src1); in validateSOPLiteral()
4509 if (AMDGPU::isSISrcOperand(Desc, OpIdx)) { in validateSOPLiteral()
4528 int OpSelIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel); in validateOpSel()
4538 int OpSelIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel); in validateOpSel()
4543 int OpSelHiIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel_hi); in validateOpSel()
4553 int OpSelIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel); in validateOpSel()
4563 assert(OpName == AMDGPU::OpName::neg_lo || OpName == AMDGPU::OpName::neg_hi); in validateNeg()
4576 int NegIdx = AMDGPU::getNamedOperandIdx(Opc, OpName); in validateNeg()
4587 int SrcMods[3] = {AMDGPU::OpName::src0_modifiers, in validateNeg()
4588 AMDGPU::OpName::src1_modifiers, in validateNeg()
4589 AMDGPU::OpName::src2_modifiers}; in validateNeg()
4592 if (!AMDGPU::hasNamedOperand(Opc, SrcMods[i])) { in validateNeg()
4604 int DppCtrlIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dpp_ctrl); in validateDPP()
4608 if (!AMDGPU::isLegalDPALU_DPPControl(DppCtrl) && in validateDPP()
4609 AMDGPU::isDPALU_DPP(MII.get(Opc))) { in validateDPP()
4617 int Dpp8Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::dpp8); in validateDPP()
4621 int Src1Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src1); in validateDPP()
4645 return (FB[AMDGPU::FeatureWavefrontSize64] && Reg == AMDGPU::VCC) || in validateVccOperand()
4646 (FB[AMDGPU::FeatureWavefrontSize32] && Reg == AMDGPU::VCC_LO); in validateVccOperand()
4677 bool IsFP64 = AMDGPU::isSISrcFPOperand(Desc, OpIdx) && in validateVOPLiteral()
4678 AMDGPU::getOperandSize(Desc.operands()[OpIdx]) == 8; in validateVOPLiteral()
4679 bool IsValid32Op = AMDGPU::isValid32BitLiteral(Value, IsFP64); in validateVOPLiteral()
4718 int OpIdx = AMDGPU::getNamedOperandIdx(Inst.getOpcode(), NameIdx); in IsAGPROperand()
4726 unsigned Sub = MRI->getSubReg(Op.getReg(), AMDGPU::sub0); in IsAGPROperand()
4728 const MCRegisterClass &AGPR32 = MRI->getRegClass(AMDGPU::AGPR_32RegClassID); in IsAGPROperand()
4739 uint16_t DataNameIdx = (TSFlags & SIInstrFlags::DS) ? AMDGPU::OpName::data0 in validateAGPRLdSt()
4740 : AMDGPU::OpName::vdata; in validateAGPRLdSt()
4743 int DstAreg = IsAGPROperand(Inst, AMDGPU::OpName::vdst, MRI); in validateAGPRLdSt()
4747 int Data2Areg = IsAGPROperand(Inst, AMDGPU::OpName::data1, MRI); in validateAGPRLdSt()
4753 if (FB[AMDGPU::FeatureGFX90AInsts]) { in validateAGPRLdSt()
4764 if (!FB[AMDGPU::FeatureGFX90AInsts]) in validateVGPRAlign()
4768 const MCRegisterClass &VGPR32 = MRI->getRegClass(AMDGPU::VGPR_32RegClassID); in validateVGPRAlign()
4769 const MCRegisterClass &AGPR32 = MRI->getRegClass(AMDGPU::AGPR_32RegClassID); in validateVGPRAlign()
4775 unsigned Sub = MRI->getSubReg(Op.getReg(), AMDGPU::sub0); in validateVGPRAlign()
4779 if (VGPR32.contains(Sub) && ((Sub - AMDGPU::VGPR0) & 1)) in validateVGPRAlign()
4781 if (AGPR32.contains(Sub) && ((Sub - AMDGPU::AGPR0) & 1)) in validateVGPRAlign()
4800 int BlgpIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::blgp); in validateBLGP()
4809 if (FB[AMDGPU::FeatureGFX940Insts]) { in validateBLGP()
4811 case AMDGPU::V_MFMA_F64_16X16X4F64_gfx940_acd: in validateBLGP()
4812 case AMDGPU::V_MFMA_F64_16X16X4F64_gfx940_vcd: in validateBLGP()
4813 case AMDGPU::V_MFMA_F64_4X4X4F64_gfx940_acd: in validateBLGP()
4814 case AMDGPU::V_MFMA_F64_4X4X4F64_gfx940_vcd: in validateBLGP()
4835 if (Opc != AMDGPU::S_WAITCNT_EXPCNT_gfx11 && in validateWaitCnt()
4836 Opc != AMDGPU::S_WAITCNT_LGKMCNT_gfx11 && in validateWaitCnt()
4837 Opc != AMDGPU::S_WAITCNT_VMCNT_gfx11 && in validateWaitCnt()
4838 Opc != AMDGPU::S_WAITCNT_VSCNT_gfx11) in validateWaitCnt()
4841 int Src0Idx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::sdst); in validateWaitCnt()
4844 if (Reg == AMDGPU::SGPR_NULL) in validateWaitCnt()
4863 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::gds); in validateDS()
4879 if (!getFeatureBits()[AMDGPU::FeatureGFX90AInsts]) in validateGWS()
4883 if (Opc != AMDGPU::DS_GWS_INIT_vi && Opc != AMDGPU::DS_GWS_BARRIER_vi && in validateGWS()
4884 Opc != AMDGPU::DS_GWS_SEMA_BR_vi) in validateGWS()
4888 const MCRegisterClass &VGPR32 = MRI->getRegClass(AMDGPU::VGPR_32RegClassID); in validateGWS()
4890 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data0); in validateGWS()
4893 auto RegIdx = Reg - (VGPR32.contains(Reg) ? AMDGPU::VGPR0 : AMDGPU::AGPR0); in validateGWS()
4906 int CPolPos = AMDGPU::getNamedOperandIdx(Inst.getOpcode(), in validateCoherencyBits()
4907 AMDGPU::OpName::cpol); in validateCoherencyBits()
4923 if (CPol & ~(AMDGPU::CPol::GLC | AMDGPU::CPol::DLC)) { in validateCoherencyBits()
4970 const unsigned TH = CPol & AMDGPU::CPol::TH; in validateTHAndScopeBits()
4971 const unsigned Scope = CPol & AMDGPU::CPol::SCOPE; in validateTHAndScopeBits()
4984 (!(TH & AMDGPU::CPol::TH_ATOMIC_RETURN))) in validateTHAndScopeBits()
4991 ((TH == AMDGPU::CPol::TH_NT_RT) || (TH == AMDGPU::CPol::TH_RT_NT) || in validateTHAndScopeBits()
4992 (TH == AMDGPU::CPol::TH_NT_HT))) in validateTHAndScopeBits()
4995 if (TH == AMDGPU::CPol::TH_BYPASS) { in validateTHAndScopeBits()
4996 if ((Scope != AMDGPU::CPol::SCOPE_SYS && in validateTHAndScopeBits()
4997 CPol & AMDGPU::CPol::TH_REAL_BYPASS) || in validateTHAndScopeBits()
4998 (Scope == AMDGPU::CPol::SCOPE_SYS && in validateTHAndScopeBits()
4999 !(CPol & AMDGPU::CPol::TH_REAL_BYPASS))) in validateTHAndScopeBits()
5008 if (!(CPol & AMDGPU::CPol::TH_TYPE_ATOMIC)) in validateTHAndScopeBits()
5011 if (!(CPol & AMDGPU::CPol::TH_TYPE_STORE)) in validateTHAndScopeBits()
5014 if (!(CPol & AMDGPU::CPol::TH_TYPE_LOAD)) in validateTHAndScopeBits()
5083 if (!validateNeg(Inst, AMDGPU::OpName::neg_lo)) { in validateInstruction()
5088 if (!validateNeg(Inst, AMDGPU::OpName::neg_hi)) { in validateInstruction()
5142 Error(IDLoc, getFeatureBits()[AMDGPU::FeatureGFX90AInsts] in validateInstruction()
5224 if (isGFX10Plus() && getFeatureBits()[AMDGPU::FeatureWavefrontSize64] && in checkUnsupportedInstruction()
5225 !getFeatureBits()[AMDGPU::FeatureWavefrontSize32]) { in checkUnsupportedInstruction()
5228 FeaturesWS32.flip(AMDGPU::FeatureWavefrontSize64) in checkUnsupportedInstruction()
5229 .flip(AMDGPU::FeatureWavefrontSize32); in checkUnsupportedInstruction()
5437 AMDGPU::MCKernelDescriptor KD = in ParseDirectiveAMDHSAKernel()
5438 AMDGPU::MCKernelDescriptor::getDefaultAmdhsaKernelDescriptor( in ParseDirectiveAMDHSAKernel()
5500 AMDGPU::MCKernelDescriptor::bits_set(FIELD, VALUE, ENTRY##_SHIFT, ENTRY, \ in ParseDirectiveAMDHSAKernel()
5821 AMDGPU::MCKernelDescriptor::bits_set( in ParseDirectiveAMDHSAKernel()
5831 AMDGPU::MCKernelDescriptor::bits_set( in ParseDirectiveAMDHSAKernel()
5845 AMDGPU::MCKernelDescriptor::bits_set( in ParseDirectiveAMDHSAKernel()
5937 if (!getFeatureBits()[AMDGPU::FeatureWavefrontSize32]) in ParseAMDKernelCodeTValue()
5940 if (!getFeatureBits()[AMDGPU::FeatureWavefrontSize64]) in ParseAMDKernelCodeTValue()
5949 if (!getFeatureBits()[AMDGPU::FeatureWavefrontSize32]) in ParseAMDKernelCodeTValue()
5952 if (!getFeatureBits()[AMDGPU::FeatureWavefrontSize64]) in ParseAMDKernelCodeTValue()
6071 if (ParseToEndDirective(AMDGPU::PALMD::AssemblerDirectiveBegin, in ParseDirectivePALMetadataBegin()
6072 AMDGPU::PALMD::AssemblerDirectiveEnd, String)) in ParseDirectivePALMetadataBegin()
6127 unsigned LocalMemorySize = AMDGPU::IsaInfo::getLocalMemorySize(&getSTI()); in ParseDirectiveAMDGPULDS()
6175 if (IDVal == AMDGPU::HSAMD::V3::AssemblerDirectiveBegin) in ParseDirective()
6187 if (IDVal == AMDGPU::HSAMD::AssemblerDirectiveBegin) { in ParseDirective()
6213 if (MRI.regsOverlap(AMDGPU::TTMP12_TTMP13_TTMP14_TTMP15, RegNo)) in subtargetHasRegister()
6217 if (MRI.regsOverlap(AMDGPU::SGPR104_SGPR105, RegNo)) in subtargetHasRegister()
6221 case AMDGPU::SRC_SHARED_BASE_LO: in subtargetHasRegister()
6222 case AMDGPU::SRC_SHARED_BASE: in subtargetHasRegister()
6223 case AMDGPU::SRC_SHARED_LIMIT_LO: in subtargetHasRegister()
6224 case AMDGPU::SRC_SHARED_LIMIT: in subtargetHasRegister()
6225 case AMDGPU::SRC_PRIVATE_BASE_LO: in subtargetHasRegister()
6226 case AMDGPU::SRC_PRIVATE_BASE: in subtargetHasRegister()
6227 case AMDGPU::SRC_PRIVATE_LIMIT_LO: in subtargetHasRegister()
6228 case AMDGPU::SRC_PRIVATE_LIMIT: in subtargetHasRegister()
6230 case AMDGPU::SRC_POPS_EXITING_WAVE_ID: in subtargetHasRegister()
6232 case AMDGPU::TBA: in subtargetHasRegister()
6233 case AMDGPU::TBA_LO: in subtargetHasRegister()
6234 case AMDGPU::TBA_HI: in subtargetHasRegister()
6235 case AMDGPU::TMA: in subtargetHasRegister()
6236 case AMDGPU::TMA_LO: in subtargetHasRegister()
6237 case AMDGPU::TMA_HI: in subtargetHasRegister()
6239 case AMDGPU::XNACK_MASK: in subtargetHasRegister()
6240 case AMDGPU::XNACK_MASK_LO: in subtargetHasRegister()
6241 case AMDGPU::XNACK_MASK_HI: in subtargetHasRegister()
6243 case AMDGPU::SGPR_NULL: in subtargetHasRegister()
6257 case AMDGPU::FLAT_SCR: in subtargetHasRegister()
6258 case AMDGPU::FLAT_SCR_LO: in subtargetHasRegister()
6259 case AMDGPU::FLAT_SCR_HI: in subtargetHasRegister()
6268 if (MRI.regsOverlap(AMDGPU::SGPR102_SGPR103, RegNo)) in subtargetHasRegister()
6515 .Case("nt", AMDGPU::CPol::NT) in getCPolKind()
6516 .Case("sc0", AMDGPU::CPol::SC0) in getCPolKind()
6517 .Case("sc1", AMDGPU::CPol::SC1) in getCPolKind()
6522 .Case("dlc", AMDGPU::CPol::DLC) in getCPolKind()
6523 .Case("glc", AMDGPU::CPol::GLC) in getCPolKind()
6524 .Case("scc", AMDGPU::CPol::SCC) in getCPolKind()
6525 .Case("slc", AMDGPU::CPol::SLC) in getCPolKind()
6583 if (!isGFX10Plus() && CPol == AMDGPU::CPol::DLC) in parseCPol()
6586 if (!isGFX90A() && CPol == AMDGPU::CPol::SCC) in parseCPol()
6608 Scope = AMDGPU::CPol::SCOPE_CU; // default; in parseScope()
6619 .Case("SCOPE_CU", AMDGPU::CPol::SCOPE_CU) in parseScope()
6620 .Case("SCOPE_SE", AMDGPU::CPol::SCOPE_SE) in parseScope()
6621 .Case("SCOPE_DEV", AMDGPU::CPol::SCOPE_DEV) in parseScope()
6622 .Case("SCOPE_SYS", AMDGPU::CPol::SCOPE_SYS) in parseScope()
6632 TH = AMDGPU::CPol::TH_RT; // default in parseTH()
6641 TH = AMDGPU::CPol::TH_RT; in parseTH()
6646 TH = AMDGPU::CPol::TH_TYPE_ATOMIC; in parseTH()
6648 TH = AMDGPU::CPol::TH_TYPE_LOAD; in parseTH()
6650 TH = AMDGPU::CPol::TH_TYPE_STORE; in parseTH()
6656 TH |= AMDGPU::CPol::TH_REAL_BYPASS; in parseTH()
6659 if (TH & AMDGPU::CPol::TH_TYPE_ATOMIC) in parseTH()
6661 .Case("RETURN", AMDGPU::CPol::TH_ATOMIC_RETURN) in parseTH()
6662 .Case("RT", AMDGPU::CPol::TH_RT) in parseTH()
6663 .Case("RT_RETURN", AMDGPU::CPol::TH_ATOMIC_RETURN) in parseTH()
6664 .Case("NT", AMDGPU::CPol::TH_ATOMIC_NT) in parseTH()
6665 .Case("NT_RETURN", AMDGPU::CPol::TH_ATOMIC_NT | in parseTH()
6666 AMDGPU::CPol::TH_ATOMIC_RETURN) in parseTH()
6667 .Case("CASCADE_RT", AMDGPU::CPol::TH_ATOMIC_CASCADE) in parseTH()
6668 .Case("CASCADE_NT", AMDGPU::CPol::TH_ATOMIC_CASCADE | in parseTH()
6669 AMDGPU::CPol::TH_ATOMIC_NT) in parseTH()
6673 .Case("RT", AMDGPU::CPol::TH_RT) in parseTH()
6674 .Case("NT", AMDGPU::CPol::TH_NT) in parseTH()
6675 .Case("HT", AMDGPU::CPol::TH_HT) in parseTH()
6676 .Case("LU", AMDGPU::CPol::TH_LU) in parseTH()
6677 .Case("RT_WB", AMDGPU::CPol::TH_RT_WB) in parseTH()
6678 .Case("NT_RT", AMDGPU::CPol::TH_NT_RT) in parseTH()
6679 .Case("RT_NT", AMDGPU::CPol::TH_RT_NT) in parseTH()
6680 .Case("NT_HT", AMDGPU::CPol::TH_NT_HT) in parseTH()
6681 .Case("NT_WB", AMDGPU::CPol::TH_NT_WB) in parseTH()
6682 .Case("BYPASS", AMDGPU::CPol::TH_BYPASS) in parseTH()
6772 using namespace llvm::AMDGPU::MTBUFFormat; in parseDfmtNfmt()
6804 using namespace llvm::AMDGPU::MTBUFFormat; in parseUfmt()
6822 using namespace llvm::AMDGPU::MTBUFFormat; in matchDfmtNfmt()
6844 using namespace llvm::AMDGPU::MTBUFFormat; in parseSymbolicSplitFormat()
6881 using namespace llvm::AMDGPU::MTBUFFormat; in parseSymbolicUnifiedFormat()
6895 using namespace llvm::AMDGPU::MTBUFFormat; in parseNumericFormat()
6907 using namespace llvm::AMDGPU::MTBUFFormat; in parseSymbolicOrNumericFormat()
6934 using namespace llvm::AMDGPU::MTBUFFormat; in parseFORMAT()
7038 Inst.addOperand(MCOperand::createReg(AMDGPU::NoRegister)); in cvtExp()
7061 Inst.getOperand(OperandIdx[2]).setReg(AMDGPU::NoRegister); in cvtExp()
7062 Inst.getOperand(OperandIdx[3]).setReg(AMDGPU::NoRegister); in cvtExp()
7066 if (Inst.getOperand(OperandIdx[i]).getReg() != AMDGPU::NoRegister) { in cvtExp()
7083 const AMDGPU::IsaVersion ISA, in encodeCnt()
7117 AMDGPU::IsaVersion ISA = AMDGPU::getIsaVersion(getSTI().getCPU()); in parseCnt()
7152 AMDGPU::IsaVersion ISA = AMDGPU::getIsaVersion(getSTI().getCPU()); in parseSWaitCnt()
7283 using namespace llvm::AMDGPU::DepCtr; in parseDepCtr()
7320 using namespace llvm::AMDGPU::DepCtr; in parseDepCtr()
7349 using namespace llvm::AMDGPU::Hwreg; in parseHwregFunc()
7387 using namespace llvm::AMDGPU::Hwreg; in parseHwreg()
7441 using namespace llvm::AMDGPU::SendMsg; in parseSendMsgBody()
7478 using namespace llvm::AMDGPU::SendMsg; in validateSendMsg()
7524 using namespace llvm::AMDGPU::SendMsg; in parseSendMsg()
7622 using namespace llvm::AMDGPU::Exp; in parseExpTgt()
7921 using namespace llvm::AMDGPU::Swizzle; in encodeBitmaskPerm()
7966 using namespace llvm::AMDGPU::Swizzle; in parseSwizzleQuadPerm()
7982 using namespace llvm::AMDGPU::Swizzle; in parseSwizzleBroadcast()
8010 using namespace llvm::AMDGPU::Swizzle; in parseSwizzleReverse()
8032 using namespace llvm::AMDGPU::Swizzle; in parseSwizzleSwap()
8054 using namespace llvm::AMDGPU::Swizzle; in parseSwizzleBitmaskPerm()
8116 using namespace llvm::AMDGPU::Swizzle; in parseSwizzleMacro()
8176 using namespace llvm::AMDGPU::VGPRIndexMode; in parseGPRIdxMacro()
8220 using namespace llvm::AMDGPU::VGPRIndexMode; in parseGPRIdxMode()
8480 int OpSelIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel); in cvtVOP3DstOpSelOnly()
8485 const int Ops[] = { AMDGPU::OpName::src0, in cvtVOP3DstOpSelOnly()
8486 AMDGPU::OpName::src1, in cvtVOP3DstOpSelOnly()
8487 AMDGPU::OpName::src2 }; in cvtVOP3DstOpSelOnly()
8488 for (SrcNum = 0; SrcNum < 3 && AMDGPU::hasNamedOperand(Opc, Ops[SrcNum]); in cvtVOP3DstOpSelOnly()
8495 int DstIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::vdst); in cvtVOP3DstOpSelOnly()
8500 int ModIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src0_modifiers); in cvtVOP3DstOpSelOnly()
8503 MRI.getRegClass(AMDGPU::VGPR_16RegClassID).contains(DstOp.getReg())) { in cvtVOP3DstOpSelOnly()
8504 if (AMDGPU::isHi(DstOp.getReg(), MRI)) in cvtVOP3DstOpSelOnly()
8528 Desc.operands()[OpNum].OperandType == AMDGPU::OPERAND_INPUT_MODS in isRegOrImmWithInputMods()
8563 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::high)) in cvtVOP3Interp()
8567 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::clamp)) in cvtVOP3Interp()
8571 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::omod)) in cvtVOP3Interp()
8600 int OpSelIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel); in cvtVINTERP()
8609 const int Ops[] = { AMDGPU::OpName::src0, in cvtVINTERP()
8610 AMDGPU::OpName::src1, in cvtVINTERP()
8611 AMDGPU::OpName::src2 }; in cvtVINTERP()
8612 const int ModOps[] = { AMDGPU::OpName::src0_modifiers, in cvtVINTERP()
8613 AMDGPU::OpName::src1_modifiers, in cvtVINTERP()
8614 AMDGPU::OpName::src2_modifiers }; in cvtVINTERP()
8619 int OpIdx = AMDGPU::getNamedOperandIdx(Opc, Ops[J]); in cvtVINTERP()
8623 int ModIdx = AMDGPU::getNamedOperandIdx(Opc, ModOps[J]); in cvtVINTERP()
8628 if (ModOps[J] == AMDGPU::OpName::src0_modifiers && in cvtVINTERP()
8659 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::byte_sel)) { in cvtVOP3()
8660 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::vdst_in)) in cvtVOP3()
8666 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::clamp)) in cvtVOP3()
8670 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::omod)) in cvtVOP3()
8680 std::advance(it, AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src2_modifiers)); in cvtVOP3()
8700 if (Opc == AMDGPU::V_CVT_SR_BF8_F32_vi || in cvtVOP3P()
8701 Opc == AMDGPU::V_CVT_SR_FP8_F32_vi || in cvtVOP3P()
8702 Opc == AMDGPU::V_CVT_SR_BF8_F32_gfx12_e64_gfx12 || in cvtVOP3P()
8703 Opc == AMDGPU::V_CVT_SR_FP8_F32_gfx12_e64_gfx12) { in cvtVOP3P()
8710 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::vdst_in) && in cvtVOP3P()
8711 !(Opc == AMDGPU::V_CVT_PK_BF8_F32_e64_dpp_gfx12 || in cvtVOP3P()
8712 Opc == AMDGPU::V_CVT_PK_FP8_F32_e64_dpp_gfx12 || in cvtVOP3P()
8713 Opc == AMDGPU::V_CVT_PK_BF8_F32_e64_dpp8_gfx12 || in cvtVOP3P()
8714 Opc == AMDGPU::V_CVT_PK_FP8_F32_e64_dpp8_gfx12 || in cvtVOP3P()
8715 Opc == AMDGPU::V_CVT_SR_FP8_F32_gfx12_e64_dpp_gfx12 || in cvtVOP3P()
8716 Opc == AMDGPU::V_CVT_SR_FP8_F32_gfx12_e64_dpp8_gfx12 || in cvtVOP3P()
8717 Opc == AMDGPU::V_CVT_SR_BF8_F32_gfx12_e64_dpp_gfx12 || in cvtVOP3P()
8718 Opc == AMDGPU::V_CVT_SR_BF8_F32_gfx12_e64_dpp8_gfx12)) { in cvtVOP3P()
8726 int OpSelIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel); in cvtVOP3P()
8731 int OpSelHiIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::op_sel_hi); in cvtVOP3P()
8738 int NegLoIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::neg_lo); in cvtVOP3P()
8742 int NegHiIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::neg_hi); in cvtVOP3P()
8746 const int Ops[] = { AMDGPU::OpName::src0, in cvtVOP3P()
8747 AMDGPU::OpName::src1, in cvtVOP3P()
8748 AMDGPU::OpName::src2 }; in cvtVOP3P()
8749 const int ModOps[] = { AMDGPU::OpName::src0_modifiers, in cvtVOP3P()
8750 AMDGPU::OpName::src1_modifiers, in cvtVOP3P()
8751 AMDGPU::OpName::src2_modifiers }; in cvtVOP3P()
8771 int OpIdx = AMDGPU::getNamedOperandIdx(Opc, Ops[J]); in cvtVOP3P()
8775 int ModIdx = AMDGPU::getNamedOperandIdx(Opc, ModOps[J]); in cvtVOP3P()
8784 ->getRegClass(AMDGPU::VGPR_16RegClassID) in cvtVOP3P()
8786 bool VGPRSuffixIsHi = AMDGPU::isHi(SrcOp.getReg(), *getMRI()); in cvtVOP3P()
8815 if (AMDGPU::getNamedOperandIdx(Opc, OpName) != -1) in addSrcModifiersAndSrc()
8825 addSrcModifiersAndSrc(Inst, Operands, 2, Opc, AMDGPU::OpName::src0_modifiers); in cvtSWMMAC()
8826 addSrcModifiersAndSrc(Inst, Operands, 3, Opc, AMDGPU::OpName::src1_modifiers); in cvtSWMMAC()
8836 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::index_key_8bit)) in cvtSWMMAC()
8840 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::index_key_16bit)) in cvtSWMMAC()
8844 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::clamp)) in cvtSWMMAC()
8917 using namespace AMDGPU::DPP; in isDPPCtrl()
8981 const AMDGPU::MIMGDimInfo *DimInfo = AMDGPU::getMIMGDimInfoByAsmSuffix(DimId); in parseDimId()
9104 using namespace AMDGPU::DPP; in parseDPPCtrlSel()
9151 using namespace AMDGPU::DPP; in parseDPPCtrl()
9194 int OldIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::old); in cvtVOP3DPP()
9196 AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::src2_modifiers); in cvtVOP3DPP()
9220 int VdstInIdx = AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::vdst_in); in cvtVOP3DPP()
9226 Opc == AMDGPU::V_CVT_SR_BF8_F32_gfx12_e64_dpp8_gfx12 || in cvtVOP3DPP()
9227 Opc == AMDGPU::V_CVT_SR_FP8_F32_gfx12_e64_dpp8_gfx12 || in cvtVOP3DPP()
9228 Opc == AMDGPU::V_CVT_SR_BF8_F32_gfx12_e64_dpp_gfx12 || in cvtVOP3DPP()
9229 Opc == AMDGPU::V_CVT_SR_FP8_F32_gfx12_e64_dpp_gfx12; in cvtVOP3DPP()
9263 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::byte_sel)) in cvtVOP3DPP()
9267 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::clamp)) in cvtVOP3DPP()
9271 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::omod)) in cvtVOP3DPP()
9278 else if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::op_sel)) { in cvtVOP3DPP()
9284 using namespace llvm::AMDGPU::DPP; in cvtVOP3DPP()
9292 if (AMDGPU::hasNamedOperand(Inst.getOpcode(), AMDGPU::OpName::fi)) in cvtVOP3DPP()
9353 using namespace llvm::AMDGPU::DPP; in cvtDPP()
9359 if (AMDGPU::hasNamedOperand(Inst.getOpcode(), AMDGPU::OpName::fi)) { in cvtDPP()
9373 using namespace llvm::AMDGPU::SDWA; in parseSDWASel()
9402 using namespace llvm::AMDGPU::SDWA; in parseSDWADstUnused()
9450 using namespace llvm::AMDGPU::SDWA; in cvtSDWA()
9465 (Op.getReg() == AMDGPU::VCC || Op.getReg() == AMDGPU::VCC_LO)) { in cvtSDWA()
9494 if (Opc != AMDGPU::V_NOP_sdwa_gfx10 && Opc != AMDGPU::V_NOP_sdwa_gfx9 && in cvtSDWA()
9495 Opc != AMDGPU::V_NOP_sdwa_vi) { in cvtSDWA()
9499 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::clamp)) in cvtSDWA()
9503 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::omod)) in cvtSDWA()
9507 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::dst_sel)) in cvtSDWA()
9511 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::dst_unused)) in cvtSDWA()
9523 if (AMDGPU::hasNamedOperand(Inst.getOpcode(), AMDGPU::OpName::omod)) in cvtSDWA()
9533 if (AMDGPU::hasNamedOperand(Inst.getOpcode(), AMDGPU::OpName::clamp)) in cvtSDWA()
9547 if (Inst.getOpcode() == AMDGPU::V_MAC_F32_sdwa_vi || in cvtSDWA()
9548 Inst.getOpcode() == AMDGPU::V_MAC_F16_sdwa_vi) { in cvtSDWA()
9551 it, AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::src2)); in cvtSDWA()