Lines Matching refs:AMDGPU
90 Register TmpVGPR = AMDGPU::NoRegister;
96 Register SavedExecReg = AMDGPU::NoRegister;
130 ExecReg = AMDGPU::EXEC_LO; in SGPRSpillBuilder()
131 MovOpc = AMDGPU::S_MOV_B32; in SGPRSpillBuilder()
132 NotOpc = AMDGPU::S_NOT_B32; in SGPRSpillBuilder()
134 ExecReg = AMDGPU::EXEC; in SGPRSpillBuilder()
135 MovOpc = AMDGPU::S_MOV_B64; in SGPRSpillBuilder()
136 NotOpc = AMDGPU::S_NOT_B64; in SGPRSpillBuilder()
139 assert(SuperReg != AMDGPU::M0 && "m0 should never spill"); in SGPRSpillBuilder()
140 assert(SuperReg != AMDGPU::EXEC_LO && SuperReg != AMDGPU::EXEC_HI && in SGPRSpillBuilder()
141 SuperReg != AMDGPU::EXEC && "exec should never spill"); in SGPRSpillBuilder()
173 TmpVGPR = RS->scavengeRegisterBackwards(AMDGPU::VGPR_32RegClass, MI, false, in prepare()
184 TmpVGPR = AMDGPU::VGPR0; in prepare()
201 IsWave32 ? AMDGPU::SGPR_32RegClass : AMDGPU::SGPR_64RegClass; in prepare()
221 if (RS->isRegUsed(AMDGPU::SCC)) in prepare()
296 if (RS->isRegUsed(AMDGPU::SCC)) in readWriteTmpVGPR()
321 : AMDGPUGenRegisterInfo(AMDGPU::PC_REG, ST.getAMDGPUDwarfFlavour(), in SIRegisterInfo()
325 assert(getSubRegIndexLaneMask(AMDGPU::sub0).getAsInteger() == 3 && in SIRegisterInfo()
326 getSubRegIndexLaneMask(AMDGPU::sub31).getAsInteger() == (3ULL << 62) && in SIRegisterInfo()
327 (getSubRegIndexLaneMask(AMDGPU::lo16) | in SIRegisterInfo()
328 getSubRegIndexLaneMask(AMDGPU::hi16)).getAsInteger() == in SIRegisterInfo()
329 getSubRegIndexLaneMask(AMDGPU::sub0).getAsInteger() && in SIRegisterInfo()
333 RegPressureIgnoredUnits.set(*regunits(MCRegister::from(AMDGPU::M0)).begin()); in SIRegisterInfo()
334 for (auto Reg : AMDGPU::VGPR_16RegClass) { in SIRegisterInfo()
335 if (AMDGPU::isHi(Reg, *this)) in SIRegisterInfo()
364 Row.fill(AMDGPU::NoSubRegister); in SIRegisterInfo()
407 static const MCPhysReg NoCalleeSavedReg = AMDGPU::NoRegister; in getCalleeSavedRegs()
444 return VGPR >= AMDGPU::VGPR0 && VGPR < AMDGPU::VGPR8; in isChainScratchRegister()
455 if (RC == &AMDGPU::VGPR_32RegClass || RC == &AMDGPU::AGPR_32RegClass) in getLargestLegalSuperClass()
456 return &AMDGPU::AV_32RegClass; in getLargestLegalSuperClass()
457 if (RC == &AMDGPU::VReg_64RegClass || RC == &AMDGPU::AReg_64RegClass) in getLargestLegalSuperClass()
458 return &AMDGPU::AV_64RegClass; in getLargestLegalSuperClass()
459 if (RC == &AMDGPU::VReg_64_Align2RegClass || in getLargestLegalSuperClass()
460 RC == &AMDGPU::AReg_64_Align2RegClass) in getLargestLegalSuperClass()
461 return &AMDGPU::AV_64_Align2RegClass; in getLargestLegalSuperClass()
462 if (RC == &AMDGPU::VReg_96RegClass || RC == &AMDGPU::AReg_96RegClass) in getLargestLegalSuperClass()
463 return &AMDGPU::AV_96RegClass; in getLargestLegalSuperClass()
464 if (RC == &AMDGPU::VReg_96_Align2RegClass || in getLargestLegalSuperClass()
465 RC == &AMDGPU::AReg_96_Align2RegClass) in getLargestLegalSuperClass()
466 return &AMDGPU::AV_96_Align2RegClass; in getLargestLegalSuperClass()
467 if (RC == &AMDGPU::VReg_128RegClass || RC == &AMDGPU::AReg_128RegClass) in getLargestLegalSuperClass()
468 return &AMDGPU::AV_128RegClass; in getLargestLegalSuperClass()
469 if (RC == &AMDGPU::VReg_128_Align2RegClass || in getLargestLegalSuperClass()
470 RC == &AMDGPU::AReg_128_Align2RegClass) in getLargestLegalSuperClass()
471 return &AMDGPU::AV_128_Align2RegClass; in getLargestLegalSuperClass()
472 if (RC == &AMDGPU::VReg_160RegClass || RC == &AMDGPU::AReg_160RegClass) in getLargestLegalSuperClass()
473 return &AMDGPU::AV_160RegClass; in getLargestLegalSuperClass()
474 if (RC == &AMDGPU::VReg_160_Align2RegClass || in getLargestLegalSuperClass()
475 RC == &AMDGPU::AReg_160_Align2RegClass) in getLargestLegalSuperClass()
476 return &AMDGPU::AV_160_Align2RegClass; in getLargestLegalSuperClass()
477 if (RC == &AMDGPU::VReg_192RegClass || RC == &AMDGPU::AReg_192RegClass) in getLargestLegalSuperClass()
478 return &AMDGPU::AV_192RegClass; in getLargestLegalSuperClass()
479 if (RC == &AMDGPU::VReg_192_Align2RegClass || in getLargestLegalSuperClass()
480 RC == &AMDGPU::AReg_192_Align2RegClass) in getLargestLegalSuperClass()
481 return &AMDGPU::AV_192_Align2RegClass; in getLargestLegalSuperClass()
482 if (RC == &AMDGPU::VReg_256RegClass || RC == &AMDGPU::AReg_256RegClass) in getLargestLegalSuperClass()
483 return &AMDGPU::AV_256RegClass; in getLargestLegalSuperClass()
484 if (RC == &AMDGPU::VReg_256_Align2RegClass || in getLargestLegalSuperClass()
485 RC == &AMDGPU::AReg_256_Align2RegClass) in getLargestLegalSuperClass()
486 return &AMDGPU::AV_256_Align2RegClass; in getLargestLegalSuperClass()
487 if (RC == &AMDGPU::VReg_512RegClass || RC == &AMDGPU::AReg_512RegClass) in getLargestLegalSuperClass()
488 return &AMDGPU::AV_512RegClass; in getLargestLegalSuperClass()
489 if (RC == &AMDGPU::VReg_512_Align2RegClass || in getLargestLegalSuperClass()
490 RC == &AMDGPU::AReg_512_Align2RegClass) in getLargestLegalSuperClass()
491 return &AMDGPU::AV_512_Align2RegClass; in getLargestLegalSuperClass()
492 if (RC == &AMDGPU::VReg_1024RegClass || RC == &AMDGPU::AReg_1024RegClass) in getLargestLegalSuperClass()
493 return &AMDGPU::AV_1024RegClass; in getLargestLegalSuperClass()
494 if (RC == &AMDGPU::VReg_1024_Align2RegClass || in getLargestLegalSuperClass()
495 RC == &AMDGPU::AReg_1024_Align2RegClass) in getLargestLegalSuperClass()
496 return &AMDGPU::AV_1024_Align2RegClass; in getLargestLegalSuperClass()
523 Register SIRegisterInfo::getBaseRegister() const { return AMDGPU::SGPR34; } in getBaseRegister()
555 MCRegister BaseReg(AMDGPU::SGPR_32RegClass.getRegister(BaseIdx)); in getAlignedHighSGPRForRC()
556 return getMatchingSuperReg(BaseReg, AMDGPU::sub0, RC); in getAlignedHighSGPRForRC()
561 return getAlignedHighSGPRForRC(MF, /*Align=*/4, &AMDGPU::SGPR_128RegClass); in reservedPrivateSegmentBufferReg()
566 Reserved.set(AMDGPU::MODE); in getReservedRegs()
574 reserveRegisterTuples(Reserved, AMDGPU::EXEC); in getReservedRegs()
575 reserveRegisterTuples(Reserved, AMDGPU::FLAT_SCR); in getReservedRegs()
578 reserveRegisterTuples(Reserved, AMDGPU::M0); in getReservedRegs()
581 reserveRegisterTuples(Reserved, AMDGPU::SRC_VCCZ); in getReservedRegs()
582 reserveRegisterTuples(Reserved, AMDGPU::SRC_EXECZ); in getReservedRegs()
583 reserveRegisterTuples(Reserved, AMDGPU::SRC_SCC); in getReservedRegs()
586 reserveRegisterTuples(Reserved, AMDGPU::SRC_SHARED_BASE); in getReservedRegs()
587 reserveRegisterTuples(Reserved, AMDGPU::SRC_SHARED_LIMIT); in getReservedRegs()
588 reserveRegisterTuples(Reserved, AMDGPU::SRC_PRIVATE_BASE); in getReservedRegs()
589 reserveRegisterTuples(Reserved, AMDGPU::SRC_PRIVATE_LIMIT); in getReservedRegs()
592 reserveRegisterTuples(Reserved, AMDGPU::SRC_POPS_EXITING_WAVE_ID); in getReservedRegs()
595 reserveRegisterTuples(Reserved, AMDGPU::XNACK_MASK); in getReservedRegs()
598 reserveRegisterTuples(Reserved, AMDGPU::LDS_DIRECT); in getReservedRegs()
601 reserveRegisterTuples(Reserved, AMDGPU::TBA); in getReservedRegs()
602 reserveRegisterTuples(Reserved, AMDGPU::TMA); in getReservedRegs()
603 reserveRegisterTuples(Reserved, AMDGPU::TTMP0_TTMP1); in getReservedRegs()
604 reserveRegisterTuples(Reserved, AMDGPU::TTMP2_TTMP3); in getReservedRegs()
605 reserveRegisterTuples(Reserved, AMDGPU::TTMP4_TTMP5); in getReservedRegs()
606 reserveRegisterTuples(Reserved, AMDGPU::TTMP6_TTMP7); in getReservedRegs()
607 reserveRegisterTuples(Reserved, AMDGPU::TTMP8_TTMP9); in getReservedRegs()
608 reserveRegisterTuples(Reserved, AMDGPU::TTMP10_TTMP11); in getReservedRegs()
609 reserveRegisterTuples(Reserved, AMDGPU::TTMP12_TTMP13); in getReservedRegs()
610 reserveRegisterTuples(Reserved, AMDGPU::TTMP14_TTMP15); in getReservedRegs()
613 reserveRegisterTuples(Reserved, AMDGPU::SGPR_NULL64); in getReservedRegs()
618 unsigned TotalNumSGPRs = AMDGPU::SGPR_32RegClass.getNumRegs(); in getReservedRegs()
631 if (ScratchRSrcReg != AMDGPU::NoRegister) { in getReservedRegs()
673 unsigned TotalNumVGPRs = AMDGPU::VGPR_32RegClass.getNumRegs(); in getReservedRegs()
793 int OffIdx = AMDGPU::getNamedOperandIdx(MI->getOpcode(), in getScratchInstrOffset()
794 AMDGPU::OpName::offset); in getScratchInstrOffset()
803 assert((Idx == AMDGPU::getNamedOperandIdx(MI->getOpcode(), in getFrameIndexInstrOffset()
804 AMDGPU::OpName::vaddr) || in getFrameIndexInstrOffset()
805 (Idx == AMDGPU::getNamedOperandIdx(MI->getOpcode(), in getFrameIndexInstrOffset()
806 AMDGPU::OpName::saddr))) && in getFrameIndexInstrOffset()
838 unsigned MovOpc = ST.enableFlatScratch() ? AMDGPU::S_MOV_B32 in materializeFrameBaseRegister()
839 : AMDGPU::V_MOV_B32_e32; in materializeFrameBaseRegister()
842 ST.enableFlatScratch() ? &AMDGPU::SReg_32_XEXEC_HIRegClass in materializeFrameBaseRegister()
843 : &AMDGPU::VGPR_32RegClass); in materializeFrameBaseRegister()
851 Register OffsetReg = MRI.createVirtualRegister(&AMDGPU::SReg_32_XM0RegClass); in materializeFrameBaseRegister()
854 ST.enableFlatScratch() ? &AMDGPU::SReg_32_XM0RegClass in materializeFrameBaseRegister()
855 : &AMDGPU::VGPR_32RegClass); in materializeFrameBaseRegister()
857 BuildMI(*MBB, Ins, DL, TII->get(AMDGPU::S_MOV_B32), OffsetReg) in materializeFrameBaseRegister()
863 BuildMI(*MBB, Ins, DL, TII->get(AMDGPU::S_ADD_I32), BaseReg) in materializeFrameBaseRegister()
896 TII->getNamedOperand(MI, IsFlat ? AMDGPU::OpName::saddr in resolveFrameIndex()
897 : AMDGPU::OpName::vaddr); in resolveFrameIndex()
899 MachineOperand *OffsetOp = TII->getNamedOperand(MI, AMDGPU::OpName::offset); in resolveFrameIndex()
915 MachineOperand *SOffset = TII->getNamedOperand(MI, AMDGPU::OpName::soffset); in resolveFrameIndex()
946 return &AMDGPU::VGPR_32RegClass; in getPointerRegClass()
953 if (RC == &AMDGPU::SCC_CLASSRegClass) in getCrossCopyRegClass()
962 case AMDGPU::SI_SPILL_S1024_SAVE: in getNumSubRegsForSpillOp()
963 case AMDGPU::SI_SPILL_S1024_RESTORE: in getNumSubRegsForSpillOp()
964 case AMDGPU::SI_SPILL_V1024_SAVE: in getNumSubRegsForSpillOp()
965 case AMDGPU::SI_SPILL_V1024_RESTORE: in getNumSubRegsForSpillOp()
966 case AMDGPU::SI_SPILL_A1024_SAVE: in getNumSubRegsForSpillOp()
967 case AMDGPU::SI_SPILL_A1024_RESTORE: in getNumSubRegsForSpillOp()
968 case AMDGPU::SI_SPILL_AV1024_SAVE: in getNumSubRegsForSpillOp()
969 case AMDGPU::SI_SPILL_AV1024_RESTORE: in getNumSubRegsForSpillOp()
971 case AMDGPU::SI_SPILL_S512_SAVE: in getNumSubRegsForSpillOp()
972 case AMDGPU::SI_SPILL_S512_RESTORE: in getNumSubRegsForSpillOp()
973 case AMDGPU::SI_SPILL_V512_SAVE: in getNumSubRegsForSpillOp()
974 case AMDGPU::SI_SPILL_V512_RESTORE: in getNumSubRegsForSpillOp()
975 case AMDGPU::SI_SPILL_A512_SAVE: in getNumSubRegsForSpillOp()
976 case AMDGPU::SI_SPILL_A512_RESTORE: in getNumSubRegsForSpillOp()
977 case AMDGPU::SI_SPILL_AV512_SAVE: in getNumSubRegsForSpillOp()
978 case AMDGPU::SI_SPILL_AV512_RESTORE: in getNumSubRegsForSpillOp()
980 case AMDGPU::SI_SPILL_S384_SAVE: in getNumSubRegsForSpillOp()
981 case AMDGPU::SI_SPILL_S384_RESTORE: in getNumSubRegsForSpillOp()
982 case AMDGPU::SI_SPILL_V384_SAVE: in getNumSubRegsForSpillOp()
983 case AMDGPU::SI_SPILL_V384_RESTORE: in getNumSubRegsForSpillOp()
984 case AMDGPU::SI_SPILL_A384_SAVE: in getNumSubRegsForSpillOp()
985 case AMDGPU::SI_SPILL_A384_RESTORE: in getNumSubRegsForSpillOp()
986 case AMDGPU::SI_SPILL_AV384_SAVE: in getNumSubRegsForSpillOp()
987 case AMDGPU::SI_SPILL_AV384_RESTORE: in getNumSubRegsForSpillOp()
989 case AMDGPU::SI_SPILL_S352_SAVE: in getNumSubRegsForSpillOp()
990 case AMDGPU::SI_SPILL_S352_RESTORE: in getNumSubRegsForSpillOp()
991 case AMDGPU::SI_SPILL_V352_SAVE: in getNumSubRegsForSpillOp()
992 case AMDGPU::SI_SPILL_V352_RESTORE: in getNumSubRegsForSpillOp()
993 case AMDGPU::SI_SPILL_A352_SAVE: in getNumSubRegsForSpillOp()
994 case AMDGPU::SI_SPILL_A352_RESTORE: in getNumSubRegsForSpillOp()
995 case AMDGPU::SI_SPILL_AV352_SAVE: in getNumSubRegsForSpillOp()
996 case AMDGPU::SI_SPILL_AV352_RESTORE: in getNumSubRegsForSpillOp()
998 case AMDGPU::SI_SPILL_S320_SAVE: in getNumSubRegsForSpillOp()
999 case AMDGPU::SI_SPILL_S320_RESTORE: in getNumSubRegsForSpillOp()
1000 case AMDGPU::SI_SPILL_V320_SAVE: in getNumSubRegsForSpillOp()
1001 case AMDGPU::SI_SPILL_V320_RESTORE: in getNumSubRegsForSpillOp()
1002 case AMDGPU::SI_SPILL_A320_SAVE: in getNumSubRegsForSpillOp()
1003 case AMDGPU::SI_SPILL_A320_RESTORE: in getNumSubRegsForSpillOp()
1004 case AMDGPU::SI_SPILL_AV320_SAVE: in getNumSubRegsForSpillOp()
1005 case AMDGPU::SI_SPILL_AV320_RESTORE: in getNumSubRegsForSpillOp()
1007 case AMDGPU::SI_SPILL_S288_SAVE: in getNumSubRegsForSpillOp()
1008 case AMDGPU::SI_SPILL_S288_RESTORE: in getNumSubRegsForSpillOp()
1009 case AMDGPU::SI_SPILL_V288_SAVE: in getNumSubRegsForSpillOp()
1010 case AMDGPU::SI_SPILL_V288_RESTORE: in getNumSubRegsForSpillOp()
1011 case AMDGPU::SI_SPILL_A288_SAVE: in getNumSubRegsForSpillOp()
1012 case AMDGPU::SI_SPILL_A288_RESTORE: in getNumSubRegsForSpillOp()
1013 case AMDGPU::SI_SPILL_AV288_SAVE: in getNumSubRegsForSpillOp()
1014 case AMDGPU::SI_SPILL_AV288_RESTORE: in getNumSubRegsForSpillOp()
1016 case AMDGPU::SI_SPILL_S256_SAVE: in getNumSubRegsForSpillOp()
1017 case AMDGPU::SI_SPILL_S256_RESTORE: in getNumSubRegsForSpillOp()
1018 case AMDGPU::SI_SPILL_V256_SAVE: in getNumSubRegsForSpillOp()
1019 case AMDGPU::SI_SPILL_V256_RESTORE: in getNumSubRegsForSpillOp()
1020 case AMDGPU::SI_SPILL_A256_SAVE: in getNumSubRegsForSpillOp()
1021 case AMDGPU::SI_SPILL_A256_RESTORE: in getNumSubRegsForSpillOp()
1022 case AMDGPU::SI_SPILL_AV256_SAVE: in getNumSubRegsForSpillOp()
1023 case AMDGPU::SI_SPILL_AV256_RESTORE: in getNumSubRegsForSpillOp()
1025 case AMDGPU::SI_SPILL_S224_SAVE: in getNumSubRegsForSpillOp()
1026 case AMDGPU::SI_SPILL_S224_RESTORE: in getNumSubRegsForSpillOp()
1027 case AMDGPU::SI_SPILL_V224_SAVE: in getNumSubRegsForSpillOp()
1028 case AMDGPU::SI_SPILL_V224_RESTORE: in getNumSubRegsForSpillOp()
1029 case AMDGPU::SI_SPILL_A224_SAVE: in getNumSubRegsForSpillOp()
1030 case AMDGPU::SI_SPILL_A224_RESTORE: in getNumSubRegsForSpillOp()
1031 case AMDGPU::SI_SPILL_AV224_SAVE: in getNumSubRegsForSpillOp()
1032 case AMDGPU::SI_SPILL_AV224_RESTORE: in getNumSubRegsForSpillOp()
1034 case AMDGPU::SI_SPILL_S192_SAVE: in getNumSubRegsForSpillOp()
1035 case AMDGPU::SI_SPILL_S192_RESTORE: in getNumSubRegsForSpillOp()
1036 case AMDGPU::SI_SPILL_V192_SAVE: in getNumSubRegsForSpillOp()
1037 case AMDGPU::SI_SPILL_V192_RESTORE: in getNumSubRegsForSpillOp()
1038 case AMDGPU::SI_SPILL_A192_SAVE: in getNumSubRegsForSpillOp()
1039 case AMDGPU::SI_SPILL_A192_RESTORE: in getNumSubRegsForSpillOp()
1040 case AMDGPU::SI_SPILL_AV192_SAVE: in getNumSubRegsForSpillOp()
1041 case AMDGPU::SI_SPILL_AV192_RESTORE: in getNumSubRegsForSpillOp()
1043 case AMDGPU::SI_SPILL_S160_SAVE: in getNumSubRegsForSpillOp()
1044 case AMDGPU::SI_SPILL_S160_RESTORE: in getNumSubRegsForSpillOp()
1045 case AMDGPU::SI_SPILL_V160_SAVE: in getNumSubRegsForSpillOp()
1046 case AMDGPU::SI_SPILL_V160_RESTORE: in getNumSubRegsForSpillOp()
1047 case AMDGPU::SI_SPILL_A160_SAVE: in getNumSubRegsForSpillOp()
1048 case AMDGPU::SI_SPILL_A160_RESTORE: in getNumSubRegsForSpillOp()
1049 case AMDGPU::SI_SPILL_AV160_SAVE: in getNumSubRegsForSpillOp()
1050 case AMDGPU::SI_SPILL_AV160_RESTORE: in getNumSubRegsForSpillOp()
1052 case AMDGPU::SI_SPILL_S128_SAVE: in getNumSubRegsForSpillOp()
1053 case AMDGPU::SI_SPILL_S128_RESTORE: in getNumSubRegsForSpillOp()
1054 case AMDGPU::SI_SPILL_V128_SAVE: in getNumSubRegsForSpillOp()
1055 case AMDGPU::SI_SPILL_V128_RESTORE: in getNumSubRegsForSpillOp()
1056 case AMDGPU::SI_SPILL_A128_SAVE: in getNumSubRegsForSpillOp()
1057 case AMDGPU::SI_SPILL_A128_RESTORE: in getNumSubRegsForSpillOp()
1058 case AMDGPU::SI_SPILL_AV128_SAVE: in getNumSubRegsForSpillOp()
1059 case AMDGPU::SI_SPILL_AV128_RESTORE: in getNumSubRegsForSpillOp()
1061 case AMDGPU::SI_SPILL_S96_SAVE: in getNumSubRegsForSpillOp()
1062 case AMDGPU::SI_SPILL_S96_RESTORE: in getNumSubRegsForSpillOp()
1063 case AMDGPU::SI_SPILL_V96_SAVE: in getNumSubRegsForSpillOp()
1064 case AMDGPU::SI_SPILL_V96_RESTORE: in getNumSubRegsForSpillOp()
1065 case AMDGPU::SI_SPILL_A96_SAVE: in getNumSubRegsForSpillOp()
1066 case AMDGPU::SI_SPILL_A96_RESTORE: in getNumSubRegsForSpillOp()
1067 case AMDGPU::SI_SPILL_AV96_SAVE: in getNumSubRegsForSpillOp()
1068 case AMDGPU::SI_SPILL_AV96_RESTORE: in getNumSubRegsForSpillOp()
1070 case AMDGPU::SI_SPILL_S64_SAVE: in getNumSubRegsForSpillOp()
1071 case AMDGPU::SI_SPILL_S64_RESTORE: in getNumSubRegsForSpillOp()
1072 case AMDGPU::SI_SPILL_V64_SAVE: in getNumSubRegsForSpillOp()
1073 case AMDGPU::SI_SPILL_V64_RESTORE: in getNumSubRegsForSpillOp()
1074 case AMDGPU::SI_SPILL_A64_SAVE: in getNumSubRegsForSpillOp()
1075 case AMDGPU::SI_SPILL_A64_RESTORE: in getNumSubRegsForSpillOp()
1076 case AMDGPU::SI_SPILL_AV64_SAVE: in getNumSubRegsForSpillOp()
1077 case AMDGPU::SI_SPILL_AV64_RESTORE: in getNumSubRegsForSpillOp()
1079 case AMDGPU::SI_SPILL_S32_SAVE: in getNumSubRegsForSpillOp()
1080 case AMDGPU::SI_SPILL_S32_RESTORE: in getNumSubRegsForSpillOp()
1081 case AMDGPU::SI_SPILL_V32_SAVE: in getNumSubRegsForSpillOp()
1082 case AMDGPU::SI_SPILL_V32_RESTORE: in getNumSubRegsForSpillOp()
1083 case AMDGPU::SI_SPILL_A32_SAVE: in getNumSubRegsForSpillOp()
1084 case AMDGPU::SI_SPILL_A32_RESTORE: in getNumSubRegsForSpillOp()
1085 case AMDGPU::SI_SPILL_AV32_SAVE: in getNumSubRegsForSpillOp()
1086 case AMDGPU::SI_SPILL_AV32_RESTORE: in getNumSubRegsForSpillOp()
1087 case AMDGPU::SI_SPILL_WWM_V32_SAVE: in getNumSubRegsForSpillOp()
1088 case AMDGPU::SI_SPILL_WWM_V32_RESTORE: in getNumSubRegsForSpillOp()
1089 case AMDGPU::SI_SPILL_WWM_AV32_SAVE: in getNumSubRegsForSpillOp()
1090 case AMDGPU::SI_SPILL_WWM_AV32_RESTORE: in getNumSubRegsForSpillOp()
1098 case AMDGPU::BUFFER_STORE_DWORD_OFFEN: in getOffsetMUBUFStore()
1099 return AMDGPU::BUFFER_STORE_DWORD_OFFSET; in getOffsetMUBUFStore()
1100 case AMDGPU::BUFFER_STORE_BYTE_OFFEN: in getOffsetMUBUFStore()
1101 return AMDGPU::BUFFER_STORE_BYTE_OFFSET; in getOffsetMUBUFStore()
1102 case AMDGPU::BUFFER_STORE_SHORT_OFFEN: in getOffsetMUBUFStore()
1103 return AMDGPU::BUFFER_STORE_SHORT_OFFSET; in getOffsetMUBUFStore()
1104 case AMDGPU::BUFFER_STORE_DWORDX2_OFFEN: in getOffsetMUBUFStore()
1105 return AMDGPU::BUFFER_STORE_DWORDX2_OFFSET; in getOffsetMUBUFStore()
1106 case AMDGPU::BUFFER_STORE_DWORDX3_OFFEN: in getOffsetMUBUFStore()
1107 return AMDGPU::BUFFER_STORE_DWORDX3_OFFSET; in getOffsetMUBUFStore()
1108 case AMDGPU::BUFFER_STORE_DWORDX4_OFFEN: in getOffsetMUBUFStore()
1109 return AMDGPU::BUFFER_STORE_DWORDX4_OFFSET; in getOffsetMUBUFStore()
1110 case AMDGPU::BUFFER_STORE_SHORT_D16_HI_OFFEN: in getOffsetMUBUFStore()
1111 return AMDGPU::BUFFER_STORE_SHORT_D16_HI_OFFSET; in getOffsetMUBUFStore()
1112 case AMDGPU::BUFFER_STORE_BYTE_D16_HI_OFFEN: in getOffsetMUBUFStore()
1113 return AMDGPU::BUFFER_STORE_BYTE_D16_HI_OFFSET; in getOffsetMUBUFStore()
1121 case AMDGPU::BUFFER_LOAD_DWORD_OFFEN: in getOffsetMUBUFLoad()
1122 return AMDGPU::BUFFER_LOAD_DWORD_OFFSET; in getOffsetMUBUFLoad()
1123 case AMDGPU::BUFFER_LOAD_UBYTE_OFFEN: in getOffsetMUBUFLoad()
1124 return AMDGPU::BUFFER_LOAD_UBYTE_OFFSET; in getOffsetMUBUFLoad()
1125 case AMDGPU::BUFFER_LOAD_SBYTE_OFFEN: in getOffsetMUBUFLoad()
1126 return AMDGPU::BUFFER_LOAD_SBYTE_OFFSET; in getOffsetMUBUFLoad()
1127 case AMDGPU::BUFFER_LOAD_USHORT_OFFEN: in getOffsetMUBUFLoad()
1128 return AMDGPU::BUFFER_LOAD_USHORT_OFFSET; in getOffsetMUBUFLoad()
1129 case AMDGPU::BUFFER_LOAD_SSHORT_OFFEN: in getOffsetMUBUFLoad()
1130 return AMDGPU::BUFFER_LOAD_SSHORT_OFFSET; in getOffsetMUBUFLoad()
1131 case AMDGPU::BUFFER_LOAD_DWORDX2_OFFEN: in getOffsetMUBUFLoad()
1132 return AMDGPU::BUFFER_LOAD_DWORDX2_OFFSET; in getOffsetMUBUFLoad()
1133 case AMDGPU::BUFFER_LOAD_DWORDX3_OFFEN: in getOffsetMUBUFLoad()
1134 return AMDGPU::BUFFER_LOAD_DWORDX3_OFFSET; in getOffsetMUBUFLoad()
1135 case AMDGPU::BUFFER_LOAD_DWORDX4_OFFEN: in getOffsetMUBUFLoad()
1136 return AMDGPU::BUFFER_LOAD_DWORDX4_OFFSET; in getOffsetMUBUFLoad()
1137 case AMDGPU::BUFFER_LOAD_UBYTE_D16_OFFEN: in getOffsetMUBUFLoad()
1138 return AMDGPU::BUFFER_LOAD_UBYTE_D16_OFFSET; in getOffsetMUBUFLoad()
1139 case AMDGPU::BUFFER_LOAD_UBYTE_D16_HI_OFFEN: in getOffsetMUBUFLoad()
1140 return AMDGPU::BUFFER_LOAD_UBYTE_D16_HI_OFFSET; in getOffsetMUBUFLoad()
1141 case AMDGPU::BUFFER_LOAD_SBYTE_D16_OFFEN: in getOffsetMUBUFLoad()
1142 return AMDGPU::BUFFER_LOAD_SBYTE_D16_OFFSET; in getOffsetMUBUFLoad()
1143 case AMDGPU::BUFFER_LOAD_SBYTE_D16_HI_OFFEN: in getOffsetMUBUFLoad()
1144 return AMDGPU::BUFFER_LOAD_SBYTE_D16_HI_OFFSET; in getOffsetMUBUFLoad()
1145 case AMDGPU::BUFFER_LOAD_SHORT_D16_OFFEN: in getOffsetMUBUFLoad()
1146 return AMDGPU::BUFFER_LOAD_SHORT_D16_OFFSET; in getOffsetMUBUFLoad()
1147 case AMDGPU::BUFFER_LOAD_SHORT_D16_HI_OFFEN: in getOffsetMUBUFLoad()
1148 return AMDGPU::BUFFER_LOAD_SHORT_D16_HI_OFFSET; in getOffsetMUBUFLoad()
1156 case AMDGPU::BUFFER_STORE_DWORD_OFFSET: in getOffenMUBUFStore()
1157 return AMDGPU::BUFFER_STORE_DWORD_OFFEN; in getOffenMUBUFStore()
1158 case AMDGPU::BUFFER_STORE_BYTE_OFFSET: in getOffenMUBUFStore()
1159 return AMDGPU::BUFFER_STORE_BYTE_OFFEN; in getOffenMUBUFStore()
1160 case AMDGPU::BUFFER_STORE_SHORT_OFFSET: in getOffenMUBUFStore()
1161 return AMDGPU::BUFFER_STORE_SHORT_OFFEN; in getOffenMUBUFStore()
1162 case AMDGPU::BUFFER_STORE_DWORDX2_OFFSET: in getOffenMUBUFStore()
1163 return AMDGPU::BUFFER_STORE_DWORDX2_OFFEN; in getOffenMUBUFStore()
1164 case AMDGPU::BUFFER_STORE_DWORDX3_OFFSET: in getOffenMUBUFStore()
1165 return AMDGPU::BUFFER_STORE_DWORDX3_OFFEN; in getOffenMUBUFStore()
1166 case AMDGPU::BUFFER_STORE_DWORDX4_OFFSET: in getOffenMUBUFStore()
1167 return AMDGPU::BUFFER_STORE_DWORDX4_OFFEN; in getOffenMUBUFStore()
1168 case AMDGPU::BUFFER_STORE_SHORT_D16_HI_OFFSET: in getOffenMUBUFStore()
1169 return AMDGPU::BUFFER_STORE_SHORT_D16_HI_OFFEN; in getOffenMUBUFStore()
1170 case AMDGPU::BUFFER_STORE_BYTE_D16_HI_OFFSET: in getOffenMUBUFStore()
1171 return AMDGPU::BUFFER_STORE_BYTE_D16_HI_OFFEN; in getOffenMUBUFStore()
1179 case AMDGPU::BUFFER_LOAD_DWORD_OFFSET: in getOffenMUBUFLoad()
1180 return AMDGPU::BUFFER_LOAD_DWORD_OFFEN; in getOffenMUBUFLoad()
1181 case AMDGPU::BUFFER_LOAD_UBYTE_OFFSET: in getOffenMUBUFLoad()
1182 return AMDGPU::BUFFER_LOAD_UBYTE_OFFEN; in getOffenMUBUFLoad()
1183 case AMDGPU::BUFFER_LOAD_SBYTE_OFFSET: in getOffenMUBUFLoad()
1184 return AMDGPU::BUFFER_LOAD_SBYTE_OFFEN; in getOffenMUBUFLoad()
1185 case AMDGPU::BUFFER_LOAD_USHORT_OFFSET: in getOffenMUBUFLoad()
1186 return AMDGPU::BUFFER_LOAD_USHORT_OFFEN; in getOffenMUBUFLoad()
1187 case AMDGPU::BUFFER_LOAD_SSHORT_OFFSET: in getOffenMUBUFLoad()
1188 return AMDGPU::BUFFER_LOAD_SSHORT_OFFEN; in getOffenMUBUFLoad()
1189 case AMDGPU::BUFFER_LOAD_DWORDX2_OFFSET: in getOffenMUBUFLoad()
1190 return AMDGPU::BUFFER_LOAD_DWORDX2_OFFEN; in getOffenMUBUFLoad()
1191 case AMDGPU::BUFFER_LOAD_DWORDX3_OFFSET: in getOffenMUBUFLoad()
1192 return AMDGPU::BUFFER_LOAD_DWORDX3_OFFEN; in getOffenMUBUFLoad()
1193 case AMDGPU::BUFFER_LOAD_DWORDX4_OFFSET: in getOffenMUBUFLoad()
1194 return AMDGPU::BUFFER_LOAD_DWORDX4_OFFEN; in getOffenMUBUFLoad()
1195 case AMDGPU::BUFFER_LOAD_UBYTE_D16_OFFSET: in getOffenMUBUFLoad()
1196 return AMDGPU::BUFFER_LOAD_UBYTE_D16_OFFEN; in getOffenMUBUFLoad()
1197 case AMDGPU::BUFFER_LOAD_UBYTE_D16_HI_OFFSET: in getOffenMUBUFLoad()
1198 return AMDGPU::BUFFER_LOAD_UBYTE_D16_HI_OFFEN; in getOffenMUBUFLoad()
1199 case AMDGPU::BUFFER_LOAD_SBYTE_D16_OFFSET: in getOffenMUBUFLoad()
1200 return AMDGPU::BUFFER_LOAD_SBYTE_D16_OFFEN; in getOffenMUBUFLoad()
1201 case AMDGPU::BUFFER_LOAD_SBYTE_D16_HI_OFFSET: in getOffenMUBUFLoad()
1202 return AMDGPU::BUFFER_LOAD_SBYTE_D16_HI_OFFEN; in getOffenMUBUFLoad()
1203 case AMDGPU::BUFFER_LOAD_SHORT_D16_OFFSET: in getOffenMUBUFLoad()
1204 return AMDGPU::BUFFER_LOAD_SHORT_D16_OFFEN; in getOffenMUBUFLoad()
1205 case AMDGPU::BUFFER_LOAD_SHORT_D16_HI_OFFSET: in getOffenMUBUFLoad()
1206 return AMDGPU::BUFFER_LOAD_SHORT_D16_HI_OFFEN; in getOffenMUBUFLoad()
1223 if (Reg == AMDGPU::NoRegister) in spillVGPRtoAGPR()
1239 auto CopyMIB = BuildMI(MBB, MI, DL, TII->get(AMDGPU::COPY), Dst) in spillVGPRtoAGPR()
1244 unsigned Opc = (IsStore ^ IsVGPR) ? AMDGPU::V_ACCVGPR_WRITE_B32_e64 in spillVGPRtoAGPR()
1245 : AMDGPU::V_ACCVGPR_READ_B32_e64; in spillVGPRtoAGPR()
1271 const MachineOperand *Reg = TII->getNamedOperand(*MI, AMDGPU::OpName::vdata); in buildMUBUFOffsetLoadStore()
1278 .add(*TII->getNamedOperand(*MI, AMDGPU::OpName::srsrc)) in buildMUBUFOffsetLoadStore()
1279 .add(*TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)) in buildMUBUFOffsetLoadStore()
1286 AMDGPU::OpName::vdata_in); in buildMUBUFOffsetLoadStore()
1296 bool HasVAddr = AMDGPU::hasNamedOperand(LoadStoreOp, AMDGPU::OpName::vaddr); in getFlatScratchSpillOpcode()
1298 !HasVAddr && !AMDGPU::hasNamedOperand(LoadStoreOp, AMDGPU::OpName::saddr); in getFlatScratchSpillOpcode()
1302 LoadStoreOp = IsStore ? AMDGPU::SCRATCH_STORE_DWORD_SADDR in getFlatScratchSpillOpcode()
1303 : AMDGPU::SCRATCH_LOAD_DWORD_SADDR; in getFlatScratchSpillOpcode()
1306 LoadStoreOp = IsStore ? AMDGPU::SCRATCH_STORE_DWORDX2_SADDR in getFlatScratchSpillOpcode()
1307 : AMDGPU::SCRATCH_LOAD_DWORDX2_SADDR; in getFlatScratchSpillOpcode()
1310 LoadStoreOp = IsStore ? AMDGPU::SCRATCH_STORE_DWORDX3_SADDR in getFlatScratchSpillOpcode()
1311 : AMDGPU::SCRATCH_LOAD_DWORDX3_SADDR; in getFlatScratchSpillOpcode()
1314 LoadStoreOp = IsStore ? AMDGPU::SCRATCH_STORE_DWORDX4_SADDR in getFlatScratchSpillOpcode()
1315 : AMDGPU::SCRATCH_LOAD_DWORDX4_SADDR; in getFlatScratchSpillOpcode()
1322 LoadStoreOp = AMDGPU::getFlatScratchInstSVfromSS(LoadStoreOp); in getFlatScratchSpillOpcode()
1324 LoadStoreOp = AMDGPU::getFlatScratchInstSTfromSS(LoadStoreOp); in getFlatScratchSpillOpcode()
1352 const unsigned RegWidth = AMDGPU::getRegBitWidth(*RC) / 8; in buildSpillLoadStore()
1395 if (ST.getConstantBusLimit(AMDGPU::V_ADD_U32_e64) >= 2) { in buildSpillLoadStore()
1396 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_ADD_U32_e64), TmpVGPR) in buildSpillLoadStore()
1401 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpVGPR) in buildSpillLoadStore()
1403 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_ADD_U32_e32), TmpVGPR) in buildSpillLoadStore()
1409 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpVGPR) in buildSpillLoadStore()
1426 SOffset = RS->scavengeRegisterBackwards(AMDGPU::SGPR_32RegClass, MI, false, 0, false); in buildSpillLoadStore()
1429 CanClobberSCC = !RS->isRegUsed(AMDGPU::SCC); in buildSpillLoadStore()
1431 CanClobberSCC = LiveUnits->available(AMDGPU::SCC); in buildSpillLoadStore()
1432 for (MCRegister Reg : AMDGPU::SGPR_32RegClass) { in buildSpillLoadStore()
1440 if (ScratchOffsetReg != AMDGPU::NoRegister && !CanClobberSCC) in buildSpillLoadStore()
1447 TmpOffsetVGPR = RS->scavengeRegisterBackwards(AMDGPU::VGPR_32RegClass, MI, false, 0); in buildSpillLoadStore()
1450 for (MCRegister Reg : AMDGPU::VGPR_32RegClass) { in buildSpillLoadStore()
1490 } else if (ScratchOffsetReg == AMDGPU::NoRegister) { in buildSpillLoadStore()
1491 BuildMI(MBB, MI, DL, TII->get(AMDGPU::S_MOV_B32), SOffset).addImm(Offset); in buildSpillLoadStore()
1494 auto Add = BuildMI(MBB, MI, DL, TII->get(AMDGPU::S_ADD_I32), SOffset) in buildSpillLoadStore()
1503 if (IsFlat && SOffset == AMDGPU::NoRegister) { in buildSpillLoadStore()
1504 assert(AMDGPU::getNamedOperandIdx(LoadStoreOp, AMDGPU::OpName::vaddr) < 0 in buildSpillLoadStore()
1508 LoadStoreOp = AMDGPU::getFlatScratchInstSVfromSS(LoadStoreOp); in buildSpillLoadStore()
1511 LoadStoreOp = AMDGPU::getFlatScratchInstSTfromSS(LoadStoreOp); in buildSpillLoadStore()
1622 TII->get(AMDGPU::V_ACCVGPR_READ_B32_e64), in buildSpillLoadStore()
1632 TmpOffsetVGPR = RS->scavengeRegisterBackwards(AMDGPU::VGPR_32RegClass, in buildSpillLoadStore()
1656 if (SOffset == AMDGPU::NoRegister) { in buildSpillLoadStore()
1672 MIB.addImm(LastUse ? AMDGPU::CPol::TH_LU : 0); // cpol in buildSpillLoadStore()
1681 if (!IsStore && IsAGPR && TmpIntermediateVGPR != AMDGPU::NoRegister) { in buildSpillLoadStore()
1682 MIB = BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_ACCVGPR_WRITE_B32_e64), in buildSpillLoadStore()
1721 BuildMI(MBB, MI, DL, TII->get(AMDGPU::S_ADD_I32), SOffset) in buildSpillLoadStore()
1746 unsigned Opc = ST.enableFlatScratch() ? AMDGPU::SCRATCH_LOAD_DWORD_SADDR in buildVGPRSpillLoadStore()
1747 : AMDGPU::BUFFER_LOAD_DWORD_OFFSET; in buildVGPRSpillLoadStore()
1751 unsigned Opc = ST.enableFlatScratch() ? AMDGPU::SCRATCH_STORE_DWORD_SADDR in buildVGPRSpillLoadStore()
1752 : AMDGPU::BUFFER_STORE_DWORD_OFFSET; in buildVGPRSpillLoadStore()
1796 SB.TII.get(AMDGPU::SI_SPILL_S32_TO_VGPR), Spill.VGPR) in spillSGPR()
1843 SB.TII.get(AMDGPU::SI_SPILL_S32_TO_VGPR), SB.TmpVGPR) in spillSGPR()
1905 SB.TII.get(AMDGPU::SI_RESTORE_S32_FROM_VGPR), SubReg) in restoreSGPR()
1938 SB.TII.get(AMDGPU::SI_RESTORE_S32_FROM_VGPR), SubReg) in restoreSGPR()
1984 BuildMI(*SB.MBB, MI, SB.DL, SB.TII.get(AMDGPU::V_WRITELANE_B32), in spillEmergencySGPR()
2018 auto MIB = BuildMI(*SB.MBB, MI, SB.DL, SB.TII.get(AMDGPU::V_READLANE_B32), in spillEmergencySGPR()
2039 case AMDGPU::SI_SPILL_S1024_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2040 case AMDGPU::SI_SPILL_S512_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2041 case AMDGPU::SI_SPILL_S384_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2042 case AMDGPU::SI_SPILL_S352_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2043 case AMDGPU::SI_SPILL_S320_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2044 case AMDGPU::SI_SPILL_S288_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2045 case AMDGPU::SI_SPILL_S256_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2046 case AMDGPU::SI_SPILL_S224_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2047 case AMDGPU::SI_SPILL_S192_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2048 case AMDGPU::SI_SPILL_S160_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2049 case AMDGPU::SI_SPILL_S128_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2050 case AMDGPU::SI_SPILL_S96_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2051 case AMDGPU::SI_SPILL_S64_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2052 case AMDGPU::SI_SPILL_S32_SAVE: in eliminateSGPRToVGPRSpillFrameIndex()
2054 case AMDGPU::SI_SPILL_S1024_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2055 case AMDGPU::SI_SPILL_S512_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2056 case AMDGPU::SI_SPILL_S384_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2057 case AMDGPU::SI_SPILL_S352_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2058 case AMDGPU::SI_SPILL_S320_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2059 case AMDGPU::SI_SPILL_S288_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2060 case AMDGPU::SI_SPILL_S256_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2061 case AMDGPU::SI_SPILL_S224_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2062 case AMDGPU::SI_SPILL_S192_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2063 case AMDGPU::SI_SPILL_S160_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2064 case AMDGPU::SI_SPILL_S128_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2065 case AMDGPU::SI_SPILL_S96_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2066 case AMDGPU::SI_SPILL_S64_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2067 case AMDGPU::SI_SPILL_S32_RESTORE: in eliminateSGPRToVGPRSpillFrameIndex()
2098 case AMDGPU::SI_SPILL_S1024_SAVE: in eliminateFrameIndex()
2099 case AMDGPU::SI_SPILL_S512_SAVE: in eliminateFrameIndex()
2100 case AMDGPU::SI_SPILL_S384_SAVE: in eliminateFrameIndex()
2101 case AMDGPU::SI_SPILL_S352_SAVE: in eliminateFrameIndex()
2102 case AMDGPU::SI_SPILL_S320_SAVE: in eliminateFrameIndex()
2103 case AMDGPU::SI_SPILL_S288_SAVE: in eliminateFrameIndex()
2104 case AMDGPU::SI_SPILL_S256_SAVE: in eliminateFrameIndex()
2105 case AMDGPU::SI_SPILL_S224_SAVE: in eliminateFrameIndex()
2106 case AMDGPU::SI_SPILL_S192_SAVE: in eliminateFrameIndex()
2107 case AMDGPU::SI_SPILL_S160_SAVE: in eliminateFrameIndex()
2108 case AMDGPU::SI_SPILL_S128_SAVE: in eliminateFrameIndex()
2109 case AMDGPU::SI_SPILL_S96_SAVE: in eliminateFrameIndex()
2110 case AMDGPU::SI_SPILL_S64_SAVE: in eliminateFrameIndex()
2111 case AMDGPU::SI_SPILL_S32_SAVE: { in eliminateFrameIndex()
2116 case AMDGPU::SI_SPILL_S1024_RESTORE: in eliminateFrameIndex()
2117 case AMDGPU::SI_SPILL_S512_RESTORE: in eliminateFrameIndex()
2118 case AMDGPU::SI_SPILL_S384_RESTORE: in eliminateFrameIndex()
2119 case AMDGPU::SI_SPILL_S352_RESTORE: in eliminateFrameIndex()
2120 case AMDGPU::SI_SPILL_S320_RESTORE: in eliminateFrameIndex()
2121 case AMDGPU::SI_SPILL_S288_RESTORE: in eliminateFrameIndex()
2122 case AMDGPU::SI_SPILL_S256_RESTORE: in eliminateFrameIndex()
2123 case AMDGPU::SI_SPILL_S224_RESTORE: in eliminateFrameIndex()
2124 case AMDGPU::SI_SPILL_S192_RESTORE: in eliminateFrameIndex()
2125 case AMDGPU::SI_SPILL_S160_RESTORE: in eliminateFrameIndex()
2126 case AMDGPU::SI_SPILL_S128_RESTORE: in eliminateFrameIndex()
2127 case AMDGPU::SI_SPILL_S96_RESTORE: in eliminateFrameIndex()
2128 case AMDGPU::SI_SPILL_S64_RESTORE: in eliminateFrameIndex()
2129 case AMDGPU::SI_SPILL_S32_RESTORE: { in eliminateFrameIndex()
2134 case AMDGPU::SI_SPILL_V1024_SAVE: in eliminateFrameIndex()
2135 case AMDGPU::SI_SPILL_V512_SAVE: in eliminateFrameIndex()
2136 case AMDGPU::SI_SPILL_V384_SAVE: in eliminateFrameIndex()
2137 case AMDGPU::SI_SPILL_V352_SAVE: in eliminateFrameIndex()
2138 case AMDGPU::SI_SPILL_V320_SAVE: in eliminateFrameIndex()
2139 case AMDGPU::SI_SPILL_V288_SAVE: in eliminateFrameIndex()
2140 case AMDGPU::SI_SPILL_V256_SAVE: in eliminateFrameIndex()
2141 case AMDGPU::SI_SPILL_V224_SAVE: in eliminateFrameIndex()
2142 case AMDGPU::SI_SPILL_V192_SAVE: in eliminateFrameIndex()
2143 case AMDGPU::SI_SPILL_V160_SAVE: in eliminateFrameIndex()
2144 case AMDGPU::SI_SPILL_V128_SAVE: in eliminateFrameIndex()
2145 case AMDGPU::SI_SPILL_V96_SAVE: in eliminateFrameIndex()
2146 case AMDGPU::SI_SPILL_V64_SAVE: in eliminateFrameIndex()
2147 case AMDGPU::SI_SPILL_V32_SAVE: in eliminateFrameIndex()
2148 case AMDGPU::SI_SPILL_A1024_SAVE: in eliminateFrameIndex()
2149 case AMDGPU::SI_SPILL_A512_SAVE: in eliminateFrameIndex()
2150 case AMDGPU::SI_SPILL_A384_SAVE: in eliminateFrameIndex()
2151 case AMDGPU::SI_SPILL_A352_SAVE: in eliminateFrameIndex()
2152 case AMDGPU::SI_SPILL_A320_SAVE: in eliminateFrameIndex()
2153 case AMDGPU::SI_SPILL_A288_SAVE: in eliminateFrameIndex()
2154 case AMDGPU::SI_SPILL_A256_SAVE: in eliminateFrameIndex()
2155 case AMDGPU::SI_SPILL_A224_SAVE: in eliminateFrameIndex()
2156 case AMDGPU::SI_SPILL_A192_SAVE: in eliminateFrameIndex()
2157 case AMDGPU::SI_SPILL_A160_SAVE: in eliminateFrameIndex()
2158 case AMDGPU::SI_SPILL_A128_SAVE: in eliminateFrameIndex()
2159 case AMDGPU::SI_SPILL_A96_SAVE: in eliminateFrameIndex()
2160 case AMDGPU::SI_SPILL_A64_SAVE: in eliminateFrameIndex()
2161 case AMDGPU::SI_SPILL_A32_SAVE: in eliminateFrameIndex()
2162 case AMDGPU::SI_SPILL_AV1024_SAVE: in eliminateFrameIndex()
2163 case AMDGPU::SI_SPILL_AV512_SAVE: in eliminateFrameIndex()
2164 case AMDGPU::SI_SPILL_AV384_SAVE: in eliminateFrameIndex()
2165 case AMDGPU::SI_SPILL_AV352_SAVE: in eliminateFrameIndex()
2166 case AMDGPU::SI_SPILL_AV320_SAVE: in eliminateFrameIndex()
2167 case AMDGPU::SI_SPILL_AV288_SAVE: in eliminateFrameIndex()
2168 case AMDGPU::SI_SPILL_AV256_SAVE: in eliminateFrameIndex()
2169 case AMDGPU::SI_SPILL_AV224_SAVE: in eliminateFrameIndex()
2170 case AMDGPU::SI_SPILL_AV192_SAVE: in eliminateFrameIndex()
2171 case AMDGPU::SI_SPILL_AV160_SAVE: in eliminateFrameIndex()
2172 case AMDGPU::SI_SPILL_AV128_SAVE: in eliminateFrameIndex()
2173 case AMDGPU::SI_SPILL_AV96_SAVE: in eliminateFrameIndex()
2174 case AMDGPU::SI_SPILL_AV64_SAVE: in eliminateFrameIndex()
2175 case AMDGPU::SI_SPILL_AV32_SAVE: in eliminateFrameIndex()
2176 case AMDGPU::SI_SPILL_WWM_V32_SAVE: in eliminateFrameIndex()
2177 case AMDGPU::SI_SPILL_WWM_AV32_SAVE: { in eliminateFrameIndex()
2179 AMDGPU::OpName::vdata); in eliminateFrameIndex()
2180 assert(TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)->getReg() == in eliminateFrameIndex()
2183 unsigned Opc = ST.enableFlatScratch() ? AMDGPU::SCRATCH_STORE_DWORD_SADDR in eliminateFrameIndex()
2184 : AMDGPU::BUFFER_STORE_DWORD_OFFSET; in eliminateFrameIndex()
2189 RS->isRegUsed(AMDGPU::SCC)); in eliminateFrameIndex()
2193 TII->getNamedOperand(*MI, AMDGPU::OpName::offset)->getImm(), in eliminateFrameIndex()
2202 case AMDGPU::SI_SPILL_V32_RESTORE: in eliminateFrameIndex()
2203 case AMDGPU::SI_SPILL_V64_RESTORE: in eliminateFrameIndex()
2204 case AMDGPU::SI_SPILL_V96_RESTORE: in eliminateFrameIndex()
2205 case AMDGPU::SI_SPILL_V128_RESTORE: in eliminateFrameIndex()
2206 case AMDGPU::SI_SPILL_V160_RESTORE: in eliminateFrameIndex()
2207 case AMDGPU::SI_SPILL_V192_RESTORE: in eliminateFrameIndex()
2208 case AMDGPU::SI_SPILL_V224_RESTORE: in eliminateFrameIndex()
2209 case AMDGPU::SI_SPILL_V256_RESTORE: in eliminateFrameIndex()
2210 case AMDGPU::SI_SPILL_V288_RESTORE: in eliminateFrameIndex()
2211 case AMDGPU::SI_SPILL_V320_RESTORE: in eliminateFrameIndex()
2212 case AMDGPU::SI_SPILL_V352_RESTORE: in eliminateFrameIndex()
2213 case AMDGPU::SI_SPILL_V384_RESTORE: in eliminateFrameIndex()
2214 case AMDGPU::SI_SPILL_V512_RESTORE: in eliminateFrameIndex()
2215 case AMDGPU::SI_SPILL_V1024_RESTORE: in eliminateFrameIndex()
2216 case AMDGPU::SI_SPILL_A32_RESTORE: in eliminateFrameIndex()
2217 case AMDGPU::SI_SPILL_A64_RESTORE: in eliminateFrameIndex()
2218 case AMDGPU::SI_SPILL_A96_RESTORE: in eliminateFrameIndex()
2219 case AMDGPU::SI_SPILL_A128_RESTORE: in eliminateFrameIndex()
2220 case AMDGPU::SI_SPILL_A160_RESTORE: in eliminateFrameIndex()
2221 case AMDGPU::SI_SPILL_A192_RESTORE: in eliminateFrameIndex()
2222 case AMDGPU::SI_SPILL_A224_RESTORE: in eliminateFrameIndex()
2223 case AMDGPU::SI_SPILL_A256_RESTORE: in eliminateFrameIndex()
2224 case AMDGPU::SI_SPILL_A288_RESTORE: in eliminateFrameIndex()
2225 case AMDGPU::SI_SPILL_A320_RESTORE: in eliminateFrameIndex()
2226 case AMDGPU::SI_SPILL_A352_RESTORE: in eliminateFrameIndex()
2227 case AMDGPU::SI_SPILL_A384_RESTORE: in eliminateFrameIndex()
2228 case AMDGPU::SI_SPILL_A512_RESTORE: in eliminateFrameIndex()
2229 case AMDGPU::SI_SPILL_A1024_RESTORE: in eliminateFrameIndex()
2230 case AMDGPU::SI_SPILL_AV32_RESTORE: in eliminateFrameIndex()
2231 case AMDGPU::SI_SPILL_AV64_RESTORE: in eliminateFrameIndex()
2232 case AMDGPU::SI_SPILL_AV96_RESTORE: in eliminateFrameIndex()
2233 case AMDGPU::SI_SPILL_AV128_RESTORE: in eliminateFrameIndex()
2234 case AMDGPU::SI_SPILL_AV160_RESTORE: in eliminateFrameIndex()
2235 case AMDGPU::SI_SPILL_AV192_RESTORE: in eliminateFrameIndex()
2236 case AMDGPU::SI_SPILL_AV224_RESTORE: in eliminateFrameIndex()
2237 case AMDGPU::SI_SPILL_AV256_RESTORE: in eliminateFrameIndex()
2238 case AMDGPU::SI_SPILL_AV288_RESTORE: in eliminateFrameIndex()
2239 case AMDGPU::SI_SPILL_AV320_RESTORE: in eliminateFrameIndex()
2240 case AMDGPU::SI_SPILL_AV352_RESTORE: in eliminateFrameIndex()
2241 case AMDGPU::SI_SPILL_AV384_RESTORE: in eliminateFrameIndex()
2242 case AMDGPU::SI_SPILL_AV512_RESTORE: in eliminateFrameIndex()
2243 case AMDGPU::SI_SPILL_AV1024_RESTORE: in eliminateFrameIndex()
2244 case AMDGPU::SI_SPILL_WWM_V32_RESTORE: in eliminateFrameIndex()
2245 case AMDGPU::SI_SPILL_WWM_AV32_RESTORE: { in eliminateFrameIndex()
2247 AMDGPU::OpName::vdata); in eliminateFrameIndex()
2248 assert(TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)->getReg() == in eliminateFrameIndex()
2251 unsigned Opc = ST.enableFlatScratch() ? AMDGPU::SCRATCH_LOAD_DWORD_SADDR in eliminateFrameIndex()
2252 : AMDGPU::BUFFER_LOAD_DWORD_OFFSET; in eliminateFrameIndex()
2257 RS->isRegUsed(AMDGPU::SCC)); in eliminateFrameIndex()
2262 TII->getNamedOperand(*MI, AMDGPU::OpName::offset)->getImm(), in eliminateFrameIndex()
2280 AMDGPU::getNamedOperandIdx(MI->getOpcode(), in eliminateFrameIndex()
2281 AMDGPU::OpName::saddr)); in eliminateFrameIndex()
2288 TII->getNamedOperand(*MI, AMDGPU::OpName::offset); in eliminateFrameIndex()
2301 if (AMDGPU::hasNamedOperand(Opc, AMDGPU::OpName::vaddr)) { in eliminateFrameIndex()
2302 NewOpc = AMDGPU::getFlatScratchInstSVfromSVS(Opc); in eliminateFrameIndex()
2306 NewOpc = AMDGPU::getFlatScratchInstSTfromSS(Opc); in eliminateFrameIndex()
2312 int VDstIn = AMDGPU::getNamedOperandIdx(Opc, in eliminateFrameIndex()
2313 AMDGPU::OpName::vdst_in); in eliminateFrameIndex()
2321 AMDGPU::getNamedOperandIdx(Opc, AMDGPU::OpName::saddr)); in eliminateFrameIndex()
2325 AMDGPU::getNamedOperandIdx(NewOpc, AMDGPU::OpName::vdst); in eliminateFrameIndex()
2327 AMDGPU::getNamedOperandIdx(NewOpc, AMDGPU::OpName::vdst_in); in eliminateFrameIndex()
2345 FIOp.ChangeToRegister(AMDGPU::M0, false); in eliminateFrameIndex()
2353 const TargetRegisterClass *RC = UseSGPR ? &AMDGPU::SReg_32_XM0RegClass in eliminateFrameIndex()
2354 : &AMDGPU::VGPR_32RegClass; in eliminateFrameIndex()
2362 unsigned Opc = UseSGPR ? AMDGPU::S_MOV_B32 : AMDGPU::V_MOV_B32_e32; in eliminateFrameIndex()
2372 bool NeedSaveSCC = RS->isRegUsed(AMDGPU::SCC) && in eliminateFrameIndex()
2373 !MI->definesRegister(AMDGPU::SCC, /*TRI=*/nullptr); in eliminateFrameIndex()
2377 : RS->scavengeRegisterBackwards(AMDGPU::SReg_32_XM0RegClass, in eliminateFrameIndex()
2394 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_ADDC_U32), TmpSReg) in eliminateFrameIndex()
2397 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_BITCMP1_B32)) in eliminateFrameIndex()
2400 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_BITSET0_B32), TmpSReg) in eliminateFrameIndex()
2404 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_ADD_I32), TmpSReg) in eliminateFrameIndex()
2410 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpReg) in eliminateFrameIndex()
2416 !MI->registerDefIsDead(AMDGPU::SCC, /*TRI=*/nullptr)) { in eliminateFrameIndex()
2418 BuildMI(*MBB, std::next(MI), DL, TII->get(AMDGPU::S_ADDC_U32), in eliminateFrameIndex()
2422 I = BuildMI(*MBB, std::next(I), DL, TII->get(AMDGPU::S_BITCMP1_B32)) in eliminateFrameIndex()
2425 BuildMI(*MBB, std::next(I), DL, TII->get(AMDGPU::S_BITSET0_B32), in eliminateFrameIndex()
2430 BuildMI(*MBB, std::next(MI), DL, TII->get(AMDGPU::S_ADD_I32), in eliminateFrameIndex()
2446 bool LiveSCC = RS->isRegUsed(AMDGPU::SCC) && in eliminateFrameIndex()
2447 !MI->definesRegister(AMDGPU::SCC, /*TRI=*/nullptr); in eliminateFrameIndex()
2449 ? &AMDGPU::SReg_32RegClass in eliminateFrameIndex()
2450 : &AMDGPU::VGPR_32RegClass; in eliminateFrameIndex()
2451 bool IsCopy = MI->getOpcode() == AMDGPU::V_MOV_B32_e32 || in eliminateFrameIndex()
2452 MI->getOpcode() == AMDGPU::V_MOV_B32_e64; in eliminateFrameIndex()
2459 unsigned OpCode = IsSALU && !LiveSCC ? AMDGPU::S_LSHR_B32 in eliminateFrameIndex()
2460 : AMDGPU::V_LSHRREV_B32_e64; in eliminateFrameIndex()
2462 if (OpCode == AMDGPU::V_LSHRREV_B32_e64) in eliminateFrameIndex()
2472 AMDGPU::SReg_32RegClass, Shift, false, 0); in eliminateFrameIndex()
2473 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::V_READFIRSTLANE_B32), in eliminateFrameIndex()
2486 BuildMI(*MBB, *MIB, DL, TII->get(AMDGPU::V_LSHRREV_B32_e64), in eliminateFrameIndex()
2491 const bool IsVOP2 = MIB->getOpcode() == AMDGPU::V_ADD_U32_e32; in eliminateFrameIndex()
2494 if (IsVOP2 || AMDGPU::isInlinableLiteral32(Offset, ST.hasInv2PiInlineImm())) { in eliminateFrameIndex()
2501 assert(MIB->getOpcode() == AMDGPU::V_ADD_CO_U32_e64 && in eliminateFrameIndex()
2507 ConstOffsetReg = getSubReg(MIB.getReg(1), AMDGPU::sub0); in eliminateFrameIndex()
2511 BuildMI(*MBB, *MIB, DL, TII->get(AMDGPU::S_MOV_B32), ConstOffsetReg) in eliminateFrameIndex()
2527 AMDGPU::SReg_32_XM0RegClass, MI, false, 0, false); in eliminateFrameIndex()
2530 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_LSHR_B32), ScaledReg) in eliminateFrameIndex()
2533 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_ADD_I32), ScaledReg) in eliminateFrameIndex()
2537 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::COPY), ResultReg) in eliminateFrameIndex()
2544 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_ADD_I32), ScaledReg) in eliminateFrameIndex()
2547 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::S_LSHL_B32), ScaledReg) in eliminateFrameIndex()
2566 AMDGPU::getNamedOperandIdx(MI->getOpcode(), in eliminateFrameIndex()
2567 AMDGPU::OpName::vaddr)); in eliminateFrameIndex()
2569 auto &SOffset = *TII->getNamedOperand(*MI, AMDGPU::OpName::soffset); in eliminateFrameIndex()
2572 if (FrameReg != AMDGPU::NoRegister) in eliminateFrameIndex()
2577 = TII->getNamedOperand(*MI, AMDGPU::OpName::offset)->getImm(); in eliminateFrameIndex()
2592 Register TmpReg = RS->scavengeRegisterBackwards(AMDGPU::VGPR_32RegClass, in eliminateFrameIndex()
2594 BuildMI(*MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpReg) in eliminateFrameIndex()
2607 unsigned AMDGPU::getRegBitWidth(const TargetRegisterClass &RC) { in getRegBitWidth()
2614 return &AMDGPU::VReg_64RegClass; in getAnyVGPRClassForBitWidth()
2616 return &AMDGPU::VReg_96RegClass; in getAnyVGPRClassForBitWidth()
2618 return &AMDGPU::VReg_128RegClass; in getAnyVGPRClassForBitWidth()
2620 return &AMDGPU::VReg_160RegClass; in getAnyVGPRClassForBitWidth()
2622 return &AMDGPU::VReg_192RegClass; in getAnyVGPRClassForBitWidth()
2624 return &AMDGPU::VReg_224RegClass; in getAnyVGPRClassForBitWidth()
2626 return &AMDGPU::VReg_256RegClass; in getAnyVGPRClassForBitWidth()
2628 return &AMDGPU::VReg_288RegClass; in getAnyVGPRClassForBitWidth()
2630 return &AMDGPU::VReg_320RegClass; in getAnyVGPRClassForBitWidth()
2632 return &AMDGPU::VReg_352RegClass; in getAnyVGPRClassForBitWidth()
2634 return &AMDGPU::VReg_384RegClass; in getAnyVGPRClassForBitWidth()
2636 return &AMDGPU::VReg_512RegClass; in getAnyVGPRClassForBitWidth()
2638 return &AMDGPU::VReg_1024RegClass; in getAnyVGPRClassForBitWidth()
2646 return &AMDGPU::VReg_64_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2648 return &AMDGPU::VReg_96_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2650 return &AMDGPU::VReg_128_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2652 return &AMDGPU::VReg_160_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2654 return &AMDGPU::VReg_192_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2656 return &AMDGPU::VReg_224_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2658 return &AMDGPU::VReg_256_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2660 return &AMDGPU::VReg_288_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2662 return &AMDGPU::VReg_320_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2664 return &AMDGPU::VReg_352_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2666 return &AMDGPU::VReg_384_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2668 return &AMDGPU::VReg_512_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2670 return &AMDGPU::VReg_1024_Align2RegClass; in getAlignedVGPRClassForBitWidth()
2678 return &AMDGPU::VReg_1RegClass; in getVGPRClassForBitWidth()
2680 return &AMDGPU::VGPR_16RegClass; in getVGPRClassForBitWidth()
2682 return &AMDGPU::VGPR_32RegClass; in getVGPRClassForBitWidth()
2690 return &AMDGPU::AReg_64RegClass; in getAnyAGPRClassForBitWidth()
2692 return &AMDGPU::AReg_96RegClass; in getAnyAGPRClassForBitWidth()
2694 return &AMDGPU::AReg_128RegClass; in getAnyAGPRClassForBitWidth()
2696 return &AMDGPU::AReg_160RegClass; in getAnyAGPRClassForBitWidth()
2698 return &AMDGPU::AReg_192RegClass; in getAnyAGPRClassForBitWidth()
2700 return &AMDGPU::AReg_224RegClass; in getAnyAGPRClassForBitWidth()
2702 return &AMDGPU::AReg_256RegClass; in getAnyAGPRClassForBitWidth()
2704 return &AMDGPU::AReg_288RegClass; in getAnyAGPRClassForBitWidth()
2706 return &AMDGPU::AReg_320RegClass; in getAnyAGPRClassForBitWidth()
2708 return &AMDGPU::AReg_352RegClass; in getAnyAGPRClassForBitWidth()
2710 return &AMDGPU::AReg_384RegClass; in getAnyAGPRClassForBitWidth()
2712 return &AMDGPU::AReg_512RegClass; in getAnyAGPRClassForBitWidth()
2714 return &AMDGPU::AReg_1024RegClass; in getAnyAGPRClassForBitWidth()
2722 return &AMDGPU::AReg_64_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2724 return &AMDGPU::AReg_96_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2726 return &AMDGPU::AReg_128_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2728 return &AMDGPU::AReg_160_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2730 return &AMDGPU::AReg_192_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2732 return &AMDGPU::AReg_224_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2734 return &AMDGPU::AReg_256_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2736 return &AMDGPU::AReg_288_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2738 return &AMDGPU::AReg_320_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2740 return &AMDGPU::AReg_352_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2742 return &AMDGPU::AReg_384_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2744 return &AMDGPU::AReg_512_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2746 return &AMDGPU::AReg_1024_Align2RegClass; in getAlignedAGPRClassForBitWidth()
2754 return &AMDGPU::AGPR_LO16RegClass; in getAGPRClassForBitWidth()
2756 return &AMDGPU::AGPR_32RegClass; in getAGPRClassForBitWidth()
2764 return &AMDGPU::AV_64RegClass; in getAnyVectorSuperClassForBitWidth()
2766 return &AMDGPU::AV_96RegClass; in getAnyVectorSuperClassForBitWidth()
2768 return &AMDGPU::AV_128RegClass; in getAnyVectorSuperClassForBitWidth()
2770 return &AMDGPU::AV_160RegClass; in getAnyVectorSuperClassForBitWidth()
2772 return &AMDGPU::AV_192RegClass; in getAnyVectorSuperClassForBitWidth()
2774 return &AMDGPU::AV_224RegClass; in getAnyVectorSuperClassForBitWidth()
2776 return &AMDGPU::AV_256RegClass; in getAnyVectorSuperClassForBitWidth()
2778 return &AMDGPU::AV_288RegClass; in getAnyVectorSuperClassForBitWidth()
2780 return &AMDGPU::AV_320RegClass; in getAnyVectorSuperClassForBitWidth()
2782 return &AMDGPU::AV_352RegClass; in getAnyVectorSuperClassForBitWidth()
2784 return &AMDGPU::AV_384RegClass; in getAnyVectorSuperClassForBitWidth()
2786 return &AMDGPU::AV_512RegClass; in getAnyVectorSuperClassForBitWidth()
2788 return &AMDGPU::AV_1024RegClass; in getAnyVectorSuperClassForBitWidth()
2796 return &AMDGPU::AV_64_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2798 return &AMDGPU::AV_96_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2800 return &AMDGPU::AV_128_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2802 return &AMDGPU::AV_160_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2804 return &AMDGPU::AV_192_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2806 return &AMDGPU::AV_224_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2808 return &AMDGPU::AV_256_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2810 return &AMDGPU::AV_288_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2812 return &AMDGPU::AV_320_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2814 return &AMDGPU::AV_352_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2816 return &AMDGPU::AV_384_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2818 return &AMDGPU::AV_512_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2820 return &AMDGPU::AV_1024_Align2RegClass; in getAlignedVectorSuperClassForBitWidth()
2828 return &AMDGPU::AV_32RegClass; in getVectorSuperClassForBitWidth()
2837 return &AMDGPU::SGPR_LO16RegClass; in getSGPRClassForBitWidth()
2839 return &AMDGPU::SReg_32RegClass; in getSGPRClassForBitWidth()
2841 return &AMDGPU::SReg_64RegClass; in getSGPRClassForBitWidth()
2843 return &AMDGPU::SGPR_96RegClass; in getSGPRClassForBitWidth()
2845 return &AMDGPU::SGPR_128RegClass; in getSGPRClassForBitWidth()
2847 return &AMDGPU::SGPR_160RegClass; in getSGPRClassForBitWidth()
2849 return &AMDGPU::SGPR_192RegClass; in getSGPRClassForBitWidth()
2851 return &AMDGPU::SGPR_224RegClass; in getSGPRClassForBitWidth()
2853 return &AMDGPU::SGPR_256RegClass; in getSGPRClassForBitWidth()
2855 return &AMDGPU::SGPR_288RegClass; in getSGPRClassForBitWidth()
2857 return &AMDGPU::SGPR_320RegClass; in getSGPRClassForBitWidth()
2859 return &AMDGPU::SGPR_352RegClass; in getSGPRClassForBitWidth()
2861 return &AMDGPU::SGPR_384RegClass; in getSGPRClassForBitWidth()
2863 return &AMDGPU::SGPR_512RegClass; in getSGPRClassForBitWidth()
2865 return &AMDGPU::SGPR_1024RegClass; in getSGPRClassForBitWidth()
2900 return &AMDGPU::SGPR_32RegClass; in getEquivalentSGPRClass()
2917 if (OpType >= AMDGPU::OPERAND_REG_INLINE_AC_FIRST && in opCanUseInlineConstant()
2918 OpType <= AMDGPU::OPERAND_REG_INLINE_AC_LAST) in opCanUseInlineConstant()
2921 return OpType >= AMDGPU::OPERAND_SRC_FIRST && in opCanUseInlineConstant()
2922 OpType <= AMDGPU::OPERAND_SRC_LAST; in opCanUseInlineConstant()
2951 return OpType >= AMDGPU::OPERAND_REG_IMM_FIRST && in opCanUseLiteralConstant()
2952 OpType <= AMDGPU::OPERAND_REG_IMM_LAST; in opCanUseLiteralConstant()
2986 const unsigned RegBitWidth = AMDGPU::getRegBitWidth(*RC); in getRegSplitParts()
3057 case AMDGPU::VGPR_32RegClassID: in getRegPressureLimit()
3059 case AMDGPU::SGPR_32RegClassID: in getRegPressureLimit()
3060 case AMDGPU::SGPR_LO16RegClassID: in getRegPressureLimit()
3067 if (Idx == AMDGPU::RegisterPressureSets::VGPR_32 || in getRegPressureSetLimit()
3068 Idx == AMDGPU::RegisterPressureSets::AGPR_32) in getRegPressureSetLimit()
3069 return getRegPressureLimit(&AMDGPU::VGPR_32RegClass, in getRegPressureSetLimit()
3072 if (Idx == AMDGPU::RegisterPressureSets::SReg_32) in getRegPressureSetLimit()
3073 return getRegPressureLimit(&AMDGPU::SGPR_32RegClass, in getRegPressureSetLimit()
3090 return AMDGPU::SGPR30_SGPR31; in getReturnAddressReg()
3097 case AMDGPU::VGPRRegBankID: in getRegClassForSizeOnBank()
3100 case AMDGPU::VCCRegBankID: in getRegClassForSizeOnBank()
3102 return isWave32 ? &AMDGPU::SReg_32_XM0_XEXECRegClass in getRegClassForSizeOnBank()
3103 : &AMDGPU::SReg_64_XEXECRegClass; in getRegClassForSizeOnBank()
3104 case AMDGPU::SGPRRegBankID: in getRegClassForSizeOnBank()
3106 case AMDGPU::AGPRRegBankID: in getRegClassForSizeOnBank()
3127 return isWave32 ? AMDGPU::VCC_LO : AMDGPU::VCC; in getVCC()
3131 return isWave32 ? AMDGPU::EXEC_LO : AMDGPU::EXEC; in getExec()
3136 return ST.needsAlignedVGPRs() ? &AMDGPU::VReg_64_Align2RegClass in getVGPR64Class()
3137 : &AMDGPU::VReg_64RegClass; in getVGPR64Class()
3143 case AMDGPU::SReg_1RegClassID: in getRegClass()
3145 case AMDGPU::SReg_1_XEXECRegClassID: in getRegClass()
3146 return isWave32 ? &AMDGPU::SReg_32_XM0_XEXECRegClass in getRegClass()
3147 : &AMDGPU::SReg_64_XEXECRegClass; in getRegClass()
3212 for (const TargetRegisterClass &RC : { AMDGPU::VGPR_32RegClass, in get32BitRegister()
3213 AMDGPU::SReg_32RegClass, in get32BitRegister()
3214 AMDGPU::AGPR_32RegClass } ) { in get32BitRegister()
3215 if (MCPhysReg Super = getMatchingSuperReg(Reg, AMDGPU::lo16, &RC)) in get32BitRegister()
3218 if (MCPhysReg Super = getMatchingSuperReg(Reg, AMDGPU::hi16, in get32BitRegister()
3219 &AMDGPU::VGPR_32RegClass)) { in get32BitRegister()
3223 return AMDGPU::NoRegister; in get32BitRegister()
3262 return ArrayRef(AMDGPU::SGPR_128RegClass.begin(), ST.getMaxNumSGPRs(MF) / 4); in getAllSGPR128()
3267 return ArrayRef(AMDGPU::SGPR_64RegClass.begin(), ST.getMaxNumSGPRs(MF) / 2); in getAllSGPR64()
3272 return ArrayRef(AMDGPU::SGPR_32RegClass.begin(), ST.getMaxNumSGPRs(MF)); in getAllSGPR32()