Lines Matching refs:TmpVGPR

90   Register TmpVGPR = AMDGPU::NoRegister;  member
173 TmpVGPR = RS->scavengeRegisterBackwards(AMDGPU::VGPR_32RegClass, MI, false, in prepare()
178 if (TmpVGPR) { in prepare()
184 TmpVGPR = AMDGPU::VGPR0; in prepare()
191 RS->assignRegToScavengingIndex(TmpVGPRIndex, TmpVGPR); in prepare()
196 RS->setRegUsed(TmpVGPR); in prepare()
214 I.addReg(TmpVGPR, RegState::ImplicitDefine); in prepare()
231 I.addReg(TmpVGPR, RegState::ImplicitDefine); in prepare()
258 I.addReg(TmpVGPR, RegState::ImplicitKill); in restore()
266 I.addReg(TmpVGPR, RegState::ImplicitKill); in restore()
277 RS->assignRegToScavengingIndex(TmpVGPRIndex, TmpVGPR, &*RestorePt); in restore()
1387 auto MaterializeVOffset = [&](Register SGPRBase, Register TmpVGPR, in buildSpillLoadStore()
1396 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_ADD_U32_e64), TmpVGPR) in buildSpillLoadStore()
1401 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpVGPR) in buildSpillLoadStore()
1403 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_ADD_U32_e32), TmpVGPR) in buildSpillLoadStore()
1409 BuildMI(MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpVGPR) in buildSpillLoadStore()
1748 buildSpillLoadStore(*SB.MBB, SB.MI, SB.DL, Opc, Index, SB.TmpVGPR, false, in buildVGPRSpillLoadStore()
1753 buildSpillLoadStore(*SB.MBB, SB.MI, SB.DL, Opc, Index, SB.TmpVGPR, IsKill, in buildVGPRSpillLoadStore()
1843 SB.TII.get(AMDGPU::SI_SPILL_S32_TO_VGPR), SB.TmpVGPR) in spillSGPR()
1846 .addReg(SB.TmpVGPR, TmpVGPRFlags); in spillSGPR()
1939 .addReg(SB.TmpVGPR, getKillRegState(LastSubReg)) in restoreSGPR()
1985 SB.TmpVGPR) in spillEmergencySGPR()
1988 .addReg(SB.TmpVGPR, TmpVGPRFlags); in spillEmergencySGPR()
2020 .addReg(SB.TmpVGPR, getKillRegState(LastSubReg)) in spillEmergencySGPR()