106c3fb27SDimitry Andric //===-- RISCVRegisterInfo.cpp - RISC-V Register Information -----*- C++ -*-===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 906c3fb27SDimitry Andric // This file contains the RISC-V implementation of the TargetRegisterInfo class. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 120b57cec5SDimitry Andric 130b57cec5SDimitry Andric #include "RISCVRegisterInfo.h" 140b57cec5SDimitry Andric #include "RISCV.h" 155ffd83dbSDimitry Andric #include "RISCVMachineFunctionInfo.h" 160b57cec5SDimitry Andric #include "RISCVSubtarget.h" 175f757f3fSDimitry Andric #include "llvm/ADT/SmallSet.h" 1881ad6265SDimitry Andric #include "llvm/BinaryFormat/Dwarf.h" 190b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h" 200b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunction.h" 210b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h" 220b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterScavenging.h" 230b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h" 240b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h" 254824e7fdSDimitry Andric #include "llvm/IR/DebugInfoMetadata.h" 260b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h" 270b57cec5SDimitry Andric 280b57cec5SDimitry Andric #define GET_REGINFO_TARGET_DESC 290b57cec5SDimitry Andric #include "RISCVGenRegisterInfo.inc" 300b57cec5SDimitry Andric 310b57cec5SDimitry Andric using namespace llvm; 320b57cec5SDimitry Andric 33bdd1243dSDimitry Andric static cl::opt<bool> 34bdd1243dSDimitry Andric DisableRegAllocHints("riscv-disable-regalloc-hints", cl::Hidden, 35bdd1243dSDimitry Andric cl::init(false), 36bdd1243dSDimitry Andric cl::desc("Disable two address hints for register " 37bdd1243dSDimitry Andric "allocation")); 38bdd1243dSDimitry Andric 398bcb0991SDimitry Andric static_assert(RISCV::X1 == RISCV::X0 + 1, "Register list not consecutive"); 408bcb0991SDimitry Andric static_assert(RISCV::X31 == RISCV::X0 + 31, "Register list not consecutive"); 41e8d8bef9SDimitry Andric static_assert(RISCV::F1_H == RISCV::F0_H + 1, "Register list not consecutive"); 42e8d8bef9SDimitry Andric static_assert(RISCV::F31_H == RISCV::F0_H + 31, 43e8d8bef9SDimitry Andric "Register list not consecutive"); 448bcb0991SDimitry Andric static_assert(RISCV::F1_F == RISCV::F0_F + 1, "Register list not consecutive"); 458bcb0991SDimitry Andric static_assert(RISCV::F31_F == RISCV::F0_F + 31, 468bcb0991SDimitry Andric "Register list not consecutive"); 478bcb0991SDimitry Andric static_assert(RISCV::F1_D == RISCV::F0_D + 1, "Register list not consecutive"); 488bcb0991SDimitry Andric static_assert(RISCV::F31_D == RISCV::F0_D + 31, 498bcb0991SDimitry Andric "Register list not consecutive"); 505ffd83dbSDimitry Andric static_assert(RISCV::V1 == RISCV::V0 + 1, "Register list not consecutive"); 515ffd83dbSDimitry Andric static_assert(RISCV::V31 == RISCV::V0 + 31, "Register list not consecutive"); 528bcb0991SDimitry Andric 530b57cec5SDimitry Andric RISCVRegisterInfo::RISCVRegisterInfo(unsigned HwMode) 540b57cec5SDimitry Andric : RISCVGenRegisterInfo(RISCV::X1, /*DwarfFlavour*/0, /*EHFlavor*/0, 550b57cec5SDimitry Andric /*PC*/0, HwMode) {} 560b57cec5SDimitry Andric 570b57cec5SDimitry Andric const MCPhysReg * 580b57cec5SDimitry Andric RISCVRegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const { 590b57cec5SDimitry Andric auto &Subtarget = MF->getSubtarget<RISCVSubtarget>(); 60e8d8bef9SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::GHC) 61e8d8bef9SDimitry Andric return CSR_NoRegs_SaveList; 620b57cec5SDimitry Andric if (MF->getFunction().hasFnAttribute("interrupt")) { 630b57cec5SDimitry Andric if (Subtarget.hasStdExtD()) 640b57cec5SDimitry Andric return CSR_XLEN_F64_Interrupt_SaveList; 650b57cec5SDimitry Andric if (Subtarget.hasStdExtF()) 66*7a6dacacSDimitry Andric return Subtarget.isRVE() ? CSR_XLEN_F32_Interrupt_RVE_SaveList 67*7a6dacacSDimitry Andric : CSR_XLEN_F32_Interrupt_SaveList; 68*7a6dacacSDimitry Andric return Subtarget.isRVE() ? CSR_Interrupt_RVE_SaveList 69*7a6dacacSDimitry Andric : CSR_Interrupt_SaveList; 700b57cec5SDimitry Andric } 710b57cec5SDimitry Andric 720b57cec5SDimitry Andric switch (Subtarget.getTargetABI()) { 730b57cec5SDimitry Andric default: 740b57cec5SDimitry Andric llvm_unreachable("Unrecognized ABI"); 75*7a6dacacSDimitry Andric case RISCVABI::ABI_ILP32E: 76*7a6dacacSDimitry Andric case RISCVABI::ABI_LP64E: 77*7a6dacacSDimitry Andric return CSR_ILP32E_LP64E_SaveList; 780b57cec5SDimitry Andric case RISCVABI::ABI_ILP32: 790b57cec5SDimitry Andric case RISCVABI::ABI_LP64: 800b57cec5SDimitry Andric return CSR_ILP32_LP64_SaveList; 810b57cec5SDimitry Andric case RISCVABI::ABI_ILP32F: 820b57cec5SDimitry Andric case RISCVABI::ABI_LP64F: 830b57cec5SDimitry Andric return CSR_ILP32F_LP64F_SaveList; 840b57cec5SDimitry Andric case RISCVABI::ABI_ILP32D: 850b57cec5SDimitry Andric case RISCVABI::ABI_LP64D: 860b57cec5SDimitry Andric return CSR_ILP32D_LP64D_SaveList; 870b57cec5SDimitry Andric } 880b57cec5SDimitry Andric } 890b57cec5SDimitry Andric 900b57cec5SDimitry Andric BitVector RISCVRegisterInfo::getReservedRegs(const MachineFunction &MF) const { 91480093f4SDimitry Andric const RISCVFrameLowering *TFI = getFrameLowering(MF); 920b57cec5SDimitry Andric BitVector Reserved(getNumRegs()); 935f757f3fSDimitry Andric auto &Subtarget = MF.getSubtarget<RISCVSubtarget>(); 940b57cec5SDimitry Andric 95480093f4SDimitry Andric // Mark any registers requested to be reserved as such 96480093f4SDimitry Andric for (size_t Reg = 0; Reg < getNumRegs(); Reg++) { 975f757f3fSDimitry Andric if (Subtarget.isRegisterReservedByUser(Reg)) 98480093f4SDimitry Andric markSuperRegs(Reserved, Reg); 99480093f4SDimitry Andric } 100480093f4SDimitry Andric 1010b57cec5SDimitry Andric // Use markSuperRegs to ensure any register aliases are also reserved 1020b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X0); // zero 1030b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X2); // sp 1040b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X3); // gp 1050b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X4); // tp 1060b57cec5SDimitry Andric if (TFI->hasFP(MF)) 1070b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X8); // fp 108480093f4SDimitry Andric // Reserve the base register if we need to realign the stack and allocate 109480093f4SDimitry Andric // variable-sized objects at runtime. 110480093f4SDimitry Andric if (TFI->hasBP(MF)) 111480093f4SDimitry Andric markSuperRegs(Reserved, RISCVABI::getBPReg()); // bp 112e8d8bef9SDimitry Andric 11306c3fb27SDimitry Andric // Additionally reserve dummy register used to form the register pair 11406c3fb27SDimitry Andric // beginning with 'x0' for instructions that take register pairs. 11506c3fb27SDimitry Andric markSuperRegs(Reserved, RISCV::DUMMY_REG_PAIR_WITH_X0); 11606c3fb27SDimitry Andric 117*7a6dacacSDimitry Andric // There are only 16 GPRs for RVE. 118*7a6dacacSDimitry Andric if (Subtarget.isRVE()) 119*7a6dacacSDimitry Andric for (MCPhysReg Reg = RISCV::X16; Reg <= RISCV::X31; Reg++) 120*7a6dacacSDimitry Andric markSuperRegs(Reserved, Reg); 121*7a6dacacSDimitry Andric 122e8d8bef9SDimitry Andric // V registers for code generation. We handle them manually. 123e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VL); 124e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VTYPE); 125e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VXSAT); 126e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VXRM); 12781ad6265SDimitry Andric markSuperRegs(Reserved, RISCV::VLENB); // vlenb (constant) 128e8d8bef9SDimitry Andric 129fe6060f1SDimitry Andric // Floating point environment registers. 130fe6060f1SDimitry Andric markSuperRegs(Reserved, RISCV::FRM); 131fe6060f1SDimitry Andric markSuperRegs(Reserved, RISCV::FFLAGS); 132fe6060f1SDimitry Andric 1335f757f3fSDimitry Andric if (MF.getFunction().getCallingConv() == CallingConv::GRAAL) { 1345f757f3fSDimitry Andric if (Subtarget.isRVE()) 1355f757f3fSDimitry Andric report_fatal_error("Graal reserved registers do not exist in RVE"); 1365f757f3fSDimitry Andric markSuperRegs(Reserved, RISCV::X23); 1375f757f3fSDimitry Andric markSuperRegs(Reserved, RISCV::X27); 1385f757f3fSDimitry Andric } 1395f757f3fSDimitry Andric 140647cbc5dSDimitry Andric // Shadow stack pointer. 141647cbc5dSDimitry Andric markSuperRegs(Reserved, RISCV::SSP); 142647cbc5dSDimitry Andric 1430b57cec5SDimitry Andric assert(checkAllSuperRegsMarked(Reserved)); 1440b57cec5SDimitry Andric return Reserved; 1450b57cec5SDimitry Andric } 1460b57cec5SDimitry Andric 147480093f4SDimitry Andric bool RISCVRegisterInfo::isAsmClobberable(const MachineFunction &MF, 1485ffd83dbSDimitry Andric MCRegister PhysReg) const { 149480093f4SDimitry Andric return !MF.getSubtarget<RISCVSubtarget>().isRegisterReservedByUser(PhysReg); 150480093f4SDimitry Andric } 151480093f4SDimitry Andric 1520b57cec5SDimitry Andric const uint32_t *RISCVRegisterInfo::getNoPreservedMask() const { 1530b57cec5SDimitry Andric return CSR_NoRegs_RegMask; 1540b57cec5SDimitry Andric } 1550b57cec5SDimitry Andric 1565ffd83dbSDimitry Andric // Frame indexes representing locations of CSRs which are given a fixed location 1575f757f3fSDimitry Andric // by save/restore libcalls or Zcmp Push/Pop. 15881ad6265SDimitry Andric static const std::pair<unsigned, int> FixedCSRFIMap[] = { 1595ffd83dbSDimitry Andric {/*ra*/ RISCV::X1, -1}, 1605ffd83dbSDimitry Andric {/*s0*/ RISCV::X8, -2}, 1615ffd83dbSDimitry Andric {/*s1*/ RISCV::X9, -3}, 1625ffd83dbSDimitry Andric {/*s2*/ RISCV::X18, -4}, 1635ffd83dbSDimitry Andric {/*s3*/ RISCV::X19, -5}, 1645ffd83dbSDimitry Andric {/*s4*/ RISCV::X20, -6}, 1655ffd83dbSDimitry Andric {/*s5*/ RISCV::X21, -7}, 1665ffd83dbSDimitry Andric {/*s6*/ RISCV::X22, -8}, 1675ffd83dbSDimitry Andric {/*s7*/ RISCV::X23, -9}, 1685ffd83dbSDimitry Andric {/*s8*/ RISCV::X24, -10}, 1695ffd83dbSDimitry Andric {/*s9*/ RISCV::X25, -11}, 1705ffd83dbSDimitry Andric {/*s10*/ RISCV::X26, -12}, 1715ffd83dbSDimitry Andric {/*s11*/ RISCV::X27, -13} 1725ffd83dbSDimitry Andric }; 1735ffd83dbSDimitry Andric 1745ffd83dbSDimitry Andric bool RISCVRegisterInfo::hasReservedSpillSlot(const MachineFunction &MF, 1755ffd83dbSDimitry Andric Register Reg, 1765ffd83dbSDimitry Andric int &FrameIdx) const { 1775ffd83dbSDimitry Andric const auto *RVFI = MF.getInfo<RISCVMachineFunctionInfo>(); 17806c3fb27SDimitry Andric if (!RVFI->useSaveRestoreLibCalls(MF) && !RVFI->isPushable(MF)) 1795ffd83dbSDimitry Andric return false; 1805ffd83dbSDimitry Andric 18181ad6265SDimitry Andric const auto *FII = 18281ad6265SDimitry Andric llvm::find_if(FixedCSRFIMap, [&](auto P) { return P.first == Reg; }); 18381ad6265SDimitry Andric if (FII == std::end(FixedCSRFIMap)) 1845ffd83dbSDimitry Andric return false; 1855ffd83dbSDimitry Andric 1865ffd83dbSDimitry Andric FrameIdx = FII->second; 1875ffd83dbSDimitry Andric return true; 1885ffd83dbSDimitry Andric } 1895ffd83dbSDimitry Andric 190bdd1243dSDimitry Andric void RISCVRegisterInfo::adjustReg(MachineBasicBlock &MBB, 191bdd1243dSDimitry Andric MachineBasicBlock::iterator II, 192bdd1243dSDimitry Andric const DebugLoc &DL, Register DestReg, 193bdd1243dSDimitry Andric Register SrcReg, StackOffset Offset, 194bdd1243dSDimitry Andric MachineInstr::MIFlag Flag, 195bdd1243dSDimitry Andric MaybeAlign RequiredAlign) const { 196bdd1243dSDimitry Andric 197bdd1243dSDimitry Andric if (DestReg == SrcReg && !Offset.getFixed() && !Offset.getScalable()) 198bdd1243dSDimitry Andric return; 199bdd1243dSDimitry Andric 200bdd1243dSDimitry Andric MachineFunction &MF = *MBB.getParent(); 201bdd1243dSDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 202bdd1243dSDimitry Andric const RISCVSubtarget &ST = MF.getSubtarget<RISCVSubtarget>(); 203bdd1243dSDimitry Andric const RISCVInstrInfo *TII = ST.getInstrInfo(); 204bdd1243dSDimitry Andric 205bdd1243dSDimitry Andric bool KillSrcReg = false; 206bdd1243dSDimitry Andric 207bdd1243dSDimitry Andric if (Offset.getScalable()) { 208bdd1243dSDimitry Andric unsigned ScalableAdjOpc = RISCV::ADD; 209bdd1243dSDimitry Andric int64_t ScalableValue = Offset.getScalable(); 210bdd1243dSDimitry Andric if (ScalableValue < 0) { 211bdd1243dSDimitry Andric ScalableValue = -ScalableValue; 212bdd1243dSDimitry Andric ScalableAdjOpc = RISCV::SUB; 213bdd1243dSDimitry Andric } 214bdd1243dSDimitry Andric // Get vlenb and multiply vlen with the number of vector registers. 215bdd1243dSDimitry Andric Register ScratchReg = DestReg; 216bdd1243dSDimitry Andric if (DestReg == SrcReg) 217bdd1243dSDimitry Andric ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); 218bdd1243dSDimitry Andric TII->getVLENFactoredAmount(MF, MBB, II, DL, ScratchReg, ScalableValue, Flag); 219bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(ScalableAdjOpc), DestReg) 220bdd1243dSDimitry Andric .addReg(SrcReg).addReg(ScratchReg, RegState::Kill) 221bdd1243dSDimitry Andric .setMIFlag(Flag); 222bdd1243dSDimitry Andric SrcReg = DestReg; 223bdd1243dSDimitry Andric KillSrcReg = true; 224bdd1243dSDimitry Andric } 225bdd1243dSDimitry Andric 226bdd1243dSDimitry Andric int64_t Val = Offset.getFixed(); 227bdd1243dSDimitry Andric if (DestReg == SrcReg && Val == 0) 228bdd1243dSDimitry Andric return; 229bdd1243dSDimitry Andric 230bdd1243dSDimitry Andric const uint64_t Align = RequiredAlign.valueOrOne().value(); 231bdd1243dSDimitry Andric 232bdd1243dSDimitry Andric if (isInt<12>(Val)) { 233bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) 234bdd1243dSDimitry Andric .addReg(SrcReg, getKillRegState(KillSrcReg)) 235bdd1243dSDimitry Andric .addImm(Val) 236bdd1243dSDimitry Andric .setMIFlag(Flag); 237bdd1243dSDimitry Andric return; 238bdd1243dSDimitry Andric } 239bdd1243dSDimitry Andric 240bdd1243dSDimitry Andric // Try to split the offset across two ADDIs. We need to keep the intermediate 241bdd1243dSDimitry Andric // result aligned after each ADDI. We need to determine the maximum value we 242bdd1243dSDimitry Andric // can put in each ADDI. In the negative direction, we can use -2048 which is 243bdd1243dSDimitry Andric // always sufficiently aligned. In the positive direction, we need to find the 244bdd1243dSDimitry Andric // largest 12-bit immediate that is aligned. Exclude -4096 since it can be 245bdd1243dSDimitry Andric // created with LUI. 246bdd1243dSDimitry Andric assert(Align < 2048 && "Required alignment too large"); 247bdd1243dSDimitry Andric int64_t MaxPosAdjStep = 2048 - Align; 248bdd1243dSDimitry Andric if (Val > -4096 && Val <= (2 * MaxPosAdjStep)) { 249bdd1243dSDimitry Andric int64_t FirstAdj = Val < 0 ? -2048 : MaxPosAdjStep; 250bdd1243dSDimitry Andric Val -= FirstAdj; 251bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) 252bdd1243dSDimitry Andric .addReg(SrcReg, getKillRegState(KillSrcReg)) 253bdd1243dSDimitry Andric .addImm(FirstAdj) 254bdd1243dSDimitry Andric .setMIFlag(Flag); 255bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) 256bdd1243dSDimitry Andric .addReg(DestReg, RegState::Kill) 257bdd1243dSDimitry Andric .addImm(Val) 258bdd1243dSDimitry Andric .setMIFlag(Flag); 259bdd1243dSDimitry Andric return; 260bdd1243dSDimitry Andric } 261bdd1243dSDimitry Andric 262bdd1243dSDimitry Andric unsigned Opc = RISCV::ADD; 263bdd1243dSDimitry Andric if (Val < 0) { 264bdd1243dSDimitry Andric Val = -Val; 265bdd1243dSDimitry Andric Opc = RISCV::SUB; 266bdd1243dSDimitry Andric } 267bdd1243dSDimitry Andric 268bdd1243dSDimitry Andric Register ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); 269bdd1243dSDimitry Andric TII->movImm(MBB, II, DL, ScratchReg, Val, Flag); 270bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(Opc), DestReg) 271bdd1243dSDimitry Andric .addReg(SrcReg, getKillRegState(KillSrcReg)) 272bdd1243dSDimitry Andric .addReg(ScratchReg, RegState::Kill) 273bdd1243dSDimitry Andric .setMIFlag(Flag); 274bdd1243dSDimitry Andric } 275bdd1243dSDimitry Andric 276bdd1243dSDimitry Andric // Split a VSPILLx_Mx pseudo into multiple whole register stores separated by 277bdd1243dSDimitry Andric // LMUL*VLENB bytes. 278bdd1243dSDimitry Andric void RISCVRegisterInfo::lowerVSPILL(MachineBasicBlock::iterator II) const { 279bdd1243dSDimitry Andric DebugLoc DL = II->getDebugLoc(); 280bdd1243dSDimitry Andric MachineBasicBlock &MBB = *II->getParent(); 281bdd1243dSDimitry Andric MachineFunction &MF = *MBB.getParent(); 282bdd1243dSDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 283*7a6dacacSDimitry Andric const RISCVSubtarget &STI = MF.getSubtarget<RISCVSubtarget>(); 284*7a6dacacSDimitry Andric const TargetInstrInfo *TII = STI.getInstrInfo(); 285*7a6dacacSDimitry Andric const TargetRegisterInfo *TRI = STI.getRegisterInfo(); 286bdd1243dSDimitry Andric 287bdd1243dSDimitry Andric auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode()); 288bdd1243dSDimitry Andric unsigned NF = ZvlssegInfo->first; 289bdd1243dSDimitry Andric unsigned LMUL = ZvlssegInfo->second; 290bdd1243dSDimitry Andric assert(NF * LMUL <= 8 && "Invalid NF/LMUL combinations."); 291bdd1243dSDimitry Andric unsigned Opcode, SubRegIdx; 292bdd1243dSDimitry Andric switch (LMUL) { 293bdd1243dSDimitry Andric default: 294bdd1243dSDimitry Andric llvm_unreachable("LMUL must be 1, 2, or 4."); 295bdd1243dSDimitry Andric case 1: 296bdd1243dSDimitry Andric Opcode = RISCV::VS1R_V; 297bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm1_0; 298bdd1243dSDimitry Andric break; 299bdd1243dSDimitry Andric case 2: 300bdd1243dSDimitry Andric Opcode = RISCV::VS2R_V; 301bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm2_0; 302bdd1243dSDimitry Andric break; 303bdd1243dSDimitry Andric case 4: 304bdd1243dSDimitry Andric Opcode = RISCV::VS4R_V; 305bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm4_0; 306bdd1243dSDimitry Andric break; 307bdd1243dSDimitry Andric } 308bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7, 309bdd1243dSDimitry Andric "Unexpected subreg numbering"); 310bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3, 311bdd1243dSDimitry Andric "Unexpected subreg numbering"); 312bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1, 313bdd1243dSDimitry Andric "Unexpected subreg numbering"); 314bdd1243dSDimitry Andric 315bdd1243dSDimitry Andric Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass); 3165f757f3fSDimitry Andric // Optimize for constant VLEN. 3175f757f3fSDimitry Andric if (STI.getRealMinVLen() == STI.getRealMaxVLen()) { 3185f757f3fSDimitry Andric const int64_t VLENB = STI.getRealMinVLen() / 8; 3195f757f3fSDimitry Andric int64_t Offset = VLENB * LMUL; 3205f757f3fSDimitry Andric STI.getInstrInfo()->movImm(MBB, II, DL, VL, Offset); 3215f757f3fSDimitry Andric } else { 322bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL); 323bdd1243dSDimitry Andric uint32_t ShiftAmount = Log2_32(LMUL); 324bdd1243dSDimitry Andric if (ShiftAmount != 0) 325bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL) 326bdd1243dSDimitry Andric .addReg(VL) 327bdd1243dSDimitry Andric .addImm(ShiftAmount); 3285f757f3fSDimitry Andric } 329bdd1243dSDimitry Andric 330bdd1243dSDimitry Andric Register SrcReg = II->getOperand(0).getReg(); 331bdd1243dSDimitry Andric Register Base = II->getOperand(1).getReg(); 332bdd1243dSDimitry Andric bool IsBaseKill = II->getOperand(1).isKill(); 333bdd1243dSDimitry Andric Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass); 334bdd1243dSDimitry Andric for (unsigned I = 0; I < NF; ++I) { 335bdd1243dSDimitry Andric // Adding implicit-use of super register to describe we are using part of 336bdd1243dSDimitry Andric // super register, that prevents machine verifier complaining when part of 337bdd1243dSDimitry Andric // subreg is undef, see comment in MachineVerifier::checkLiveness for more 338bdd1243dSDimitry Andric // detail. 339bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(Opcode)) 340bdd1243dSDimitry Andric .addReg(TRI->getSubReg(SrcReg, SubRegIdx + I)) 341bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I == NF - 1)) 342bdd1243dSDimitry Andric .addMemOperand(*(II->memoperands_begin())) 343bdd1243dSDimitry Andric .addReg(SrcReg, RegState::Implicit); 344bdd1243dSDimitry Andric if (I != NF - 1) 345bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase) 346bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I != 0 || IsBaseKill)) 347bdd1243dSDimitry Andric .addReg(VL, getKillRegState(I == NF - 2)); 348bdd1243dSDimitry Andric Base = NewBase; 349bdd1243dSDimitry Andric } 350bdd1243dSDimitry Andric II->eraseFromParent(); 351bdd1243dSDimitry Andric } 352bdd1243dSDimitry Andric 353bdd1243dSDimitry Andric // Split a VSPILLx_Mx pseudo into multiple whole register loads separated by 354bdd1243dSDimitry Andric // LMUL*VLENB bytes. 355bdd1243dSDimitry Andric void RISCVRegisterInfo::lowerVRELOAD(MachineBasicBlock::iterator II) const { 356bdd1243dSDimitry Andric DebugLoc DL = II->getDebugLoc(); 357bdd1243dSDimitry Andric MachineBasicBlock &MBB = *II->getParent(); 358bdd1243dSDimitry Andric MachineFunction &MF = *MBB.getParent(); 359bdd1243dSDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 360*7a6dacacSDimitry Andric const RISCVSubtarget &STI = MF.getSubtarget<RISCVSubtarget>(); 361*7a6dacacSDimitry Andric const TargetInstrInfo *TII = STI.getInstrInfo(); 362*7a6dacacSDimitry Andric const TargetRegisterInfo *TRI = STI.getRegisterInfo(); 363bdd1243dSDimitry Andric 364bdd1243dSDimitry Andric auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode()); 365bdd1243dSDimitry Andric unsigned NF = ZvlssegInfo->first; 366bdd1243dSDimitry Andric unsigned LMUL = ZvlssegInfo->second; 367bdd1243dSDimitry Andric assert(NF * LMUL <= 8 && "Invalid NF/LMUL combinations."); 368bdd1243dSDimitry Andric unsigned Opcode, SubRegIdx; 369bdd1243dSDimitry Andric switch (LMUL) { 370bdd1243dSDimitry Andric default: 371bdd1243dSDimitry Andric llvm_unreachable("LMUL must be 1, 2, or 4."); 372bdd1243dSDimitry Andric case 1: 373bdd1243dSDimitry Andric Opcode = RISCV::VL1RE8_V; 374bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm1_0; 375bdd1243dSDimitry Andric break; 376bdd1243dSDimitry Andric case 2: 377bdd1243dSDimitry Andric Opcode = RISCV::VL2RE8_V; 378bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm2_0; 379bdd1243dSDimitry Andric break; 380bdd1243dSDimitry Andric case 4: 381bdd1243dSDimitry Andric Opcode = RISCV::VL4RE8_V; 382bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm4_0; 383bdd1243dSDimitry Andric break; 384bdd1243dSDimitry Andric } 385bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7, 386bdd1243dSDimitry Andric "Unexpected subreg numbering"); 387bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3, 388bdd1243dSDimitry Andric "Unexpected subreg numbering"); 389bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1, 390bdd1243dSDimitry Andric "Unexpected subreg numbering"); 391bdd1243dSDimitry Andric 392bdd1243dSDimitry Andric Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass); 3935f757f3fSDimitry Andric // Optimize for constant VLEN. 3945f757f3fSDimitry Andric if (STI.getRealMinVLen() == STI.getRealMaxVLen()) { 3955f757f3fSDimitry Andric const int64_t VLENB = STI.getRealMinVLen() / 8; 3965f757f3fSDimitry Andric int64_t Offset = VLENB * LMUL; 3975f757f3fSDimitry Andric STI.getInstrInfo()->movImm(MBB, II, DL, VL, Offset); 3985f757f3fSDimitry Andric } else { 399bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL); 400bdd1243dSDimitry Andric uint32_t ShiftAmount = Log2_32(LMUL); 401bdd1243dSDimitry Andric if (ShiftAmount != 0) 402bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL) 403bdd1243dSDimitry Andric .addReg(VL) 404bdd1243dSDimitry Andric .addImm(ShiftAmount); 4055f757f3fSDimitry Andric } 406bdd1243dSDimitry Andric 407bdd1243dSDimitry Andric Register DestReg = II->getOperand(0).getReg(); 408bdd1243dSDimitry Andric Register Base = II->getOperand(1).getReg(); 409bdd1243dSDimitry Andric bool IsBaseKill = II->getOperand(1).isKill(); 410bdd1243dSDimitry Andric Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass); 411bdd1243dSDimitry Andric for (unsigned I = 0; I < NF; ++I) { 412bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(Opcode), 413bdd1243dSDimitry Andric TRI->getSubReg(DestReg, SubRegIdx + I)) 414bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I == NF - 1)) 415bdd1243dSDimitry Andric .addMemOperand(*(II->memoperands_begin())); 416bdd1243dSDimitry Andric if (I != NF - 1) 417bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase) 418bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I != 0 || IsBaseKill)) 419bdd1243dSDimitry Andric .addReg(VL, getKillRegState(I == NF - 2)); 420bdd1243dSDimitry Andric Base = NewBase; 421bdd1243dSDimitry Andric } 422bdd1243dSDimitry Andric II->eraseFromParent(); 423bdd1243dSDimitry Andric } 424bdd1243dSDimitry Andric 425bdd1243dSDimitry Andric bool RISCVRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II, 4260b57cec5SDimitry Andric int SPAdj, unsigned FIOperandNum, 4270b57cec5SDimitry Andric RegScavenger *RS) const { 4280b57cec5SDimitry Andric assert(SPAdj == 0 && "Unexpected non-zero SPAdj value"); 4290b57cec5SDimitry Andric 4300b57cec5SDimitry Andric MachineInstr &MI = *II; 4310b57cec5SDimitry Andric MachineFunction &MF = *MI.getParent()->getParent(); 4320b57cec5SDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 433bdd1243dSDimitry Andric const RISCVSubtarget &ST = MF.getSubtarget<RISCVSubtarget>(); 4340b57cec5SDimitry Andric DebugLoc DL = MI.getDebugLoc(); 4350b57cec5SDimitry Andric 4360b57cec5SDimitry Andric int FrameIndex = MI.getOperand(FIOperandNum).getIndex(); 4375ffd83dbSDimitry Andric Register FrameReg; 438fe6060f1SDimitry Andric StackOffset Offset = 439fe6060f1SDimitry Andric getFrameLowering(MF)->getFrameIndexReference(MF, FrameIndex, FrameReg); 44081ad6265SDimitry Andric bool IsRVVSpill = RISCV::isRVVSpill(MI); 441fe6060f1SDimitry Andric if (!IsRVVSpill) 442fe6060f1SDimitry Andric Offset += StackOffset::getFixed(MI.getOperand(FIOperandNum + 1).getImm()); 4430b57cec5SDimitry Andric 444bdd1243dSDimitry Andric if (Offset.getScalable() && 445bdd1243dSDimitry Andric ST.getRealMinVLen() == ST.getRealMaxVLen()) { 446bdd1243dSDimitry Andric // For an exact VLEN value, scalable offsets become constant and thus 447bdd1243dSDimitry Andric // can be converted entirely into fixed offsets. 448bdd1243dSDimitry Andric int64_t FixedValue = Offset.getFixed(); 449bdd1243dSDimitry Andric int64_t ScalableValue = Offset.getScalable(); 450bdd1243dSDimitry Andric assert(ScalableValue % 8 == 0 && 451bdd1243dSDimitry Andric "Scalable offset is not a multiple of a single vector size."); 452bdd1243dSDimitry Andric int64_t NumOfVReg = ScalableValue / 8; 453bdd1243dSDimitry Andric int64_t VLENB = ST.getRealMinVLen() / 8; 454bdd1243dSDimitry Andric Offset = StackOffset::getFixed(FixedValue + NumOfVReg * VLENB); 455bdd1243dSDimitry Andric } 456bdd1243dSDimitry Andric 457fe6060f1SDimitry Andric if (!isInt<32>(Offset.getFixed())) { 4580b57cec5SDimitry Andric report_fatal_error( 4590b57cec5SDimitry Andric "Frame offsets outside of the signed 32-bit range not supported"); 4600b57cec5SDimitry Andric } 4610b57cec5SDimitry Andric 462bdd1243dSDimitry Andric if (!IsRVVSpill) { 463bdd1243dSDimitry Andric if (MI.getOpcode() == RISCV::ADDI && !isInt<12>(Offset.getFixed())) { 464bdd1243dSDimitry Andric // We chose to emit the canonical immediate sequence rather than folding 465bdd1243dSDimitry Andric // the offset into the using add under the theory that doing so doesn't 466bdd1243dSDimitry Andric // save dynamic instruction count and some target may fuse the canonical 467bdd1243dSDimitry Andric // 32 bit immediate sequence. We still need to clear the portion of the 468bdd1243dSDimitry Andric // offset encoded in the immediate. 469bdd1243dSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(0); 470fe6060f1SDimitry Andric } else { 471bdd1243dSDimitry Andric // We can encode an add with 12 bit signed immediate in the immediate 472bdd1243dSDimitry Andric // operand of our user instruction. As a result, the remaining 473bdd1243dSDimitry Andric // offset can by construction, at worst, a LUI and a ADD. 474bdd1243dSDimitry Andric int64_t Val = Offset.getFixed(); 475bdd1243dSDimitry Andric int64_t Lo12 = SignExtend64<12>(Val); 4765f757f3fSDimitry Andric if ((MI.getOpcode() == RISCV::PREFETCH_I || 4775f757f3fSDimitry Andric MI.getOpcode() == RISCV::PREFETCH_R || 4785f757f3fSDimitry Andric MI.getOpcode() == RISCV::PREFETCH_W) && 4795f757f3fSDimitry Andric (Lo12 & 0b11111) != 0) 4805f757f3fSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(0); 4815f757f3fSDimitry Andric else { 482bdd1243dSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Lo12); 483bdd1243dSDimitry Andric Offset = StackOffset::get((uint64_t)Val - (uint64_t)Lo12, 484bdd1243dSDimitry Andric Offset.getScalable()); 485bdd1243dSDimitry Andric } 486bdd1243dSDimitry Andric } 4875f757f3fSDimitry Andric } 488fe6060f1SDimitry Andric 489bdd1243dSDimitry Andric if (Offset.getScalable() || Offset.getFixed()) { 490bdd1243dSDimitry Andric Register DestReg; 491bdd1243dSDimitry Andric if (MI.getOpcode() == RISCV::ADDI) 492bdd1243dSDimitry Andric DestReg = MI.getOperand(0).getReg(); 493bdd1243dSDimitry Andric else 494bdd1243dSDimitry Andric DestReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); 495bdd1243dSDimitry Andric adjustReg(*II->getParent(), II, DL, DestReg, FrameReg, Offset, 496bdd1243dSDimitry Andric MachineInstr::NoFlags, std::nullopt); 497bdd1243dSDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(DestReg, /*IsDef*/false, 498bdd1243dSDimitry Andric /*IsImp*/false, 499bdd1243dSDimitry Andric /*IsKill*/true); 500bdd1243dSDimitry Andric } else { 501bdd1243dSDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(FrameReg, /*IsDef*/false, 502bdd1243dSDimitry Andric /*IsImp*/false, 503bdd1243dSDimitry Andric /*IsKill*/false); 504bdd1243dSDimitry Andric } 505bdd1243dSDimitry Andric 506bdd1243dSDimitry Andric // If after materializing the adjustment, we have a pointless ADDI, remove it 507bdd1243dSDimitry Andric if (MI.getOpcode() == RISCV::ADDI && 508bdd1243dSDimitry Andric MI.getOperand(0).getReg() == MI.getOperand(1).getReg() && 509bdd1243dSDimitry Andric MI.getOperand(2).getImm() == 0) { 510fe6060f1SDimitry Andric MI.eraseFromParent(); 511bdd1243dSDimitry Andric return true; 512fe6060f1SDimitry Andric } 513fe6060f1SDimitry Andric 514bdd1243dSDimitry Andric // Handle spill/fill of synthetic register classes for segment operations to 515bdd1243dSDimitry Andric // ensure correctness in the edge case one gets spilled. There are many 516bdd1243dSDimitry Andric // possible optimizations here, but given the extreme rarity of such spills, 517bdd1243dSDimitry Andric // we prefer simplicity of implementation for now. 518bdd1243dSDimitry Andric switch (MI.getOpcode()) { 519bdd1243dSDimitry Andric case RISCV::PseudoVSPILL2_M1: 520bdd1243dSDimitry Andric case RISCV::PseudoVSPILL2_M2: 521bdd1243dSDimitry Andric case RISCV::PseudoVSPILL2_M4: 522bdd1243dSDimitry Andric case RISCV::PseudoVSPILL3_M1: 523bdd1243dSDimitry Andric case RISCV::PseudoVSPILL3_M2: 524bdd1243dSDimitry Andric case RISCV::PseudoVSPILL4_M1: 525bdd1243dSDimitry Andric case RISCV::PseudoVSPILL4_M2: 526bdd1243dSDimitry Andric case RISCV::PseudoVSPILL5_M1: 527bdd1243dSDimitry Andric case RISCV::PseudoVSPILL6_M1: 528bdd1243dSDimitry Andric case RISCV::PseudoVSPILL7_M1: 529bdd1243dSDimitry Andric case RISCV::PseudoVSPILL8_M1: 530bdd1243dSDimitry Andric lowerVSPILL(II); 531bdd1243dSDimitry Andric return true; 532bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD2_M1: 533bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD2_M2: 534bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD2_M4: 535bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD3_M1: 536bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD3_M2: 537bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD4_M1: 538bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD4_M2: 539bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD5_M1: 540bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD6_M1: 541bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD7_M1: 542bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD8_M1: 543bdd1243dSDimitry Andric lowerVRELOAD(II); 544bdd1243dSDimitry Andric return true; 545fe6060f1SDimitry Andric } 546fe6060f1SDimitry Andric 547bdd1243dSDimitry Andric return false; 548fe6060f1SDimitry Andric } 549bdd1243dSDimitry Andric 550bdd1243dSDimitry Andric bool RISCVRegisterInfo::requiresVirtualBaseRegisters( 551bdd1243dSDimitry Andric const MachineFunction &MF) const { 552bdd1243dSDimitry Andric return true; 553bdd1243dSDimitry Andric } 554bdd1243dSDimitry Andric 555bdd1243dSDimitry Andric // Returns true if the instruction's frame index reference would be better 556bdd1243dSDimitry Andric // served by a base register other than FP or SP. 557bdd1243dSDimitry Andric // Used by LocalStackSlotAllocation pass to determine which frame index 558bdd1243dSDimitry Andric // references it should create new base registers for. 559bdd1243dSDimitry Andric bool RISCVRegisterInfo::needsFrameBaseReg(MachineInstr *MI, 560bdd1243dSDimitry Andric int64_t Offset) const { 561bdd1243dSDimitry Andric unsigned FIOperandNum = 0; 562bdd1243dSDimitry Andric for (; !MI->getOperand(FIOperandNum).isFI(); FIOperandNum++) 563bdd1243dSDimitry Andric assert(FIOperandNum < MI->getNumOperands() && 564bdd1243dSDimitry Andric "Instr doesn't have FrameIndex operand"); 565bdd1243dSDimitry Andric 566bdd1243dSDimitry Andric // For RISC-V, The machine instructions that include a FrameIndex operand 567bdd1243dSDimitry Andric // are load/store, ADDI instructions. 568bdd1243dSDimitry Andric unsigned MIFrm = RISCVII::getFormat(MI->getDesc().TSFlags); 569bdd1243dSDimitry Andric if (MIFrm != RISCVII::InstFormatI && MIFrm != RISCVII::InstFormatS) 570bdd1243dSDimitry Andric return false; 571bdd1243dSDimitry Andric // We only generate virtual base registers for loads and stores, so 572bdd1243dSDimitry Andric // return false for everything else. 573bdd1243dSDimitry Andric if (!MI->mayLoad() && !MI->mayStore()) 574bdd1243dSDimitry Andric return false; 575bdd1243dSDimitry Andric 576bdd1243dSDimitry Andric const MachineFunction &MF = *MI->getMF(); 577bdd1243dSDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo(); 578bdd1243dSDimitry Andric const RISCVFrameLowering *TFI = getFrameLowering(MF); 579bdd1243dSDimitry Andric const MachineRegisterInfo &MRI = MF.getRegInfo(); 580bdd1243dSDimitry Andric unsigned CalleeSavedSize = 0; 581bdd1243dSDimitry Andric Offset += getFrameIndexInstrOffset(MI, FIOperandNum); 582bdd1243dSDimitry Andric 583bdd1243dSDimitry Andric // Estimate the stack size used to store callee saved registers( 584bdd1243dSDimitry Andric // excludes reserved registers). 585bdd1243dSDimitry Andric BitVector ReservedRegs = getReservedRegs(MF); 586bdd1243dSDimitry Andric for (const MCPhysReg *R = MRI.getCalleeSavedRegs(); MCPhysReg Reg = *R; ++R) { 587bdd1243dSDimitry Andric if (!ReservedRegs.test(Reg)) 588bdd1243dSDimitry Andric CalleeSavedSize += getSpillSize(*getMinimalPhysRegClass(Reg)); 589bdd1243dSDimitry Andric } 590bdd1243dSDimitry Andric 591bdd1243dSDimitry Andric int64_t MaxFPOffset = Offset - CalleeSavedSize; 592bdd1243dSDimitry Andric if (TFI->hasFP(MF) && !shouldRealignStack(MF)) 593bdd1243dSDimitry Andric return !isFrameOffsetLegal(MI, RISCV::X8, MaxFPOffset); 594bdd1243dSDimitry Andric 595bdd1243dSDimitry Andric // Assume 128 bytes spill slots size to estimate the maximum possible 596bdd1243dSDimitry Andric // offset relative to the stack pointer. 597bdd1243dSDimitry Andric // FIXME: The 128 is copied from ARM. We should run some statistics and pick a 598bdd1243dSDimitry Andric // real one for RISC-V. 599bdd1243dSDimitry Andric int64_t MaxSPOffset = Offset + 128; 600bdd1243dSDimitry Andric MaxSPOffset += MFI.getLocalFrameSize(); 601bdd1243dSDimitry Andric return !isFrameOffsetLegal(MI, RISCV::X2, MaxSPOffset); 602bdd1243dSDimitry Andric } 603bdd1243dSDimitry Andric 604bdd1243dSDimitry Andric // Determine whether a given base register plus offset immediate is 605bdd1243dSDimitry Andric // encodable to resolve a frame index. 606bdd1243dSDimitry Andric bool RISCVRegisterInfo::isFrameOffsetLegal(const MachineInstr *MI, 607bdd1243dSDimitry Andric Register BaseReg, 608bdd1243dSDimitry Andric int64_t Offset) const { 609bdd1243dSDimitry Andric unsigned FIOperandNum = 0; 610bdd1243dSDimitry Andric while (!MI->getOperand(FIOperandNum).isFI()) { 611bdd1243dSDimitry Andric FIOperandNum++; 612bdd1243dSDimitry Andric assert(FIOperandNum < MI->getNumOperands() && 613bdd1243dSDimitry Andric "Instr does not have a FrameIndex operand!"); 614bdd1243dSDimitry Andric } 615bdd1243dSDimitry Andric 616bdd1243dSDimitry Andric Offset += getFrameIndexInstrOffset(MI, FIOperandNum); 617bdd1243dSDimitry Andric return isInt<12>(Offset); 618bdd1243dSDimitry Andric } 619bdd1243dSDimitry Andric 620bdd1243dSDimitry Andric // Insert defining instruction(s) for a pointer to FrameIdx before 621bdd1243dSDimitry Andric // insertion point I. 622bdd1243dSDimitry Andric // Return materialized frame pointer. 623bdd1243dSDimitry Andric Register RISCVRegisterInfo::materializeFrameBaseRegister(MachineBasicBlock *MBB, 624bdd1243dSDimitry Andric int FrameIdx, 625bdd1243dSDimitry Andric int64_t Offset) const { 626bdd1243dSDimitry Andric MachineBasicBlock::iterator MBBI = MBB->begin(); 627bdd1243dSDimitry Andric DebugLoc DL; 628bdd1243dSDimitry Andric if (MBBI != MBB->end()) 629bdd1243dSDimitry Andric DL = MBBI->getDebugLoc(); 630bdd1243dSDimitry Andric MachineFunction *MF = MBB->getParent(); 631bdd1243dSDimitry Andric MachineRegisterInfo &MFI = MF->getRegInfo(); 632bdd1243dSDimitry Andric const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo(); 633bdd1243dSDimitry Andric 634bdd1243dSDimitry Andric Register BaseReg = MFI.createVirtualRegister(&RISCV::GPRRegClass); 635bdd1243dSDimitry Andric BuildMI(*MBB, MBBI, DL, TII->get(RISCV::ADDI), BaseReg) 636bdd1243dSDimitry Andric .addFrameIndex(FrameIdx) 637bdd1243dSDimitry Andric .addImm(Offset); 638bdd1243dSDimitry Andric return BaseReg; 639bdd1243dSDimitry Andric } 640bdd1243dSDimitry Andric 641bdd1243dSDimitry Andric // Resolve a frame index operand of an instruction to reference the 642bdd1243dSDimitry Andric // indicated base register plus offset instead. 643bdd1243dSDimitry Andric void RISCVRegisterInfo::resolveFrameIndex(MachineInstr &MI, Register BaseReg, 644bdd1243dSDimitry Andric int64_t Offset) const { 645bdd1243dSDimitry Andric unsigned FIOperandNum = 0; 646bdd1243dSDimitry Andric while (!MI.getOperand(FIOperandNum).isFI()) { 647bdd1243dSDimitry Andric FIOperandNum++; 648bdd1243dSDimitry Andric assert(FIOperandNum < MI.getNumOperands() && 649bdd1243dSDimitry Andric "Instr does not have a FrameIndex operand!"); 650bdd1243dSDimitry Andric } 651bdd1243dSDimitry Andric 652bdd1243dSDimitry Andric Offset += getFrameIndexInstrOffset(&MI, FIOperandNum); 653bdd1243dSDimitry Andric // FrameIndex Operands are always represented as a 654bdd1243dSDimitry Andric // register followed by an immediate. 655bdd1243dSDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(BaseReg, false); 656bdd1243dSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Offset); 657bdd1243dSDimitry Andric } 658bdd1243dSDimitry Andric 659bdd1243dSDimitry Andric // Get the offset from the referenced frame index in the instruction, 660bdd1243dSDimitry Andric // if there is one. 661bdd1243dSDimitry Andric int64_t RISCVRegisterInfo::getFrameIndexInstrOffset(const MachineInstr *MI, 662bdd1243dSDimitry Andric int Idx) const { 663bdd1243dSDimitry Andric assert((RISCVII::getFormat(MI->getDesc().TSFlags) == RISCVII::InstFormatI || 664bdd1243dSDimitry Andric RISCVII::getFormat(MI->getDesc().TSFlags) == RISCVII::InstFormatS) && 665bdd1243dSDimitry Andric "The MI must be I or S format."); 666bdd1243dSDimitry Andric assert(MI->getOperand(Idx).isFI() && "The Idx'th operand of MI is not a " 667bdd1243dSDimitry Andric "FrameIndex operand"); 668bdd1243dSDimitry Andric return MI->getOperand(Idx + 1).getImm(); 6690b57cec5SDimitry Andric } 6700b57cec5SDimitry Andric 6710b57cec5SDimitry Andric Register RISCVRegisterInfo::getFrameRegister(const MachineFunction &MF) const { 6720b57cec5SDimitry Andric const TargetFrameLowering *TFI = getFrameLowering(MF); 6730b57cec5SDimitry Andric return TFI->hasFP(MF) ? RISCV::X8 : RISCV::X2; 6740b57cec5SDimitry Andric } 6750b57cec5SDimitry Andric 6760b57cec5SDimitry Andric const uint32_t * 6770b57cec5SDimitry Andric RISCVRegisterInfo::getCallPreservedMask(const MachineFunction & MF, 678e8d8bef9SDimitry Andric CallingConv::ID CC) const { 6790b57cec5SDimitry Andric auto &Subtarget = MF.getSubtarget<RISCVSubtarget>(); 6800b57cec5SDimitry Andric 681e8d8bef9SDimitry Andric if (CC == CallingConv::GHC) 682e8d8bef9SDimitry Andric return CSR_NoRegs_RegMask; 6830b57cec5SDimitry Andric switch (Subtarget.getTargetABI()) { 6840b57cec5SDimitry Andric default: 6850b57cec5SDimitry Andric llvm_unreachable("Unrecognized ABI"); 686*7a6dacacSDimitry Andric case RISCVABI::ABI_ILP32E: 687*7a6dacacSDimitry Andric case RISCVABI::ABI_LP64E: 688*7a6dacacSDimitry Andric return CSR_ILP32E_LP64E_RegMask; 6890b57cec5SDimitry Andric case RISCVABI::ABI_ILP32: 6900b57cec5SDimitry Andric case RISCVABI::ABI_LP64: 6910b57cec5SDimitry Andric return CSR_ILP32_LP64_RegMask; 6920b57cec5SDimitry Andric case RISCVABI::ABI_ILP32F: 6930b57cec5SDimitry Andric case RISCVABI::ABI_LP64F: 6940b57cec5SDimitry Andric return CSR_ILP32F_LP64F_RegMask; 6950b57cec5SDimitry Andric case RISCVABI::ABI_ILP32D: 6960b57cec5SDimitry Andric case RISCVABI::ABI_LP64D: 6970b57cec5SDimitry Andric return CSR_ILP32D_LP64D_RegMask; 6980b57cec5SDimitry Andric } 6990b57cec5SDimitry Andric } 700fe6060f1SDimitry Andric 701fe6060f1SDimitry Andric const TargetRegisterClass * 702fe6060f1SDimitry Andric RISCVRegisterInfo::getLargestLegalSuperClass(const TargetRegisterClass *RC, 703fe6060f1SDimitry Andric const MachineFunction &) const { 704fe6060f1SDimitry Andric if (RC == &RISCV::VMV0RegClass) 705fe6060f1SDimitry Andric return &RISCV::VRRegClass; 7065f757f3fSDimitry Andric if (RC == &RISCV::VRNoV0RegClass) 7075f757f3fSDimitry Andric return &RISCV::VRRegClass; 7085f757f3fSDimitry Andric if (RC == &RISCV::VRM2NoV0RegClass) 7095f757f3fSDimitry Andric return &RISCV::VRM2RegClass; 7105f757f3fSDimitry Andric if (RC == &RISCV::VRM4NoV0RegClass) 7115f757f3fSDimitry Andric return &RISCV::VRM4RegClass; 7125f757f3fSDimitry Andric if (RC == &RISCV::VRM8NoV0RegClass) 7135f757f3fSDimitry Andric return &RISCV::VRM8RegClass; 714fe6060f1SDimitry Andric return RC; 715fe6060f1SDimitry Andric } 7164824e7fdSDimitry Andric 7174824e7fdSDimitry Andric void RISCVRegisterInfo::getOffsetOpcodes(const StackOffset &Offset, 7184824e7fdSDimitry Andric SmallVectorImpl<uint64_t> &Ops) const { 7194824e7fdSDimitry Andric // VLENB is the length of a vector register in bytes. We use <vscale x 8 x i8> 7204824e7fdSDimitry Andric // to represent one vector register. The dwarf offset is 7214824e7fdSDimitry Andric // VLENB * scalable_offset / 8. 7224824e7fdSDimitry Andric assert(Offset.getScalable() % 8 == 0 && "Invalid frame offset"); 7234824e7fdSDimitry Andric 7244824e7fdSDimitry Andric // Add fixed-sized offset using existing DIExpression interface. 7254824e7fdSDimitry Andric DIExpression::appendOffset(Ops, Offset.getFixed()); 7264824e7fdSDimitry Andric 7274824e7fdSDimitry Andric unsigned VLENB = getDwarfRegNum(RISCV::VLENB, true); 7284824e7fdSDimitry Andric int64_t VLENBSized = Offset.getScalable() / 8; 7294824e7fdSDimitry Andric if (VLENBSized > 0) { 7304824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_constu); 7314824e7fdSDimitry Andric Ops.push_back(VLENBSized); 7324824e7fdSDimitry Andric Ops.append({dwarf::DW_OP_bregx, VLENB, 0ULL}); 7334824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_mul); 7344824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_plus); 7354824e7fdSDimitry Andric } else if (VLENBSized < 0) { 7364824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_constu); 7374824e7fdSDimitry Andric Ops.push_back(-VLENBSized); 7384824e7fdSDimitry Andric Ops.append({dwarf::DW_OP_bregx, VLENB, 0ULL}); 7394824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_mul); 7404824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_minus); 7414824e7fdSDimitry Andric } 7424824e7fdSDimitry Andric } 74304eeddc0SDimitry Andric 74404eeddc0SDimitry Andric unsigned 74504eeddc0SDimitry Andric RISCVRegisterInfo::getRegisterCostTableIndex(const MachineFunction &MF) const { 746bdd1243dSDimitry Andric return MF.getSubtarget<RISCVSubtarget>().hasStdExtCOrZca() ? 1 : 0; 747bdd1243dSDimitry Andric } 748bdd1243dSDimitry Andric 749bdd1243dSDimitry Andric // Add two address hints to improve chances of being able to use a compressed 750bdd1243dSDimitry Andric // instruction. 751bdd1243dSDimitry Andric bool RISCVRegisterInfo::getRegAllocationHints( 752bdd1243dSDimitry Andric Register VirtReg, ArrayRef<MCPhysReg> Order, 753bdd1243dSDimitry Andric SmallVectorImpl<MCPhysReg> &Hints, const MachineFunction &MF, 754bdd1243dSDimitry Andric const VirtRegMap *VRM, const LiveRegMatrix *Matrix) const { 755bdd1243dSDimitry Andric const MachineRegisterInfo *MRI = &MF.getRegInfo(); 756*7a6dacacSDimitry Andric auto &Subtarget = MF.getSubtarget<RISCVSubtarget>(); 757bdd1243dSDimitry Andric 758bdd1243dSDimitry Andric bool BaseImplRetVal = TargetRegisterInfo::getRegAllocationHints( 759bdd1243dSDimitry Andric VirtReg, Order, Hints, MF, VRM, Matrix); 760bdd1243dSDimitry Andric 761bdd1243dSDimitry Andric if (!VRM || DisableRegAllocHints) 762bdd1243dSDimitry Andric return BaseImplRetVal; 763bdd1243dSDimitry Andric 764bdd1243dSDimitry Andric // Add any two address hints after any copy hints. 765bdd1243dSDimitry Andric SmallSet<Register, 4> TwoAddrHints; 766bdd1243dSDimitry Andric 767bdd1243dSDimitry Andric auto tryAddHint = [&](const MachineOperand &VRRegMO, const MachineOperand &MO, 768bdd1243dSDimitry Andric bool NeedGPRC) -> void { 769bdd1243dSDimitry Andric Register Reg = MO.getReg(); 770bdd1243dSDimitry Andric Register PhysReg = Reg.isPhysical() ? Reg : Register(VRM->getPhys(Reg)); 771bdd1243dSDimitry Andric if (PhysReg && (!NeedGPRC || RISCV::GPRCRegClass.contains(PhysReg))) { 772bdd1243dSDimitry Andric assert(!MO.getSubReg() && !VRRegMO.getSubReg() && "Unexpected subreg!"); 773bdd1243dSDimitry Andric if (!MRI->isReserved(PhysReg) && !is_contained(Hints, PhysReg)) 774bdd1243dSDimitry Andric TwoAddrHints.insert(PhysReg); 775bdd1243dSDimitry Andric } 776bdd1243dSDimitry Andric }; 777bdd1243dSDimitry Andric 778bdd1243dSDimitry Andric // This is all of the compressible binary instructions. If an instruction 779bdd1243dSDimitry Andric // needs GPRC register class operands \p NeedGPRC will be set to true. 780*7a6dacacSDimitry Andric auto isCompressible = [&Subtarget](const MachineInstr &MI, bool &NeedGPRC) { 781bdd1243dSDimitry Andric NeedGPRC = false; 782bdd1243dSDimitry Andric switch (MI.getOpcode()) { 783bdd1243dSDimitry Andric default: 784bdd1243dSDimitry Andric return false; 785bdd1243dSDimitry Andric case RISCV::AND: 786bdd1243dSDimitry Andric case RISCV::OR: 787bdd1243dSDimitry Andric case RISCV::XOR: 788bdd1243dSDimitry Andric case RISCV::SUB: 789bdd1243dSDimitry Andric case RISCV::ADDW: 790bdd1243dSDimitry Andric case RISCV::SUBW: 791bdd1243dSDimitry Andric NeedGPRC = true; 792bdd1243dSDimitry Andric return true; 793*7a6dacacSDimitry Andric case RISCV::ANDI: { 794bdd1243dSDimitry Andric NeedGPRC = true; 795*7a6dacacSDimitry Andric if (!MI.getOperand(2).isImm()) 796*7a6dacacSDimitry Andric return false; 797*7a6dacacSDimitry Andric int64_t Imm = MI.getOperand(2).getImm(); 798*7a6dacacSDimitry Andric if (isInt<6>(Imm)) 799*7a6dacacSDimitry Andric return true; 800*7a6dacacSDimitry Andric // c.zext.b 801*7a6dacacSDimitry Andric return Subtarget.hasStdExtZcb() && Imm == 255; 802*7a6dacacSDimitry Andric } 803bdd1243dSDimitry Andric case RISCV::SRAI: 804bdd1243dSDimitry Andric case RISCV::SRLI: 805bdd1243dSDimitry Andric NeedGPRC = true; 806bdd1243dSDimitry Andric return true; 807bdd1243dSDimitry Andric case RISCV::ADD: 808bdd1243dSDimitry Andric case RISCV::SLLI: 809bdd1243dSDimitry Andric return true; 810bdd1243dSDimitry Andric case RISCV::ADDI: 811bdd1243dSDimitry Andric case RISCV::ADDIW: 812bdd1243dSDimitry Andric return MI.getOperand(2).isImm() && isInt<6>(MI.getOperand(2).getImm()); 813*7a6dacacSDimitry Andric case RISCV::MUL: 814*7a6dacacSDimitry Andric case RISCV::SEXT_B: 815*7a6dacacSDimitry Andric case RISCV::SEXT_H: 816*7a6dacacSDimitry Andric case RISCV::ZEXT_H_RV32: 817*7a6dacacSDimitry Andric case RISCV::ZEXT_H_RV64: 818*7a6dacacSDimitry Andric // c.mul, c.sext.b, c.sext.h, c.zext.h 819*7a6dacacSDimitry Andric NeedGPRC = true; 820*7a6dacacSDimitry Andric return Subtarget.hasStdExtZcb(); 821*7a6dacacSDimitry Andric case RISCV::ADD_UW: 822*7a6dacacSDimitry Andric // c.zext.w 823*7a6dacacSDimitry Andric NeedGPRC = true; 824*7a6dacacSDimitry Andric return Subtarget.hasStdExtZcb() && MI.getOperand(2).isReg() && 825*7a6dacacSDimitry Andric MI.getOperand(2).getReg() == RISCV::X0; 826*7a6dacacSDimitry Andric case RISCV::XORI: 827*7a6dacacSDimitry Andric // c.not 828*7a6dacacSDimitry Andric NeedGPRC = true; 829*7a6dacacSDimitry Andric return Subtarget.hasStdExtZcb() && MI.getOperand(2).isImm() && 830*7a6dacacSDimitry Andric MI.getOperand(2).getImm() == -1; 831bdd1243dSDimitry Andric } 832bdd1243dSDimitry Andric }; 833bdd1243dSDimitry Andric 834bdd1243dSDimitry Andric // Returns true if this operand is compressible. For non-registers it always 835bdd1243dSDimitry Andric // returns true. Immediate range was already checked in isCompressible. 836bdd1243dSDimitry Andric // For registers, it checks if the register is a GPRC register. reg-reg 837bdd1243dSDimitry Andric // instructions that require GPRC need all register operands to be GPRC. 838bdd1243dSDimitry Andric auto isCompressibleOpnd = [&](const MachineOperand &MO) { 839bdd1243dSDimitry Andric if (!MO.isReg()) 840bdd1243dSDimitry Andric return true; 841bdd1243dSDimitry Andric Register Reg = MO.getReg(); 842bdd1243dSDimitry Andric Register PhysReg = Reg.isPhysical() ? Reg : Register(VRM->getPhys(Reg)); 843bdd1243dSDimitry Andric return PhysReg && RISCV::GPRCRegClass.contains(PhysReg); 844bdd1243dSDimitry Andric }; 845bdd1243dSDimitry Andric 846bdd1243dSDimitry Andric for (auto &MO : MRI->reg_nodbg_operands(VirtReg)) { 847bdd1243dSDimitry Andric const MachineInstr &MI = *MO.getParent(); 84806c3fb27SDimitry Andric unsigned OpIdx = MO.getOperandNo(); 849bdd1243dSDimitry Andric bool NeedGPRC; 850bdd1243dSDimitry Andric if (isCompressible(MI, NeedGPRC)) { 851bdd1243dSDimitry Andric if (OpIdx == 0 && MI.getOperand(1).isReg()) { 852*7a6dacacSDimitry Andric if (!NeedGPRC || MI.getNumExplicitOperands() < 3 || 853*7a6dacacSDimitry Andric MI.getOpcode() == RISCV::ADD_UW || 854*7a6dacacSDimitry Andric isCompressibleOpnd(MI.getOperand(2))) 855bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(1), NeedGPRC); 856bdd1243dSDimitry Andric if (MI.isCommutable() && MI.getOperand(2).isReg() && 857bdd1243dSDimitry Andric (!NeedGPRC || isCompressibleOpnd(MI.getOperand(1)))) 858bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(2), NeedGPRC); 859*7a6dacacSDimitry Andric } else if (OpIdx == 1 && (!NeedGPRC || MI.getNumExplicitOperands() < 3 || 860*7a6dacacSDimitry Andric isCompressibleOpnd(MI.getOperand(2)))) { 861bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(0), NeedGPRC); 862bdd1243dSDimitry Andric } else if (MI.isCommutable() && OpIdx == 2 && 863bdd1243dSDimitry Andric (!NeedGPRC || isCompressibleOpnd(MI.getOperand(1)))) { 864bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(0), NeedGPRC); 865bdd1243dSDimitry Andric } 866bdd1243dSDimitry Andric } 867bdd1243dSDimitry Andric } 868bdd1243dSDimitry Andric 869bdd1243dSDimitry Andric for (MCPhysReg OrderReg : Order) 870bdd1243dSDimitry Andric if (TwoAddrHints.count(OrderReg)) 871bdd1243dSDimitry Andric Hints.push_back(OrderReg); 872bdd1243dSDimitry Andric 873bdd1243dSDimitry Andric return BaseImplRetVal; 87404eeddc0SDimitry Andric } 875