1*06c3fb27SDimitry Andric //===-- RISCVRegisterInfo.cpp - RISC-V Register Information -----*- C++ -*-===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 9*06c3fb27SDimitry Andric // This file contains the RISC-V implementation of the TargetRegisterInfo class. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 120b57cec5SDimitry Andric 130b57cec5SDimitry Andric #include "RISCVRegisterInfo.h" 140b57cec5SDimitry Andric #include "RISCV.h" 155ffd83dbSDimitry Andric #include "RISCVMachineFunctionInfo.h" 160b57cec5SDimitry Andric #include "RISCVSubtarget.h" 1781ad6265SDimitry Andric #include "llvm/BinaryFormat/Dwarf.h" 180b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h" 190b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunction.h" 200b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h" 210b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterScavenging.h" 220b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h" 230b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h" 244824e7fdSDimitry Andric #include "llvm/IR/DebugInfoMetadata.h" 250b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h" 260b57cec5SDimitry Andric 270b57cec5SDimitry Andric #define GET_REGINFO_TARGET_DESC 280b57cec5SDimitry Andric #include "RISCVGenRegisterInfo.inc" 290b57cec5SDimitry Andric 300b57cec5SDimitry Andric using namespace llvm; 310b57cec5SDimitry Andric 32bdd1243dSDimitry Andric static cl::opt<bool> 33bdd1243dSDimitry Andric DisableRegAllocHints("riscv-disable-regalloc-hints", cl::Hidden, 34bdd1243dSDimitry Andric cl::init(false), 35bdd1243dSDimitry Andric cl::desc("Disable two address hints for register " 36bdd1243dSDimitry Andric "allocation")); 37bdd1243dSDimitry Andric 388bcb0991SDimitry Andric static_assert(RISCV::X1 == RISCV::X0 + 1, "Register list not consecutive"); 398bcb0991SDimitry Andric static_assert(RISCV::X31 == RISCV::X0 + 31, "Register list not consecutive"); 40e8d8bef9SDimitry Andric static_assert(RISCV::F1_H == RISCV::F0_H + 1, "Register list not consecutive"); 41e8d8bef9SDimitry Andric static_assert(RISCV::F31_H == RISCV::F0_H + 31, 42e8d8bef9SDimitry Andric "Register list not consecutive"); 438bcb0991SDimitry Andric static_assert(RISCV::F1_F == RISCV::F0_F + 1, "Register list not consecutive"); 448bcb0991SDimitry Andric static_assert(RISCV::F31_F == RISCV::F0_F + 31, 458bcb0991SDimitry Andric "Register list not consecutive"); 468bcb0991SDimitry Andric static_assert(RISCV::F1_D == RISCV::F0_D + 1, "Register list not consecutive"); 478bcb0991SDimitry Andric static_assert(RISCV::F31_D == RISCV::F0_D + 31, 488bcb0991SDimitry Andric "Register list not consecutive"); 495ffd83dbSDimitry Andric static_assert(RISCV::V1 == RISCV::V0 + 1, "Register list not consecutive"); 505ffd83dbSDimitry Andric static_assert(RISCV::V31 == RISCV::V0 + 31, "Register list not consecutive"); 518bcb0991SDimitry Andric 520b57cec5SDimitry Andric RISCVRegisterInfo::RISCVRegisterInfo(unsigned HwMode) 530b57cec5SDimitry Andric : RISCVGenRegisterInfo(RISCV::X1, /*DwarfFlavour*/0, /*EHFlavor*/0, 540b57cec5SDimitry Andric /*PC*/0, HwMode) {} 550b57cec5SDimitry Andric 560b57cec5SDimitry Andric const MCPhysReg * 570b57cec5SDimitry Andric RISCVRegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const { 580b57cec5SDimitry Andric auto &Subtarget = MF->getSubtarget<RISCVSubtarget>(); 59e8d8bef9SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::GHC) 60e8d8bef9SDimitry Andric return CSR_NoRegs_SaveList; 610b57cec5SDimitry Andric if (MF->getFunction().hasFnAttribute("interrupt")) { 620b57cec5SDimitry Andric if (Subtarget.hasStdExtD()) 630b57cec5SDimitry Andric return CSR_XLEN_F64_Interrupt_SaveList; 640b57cec5SDimitry Andric if (Subtarget.hasStdExtF()) 650b57cec5SDimitry Andric return CSR_XLEN_F32_Interrupt_SaveList; 660b57cec5SDimitry Andric return CSR_Interrupt_SaveList; 670b57cec5SDimitry Andric } 680b57cec5SDimitry Andric 690b57cec5SDimitry Andric switch (Subtarget.getTargetABI()) { 700b57cec5SDimitry Andric default: 710b57cec5SDimitry Andric llvm_unreachable("Unrecognized ABI"); 720b57cec5SDimitry Andric case RISCVABI::ABI_ILP32: 730b57cec5SDimitry Andric case RISCVABI::ABI_LP64: 740b57cec5SDimitry Andric return CSR_ILP32_LP64_SaveList; 750b57cec5SDimitry Andric case RISCVABI::ABI_ILP32F: 760b57cec5SDimitry Andric case RISCVABI::ABI_LP64F: 770b57cec5SDimitry Andric return CSR_ILP32F_LP64F_SaveList; 780b57cec5SDimitry Andric case RISCVABI::ABI_ILP32D: 790b57cec5SDimitry Andric case RISCVABI::ABI_LP64D: 800b57cec5SDimitry Andric return CSR_ILP32D_LP64D_SaveList; 810b57cec5SDimitry Andric } 820b57cec5SDimitry Andric } 830b57cec5SDimitry Andric 840b57cec5SDimitry Andric BitVector RISCVRegisterInfo::getReservedRegs(const MachineFunction &MF) const { 85480093f4SDimitry Andric const RISCVFrameLowering *TFI = getFrameLowering(MF); 860b57cec5SDimitry Andric BitVector Reserved(getNumRegs()); 870b57cec5SDimitry Andric 88480093f4SDimitry Andric // Mark any registers requested to be reserved as such 89480093f4SDimitry Andric for (size_t Reg = 0; Reg < getNumRegs(); Reg++) { 90480093f4SDimitry Andric if (MF.getSubtarget<RISCVSubtarget>().isRegisterReservedByUser(Reg)) 91480093f4SDimitry Andric markSuperRegs(Reserved, Reg); 92480093f4SDimitry Andric } 93480093f4SDimitry Andric 940b57cec5SDimitry Andric // Use markSuperRegs to ensure any register aliases are also reserved 950b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X0); // zero 960b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X2); // sp 970b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X3); // gp 980b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X4); // tp 990b57cec5SDimitry Andric if (TFI->hasFP(MF)) 1000b57cec5SDimitry Andric markSuperRegs(Reserved, RISCV::X8); // fp 101480093f4SDimitry Andric // Reserve the base register if we need to realign the stack and allocate 102480093f4SDimitry Andric // variable-sized objects at runtime. 103480093f4SDimitry Andric if (TFI->hasBP(MF)) 104480093f4SDimitry Andric markSuperRegs(Reserved, RISCVABI::getBPReg()); // bp 105e8d8bef9SDimitry Andric 106*06c3fb27SDimitry Andric // Additionally reserve dummy register used to form the register pair 107*06c3fb27SDimitry Andric // beginning with 'x0' for instructions that take register pairs. 108*06c3fb27SDimitry Andric markSuperRegs(Reserved, RISCV::DUMMY_REG_PAIR_WITH_X0); 109*06c3fb27SDimitry Andric 110e8d8bef9SDimitry Andric // V registers for code generation. We handle them manually. 111e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VL); 112e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VTYPE); 113e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VXSAT); 114e8d8bef9SDimitry Andric markSuperRegs(Reserved, RISCV::VXRM); 11581ad6265SDimitry Andric markSuperRegs(Reserved, RISCV::VLENB); // vlenb (constant) 116e8d8bef9SDimitry Andric 117fe6060f1SDimitry Andric // Floating point environment registers. 118fe6060f1SDimitry Andric markSuperRegs(Reserved, RISCV::FRM); 119fe6060f1SDimitry Andric markSuperRegs(Reserved, RISCV::FFLAGS); 120fe6060f1SDimitry Andric 1210b57cec5SDimitry Andric assert(checkAllSuperRegsMarked(Reserved)); 1220b57cec5SDimitry Andric return Reserved; 1230b57cec5SDimitry Andric } 1240b57cec5SDimitry Andric 125480093f4SDimitry Andric bool RISCVRegisterInfo::isAsmClobberable(const MachineFunction &MF, 1265ffd83dbSDimitry Andric MCRegister PhysReg) const { 127480093f4SDimitry Andric return !MF.getSubtarget<RISCVSubtarget>().isRegisterReservedByUser(PhysReg); 128480093f4SDimitry Andric } 129480093f4SDimitry Andric 1300b57cec5SDimitry Andric const uint32_t *RISCVRegisterInfo::getNoPreservedMask() const { 1310b57cec5SDimitry Andric return CSR_NoRegs_RegMask; 1320b57cec5SDimitry Andric } 1330b57cec5SDimitry Andric 1345ffd83dbSDimitry Andric // Frame indexes representing locations of CSRs which are given a fixed location 1355ffd83dbSDimitry Andric // by save/restore libcalls. 13681ad6265SDimitry Andric static const std::pair<unsigned, int> FixedCSRFIMap[] = { 1375ffd83dbSDimitry Andric {/*ra*/ RISCV::X1, -1}, 1385ffd83dbSDimitry Andric {/*s0*/ RISCV::X8, -2}, 1395ffd83dbSDimitry Andric {/*s1*/ RISCV::X9, -3}, 1405ffd83dbSDimitry Andric {/*s2*/ RISCV::X18, -4}, 1415ffd83dbSDimitry Andric {/*s3*/ RISCV::X19, -5}, 1425ffd83dbSDimitry Andric {/*s4*/ RISCV::X20, -6}, 1435ffd83dbSDimitry Andric {/*s5*/ RISCV::X21, -7}, 1445ffd83dbSDimitry Andric {/*s6*/ RISCV::X22, -8}, 1455ffd83dbSDimitry Andric {/*s7*/ RISCV::X23, -9}, 1465ffd83dbSDimitry Andric {/*s8*/ RISCV::X24, -10}, 1475ffd83dbSDimitry Andric {/*s9*/ RISCV::X25, -11}, 1485ffd83dbSDimitry Andric {/*s10*/ RISCV::X26, -12}, 1495ffd83dbSDimitry Andric {/*s11*/ RISCV::X27, -13} 1505ffd83dbSDimitry Andric }; 1515ffd83dbSDimitry Andric 1525ffd83dbSDimitry Andric bool RISCVRegisterInfo::hasReservedSpillSlot(const MachineFunction &MF, 1535ffd83dbSDimitry Andric Register Reg, 1545ffd83dbSDimitry Andric int &FrameIdx) const { 1555ffd83dbSDimitry Andric const auto *RVFI = MF.getInfo<RISCVMachineFunctionInfo>(); 156*06c3fb27SDimitry Andric if (!RVFI->useSaveRestoreLibCalls(MF) && !RVFI->isPushable(MF)) 1575ffd83dbSDimitry Andric return false; 1585ffd83dbSDimitry Andric 15981ad6265SDimitry Andric const auto *FII = 16081ad6265SDimitry Andric llvm::find_if(FixedCSRFIMap, [&](auto P) { return P.first == Reg; }); 16181ad6265SDimitry Andric if (FII == std::end(FixedCSRFIMap)) 1625ffd83dbSDimitry Andric return false; 1635ffd83dbSDimitry Andric 1645ffd83dbSDimitry Andric FrameIdx = FII->second; 1655ffd83dbSDimitry Andric return true; 1665ffd83dbSDimitry Andric } 1675ffd83dbSDimitry Andric 168bdd1243dSDimitry Andric void RISCVRegisterInfo::adjustReg(MachineBasicBlock &MBB, 169bdd1243dSDimitry Andric MachineBasicBlock::iterator II, 170bdd1243dSDimitry Andric const DebugLoc &DL, Register DestReg, 171bdd1243dSDimitry Andric Register SrcReg, StackOffset Offset, 172bdd1243dSDimitry Andric MachineInstr::MIFlag Flag, 173bdd1243dSDimitry Andric MaybeAlign RequiredAlign) const { 174bdd1243dSDimitry Andric 175bdd1243dSDimitry Andric if (DestReg == SrcReg && !Offset.getFixed() && !Offset.getScalable()) 176bdd1243dSDimitry Andric return; 177bdd1243dSDimitry Andric 178bdd1243dSDimitry Andric MachineFunction &MF = *MBB.getParent(); 179bdd1243dSDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 180bdd1243dSDimitry Andric const RISCVSubtarget &ST = MF.getSubtarget<RISCVSubtarget>(); 181bdd1243dSDimitry Andric const RISCVInstrInfo *TII = ST.getInstrInfo(); 182bdd1243dSDimitry Andric 183bdd1243dSDimitry Andric bool KillSrcReg = false; 184bdd1243dSDimitry Andric 185bdd1243dSDimitry Andric if (Offset.getScalable()) { 186bdd1243dSDimitry Andric unsigned ScalableAdjOpc = RISCV::ADD; 187bdd1243dSDimitry Andric int64_t ScalableValue = Offset.getScalable(); 188bdd1243dSDimitry Andric if (ScalableValue < 0) { 189bdd1243dSDimitry Andric ScalableValue = -ScalableValue; 190bdd1243dSDimitry Andric ScalableAdjOpc = RISCV::SUB; 191bdd1243dSDimitry Andric } 192bdd1243dSDimitry Andric // Get vlenb and multiply vlen with the number of vector registers. 193bdd1243dSDimitry Andric Register ScratchReg = DestReg; 194bdd1243dSDimitry Andric if (DestReg == SrcReg) 195bdd1243dSDimitry Andric ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); 196bdd1243dSDimitry Andric TII->getVLENFactoredAmount(MF, MBB, II, DL, ScratchReg, ScalableValue, Flag); 197bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(ScalableAdjOpc), DestReg) 198bdd1243dSDimitry Andric .addReg(SrcReg).addReg(ScratchReg, RegState::Kill) 199bdd1243dSDimitry Andric .setMIFlag(Flag); 200bdd1243dSDimitry Andric SrcReg = DestReg; 201bdd1243dSDimitry Andric KillSrcReg = true; 202bdd1243dSDimitry Andric } 203bdd1243dSDimitry Andric 204bdd1243dSDimitry Andric int64_t Val = Offset.getFixed(); 205bdd1243dSDimitry Andric if (DestReg == SrcReg && Val == 0) 206bdd1243dSDimitry Andric return; 207bdd1243dSDimitry Andric 208bdd1243dSDimitry Andric const uint64_t Align = RequiredAlign.valueOrOne().value(); 209bdd1243dSDimitry Andric 210bdd1243dSDimitry Andric if (isInt<12>(Val)) { 211bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) 212bdd1243dSDimitry Andric .addReg(SrcReg, getKillRegState(KillSrcReg)) 213bdd1243dSDimitry Andric .addImm(Val) 214bdd1243dSDimitry Andric .setMIFlag(Flag); 215bdd1243dSDimitry Andric return; 216bdd1243dSDimitry Andric } 217bdd1243dSDimitry Andric 218bdd1243dSDimitry Andric // Try to split the offset across two ADDIs. We need to keep the intermediate 219bdd1243dSDimitry Andric // result aligned after each ADDI. We need to determine the maximum value we 220bdd1243dSDimitry Andric // can put in each ADDI. In the negative direction, we can use -2048 which is 221bdd1243dSDimitry Andric // always sufficiently aligned. In the positive direction, we need to find the 222bdd1243dSDimitry Andric // largest 12-bit immediate that is aligned. Exclude -4096 since it can be 223bdd1243dSDimitry Andric // created with LUI. 224bdd1243dSDimitry Andric assert(Align < 2048 && "Required alignment too large"); 225bdd1243dSDimitry Andric int64_t MaxPosAdjStep = 2048 - Align; 226bdd1243dSDimitry Andric if (Val > -4096 && Val <= (2 * MaxPosAdjStep)) { 227bdd1243dSDimitry Andric int64_t FirstAdj = Val < 0 ? -2048 : MaxPosAdjStep; 228bdd1243dSDimitry Andric Val -= FirstAdj; 229bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) 230bdd1243dSDimitry Andric .addReg(SrcReg, getKillRegState(KillSrcReg)) 231bdd1243dSDimitry Andric .addImm(FirstAdj) 232bdd1243dSDimitry Andric .setMIFlag(Flag); 233bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) 234bdd1243dSDimitry Andric .addReg(DestReg, RegState::Kill) 235bdd1243dSDimitry Andric .addImm(Val) 236bdd1243dSDimitry Andric .setMIFlag(Flag); 237bdd1243dSDimitry Andric return; 238bdd1243dSDimitry Andric } 239bdd1243dSDimitry Andric 240bdd1243dSDimitry Andric unsigned Opc = RISCV::ADD; 241bdd1243dSDimitry Andric if (Val < 0) { 242bdd1243dSDimitry Andric Val = -Val; 243bdd1243dSDimitry Andric Opc = RISCV::SUB; 244bdd1243dSDimitry Andric } 245bdd1243dSDimitry Andric 246bdd1243dSDimitry Andric Register ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); 247bdd1243dSDimitry Andric TII->movImm(MBB, II, DL, ScratchReg, Val, Flag); 248bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(Opc), DestReg) 249bdd1243dSDimitry Andric .addReg(SrcReg, getKillRegState(KillSrcReg)) 250bdd1243dSDimitry Andric .addReg(ScratchReg, RegState::Kill) 251bdd1243dSDimitry Andric .setMIFlag(Flag); 252bdd1243dSDimitry Andric } 253bdd1243dSDimitry Andric 254bdd1243dSDimitry Andric // Split a VSPILLx_Mx pseudo into multiple whole register stores separated by 255bdd1243dSDimitry Andric // LMUL*VLENB bytes. 256bdd1243dSDimitry Andric void RISCVRegisterInfo::lowerVSPILL(MachineBasicBlock::iterator II) const { 257bdd1243dSDimitry Andric DebugLoc DL = II->getDebugLoc(); 258bdd1243dSDimitry Andric MachineBasicBlock &MBB = *II->getParent(); 259bdd1243dSDimitry Andric MachineFunction &MF = *MBB.getParent(); 260bdd1243dSDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 261bdd1243dSDimitry Andric const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo(); 262bdd1243dSDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 263bdd1243dSDimitry Andric 264bdd1243dSDimitry Andric auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode()); 265bdd1243dSDimitry Andric unsigned NF = ZvlssegInfo->first; 266bdd1243dSDimitry Andric unsigned LMUL = ZvlssegInfo->second; 267bdd1243dSDimitry Andric assert(NF * LMUL <= 8 && "Invalid NF/LMUL combinations."); 268bdd1243dSDimitry Andric unsigned Opcode, SubRegIdx; 269bdd1243dSDimitry Andric switch (LMUL) { 270bdd1243dSDimitry Andric default: 271bdd1243dSDimitry Andric llvm_unreachable("LMUL must be 1, 2, or 4."); 272bdd1243dSDimitry Andric case 1: 273bdd1243dSDimitry Andric Opcode = RISCV::VS1R_V; 274bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm1_0; 275bdd1243dSDimitry Andric break; 276bdd1243dSDimitry Andric case 2: 277bdd1243dSDimitry Andric Opcode = RISCV::VS2R_V; 278bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm2_0; 279bdd1243dSDimitry Andric break; 280bdd1243dSDimitry Andric case 4: 281bdd1243dSDimitry Andric Opcode = RISCV::VS4R_V; 282bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm4_0; 283bdd1243dSDimitry Andric break; 284bdd1243dSDimitry Andric } 285bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7, 286bdd1243dSDimitry Andric "Unexpected subreg numbering"); 287bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3, 288bdd1243dSDimitry Andric "Unexpected subreg numbering"); 289bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1, 290bdd1243dSDimitry Andric "Unexpected subreg numbering"); 291bdd1243dSDimitry Andric 292bdd1243dSDimitry Andric Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass); 293bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL); 294bdd1243dSDimitry Andric uint32_t ShiftAmount = Log2_32(LMUL); 295bdd1243dSDimitry Andric if (ShiftAmount != 0) 296bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL) 297bdd1243dSDimitry Andric .addReg(VL) 298bdd1243dSDimitry Andric .addImm(ShiftAmount); 299bdd1243dSDimitry Andric 300bdd1243dSDimitry Andric Register SrcReg = II->getOperand(0).getReg(); 301bdd1243dSDimitry Andric Register Base = II->getOperand(1).getReg(); 302bdd1243dSDimitry Andric bool IsBaseKill = II->getOperand(1).isKill(); 303bdd1243dSDimitry Andric Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass); 304bdd1243dSDimitry Andric for (unsigned I = 0; I < NF; ++I) { 305bdd1243dSDimitry Andric // Adding implicit-use of super register to describe we are using part of 306bdd1243dSDimitry Andric // super register, that prevents machine verifier complaining when part of 307bdd1243dSDimitry Andric // subreg is undef, see comment in MachineVerifier::checkLiveness for more 308bdd1243dSDimitry Andric // detail. 309bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(Opcode)) 310bdd1243dSDimitry Andric .addReg(TRI->getSubReg(SrcReg, SubRegIdx + I)) 311bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I == NF - 1)) 312bdd1243dSDimitry Andric .addMemOperand(*(II->memoperands_begin())) 313bdd1243dSDimitry Andric .addReg(SrcReg, RegState::Implicit); 314bdd1243dSDimitry Andric if (I != NF - 1) 315bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase) 316bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I != 0 || IsBaseKill)) 317bdd1243dSDimitry Andric .addReg(VL, getKillRegState(I == NF - 2)); 318bdd1243dSDimitry Andric Base = NewBase; 319bdd1243dSDimitry Andric } 320bdd1243dSDimitry Andric II->eraseFromParent(); 321bdd1243dSDimitry Andric } 322bdd1243dSDimitry Andric 323bdd1243dSDimitry Andric // Split a VSPILLx_Mx pseudo into multiple whole register loads separated by 324bdd1243dSDimitry Andric // LMUL*VLENB bytes. 325bdd1243dSDimitry Andric void RISCVRegisterInfo::lowerVRELOAD(MachineBasicBlock::iterator II) const { 326bdd1243dSDimitry Andric DebugLoc DL = II->getDebugLoc(); 327bdd1243dSDimitry Andric MachineBasicBlock &MBB = *II->getParent(); 328bdd1243dSDimitry Andric MachineFunction &MF = *MBB.getParent(); 329bdd1243dSDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 330bdd1243dSDimitry Andric const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo(); 331bdd1243dSDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 332bdd1243dSDimitry Andric 333bdd1243dSDimitry Andric auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode()); 334bdd1243dSDimitry Andric unsigned NF = ZvlssegInfo->first; 335bdd1243dSDimitry Andric unsigned LMUL = ZvlssegInfo->second; 336bdd1243dSDimitry Andric assert(NF * LMUL <= 8 && "Invalid NF/LMUL combinations."); 337bdd1243dSDimitry Andric unsigned Opcode, SubRegIdx; 338bdd1243dSDimitry Andric switch (LMUL) { 339bdd1243dSDimitry Andric default: 340bdd1243dSDimitry Andric llvm_unreachable("LMUL must be 1, 2, or 4."); 341bdd1243dSDimitry Andric case 1: 342bdd1243dSDimitry Andric Opcode = RISCV::VL1RE8_V; 343bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm1_0; 344bdd1243dSDimitry Andric break; 345bdd1243dSDimitry Andric case 2: 346bdd1243dSDimitry Andric Opcode = RISCV::VL2RE8_V; 347bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm2_0; 348bdd1243dSDimitry Andric break; 349bdd1243dSDimitry Andric case 4: 350bdd1243dSDimitry Andric Opcode = RISCV::VL4RE8_V; 351bdd1243dSDimitry Andric SubRegIdx = RISCV::sub_vrm4_0; 352bdd1243dSDimitry Andric break; 353bdd1243dSDimitry Andric } 354bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7, 355bdd1243dSDimitry Andric "Unexpected subreg numbering"); 356bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3, 357bdd1243dSDimitry Andric "Unexpected subreg numbering"); 358bdd1243dSDimitry Andric static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1, 359bdd1243dSDimitry Andric "Unexpected subreg numbering"); 360bdd1243dSDimitry Andric 361bdd1243dSDimitry Andric Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass); 362bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL); 363bdd1243dSDimitry Andric uint32_t ShiftAmount = Log2_32(LMUL); 364bdd1243dSDimitry Andric if (ShiftAmount != 0) 365bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL) 366bdd1243dSDimitry Andric .addReg(VL) 367bdd1243dSDimitry Andric .addImm(ShiftAmount); 368bdd1243dSDimitry Andric 369bdd1243dSDimitry Andric Register DestReg = II->getOperand(0).getReg(); 370bdd1243dSDimitry Andric Register Base = II->getOperand(1).getReg(); 371bdd1243dSDimitry Andric bool IsBaseKill = II->getOperand(1).isKill(); 372bdd1243dSDimitry Andric Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass); 373bdd1243dSDimitry Andric for (unsigned I = 0; I < NF; ++I) { 374bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(Opcode), 375bdd1243dSDimitry Andric TRI->getSubReg(DestReg, SubRegIdx + I)) 376bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I == NF - 1)) 377bdd1243dSDimitry Andric .addMemOperand(*(II->memoperands_begin())); 378bdd1243dSDimitry Andric if (I != NF - 1) 379bdd1243dSDimitry Andric BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase) 380bdd1243dSDimitry Andric .addReg(Base, getKillRegState(I != 0 || IsBaseKill)) 381bdd1243dSDimitry Andric .addReg(VL, getKillRegState(I == NF - 2)); 382bdd1243dSDimitry Andric Base = NewBase; 383bdd1243dSDimitry Andric } 384bdd1243dSDimitry Andric II->eraseFromParent(); 385bdd1243dSDimitry Andric } 386bdd1243dSDimitry Andric 387bdd1243dSDimitry Andric bool RISCVRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II, 3880b57cec5SDimitry Andric int SPAdj, unsigned FIOperandNum, 3890b57cec5SDimitry Andric RegScavenger *RS) const { 3900b57cec5SDimitry Andric assert(SPAdj == 0 && "Unexpected non-zero SPAdj value"); 3910b57cec5SDimitry Andric 3920b57cec5SDimitry Andric MachineInstr &MI = *II; 3930b57cec5SDimitry Andric MachineFunction &MF = *MI.getParent()->getParent(); 3940b57cec5SDimitry Andric MachineRegisterInfo &MRI = MF.getRegInfo(); 395bdd1243dSDimitry Andric const RISCVSubtarget &ST = MF.getSubtarget<RISCVSubtarget>(); 3960b57cec5SDimitry Andric DebugLoc DL = MI.getDebugLoc(); 3970b57cec5SDimitry Andric 3980b57cec5SDimitry Andric int FrameIndex = MI.getOperand(FIOperandNum).getIndex(); 3995ffd83dbSDimitry Andric Register FrameReg; 400fe6060f1SDimitry Andric StackOffset Offset = 401fe6060f1SDimitry Andric getFrameLowering(MF)->getFrameIndexReference(MF, FrameIndex, FrameReg); 40281ad6265SDimitry Andric bool IsRVVSpill = RISCV::isRVVSpill(MI); 403fe6060f1SDimitry Andric if (!IsRVVSpill) 404fe6060f1SDimitry Andric Offset += StackOffset::getFixed(MI.getOperand(FIOperandNum + 1).getImm()); 4050b57cec5SDimitry Andric 406bdd1243dSDimitry Andric if (Offset.getScalable() && 407bdd1243dSDimitry Andric ST.getRealMinVLen() == ST.getRealMaxVLen()) { 408bdd1243dSDimitry Andric // For an exact VLEN value, scalable offsets become constant and thus 409bdd1243dSDimitry Andric // can be converted entirely into fixed offsets. 410bdd1243dSDimitry Andric int64_t FixedValue = Offset.getFixed(); 411bdd1243dSDimitry Andric int64_t ScalableValue = Offset.getScalable(); 412bdd1243dSDimitry Andric assert(ScalableValue % 8 == 0 && 413bdd1243dSDimitry Andric "Scalable offset is not a multiple of a single vector size."); 414bdd1243dSDimitry Andric int64_t NumOfVReg = ScalableValue / 8; 415bdd1243dSDimitry Andric int64_t VLENB = ST.getRealMinVLen() / 8; 416bdd1243dSDimitry Andric Offset = StackOffset::getFixed(FixedValue + NumOfVReg * VLENB); 417bdd1243dSDimitry Andric } 418bdd1243dSDimitry Andric 419fe6060f1SDimitry Andric if (!isInt<32>(Offset.getFixed())) { 4200b57cec5SDimitry Andric report_fatal_error( 4210b57cec5SDimitry Andric "Frame offsets outside of the signed 32-bit range not supported"); 4220b57cec5SDimitry Andric } 4230b57cec5SDimitry Andric 424bdd1243dSDimitry Andric if (!IsRVVSpill) { 425bdd1243dSDimitry Andric if (MI.getOpcode() == RISCV::ADDI && !isInt<12>(Offset.getFixed())) { 426bdd1243dSDimitry Andric // We chose to emit the canonical immediate sequence rather than folding 427bdd1243dSDimitry Andric // the offset into the using add under the theory that doing so doesn't 428bdd1243dSDimitry Andric // save dynamic instruction count and some target may fuse the canonical 429bdd1243dSDimitry Andric // 32 bit immediate sequence. We still need to clear the portion of the 430bdd1243dSDimitry Andric // offset encoded in the immediate. 431bdd1243dSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(0); 432fe6060f1SDimitry Andric } else { 433bdd1243dSDimitry Andric // We can encode an add with 12 bit signed immediate in the immediate 434bdd1243dSDimitry Andric // operand of our user instruction. As a result, the remaining 435bdd1243dSDimitry Andric // offset can by construction, at worst, a LUI and a ADD. 436bdd1243dSDimitry Andric int64_t Val = Offset.getFixed(); 437bdd1243dSDimitry Andric int64_t Lo12 = SignExtend64<12>(Val); 438bdd1243dSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Lo12); 439bdd1243dSDimitry Andric Offset = StackOffset::get((uint64_t)Val - (uint64_t)Lo12, 440bdd1243dSDimitry Andric Offset.getScalable()); 441bdd1243dSDimitry Andric } 442bdd1243dSDimitry Andric } 443fe6060f1SDimitry Andric 444bdd1243dSDimitry Andric if (Offset.getScalable() || Offset.getFixed()) { 445bdd1243dSDimitry Andric Register DestReg; 446bdd1243dSDimitry Andric if (MI.getOpcode() == RISCV::ADDI) 447bdd1243dSDimitry Andric DestReg = MI.getOperand(0).getReg(); 448bdd1243dSDimitry Andric else 449bdd1243dSDimitry Andric DestReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); 450bdd1243dSDimitry Andric adjustReg(*II->getParent(), II, DL, DestReg, FrameReg, Offset, 451bdd1243dSDimitry Andric MachineInstr::NoFlags, std::nullopt); 452bdd1243dSDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(DestReg, /*IsDef*/false, 453bdd1243dSDimitry Andric /*IsImp*/false, 454bdd1243dSDimitry Andric /*IsKill*/true); 455bdd1243dSDimitry Andric } else { 456bdd1243dSDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(FrameReg, /*IsDef*/false, 457bdd1243dSDimitry Andric /*IsImp*/false, 458bdd1243dSDimitry Andric /*IsKill*/false); 459bdd1243dSDimitry Andric } 460bdd1243dSDimitry Andric 461bdd1243dSDimitry Andric // If after materializing the adjustment, we have a pointless ADDI, remove it 462bdd1243dSDimitry Andric if (MI.getOpcode() == RISCV::ADDI && 463bdd1243dSDimitry Andric MI.getOperand(0).getReg() == MI.getOperand(1).getReg() && 464bdd1243dSDimitry Andric MI.getOperand(2).getImm() == 0) { 465fe6060f1SDimitry Andric MI.eraseFromParent(); 466bdd1243dSDimitry Andric return true; 467fe6060f1SDimitry Andric } 468fe6060f1SDimitry Andric 469bdd1243dSDimitry Andric // Handle spill/fill of synthetic register classes for segment operations to 470bdd1243dSDimitry Andric // ensure correctness in the edge case one gets spilled. There are many 471bdd1243dSDimitry Andric // possible optimizations here, but given the extreme rarity of such spills, 472bdd1243dSDimitry Andric // we prefer simplicity of implementation for now. 473bdd1243dSDimitry Andric switch (MI.getOpcode()) { 474bdd1243dSDimitry Andric case RISCV::PseudoVSPILL2_M1: 475bdd1243dSDimitry Andric case RISCV::PseudoVSPILL2_M2: 476bdd1243dSDimitry Andric case RISCV::PseudoVSPILL2_M4: 477bdd1243dSDimitry Andric case RISCV::PseudoVSPILL3_M1: 478bdd1243dSDimitry Andric case RISCV::PseudoVSPILL3_M2: 479bdd1243dSDimitry Andric case RISCV::PseudoVSPILL4_M1: 480bdd1243dSDimitry Andric case RISCV::PseudoVSPILL4_M2: 481bdd1243dSDimitry Andric case RISCV::PseudoVSPILL5_M1: 482bdd1243dSDimitry Andric case RISCV::PseudoVSPILL6_M1: 483bdd1243dSDimitry Andric case RISCV::PseudoVSPILL7_M1: 484bdd1243dSDimitry Andric case RISCV::PseudoVSPILL8_M1: 485bdd1243dSDimitry Andric lowerVSPILL(II); 486bdd1243dSDimitry Andric return true; 487bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD2_M1: 488bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD2_M2: 489bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD2_M4: 490bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD3_M1: 491bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD3_M2: 492bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD4_M1: 493bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD4_M2: 494bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD5_M1: 495bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD6_M1: 496bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD7_M1: 497bdd1243dSDimitry Andric case RISCV::PseudoVRELOAD8_M1: 498bdd1243dSDimitry Andric lowerVRELOAD(II); 499bdd1243dSDimitry Andric return true; 500fe6060f1SDimitry Andric } 501fe6060f1SDimitry Andric 502bdd1243dSDimitry Andric return false; 503fe6060f1SDimitry Andric } 504bdd1243dSDimitry Andric 505bdd1243dSDimitry Andric bool RISCVRegisterInfo::requiresVirtualBaseRegisters( 506bdd1243dSDimitry Andric const MachineFunction &MF) const { 507bdd1243dSDimitry Andric return true; 508bdd1243dSDimitry Andric } 509bdd1243dSDimitry Andric 510bdd1243dSDimitry Andric // Returns true if the instruction's frame index reference would be better 511bdd1243dSDimitry Andric // served by a base register other than FP or SP. 512bdd1243dSDimitry Andric // Used by LocalStackSlotAllocation pass to determine which frame index 513bdd1243dSDimitry Andric // references it should create new base registers for. 514bdd1243dSDimitry Andric bool RISCVRegisterInfo::needsFrameBaseReg(MachineInstr *MI, 515bdd1243dSDimitry Andric int64_t Offset) const { 516bdd1243dSDimitry Andric unsigned FIOperandNum = 0; 517bdd1243dSDimitry Andric for (; !MI->getOperand(FIOperandNum).isFI(); FIOperandNum++) 518bdd1243dSDimitry Andric assert(FIOperandNum < MI->getNumOperands() && 519bdd1243dSDimitry Andric "Instr doesn't have FrameIndex operand"); 520bdd1243dSDimitry Andric 521bdd1243dSDimitry Andric // For RISC-V, The machine instructions that include a FrameIndex operand 522bdd1243dSDimitry Andric // are load/store, ADDI instructions. 523bdd1243dSDimitry Andric unsigned MIFrm = RISCVII::getFormat(MI->getDesc().TSFlags); 524bdd1243dSDimitry Andric if (MIFrm != RISCVII::InstFormatI && MIFrm != RISCVII::InstFormatS) 525bdd1243dSDimitry Andric return false; 526bdd1243dSDimitry Andric // We only generate virtual base registers for loads and stores, so 527bdd1243dSDimitry Andric // return false for everything else. 528bdd1243dSDimitry Andric if (!MI->mayLoad() && !MI->mayStore()) 529bdd1243dSDimitry Andric return false; 530bdd1243dSDimitry Andric 531bdd1243dSDimitry Andric const MachineFunction &MF = *MI->getMF(); 532bdd1243dSDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo(); 533bdd1243dSDimitry Andric const RISCVFrameLowering *TFI = getFrameLowering(MF); 534bdd1243dSDimitry Andric const MachineRegisterInfo &MRI = MF.getRegInfo(); 535bdd1243dSDimitry Andric unsigned CalleeSavedSize = 0; 536bdd1243dSDimitry Andric Offset += getFrameIndexInstrOffset(MI, FIOperandNum); 537bdd1243dSDimitry Andric 538bdd1243dSDimitry Andric // Estimate the stack size used to store callee saved registers( 539bdd1243dSDimitry Andric // excludes reserved registers). 540bdd1243dSDimitry Andric BitVector ReservedRegs = getReservedRegs(MF); 541bdd1243dSDimitry Andric for (const MCPhysReg *R = MRI.getCalleeSavedRegs(); MCPhysReg Reg = *R; ++R) { 542bdd1243dSDimitry Andric if (!ReservedRegs.test(Reg)) 543bdd1243dSDimitry Andric CalleeSavedSize += getSpillSize(*getMinimalPhysRegClass(Reg)); 544bdd1243dSDimitry Andric } 545bdd1243dSDimitry Andric 546bdd1243dSDimitry Andric int64_t MaxFPOffset = Offset - CalleeSavedSize; 547bdd1243dSDimitry Andric if (TFI->hasFP(MF) && !shouldRealignStack(MF)) 548bdd1243dSDimitry Andric return !isFrameOffsetLegal(MI, RISCV::X8, MaxFPOffset); 549bdd1243dSDimitry Andric 550bdd1243dSDimitry Andric // Assume 128 bytes spill slots size to estimate the maximum possible 551bdd1243dSDimitry Andric // offset relative to the stack pointer. 552bdd1243dSDimitry Andric // FIXME: The 128 is copied from ARM. We should run some statistics and pick a 553bdd1243dSDimitry Andric // real one for RISC-V. 554bdd1243dSDimitry Andric int64_t MaxSPOffset = Offset + 128; 555bdd1243dSDimitry Andric MaxSPOffset += MFI.getLocalFrameSize(); 556bdd1243dSDimitry Andric return !isFrameOffsetLegal(MI, RISCV::X2, MaxSPOffset); 557bdd1243dSDimitry Andric } 558bdd1243dSDimitry Andric 559bdd1243dSDimitry Andric // Determine whether a given base register plus offset immediate is 560bdd1243dSDimitry Andric // encodable to resolve a frame index. 561bdd1243dSDimitry Andric bool RISCVRegisterInfo::isFrameOffsetLegal(const MachineInstr *MI, 562bdd1243dSDimitry Andric Register BaseReg, 563bdd1243dSDimitry Andric int64_t Offset) const { 564bdd1243dSDimitry Andric unsigned FIOperandNum = 0; 565bdd1243dSDimitry Andric while (!MI->getOperand(FIOperandNum).isFI()) { 566bdd1243dSDimitry Andric FIOperandNum++; 567bdd1243dSDimitry Andric assert(FIOperandNum < MI->getNumOperands() && 568bdd1243dSDimitry Andric "Instr does not have a FrameIndex operand!"); 569bdd1243dSDimitry Andric } 570bdd1243dSDimitry Andric 571bdd1243dSDimitry Andric Offset += getFrameIndexInstrOffset(MI, FIOperandNum); 572bdd1243dSDimitry Andric return isInt<12>(Offset); 573bdd1243dSDimitry Andric } 574bdd1243dSDimitry Andric 575bdd1243dSDimitry Andric // Insert defining instruction(s) for a pointer to FrameIdx before 576bdd1243dSDimitry Andric // insertion point I. 577bdd1243dSDimitry Andric // Return materialized frame pointer. 578bdd1243dSDimitry Andric Register RISCVRegisterInfo::materializeFrameBaseRegister(MachineBasicBlock *MBB, 579bdd1243dSDimitry Andric int FrameIdx, 580bdd1243dSDimitry Andric int64_t Offset) const { 581bdd1243dSDimitry Andric MachineBasicBlock::iterator MBBI = MBB->begin(); 582bdd1243dSDimitry Andric DebugLoc DL; 583bdd1243dSDimitry Andric if (MBBI != MBB->end()) 584bdd1243dSDimitry Andric DL = MBBI->getDebugLoc(); 585bdd1243dSDimitry Andric MachineFunction *MF = MBB->getParent(); 586bdd1243dSDimitry Andric MachineRegisterInfo &MFI = MF->getRegInfo(); 587bdd1243dSDimitry Andric const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo(); 588bdd1243dSDimitry Andric 589bdd1243dSDimitry Andric Register BaseReg = MFI.createVirtualRegister(&RISCV::GPRRegClass); 590bdd1243dSDimitry Andric BuildMI(*MBB, MBBI, DL, TII->get(RISCV::ADDI), BaseReg) 591bdd1243dSDimitry Andric .addFrameIndex(FrameIdx) 592bdd1243dSDimitry Andric .addImm(Offset); 593bdd1243dSDimitry Andric return BaseReg; 594bdd1243dSDimitry Andric } 595bdd1243dSDimitry Andric 596bdd1243dSDimitry Andric // Resolve a frame index operand of an instruction to reference the 597bdd1243dSDimitry Andric // indicated base register plus offset instead. 598bdd1243dSDimitry Andric void RISCVRegisterInfo::resolveFrameIndex(MachineInstr &MI, Register BaseReg, 599bdd1243dSDimitry Andric int64_t Offset) const { 600bdd1243dSDimitry Andric unsigned FIOperandNum = 0; 601bdd1243dSDimitry Andric while (!MI.getOperand(FIOperandNum).isFI()) { 602bdd1243dSDimitry Andric FIOperandNum++; 603bdd1243dSDimitry Andric assert(FIOperandNum < MI.getNumOperands() && 604bdd1243dSDimitry Andric "Instr does not have a FrameIndex operand!"); 605bdd1243dSDimitry Andric } 606bdd1243dSDimitry Andric 607bdd1243dSDimitry Andric Offset += getFrameIndexInstrOffset(&MI, FIOperandNum); 608bdd1243dSDimitry Andric // FrameIndex Operands are always represented as a 609bdd1243dSDimitry Andric // register followed by an immediate. 610bdd1243dSDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(BaseReg, false); 611bdd1243dSDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Offset); 612bdd1243dSDimitry Andric } 613bdd1243dSDimitry Andric 614bdd1243dSDimitry Andric // Get the offset from the referenced frame index in the instruction, 615bdd1243dSDimitry Andric // if there is one. 616bdd1243dSDimitry Andric int64_t RISCVRegisterInfo::getFrameIndexInstrOffset(const MachineInstr *MI, 617bdd1243dSDimitry Andric int Idx) const { 618bdd1243dSDimitry Andric assert((RISCVII::getFormat(MI->getDesc().TSFlags) == RISCVII::InstFormatI || 619bdd1243dSDimitry Andric RISCVII::getFormat(MI->getDesc().TSFlags) == RISCVII::InstFormatS) && 620bdd1243dSDimitry Andric "The MI must be I or S format."); 621bdd1243dSDimitry Andric assert(MI->getOperand(Idx).isFI() && "The Idx'th operand of MI is not a " 622bdd1243dSDimitry Andric "FrameIndex operand"); 623bdd1243dSDimitry Andric return MI->getOperand(Idx + 1).getImm(); 6240b57cec5SDimitry Andric } 6250b57cec5SDimitry Andric 6260b57cec5SDimitry Andric Register RISCVRegisterInfo::getFrameRegister(const MachineFunction &MF) const { 6270b57cec5SDimitry Andric const TargetFrameLowering *TFI = getFrameLowering(MF); 6280b57cec5SDimitry Andric return TFI->hasFP(MF) ? RISCV::X8 : RISCV::X2; 6290b57cec5SDimitry Andric } 6300b57cec5SDimitry Andric 6310b57cec5SDimitry Andric const uint32_t * 6320b57cec5SDimitry Andric RISCVRegisterInfo::getCallPreservedMask(const MachineFunction & MF, 633e8d8bef9SDimitry Andric CallingConv::ID CC) const { 6340b57cec5SDimitry Andric auto &Subtarget = MF.getSubtarget<RISCVSubtarget>(); 6350b57cec5SDimitry Andric 636e8d8bef9SDimitry Andric if (CC == CallingConv::GHC) 637e8d8bef9SDimitry Andric return CSR_NoRegs_RegMask; 6380b57cec5SDimitry Andric switch (Subtarget.getTargetABI()) { 6390b57cec5SDimitry Andric default: 6400b57cec5SDimitry Andric llvm_unreachable("Unrecognized ABI"); 6410b57cec5SDimitry Andric case RISCVABI::ABI_ILP32: 6420b57cec5SDimitry Andric case RISCVABI::ABI_LP64: 6430b57cec5SDimitry Andric return CSR_ILP32_LP64_RegMask; 6440b57cec5SDimitry Andric case RISCVABI::ABI_ILP32F: 6450b57cec5SDimitry Andric case RISCVABI::ABI_LP64F: 6460b57cec5SDimitry Andric return CSR_ILP32F_LP64F_RegMask; 6470b57cec5SDimitry Andric case RISCVABI::ABI_ILP32D: 6480b57cec5SDimitry Andric case RISCVABI::ABI_LP64D: 6490b57cec5SDimitry Andric return CSR_ILP32D_LP64D_RegMask; 6500b57cec5SDimitry Andric } 6510b57cec5SDimitry Andric } 652fe6060f1SDimitry Andric 653fe6060f1SDimitry Andric const TargetRegisterClass * 654fe6060f1SDimitry Andric RISCVRegisterInfo::getLargestLegalSuperClass(const TargetRegisterClass *RC, 655fe6060f1SDimitry Andric const MachineFunction &) const { 656fe6060f1SDimitry Andric if (RC == &RISCV::VMV0RegClass) 657fe6060f1SDimitry Andric return &RISCV::VRRegClass; 658fe6060f1SDimitry Andric return RC; 659fe6060f1SDimitry Andric } 6604824e7fdSDimitry Andric 6614824e7fdSDimitry Andric void RISCVRegisterInfo::getOffsetOpcodes(const StackOffset &Offset, 6624824e7fdSDimitry Andric SmallVectorImpl<uint64_t> &Ops) const { 6634824e7fdSDimitry Andric // VLENB is the length of a vector register in bytes. We use <vscale x 8 x i8> 6644824e7fdSDimitry Andric // to represent one vector register. The dwarf offset is 6654824e7fdSDimitry Andric // VLENB * scalable_offset / 8. 6664824e7fdSDimitry Andric assert(Offset.getScalable() % 8 == 0 && "Invalid frame offset"); 6674824e7fdSDimitry Andric 6684824e7fdSDimitry Andric // Add fixed-sized offset using existing DIExpression interface. 6694824e7fdSDimitry Andric DIExpression::appendOffset(Ops, Offset.getFixed()); 6704824e7fdSDimitry Andric 6714824e7fdSDimitry Andric unsigned VLENB = getDwarfRegNum(RISCV::VLENB, true); 6724824e7fdSDimitry Andric int64_t VLENBSized = Offset.getScalable() / 8; 6734824e7fdSDimitry Andric if (VLENBSized > 0) { 6744824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_constu); 6754824e7fdSDimitry Andric Ops.push_back(VLENBSized); 6764824e7fdSDimitry Andric Ops.append({dwarf::DW_OP_bregx, VLENB, 0ULL}); 6774824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_mul); 6784824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_plus); 6794824e7fdSDimitry Andric } else if (VLENBSized < 0) { 6804824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_constu); 6814824e7fdSDimitry Andric Ops.push_back(-VLENBSized); 6824824e7fdSDimitry Andric Ops.append({dwarf::DW_OP_bregx, VLENB, 0ULL}); 6834824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_mul); 6844824e7fdSDimitry Andric Ops.push_back(dwarf::DW_OP_minus); 6854824e7fdSDimitry Andric } 6864824e7fdSDimitry Andric } 68704eeddc0SDimitry Andric 68804eeddc0SDimitry Andric unsigned 68904eeddc0SDimitry Andric RISCVRegisterInfo::getRegisterCostTableIndex(const MachineFunction &MF) const { 690bdd1243dSDimitry Andric return MF.getSubtarget<RISCVSubtarget>().hasStdExtCOrZca() ? 1 : 0; 691bdd1243dSDimitry Andric } 692bdd1243dSDimitry Andric 693bdd1243dSDimitry Andric // Add two address hints to improve chances of being able to use a compressed 694bdd1243dSDimitry Andric // instruction. 695bdd1243dSDimitry Andric bool RISCVRegisterInfo::getRegAllocationHints( 696bdd1243dSDimitry Andric Register VirtReg, ArrayRef<MCPhysReg> Order, 697bdd1243dSDimitry Andric SmallVectorImpl<MCPhysReg> &Hints, const MachineFunction &MF, 698bdd1243dSDimitry Andric const VirtRegMap *VRM, const LiveRegMatrix *Matrix) const { 699bdd1243dSDimitry Andric const MachineRegisterInfo *MRI = &MF.getRegInfo(); 700bdd1243dSDimitry Andric 701bdd1243dSDimitry Andric bool BaseImplRetVal = TargetRegisterInfo::getRegAllocationHints( 702bdd1243dSDimitry Andric VirtReg, Order, Hints, MF, VRM, Matrix); 703bdd1243dSDimitry Andric 704bdd1243dSDimitry Andric if (!VRM || DisableRegAllocHints) 705bdd1243dSDimitry Andric return BaseImplRetVal; 706bdd1243dSDimitry Andric 707bdd1243dSDimitry Andric // Add any two address hints after any copy hints. 708bdd1243dSDimitry Andric SmallSet<Register, 4> TwoAddrHints; 709bdd1243dSDimitry Andric 710bdd1243dSDimitry Andric auto tryAddHint = [&](const MachineOperand &VRRegMO, const MachineOperand &MO, 711bdd1243dSDimitry Andric bool NeedGPRC) -> void { 712bdd1243dSDimitry Andric Register Reg = MO.getReg(); 713bdd1243dSDimitry Andric Register PhysReg = Reg.isPhysical() ? Reg : Register(VRM->getPhys(Reg)); 714bdd1243dSDimitry Andric if (PhysReg && (!NeedGPRC || RISCV::GPRCRegClass.contains(PhysReg))) { 715bdd1243dSDimitry Andric assert(!MO.getSubReg() && !VRRegMO.getSubReg() && "Unexpected subreg!"); 716bdd1243dSDimitry Andric if (!MRI->isReserved(PhysReg) && !is_contained(Hints, PhysReg)) 717bdd1243dSDimitry Andric TwoAddrHints.insert(PhysReg); 718bdd1243dSDimitry Andric } 719bdd1243dSDimitry Andric }; 720bdd1243dSDimitry Andric 721bdd1243dSDimitry Andric // This is all of the compressible binary instructions. If an instruction 722bdd1243dSDimitry Andric // needs GPRC register class operands \p NeedGPRC will be set to true. 723bdd1243dSDimitry Andric auto isCompressible = [](const MachineInstr &MI, bool &NeedGPRC) { 724bdd1243dSDimitry Andric NeedGPRC = false; 725bdd1243dSDimitry Andric switch (MI.getOpcode()) { 726bdd1243dSDimitry Andric default: 727bdd1243dSDimitry Andric return false; 728bdd1243dSDimitry Andric case RISCV::AND: 729bdd1243dSDimitry Andric case RISCV::OR: 730bdd1243dSDimitry Andric case RISCV::XOR: 731bdd1243dSDimitry Andric case RISCV::SUB: 732bdd1243dSDimitry Andric case RISCV::ADDW: 733bdd1243dSDimitry Andric case RISCV::SUBW: 734bdd1243dSDimitry Andric NeedGPRC = true; 735bdd1243dSDimitry Andric return true; 736bdd1243dSDimitry Andric case RISCV::ANDI: 737bdd1243dSDimitry Andric NeedGPRC = true; 738bdd1243dSDimitry Andric return MI.getOperand(2).isImm() && isInt<6>(MI.getOperand(2).getImm()); 739bdd1243dSDimitry Andric case RISCV::SRAI: 740bdd1243dSDimitry Andric case RISCV::SRLI: 741bdd1243dSDimitry Andric NeedGPRC = true; 742bdd1243dSDimitry Andric return true; 743bdd1243dSDimitry Andric case RISCV::ADD: 744bdd1243dSDimitry Andric case RISCV::SLLI: 745bdd1243dSDimitry Andric return true; 746bdd1243dSDimitry Andric case RISCV::ADDI: 747bdd1243dSDimitry Andric case RISCV::ADDIW: 748bdd1243dSDimitry Andric return MI.getOperand(2).isImm() && isInt<6>(MI.getOperand(2).getImm()); 749bdd1243dSDimitry Andric } 750bdd1243dSDimitry Andric }; 751bdd1243dSDimitry Andric 752bdd1243dSDimitry Andric // Returns true if this operand is compressible. For non-registers it always 753bdd1243dSDimitry Andric // returns true. Immediate range was already checked in isCompressible. 754bdd1243dSDimitry Andric // For registers, it checks if the register is a GPRC register. reg-reg 755bdd1243dSDimitry Andric // instructions that require GPRC need all register operands to be GPRC. 756bdd1243dSDimitry Andric auto isCompressibleOpnd = [&](const MachineOperand &MO) { 757bdd1243dSDimitry Andric if (!MO.isReg()) 758bdd1243dSDimitry Andric return true; 759bdd1243dSDimitry Andric Register Reg = MO.getReg(); 760bdd1243dSDimitry Andric Register PhysReg = Reg.isPhysical() ? Reg : Register(VRM->getPhys(Reg)); 761bdd1243dSDimitry Andric return PhysReg && RISCV::GPRCRegClass.contains(PhysReg); 762bdd1243dSDimitry Andric }; 763bdd1243dSDimitry Andric 764bdd1243dSDimitry Andric for (auto &MO : MRI->reg_nodbg_operands(VirtReg)) { 765bdd1243dSDimitry Andric const MachineInstr &MI = *MO.getParent(); 766*06c3fb27SDimitry Andric unsigned OpIdx = MO.getOperandNo(); 767bdd1243dSDimitry Andric bool NeedGPRC; 768bdd1243dSDimitry Andric if (isCompressible(MI, NeedGPRC)) { 769bdd1243dSDimitry Andric if (OpIdx == 0 && MI.getOperand(1).isReg()) { 770bdd1243dSDimitry Andric if (!NeedGPRC || isCompressibleOpnd(MI.getOperand(2))) 771bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(1), NeedGPRC); 772bdd1243dSDimitry Andric if (MI.isCommutable() && MI.getOperand(2).isReg() && 773bdd1243dSDimitry Andric (!NeedGPRC || isCompressibleOpnd(MI.getOperand(1)))) 774bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(2), NeedGPRC); 775bdd1243dSDimitry Andric } else if (OpIdx == 1 && 776bdd1243dSDimitry Andric (!NeedGPRC || isCompressibleOpnd(MI.getOperand(2)))) { 777bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(0), NeedGPRC); 778bdd1243dSDimitry Andric } else if (MI.isCommutable() && OpIdx == 2 && 779bdd1243dSDimitry Andric (!NeedGPRC || isCompressibleOpnd(MI.getOperand(1)))) { 780bdd1243dSDimitry Andric tryAddHint(MO, MI.getOperand(0), NeedGPRC); 781bdd1243dSDimitry Andric } 782bdd1243dSDimitry Andric } 783bdd1243dSDimitry Andric } 784bdd1243dSDimitry Andric 785bdd1243dSDimitry Andric for (MCPhysReg OrderReg : Order) 786bdd1243dSDimitry Andric if (TwoAddrHints.count(OrderReg)) 787bdd1243dSDimitry Andric Hints.push_back(OrderReg); 788bdd1243dSDimitry Andric 789bdd1243dSDimitry Andric return BaseImplRetVal; 79004eeddc0SDimitry Andric } 791