xref: /freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/RISCVRegisterInfo.cpp (revision bdd1243df58e60e85101c09001d9812a789b6bc4)
10b57cec5SDimitry Andric //===-- RISCVRegisterInfo.cpp - RISCV Register Information ------*- C++ -*-===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file contains the RISCV implementation of the TargetRegisterInfo class.
100b57cec5SDimitry Andric //
110b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
120b57cec5SDimitry Andric 
130b57cec5SDimitry Andric #include "RISCVRegisterInfo.h"
140b57cec5SDimitry Andric #include "RISCV.h"
155ffd83dbSDimitry Andric #include "RISCVMachineFunctionInfo.h"
160b57cec5SDimitry Andric #include "RISCVSubtarget.h"
1781ad6265SDimitry Andric #include "llvm/BinaryFormat/Dwarf.h"
180b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
190b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunction.h"
200b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h"
210b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterScavenging.h"
220b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
230b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
244824e7fdSDimitry Andric #include "llvm/IR/DebugInfoMetadata.h"
250b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h"
260b57cec5SDimitry Andric 
270b57cec5SDimitry Andric #define GET_REGINFO_TARGET_DESC
280b57cec5SDimitry Andric #include "RISCVGenRegisterInfo.inc"
290b57cec5SDimitry Andric 
300b57cec5SDimitry Andric using namespace llvm;
310b57cec5SDimitry Andric 
32*bdd1243dSDimitry Andric static cl::opt<bool>
33*bdd1243dSDimitry Andric     DisableRegAllocHints("riscv-disable-regalloc-hints", cl::Hidden,
34*bdd1243dSDimitry Andric                          cl::init(false),
35*bdd1243dSDimitry Andric                          cl::desc("Disable two address hints for register "
36*bdd1243dSDimitry Andric                                   "allocation"));
37*bdd1243dSDimitry Andric 
388bcb0991SDimitry Andric static_assert(RISCV::X1 == RISCV::X0 + 1, "Register list not consecutive");
398bcb0991SDimitry Andric static_assert(RISCV::X31 == RISCV::X0 + 31, "Register list not consecutive");
40e8d8bef9SDimitry Andric static_assert(RISCV::F1_H == RISCV::F0_H + 1, "Register list not consecutive");
41e8d8bef9SDimitry Andric static_assert(RISCV::F31_H == RISCV::F0_H + 31,
42e8d8bef9SDimitry Andric               "Register list not consecutive");
438bcb0991SDimitry Andric static_assert(RISCV::F1_F == RISCV::F0_F + 1, "Register list not consecutive");
448bcb0991SDimitry Andric static_assert(RISCV::F31_F == RISCV::F0_F + 31,
458bcb0991SDimitry Andric               "Register list not consecutive");
468bcb0991SDimitry Andric static_assert(RISCV::F1_D == RISCV::F0_D + 1, "Register list not consecutive");
478bcb0991SDimitry Andric static_assert(RISCV::F31_D == RISCV::F0_D + 31,
488bcb0991SDimitry Andric               "Register list not consecutive");
495ffd83dbSDimitry Andric static_assert(RISCV::V1 == RISCV::V0 + 1, "Register list not consecutive");
505ffd83dbSDimitry Andric static_assert(RISCV::V31 == RISCV::V0 + 31, "Register list not consecutive");
518bcb0991SDimitry Andric 
520b57cec5SDimitry Andric RISCVRegisterInfo::RISCVRegisterInfo(unsigned HwMode)
530b57cec5SDimitry Andric     : RISCVGenRegisterInfo(RISCV::X1, /*DwarfFlavour*/0, /*EHFlavor*/0,
540b57cec5SDimitry Andric                            /*PC*/0, HwMode) {}
550b57cec5SDimitry Andric 
560b57cec5SDimitry Andric const MCPhysReg *
570b57cec5SDimitry Andric RISCVRegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const {
580b57cec5SDimitry Andric   auto &Subtarget = MF->getSubtarget<RISCVSubtarget>();
59e8d8bef9SDimitry Andric   if (MF->getFunction().getCallingConv() == CallingConv::GHC)
60e8d8bef9SDimitry Andric     return CSR_NoRegs_SaveList;
610b57cec5SDimitry Andric   if (MF->getFunction().hasFnAttribute("interrupt")) {
620b57cec5SDimitry Andric     if (Subtarget.hasStdExtD())
630b57cec5SDimitry Andric       return CSR_XLEN_F64_Interrupt_SaveList;
640b57cec5SDimitry Andric     if (Subtarget.hasStdExtF())
650b57cec5SDimitry Andric       return CSR_XLEN_F32_Interrupt_SaveList;
660b57cec5SDimitry Andric     return CSR_Interrupt_SaveList;
670b57cec5SDimitry Andric   }
680b57cec5SDimitry Andric 
690b57cec5SDimitry Andric   switch (Subtarget.getTargetABI()) {
700b57cec5SDimitry Andric   default:
710b57cec5SDimitry Andric     llvm_unreachable("Unrecognized ABI");
720b57cec5SDimitry Andric   case RISCVABI::ABI_ILP32:
730b57cec5SDimitry Andric   case RISCVABI::ABI_LP64:
740b57cec5SDimitry Andric     return CSR_ILP32_LP64_SaveList;
750b57cec5SDimitry Andric   case RISCVABI::ABI_ILP32F:
760b57cec5SDimitry Andric   case RISCVABI::ABI_LP64F:
770b57cec5SDimitry Andric     return CSR_ILP32F_LP64F_SaveList;
780b57cec5SDimitry Andric   case RISCVABI::ABI_ILP32D:
790b57cec5SDimitry Andric   case RISCVABI::ABI_LP64D:
800b57cec5SDimitry Andric     return CSR_ILP32D_LP64D_SaveList;
810b57cec5SDimitry Andric   }
820b57cec5SDimitry Andric }
830b57cec5SDimitry Andric 
840b57cec5SDimitry Andric BitVector RISCVRegisterInfo::getReservedRegs(const MachineFunction &MF) const {
85480093f4SDimitry Andric   const RISCVFrameLowering *TFI = getFrameLowering(MF);
860b57cec5SDimitry Andric   BitVector Reserved(getNumRegs());
870b57cec5SDimitry Andric 
88480093f4SDimitry Andric   // Mark any registers requested to be reserved as such
89480093f4SDimitry Andric   for (size_t Reg = 0; Reg < getNumRegs(); Reg++) {
90480093f4SDimitry Andric     if (MF.getSubtarget<RISCVSubtarget>().isRegisterReservedByUser(Reg))
91480093f4SDimitry Andric       markSuperRegs(Reserved, Reg);
92480093f4SDimitry Andric   }
93480093f4SDimitry Andric 
940b57cec5SDimitry Andric   // Use markSuperRegs to ensure any register aliases are also reserved
950b57cec5SDimitry Andric   markSuperRegs(Reserved, RISCV::X0); // zero
960b57cec5SDimitry Andric   markSuperRegs(Reserved, RISCV::X2); // sp
970b57cec5SDimitry Andric   markSuperRegs(Reserved, RISCV::X3); // gp
980b57cec5SDimitry Andric   markSuperRegs(Reserved, RISCV::X4); // tp
990b57cec5SDimitry Andric   if (TFI->hasFP(MF))
1000b57cec5SDimitry Andric     markSuperRegs(Reserved, RISCV::X8); // fp
101480093f4SDimitry Andric   // Reserve the base register if we need to realign the stack and allocate
102480093f4SDimitry Andric   // variable-sized objects at runtime.
103480093f4SDimitry Andric   if (TFI->hasBP(MF))
104480093f4SDimitry Andric     markSuperRegs(Reserved, RISCVABI::getBPReg()); // bp
105e8d8bef9SDimitry Andric 
106e8d8bef9SDimitry Andric   // V registers for code generation. We handle them manually.
107e8d8bef9SDimitry Andric   markSuperRegs(Reserved, RISCV::VL);
108e8d8bef9SDimitry Andric   markSuperRegs(Reserved, RISCV::VTYPE);
109e8d8bef9SDimitry Andric   markSuperRegs(Reserved, RISCV::VXSAT);
110e8d8bef9SDimitry Andric   markSuperRegs(Reserved, RISCV::VXRM);
11181ad6265SDimitry Andric   markSuperRegs(Reserved, RISCV::VLENB); // vlenb (constant)
112e8d8bef9SDimitry Andric 
113fe6060f1SDimitry Andric   // Floating point environment registers.
114fe6060f1SDimitry Andric   markSuperRegs(Reserved, RISCV::FRM);
115fe6060f1SDimitry Andric   markSuperRegs(Reserved, RISCV::FFLAGS);
116fe6060f1SDimitry Andric 
1170b57cec5SDimitry Andric   assert(checkAllSuperRegsMarked(Reserved));
1180b57cec5SDimitry Andric   return Reserved;
1190b57cec5SDimitry Andric }
1200b57cec5SDimitry Andric 
121480093f4SDimitry Andric bool RISCVRegisterInfo::isAsmClobberable(const MachineFunction &MF,
1225ffd83dbSDimitry Andric                                          MCRegister PhysReg) const {
123480093f4SDimitry Andric   return !MF.getSubtarget<RISCVSubtarget>().isRegisterReservedByUser(PhysReg);
124480093f4SDimitry Andric }
125480093f4SDimitry Andric 
1260b57cec5SDimitry Andric const uint32_t *RISCVRegisterInfo::getNoPreservedMask() const {
1270b57cec5SDimitry Andric   return CSR_NoRegs_RegMask;
1280b57cec5SDimitry Andric }
1290b57cec5SDimitry Andric 
1305ffd83dbSDimitry Andric // Frame indexes representing locations of CSRs which are given a fixed location
1315ffd83dbSDimitry Andric // by save/restore libcalls.
13281ad6265SDimitry Andric static const std::pair<unsigned, int> FixedCSRFIMap[] = {
1335ffd83dbSDimitry Andric   {/*ra*/  RISCV::X1,   -1},
1345ffd83dbSDimitry Andric   {/*s0*/  RISCV::X8,   -2},
1355ffd83dbSDimitry Andric   {/*s1*/  RISCV::X9,   -3},
1365ffd83dbSDimitry Andric   {/*s2*/  RISCV::X18,  -4},
1375ffd83dbSDimitry Andric   {/*s3*/  RISCV::X19,  -5},
1385ffd83dbSDimitry Andric   {/*s4*/  RISCV::X20,  -6},
1395ffd83dbSDimitry Andric   {/*s5*/  RISCV::X21,  -7},
1405ffd83dbSDimitry Andric   {/*s6*/  RISCV::X22,  -8},
1415ffd83dbSDimitry Andric   {/*s7*/  RISCV::X23,  -9},
1425ffd83dbSDimitry Andric   {/*s8*/  RISCV::X24,  -10},
1435ffd83dbSDimitry Andric   {/*s9*/  RISCV::X25,  -11},
1445ffd83dbSDimitry Andric   {/*s10*/ RISCV::X26,  -12},
1455ffd83dbSDimitry Andric   {/*s11*/ RISCV::X27,  -13}
1465ffd83dbSDimitry Andric };
1475ffd83dbSDimitry Andric 
1485ffd83dbSDimitry Andric bool RISCVRegisterInfo::hasReservedSpillSlot(const MachineFunction &MF,
1495ffd83dbSDimitry Andric                                              Register Reg,
1505ffd83dbSDimitry Andric                                              int &FrameIdx) const {
1515ffd83dbSDimitry Andric   const auto *RVFI = MF.getInfo<RISCVMachineFunctionInfo>();
1525ffd83dbSDimitry Andric   if (!RVFI->useSaveRestoreLibCalls(MF))
1535ffd83dbSDimitry Andric     return false;
1545ffd83dbSDimitry Andric 
15581ad6265SDimitry Andric   const auto *FII =
15681ad6265SDimitry Andric       llvm::find_if(FixedCSRFIMap, [&](auto P) { return P.first == Reg; });
15781ad6265SDimitry Andric   if (FII == std::end(FixedCSRFIMap))
1585ffd83dbSDimitry Andric     return false;
1595ffd83dbSDimitry Andric 
1605ffd83dbSDimitry Andric   FrameIdx = FII->second;
1615ffd83dbSDimitry Andric   return true;
1625ffd83dbSDimitry Andric }
1635ffd83dbSDimitry Andric 
164*bdd1243dSDimitry Andric void RISCVRegisterInfo::adjustReg(MachineBasicBlock &MBB,
165*bdd1243dSDimitry Andric                                   MachineBasicBlock::iterator II,
166*bdd1243dSDimitry Andric                                   const DebugLoc &DL, Register DestReg,
167*bdd1243dSDimitry Andric                                   Register SrcReg, StackOffset Offset,
168*bdd1243dSDimitry Andric                                   MachineInstr::MIFlag Flag,
169*bdd1243dSDimitry Andric                                   MaybeAlign RequiredAlign) const {
170*bdd1243dSDimitry Andric 
171*bdd1243dSDimitry Andric   if (DestReg == SrcReg && !Offset.getFixed() && !Offset.getScalable())
172*bdd1243dSDimitry Andric     return;
173*bdd1243dSDimitry Andric 
174*bdd1243dSDimitry Andric   MachineFunction &MF = *MBB.getParent();
175*bdd1243dSDimitry Andric   MachineRegisterInfo &MRI = MF.getRegInfo();
176*bdd1243dSDimitry Andric   const RISCVSubtarget &ST = MF.getSubtarget<RISCVSubtarget>();
177*bdd1243dSDimitry Andric   const RISCVInstrInfo *TII = ST.getInstrInfo();
178*bdd1243dSDimitry Andric 
179*bdd1243dSDimitry Andric   bool KillSrcReg = false;
180*bdd1243dSDimitry Andric 
181*bdd1243dSDimitry Andric   if (Offset.getScalable()) {
182*bdd1243dSDimitry Andric     unsigned ScalableAdjOpc = RISCV::ADD;
183*bdd1243dSDimitry Andric     int64_t ScalableValue = Offset.getScalable();
184*bdd1243dSDimitry Andric     if (ScalableValue < 0) {
185*bdd1243dSDimitry Andric       ScalableValue = -ScalableValue;
186*bdd1243dSDimitry Andric       ScalableAdjOpc = RISCV::SUB;
187*bdd1243dSDimitry Andric     }
188*bdd1243dSDimitry Andric     // Get vlenb and multiply vlen with the number of vector registers.
189*bdd1243dSDimitry Andric     Register ScratchReg = DestReg;
190*bdd1243dSDimitry Andric     if (DestReg == SrcReg)
191*bdd1243dSDimitry Andric       ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
192*bdd1243dSDimitry Andric     TII->getVLENFactoredAmount(MF, MBB, II, DL, ScratchReg, ScalableValue, Flag);
193*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(ScalableAdjOpc), DestReg)
194*bdd1243dSDimitry Andric       .addReg(SrcReg).addReg(ScratchReg, RegState::Kill)
195*bdd1243dSDimitry Andric       .setMIFlag(Flag);
196*bdd1243dSDimitry Andric     SrcReg = DestReg;
197*bdd1243dSDimitry Andric     KillSrcReg = true;
198*bdd1243dSDimitry Andric   }
199*bdd1243dSDimitry Andric 
200*bdd1243dSDimitry Andric   int64_t Val = Offset.getFixed();
201*bdd1243dSDimitry Andric   if (DestReg == SrcReg && Val == 0)
202*bdd1243dSDimitry Andric     return;
203*bdd1243dSDimitry Andric 
204*bdd1243dSDimitry Andric   const uint64_t Align = RequiredAlign.valueOrOne().value();
205*bdd1243dSDimitry Andric 
206*bdd1243dSDimitry Andric   if (isInt<12>(Val)) {
207*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg)
208*bdd1243dSDimitry Andric         .addReg(SrcReg, getKillRegState(KillSrcReg))
209*bdd1243dSDimitry Andric         .addImm(Val)
210*bdd1243dSDimitry Andric         .setMIFlag(Flag);
211*bdd1243dSDimitry Andric     return;
212*bdd1243dSDimitry Andric   }
213*bdd1243dSDimitry Andric 
214*bdd1243dSDimitry Andric   // Try to split the offset across two ADDIs. We need to keep the intermediate
215*bdd1243dSDimitry Andric   // result aligned after each ADDI.  We need to determine the maximum value we
216*bdd1243dSDimitry Andric   // can put in each ADDI. In the negative direction, we can use -2048 which is
217*bdd1243dSDimitry Andric   // always sufficiently aligned. In the positive direction, we need to find the
218*bdd1243dSDimitry Andric   // largest 12-bit immediate that is aligned.  Exclude -4096 since it can be
219*bdd1243dSDimitry Andric   // created with LUI.
220*bdd1243dSDimitry Andric   assert(Align < 2048 && "Required alignment too large");
221*bdd1243dSDimitry Andric   int64_t MaxPosAdjStep = 2048 - Align;
222*bdd1243dSDimitry Andric   if (Val > -4096 && Val <= (2 * MaxPosAdjStep)) {
223*bdd1243dSDimitry Andric     int64_t FirstAdj = Val < 0 ? -2048 : MaxPosAdjStep;
224*bdd1243dSDimitry Andric     Val -= FirstAdj;
225*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg)
226*bdd1243dSDimitry Andric         .addReg(SrcReg, getKillRegState(KillSrcReg))
227*bdd1243dSDimitry Andric         .addImm(FirstAdj)
228*bdd1243dSDimitry Andric         .setMIFlag(Flag);
229*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg)
230*bdd1243dSDimitry Andric         .addReg(DestReg, RegState::Kill)
231*bdd1243dSDimitry Andric         .addImm(Val)
232*bdd1243dSDimitry Andric         .setMIFlag(Flag);
233*bdd1243dSDimitry Andric     return;
234*bdd1243dSDimitry Andric   }
235*bdd1243dSDimitry Andric 
236*bdd1243dSDimitry Andric   unsigned Opc = RISCV::ADD;
237*bdd1243dSDimitry Andric   if (Val < 0) {
238*bdd1243dSDimitry Andric     Val = -Val;
239*bdd1243dSDimitry Andric     Opc = RISCV::SUB;
240*bdd1243dSDimitry Andric   }
241*bdd1243dSDimitry Andric 
242*bdd1243dSDimitry Andric   Register ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
243*bdd1243dSDimitry Andric   TII->movImm(MBB, II, DL, ScratchReg, Val, Flag);
244*bdd1243dSDimitry Andric   BuildMI(MBB, II, DL, TII->get(Opc), DestReg)
245*bdd1243dSDimitry Andric       .addReg(SrcReg, getKillRegState(KillSrcReg))
246*bdd1243dSDimitry Andric       .addReg(ScratchReg, RegState::Kill)
247*bdd1243dSDimitry Andric       .setMIFlag(Flag);
248*bdd1243dSDimitry Andric }
249*bdd1243dSDimitry Andric 
250*bdd1243dSDimitry Andric // Split a VSPILLx_Mx pseudo into multiple whole register stores separated by
251*bdd1243dSDimitry Andric // LMUL*VLENB bytes.
252*bdd1243dSDimitry Andric void RISCVRegisterInfo::lowerVSPILL(MachineBasicBlock::iterator II) const {
253*bdd1243dSDimitry Andric   DebugLoc DL = II->getDebugLoc();
254*bdd1243dSDimitry Andric   MachineBasicBlock &MBB = *II->getParent();
255*bdd1243dSDimitry Andric   MachineFunction &MF = *MBB.getParent();
256*bdd1243dSDimitry Andric   MachineRegisterInfo &MRI = MF.getRegInfo();
257*bdd1243dSDimitry Andric   const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo();
258*bdd1243dSDimitry Andric   const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
259*bdd1243dSDimitry Andric 
260*bdd1243dSDimitry Andric   auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode());
261*bdd1243dSDimitry Andric   unsigned NF = ZvlssegInfo->first;
262*bdd1243dSDimitry Andric   unsigned LMUL = ZvlssegInfo->second;
263*bdd1243dSDimitry Andric   assert(NF * LMUL <= 8 && "Invalid NF/LMUL combinations.");
264*bdd1243dSDimitry Andric   unsigned Opcode, SubRegIdx;
265*bdd1243dSDimitry Andric   switch (LMUL) {
266*bdd1243dSDimitry Andric   default:
267*bdd1243dSDimitry Andric     llvm_unreachable("LMUL must be 1, 2, or 4.");
268*bdd1243dSDimitry Andric   case 1:
269*bdd1243dSDimitry Andric     Opcode = RISCV::VS1R_V;
270*bdd1243dSDimitry Andric     SubRegIdx = RISCV::sub_vrm1_0;
271*bdd1243dSDimitry Andric     break;
272*bdd1243dSDimitry Andric   case 2:
273*bdd1243dSDimitry Andric     Opcode = RISCV::VS2R_V;
274*bdd1243dSDimitry Andric     SubRegIdx = RISCV::sub_vrm2_0;
275*bdd1243dSDimitry Andric     break;
276*bdd1243dSDimitry Andric   case 4:
277*bdd1243dSDimitry Andric     Opcode = RISCV::VS4R_V;
278*bdd1243dSDimitry Andric     SubRegIdx = RISCV::sub_vrm4_0;
279*bdd1243dSDimitry Andric     break;
280*bdd1243dSDimitry Andric   }
281*bdd1243dSDimitry Andric   static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7,
282*bdd1243dSDimitry Andric                 "Unexpected subreg numbering");
283*bdd1243dSDimitry Andric   static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3,
284*bdd1243dSDimitry Andric                 "Unexpected subreg numbering");
285*bdd1243dSDimitry Andric   static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1,
286*bdd1243dSDimitry Andric                 "Unexpected subreg numbering");
287*bdd1243dSDimitry Andric 
288*bdd1243dSDimitry Andric   Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass);
289*bdd1243dSDimitry Andric   BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL);
290*bdd1243dSDimitry Andric   uint32_t ShiftAmount = Log2_32(LMUL);
291*bdd1243dSDimitry Andric   if (ShiftAmount != 0)
292*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL)
293*bdd1243dSDimitry Andric         .addReg(VL)
294*bdd1243dSDimitry Andric         .addImm(ShiftAmount);
295*bdd1243dSDimitry Andric 
296*bdd1243dSDimitry Andric   Register SrcReg = II->getOperand(0).getReg();
297*bdd1243dSDimitry Andric   Register Base = II->getOperand(1).getReg();
298*bdd1243dSDimitry Andric   bool IsBaseKill = II->getOperand(1).isKill();
299*bdd1243dSDimitry Andric   Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass);
300*bdd1243dSDimitry Andric   for (unsigned I = 0; I < NF; ++I) {
301*bdd1243dSDimitry Andric     // Adding implicit-use of super register to describe we are using part of
302*bdd1243dSDimitry Andric     // super register, that prevents machine verifier complaining when part of
303*bdd1243dSDimitry Andric     // subreg is undef, see comment in MachineVerifier::checkLiveness for more
304*bdd1243dSDimitry Andric     // detail.
305*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(Opcode))
306*bdd1243dSDimitry Andric         .addReg(TRI->getSubReg(SrcReg, SubRegIdx + I))
307*bdd1243dSDimitry Andric         .addReg(Base, getKillRegState(I == NF - 1))
308*bdd1243dSDimitry Andric         .addMemOperand(*(II->memoperands_begin()))
309*bdd1243dSDimitry Andric         .addReg(SrcReg, RegState::Implicit);
310*bdd1243dSDimitry Andric     if (I != NF - 1)
311*bdd1243dSDimitry Andric       BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase)
312*bdd1243dSDimitry Andric           .addReg(Base, getKillRegState(I != 0 || IsBaseKill))
313*bdd1243dSDimitry Andric           .addReg(VL, getKillRegState(I == NF - 2));
314*bdd1243dSDimitry Andric     Base = NewBase;
315*bdd1243dSDimitry Andric   }
316*bdd1243dSDimitry Andric   II->eraseFromParent();
317*bdd1243dSDimitry Andric }
318*bdd1243dSDimitry Andric 
319*bdd1243dSDimitry Andric // Split a VSPILLx_Mx pseudo into multiple whole register loads separated by
320*bdd1243dSDimitry Andric // LMUL*VLENB bytes.
321*bdd1243dSDimitry Andric void RISCVRegisterInfo::lowerVRELOAD(MachineBasicBlock::iterator II) const {
322*bdd1243dSDimitry Andric   DebugLoc DL = II->getDebugLoc();
323*bdd1243dSDimitry Andric   MachineBasicBlock &MBB = *II->getParent();
324*bdd1243dSDimitry Andric   MachineFunction &MF = *MBB.getParent();
325*bdd1243dSDimitry Andric   MachineRegisterInfo &MRI = MF.getRegInfo();
326*bdd1243dSDimitry Andric   const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo();
327*bdd1243dSDimitry Andric   const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
328*bdd1243dSDimitry Andric 
329*bdd1243dSDimitry Andric   auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode());
330*bdd1243dSDimitry Andric   unsigned NF = ZvlssegInfo->first;
331*bdd1243dSDimitry Andric   unsigned LMUL = ZvlssegInfo->second;
332*bdd1243dSDimitry Andric   assert(NF * LMUL <= 8 && "Invalid NF/LMUL combinations.");
333*bdd1243dSDimitry Andric   unsigned Opcode, SubRegIdx;
334*bdd1243dSDimitry Andric   switch (LMUL) {
335*bdd1243dSDimitry Andric   default:
336*bdd1243dSDimitry Andric     llvm_unreachable("LMUL must be 1, 2, or 4.");
337*bdd1243dSDimitry Andric   case 1:
338*bdd1243dSDimitry Andric     Opcode = RISCV::VL1RE8_V;
339*bdd1243dSDimitry Andric     SubRegIdx = RISCV::sub_vrm1_0;
340*bdd1243dSDimitry Andric     break;
341*bdd1243dSDimitry Andric   case 2:
342*bdd1243dSDimitry Andric     Opcode = RISCV::VL2RE8_V;
343*bdd1243dSDimitry Andric     SubRegIdx = RISCV::sub_vrm2_0;
344*bdd1243dSDimitry Andric     break;
345*bdd1243dSDimitry Andric   case 4:
346*bdd1243dSDimitry Andric     Opcode = RISCV::VL4RE8_V;
347*bdd1243dSDimitry Andric     SubRegIdx = RISCV::sub_vrm4_0;
348*bdd1243dSDimitry Andric     break;
349*bdd1243dSDimitry Andric   }
350*bdd1243dSDimitry Andric   static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7,
351*bdd1243dSDimitry Andric                 "Unexpected subreg numbering");
352*bdd1243dSDimitry Andric   static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3,
353*bdd1243dSDimitry Andric                 "Unexpected subreg numbering");
354*bdd1243dSDimitry Andric   static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1,
355*bdd1243dSDimitry Andric                 "Unexpected subreg numbering");
356*bdd1243dSDimitry Andric 
357*bdd1243dSDimitry Andric   Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass);
358*bdd1243dSDimitry Andric   BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL);
359*bdd1243dSDimitry Andric   uint32_t ShiftAmount = Log2_32(LMUL);
360*bdd1243dSDimitry Andric   if (ShiftAmount != 0)
361*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL)
362*bdd1243dSDimitry Andric         .addReg(VL)
363*bdd1243dSDimitry Andric         .addImm(ShiftAmount);
364*bdd1243dSDimitry Andric 
365*bdd1243dSDimitry Andric   Register DestReg = II->getOperand(0).getReg();
366*bdd1243dSDimitry Andric   Register Base = II->getOperand(1).getReg();
367*bdd1243dSDimitry Andric   bool IsBaseKill = II->getOperand(1).isKill();
368*bdd1243dSDimitry Andric   Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass);
369*bdd1243dSDimitry Andric   for (unsigned I = 0; I < NF; ++I) {
370*bdd1243dSDimitry Andric     BuildMI(MBB, II, DL, TII->get(Opcode),
371*bdd1243dSDimitry Andric             TRI->getSubReg(DestReg, SubRegIdx + I))
372*bdd1243dSDimitry Andric         .addReg(Base, getKillRegState(I == NF - 1))
373*bdd1243dSDimitry Andric         .addMemOperand(*(II->memoperands_begin()));
374*bdd1243dSDimitry Andric     if (I != NF - 1)
375*bdd1243dSDimitry Andric       BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase)
376*bdd1243dSDimitry Andric           .addReg(Base, getKillRegState(I != 0 || IsBaseKill))
377*bdd1243dSDimitry Andric           .addReg(VL, getKillRegState(I == NF - 2));
378*bdd1243dSDimitry Andric     Base = NewBase;
379*bdd1243dSDimitry Andric   }
380*bdd1243dSDimitry Andric   II->eraseFromParent();
381*bdd1243dSDimitry Andric }
382*bdd1243dSDimitry Andric 
383*bdd1243dSDimitry Andric bool RISCVRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II,
3840b57cec5SDimitry Andric                                             int SPAdj, unsigned FIOperandNum,
3850b57cec5SDimitry Andric                                             RegScavenger *RS) const {
3860b57cec5SDimitry Andric   assert(SPAdj == 0 && "Unexpected non-zero SPAdj value");
3870b57cec5SDimitry Andric 
3880b57cec5SDimitry Andric   MachineInstr &MI = *II;
3890b57cec5SDimitry Andric   MachineFunction &MF = *MI.getParent()->getParent();
3900b57cec5SDimitry Andric   MachineRegisterInfo &MRI = MF.getRegInfo();
391*bdd1243dSDimitry Andric   const RISCVSubtarget &ST = MF.getSubtarget<RISCVSubtarget>();
3920b57cec5SDimitry Andric   DebugLoc DL = MI.getDebugLoc();
3930b57cec5SDimitry Andric 
3940b57cec5SDimitry Andric   int FrameIndex = MI.getOperand(FIOperandNum).getIndex();
3955ffd83dbSDimitry Andric   Register FrameReg;
396fe6060f1SDimitry Andric   StackOffset Offset =
397fe6060f1SDimitry Andric       getFrameLowering(MF)->getFrameIndexReference(MF, FrameIndex, FrameReg);
39881ad6265SDimitry Andric   bool IsRVVSpill = RISCV::isRVVSpill(MI);
399fe6060f1SDimitry Andric   if (!IsRVVSpill)
400fe6060f1SDimitry Andric     Offset += StackOffset::getFixed(MI.getOperand(FIOperandNum + 1).getImm());
4010b57cec5SDimitry Andric 
402*bdd1243dSDimitry Andric   if (Offset.getScalable() &&
403*bdd1243dSDimitry Andric       ST.getRealMinVLen() == ST.getRealMaxVLen()) {
404*bdd1243dSDimitry Andric     // For an exact VLEN value, scalable offsets become constant and thus
405*bdd1243dSDimitry Andric     // can be converted entirely into fixed offsets.
406*bdd1243dSDimitry Andric     int64_t FixedValue = Offset.getFixed();
407*bdd1243dSDimitry Andric     int64_t ScalableValue = Offset.getScalable();
408*bdd1243dSDimitry Andric     assert(ScalableValue % 8 == 0 &&
409*bdd1243dSDimitry Andric            "Scalable offset is not a multiple of a single vector size.");
410*bdd1243dSDimitry Andric     int64_t NumOfVReg = ScalableValue / 8;
411*bdd1243dSDimitry Andric     int64_t VLENB = ST.getRealMinVLen() / 8;
412*bdd1243dSDimitry Andric     Offset = StackOffset::getFixed(FixedValue + NumOfVReg * VLENB);
413*bdd1243dSDimitry Andric   }
414*bdd1243dSDimitry Andric 
415fe6060f1SDimitry Andric   if (!isInt<32>(Offset.getFixed())) {
4160b57cec5SDimitry Andric     report_fatal_error(
4170b57cec5SDimitry Andric         "Frame offsets outside of the signed 32-bit range not supported");
4180b57cec5SDimitry Andric   }
4190b57cec5SDimitry Andric 
420*bdd1243dSDimitry Andric   if (!IsRVVSpill) {
421*bdd1243dSDimitry Andric     if (MI.getOpcode() == RISCV::ADDI && !isInt<12>(Offset.getFixed())) {
422*bdd1243dSDimitry Andric       // We chose to emit the canonical immediate sequence rather than folding
423*bdd1243dSDimitry Andric       // the offset into the using add under the theory that doing so doesn't
424*bdd1243dSDimitry Andric       // save dynamic instruction count and some target may fuse the canonical
425*bdd1243dSDimitry Andric       // 32 bit immediate sequence.  We still need to clear the portion of the
426*bdd1243dSDimitry Andric       // offset encoded in the immediate.
427*bdd1243dSDimitry Andric       MI.getOperand(FIOperandNum + 1).ChangeToImmediate(0);
428fe6060f1SDimitry Andric     } else {
429*bdd1243dSDimitry Andric       // We can encode an add with 12 bit signed immediate in the immediate
430*bdd1243dSDimitry Andric       // operand of our user instruction.  As a result, the remaining
431*bdd1243dSDimitry Andric       // offset can by construction, at worst, a LUI and a ADD.
432*bdd1243dSDimitry Andric       int64_t Val = Offset.getFixed();
433*bdd1243dSDimitry Andric       int64_t Lo12 = SignExtend64<12>(Val);
434*bdd1243dSDimitry Andric       MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Lo12);
435*bdd1243dSDimitry Andric       Offset = StackOffset::get((uint64_t)Val - (uint64_t)Lo12,
436*bdd1243dSDimitry Andric                                 Offset.getScalable());
437*bdd1243dSDimitry Andric     }
438*bdd1243dSDimitry Andric   }
439fe6060f1SDimitry Andric 
440*bdd1243dSDimitry Andric   if (Offset.getScalable() || Offset.getFixed()) {
441*bdd1243dSDimitry Andric     Register DestReg;
442*bdd1243dSDimitry Andric     if (MI.getOpcode() == RISCV::ADDI)
443*bdd1243dSDimitry Andric       DestReg = MI.getOperand(0).getReg();
444*bdd1243dSDimitry Andric     else
445*bdd1243dSDimitry Andric       DestReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
446*bdd1243dSDimitry Andric     adjustReg(*II->getParent(), II, DL, DestReg, FrameReg, Offset,
447*bdd1243dSDimitry Andric               MachineInstr::NoFlags, std::nullopt);
448*bdd1243dSDimitry Andric     MI.getOperand(FIOperandNum).ChangeToRegister(DestReg, /*IsDef*/false,
449*bdd1243dSDimitry Andric                                                  /*IsImp*/false,
450*bdd1243dSDimitry Andric                                                  /*IsKill*/true);
451*bdd1243dSDimitry Andric   } else {
452*bdd1243dSDimitry Andric     MI.getOperand(FIOperandNum).ChangeToRegister(FrameReg, /*IsDef*/false,
453*bdd1243dSDimitry Andric                                                  /*IsImp*/false,
454*bdd1243dSDimitry Andric                                                  /*IsKill*/false);
455*bdd1243dSDimitry Andric   }
456*bdd1243dSDimitry Andric 
457*bdd1243dSDimitry Andric   // If after materializing the adjustment, we have a pointless ADDI, remove it
458*bdd1243dSDimitry Andric   if (MI.getOpcode() == RISCV::ADDI &&
459*bdd1243dSDimitry Andric       MI.getOperand(0).getReg() == MI.getOperand(1).getReg() &&
460*bdd1243dSDimitry Andric       MI.getOperand(2).getImm() == 0) {
461fe6060f1SDimitry Andric     MI.eraseFromParent();
462*bdd1243dSDimitry Andric     return true;
463fe6060f1SDimitry Andric   }
464fe6060f1SDimitry Andric 
465*bdd1243dSDimitry Andric   // Handle spill/fill of synthetic register classes for segment operations to
466*bdd1243dSDimitry Andric   // ensure correctness in the edge case one gets spilled. There are many
467*bdd1243dSDimitry Andric   // possible optimizations here, but given the extreme rarity of such spills,
468*bdd1243dSDimitry Andric   // we prefer simplicity of implementation for now.
469*bdd1243dSDimitry Andric   switch (MI.getOpcode()) {
470*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL2_M1:
471*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL2_M2:
472*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL2_M4:
473*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL3_M1:
474*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL3_M2:
475*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL4_M1:
476*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL4_M2:
477*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL5_M1:
478*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL6_M1:
479*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL7_M1:
480*bdd1243dSDimitry Andric   case RISCV::PseudoVSPILL8_M1:
481*bdd1243dSDimitry Andric     lowerVSPILL(II);
482*bdd1243dSDimitry Andric     return true;
483*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD2_M1:
484*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD2_M2:
485*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD2_M4:
486*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD3_M1:
487*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD3_M2:
488*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD4_M1:
489*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD4_M2:
490*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD5_M1:
491*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD6_M1:
492*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD7_M1:
493*bdd1243dSDimitry Andric   case RISCV::PseudoVRELOAD8_M1:
494*bdd1243dSDimitry Andric     lowerVRELOAD(II);
495*bdd1243dSDimitry Andric     return true;
496fe6060f1SDimitry Andric   }
497fe6060f1SDimitry Andric 
498*bdd1243dSDimitry Andric   return false;
499fe6060f1SDimitry Andric }
500*bdd1243dSDimitry Andric 
501*bdd1243dSDimitry Andric bool RISCVRegisterInfo::requiresVirtualBaseRegisters(
502*bdd1243dSDimitry Andric     const MachineFunction &MF) const {
503*bdd1243dSDimitry Andric   return true;
504*bdd1243dSDimitry Andric }
505*bdd1243dSDimitry Andric 
506*bdd1243dSDimitry Andric // Returns true if the instruction's frame index reference would be better
507*bdd1243dSDimitry Andric // served by a base register other than FP or SP.
508*bdd1243dSDimitry Andric // Used by LocalStackSlotAllocation pass to determine which frame index
509*bdd1243dSDimitry Andric // references it should create new base registers for.
510*bdd1243dSDimitry Andric bool RISCVRegisterInfo::needsFrameBaseReg(MachineInstr *MI,
511*bdd1243dSDimitry Andric                                           int64_t Offset) const {
512*bdd1243dSDimitry Andric   unsigned FIOperandNum = 0;
513*bdd1243dSDimitry Andric   for (; !MI->getOperand(FIOperandNum).isFI(); FIOperandNum++)
514*bdd1243dSDimitry Andric     assert(FIOperandNum < MI->getNumOperands() &&
515*bdd1243dSDimitry Andric            "Instr doesn't have FrameIndex operand");
516*bdd1243dSDimitry Andric 
517*bdd1243dSDimitry Andric   // For RISC-V, The machine instructions that include a FrameIndex operand
518*bdd1243dSDimitry Andric   // are load/store, ADDI instructions.
519*bdd1243dSDimitry Andric   unsigned MIFrm = RISCVII::getFormat(MI->getDesc().TSFlags);
520*bdd1243dSDimitry Andric   if (MIFrm != RISCVII::InstFormatI && MIFrm != RISCVII::InstFormatS)
521*bdd1243dSDimitry Andric     return false;
522*bdd1243dSDimitry Andric   // We only generate virtual base registers for loads and stores, so
523*bdd1243dSDimitry Andric   // return false for everything else.
524*bdd1243dSDimitry Andric   if (!MI->mayLoad() && !MI->mayStore())
525*bdd1243dSDimitry Andric     return false;
526*bdd1243dSDimitry Andric 
527*bdd1243dSDimitry Andric   const MachineFunction &MF = *MI->getMF();
528*bdd1243dSDimitry Andric   const MachineFrameInfo &MFI = MF.getFrameInfo();
529*bdd1243dSDimitry Andric   const RISCVFrameLowering *TFI = getFrameLowering(MF);
530*bdd1243dSDimitry Andric   const MachineRegisterInfo &MRI = MF.getRegInfo();
531*bdd1243dSDimitry Andric   unsigned CalleeSavedSize = 0;
532*bdd1243dSDimitry Andric   Offset += getFrameIndexInstrOffset(MI, FIOperandNum);
533*bdd1243dSDimitry Andric 
534*bdd1243dSDimitry Andric   // Estimate the stack size used to store callee saved registers(
535*bdd1243dSDimitry Andric   // excludes reserved registers).
536*bdd1243dSDimitry Andric   BitVector ReservedRegs = getReservedRegs(MF);
537*bdd1243dSDimitry Andric   for (const MCPhysReg *R = MRI.getCalleeSavedRegs(); MCPhysReg Reg = *R; ++R) {
538*bdd1243dSDimitry Andric     if (!ReservedRegs.test(Reg))
539*bdd1243dSDimitry Andric       CalleeSavedSize += getSpillSize(*getMinimalPhysRegClass(Reg));
540*bdd1243dSDimitry Andric   }
541*bdd1243dSDimitry Andric 
542*bdd1243dSDimitry Andric   int64_t MaxFPOffset = Offset - CalleeSavedSize;
543*bdd1243dSDimitry Andric   if (TFI->hasFP(MF) && !shouldRealignStack(MF))
544*bdd1243dSDimitry Andric     return !isFrameOffsetLegal(MI, RISCV::X8, MaxFPOffset);
545*bdd1243dSDimitry Andric 
546*bdd1243dSDimitry Andric   // Assume 128 bytes spill slots size to estimate the maximum possible
547*bdd1243dSDimitry Andric   // offset relative to the stack pointer.
548*bdd1243dSDimitry Andric   // FIXME: The 128 is copied from ARM. We should run some statistics and pick a
549*bdd1243dSDimitry Andric   // real one for RISC-V.
550*bdd1243dSDimitry Andric   int64_t MaxSPOffset = Offset + 128;
551*bdd1243dSDimitry Andric   MaxSPOffset += MFI.getLocalFrameSize();
552*bdd1243dSDimitry Andric   return !isFrameOffsetLegal(MI, RISCV::X2, MaxSPOffset);
553*bdd1243dSDimitry Andric }
554*bdd1243dSDimitry Andric 
555*bdd1243dSDimitry Andric // Determine whether a given base register plus offset immediate is
556*bdd1243dSDimitry Andric // encodable to resolve a frame index.
557*bdd1243dSDimitry Andric bool RISCVRegisterInfo::isFrameOffsetLegal(const MachineInstr *MI,
558*bdd1243dSDimitry Andric                                            Register BaseReg,
559*bdd1243dSDimitry Andric                                            int64_t Offset) const {
560*bdd1243dSDimitry Andric   unsigned FIOperandNum = 0;
561*bdd1243dSDimitry Andric   while (!MI->getOperand(FIOperandNum).isFI()) {
562*bdd1243dSDimitry Andric     FIOperandNum++;
563*bdd1243dSDimitry Andric     assert(FIOperandNum < MI->getNumOperands() &&
564*bdd1243dSDimitry Andric            "Instr does not have a FrameIndex operand!");
565*bdd1243dSDimitry Andric   }
566*bdd1243dSDimitry Andric 
567*bdd1243dSDimitry Andric   Offset += getFrameIndexInstrOffset(MI, FIOperandNum);
568*bdd1243dSDimitry Andric   return isInt<12>(Offset);
569*bdd1243dSDimitry Andric }
570*bdd1243dSDimitry Andric 
571*bdd1243dSDimitry Andric // Insert defining instruction(s) for a pointer to FrameIdx before
572*bdd1243dSDimitry Andric // insertion point I.
573*bdd1243dSDimitry Andric // Return materialized frame pointer.
574*bdd1243dSDimitry Andric Register RISCVRegisterInfo::materializeFrameBaseRegister(MachineBasicBlock *MBB,
575*bdd1243dSDimitry Andric                                                          int FrameIdx,
576*bdd1243dSDimitry Andric                                                          int64_t Offset) const {
577*bdd1243dSDimitry Andric   MachineBasicBlock::iterator MBBI = MBB->begin();
578*bdd1243dSDimitry Andric   DebugLoc DL;
579*bdd1243dSDimitry Andric   if (MBBI != MBB->end())
580*bdd1243dSDimitry Andric     DL = MBBI->getDebugLoc();
581*bdd1243dSDimitry Andric   MachineFunction *MF = MBB->getParent();
582*bdd1243dSDimitry Andric   MachineRegisterInfo &MFI = MF->getRegInfo();
583*bdd1243dSDimitry Andric   const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo();
584*bdd1243dSDimitry Andric 
585*bdd1243dSDimitry Andric   Register BaseReg = MFI.createVirtualRegister(&RISCV::GPRRegClass);
586*bdd1243dSDimitry Andric   BuildMI(*MBB, MBBI, DL, TII->get(RISCV::ADDI), BaseReg)
587*bdd1243dSDimitry Andric       .addFrameIndex(FrameIdx)
588*bdd1243dSDimitry Andric       .addImm(Offset);
589*bdd1243dSDimitry Andric   return BaseReg;
590*bdd1243dSDimitry Andric }
591*bdd1243dSDimitry Andric 
592*bdd1243dSDimitry Andric // Resolve a frame index operand of an instruction to reference the
593*bdd1243dSDimitry Andric // indicated base register plus offset instead.
594*bdd1243dSDimitry Andric void RISCVRegisterInfo::resolveFrameIndex(MachineInstr &MI, Register BaseReg,
595*bdd1243dSDimitry Andric                                           int64_t Offset) const {
596*bdd1243dSDimitry Andric   unsigned FIOperandNum = 0;
597*bdd1243dSDimitry Andric   while (!MI.getOperand(FIOperandNum).isFI()) {
598*bdd1243dSDimitry Andric     FIOperandNum++;
599*bdd1243dSDimitry Andric     assert(FIOperandNum < MI.getNumOperands() &&
600*bdd1243dSDimitry Andric            "Instr does not have a FrameIndex operand!");
601*bdd1243dSDimitry Andric   }
602*bdd1243dSDimitry Andric 
603*bdd1243dSDimitry Andric   Offset += getFrameIndexInstrOffset(&MI, FIOperandNum);
604*bdd1243dSDimitry Andric   // FrameIndex Operands are always represented as a
605*bdd1243dSDimitry Andric   // register followed by an immediate.
606*bdd1243dSDimitry Andric   MI.getOperand(FIOperandNum).ChangeToRegister(BaseReg, false);
607*bdd1243dSDimitry Andric   MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Offset);
608*bdd1243dSDimitry Andric }
609*bdd1243dSDimitry Andric 
610*bdd1243dSDimitry Andric // Get the offset from the referenced frame index in the instruction,
611*bdd1243dSDimitry Andric // if there is one.
612*bdd1243dSDimitry Andric int64_t RISCVRegisterInfo::getFrameIndexInstrOffset(const MachineInstr *MI,
613*bdd1243dSDimitry Andric                                                     int Idx) const {
614*bdd1243dSDimitry Andric   assert((RISCVII::getFormat(MI->getDesc().TSFlags) == RISCVII::InstFormatI ||
615*bdd1243dSDimitry Andric           RISCVII::getFormat(MI->getDesc().TSFlags) == RISCVII::InstFormatS) &&
616*bdd1243dSDimitry Andric          "The MI must be I or S format.");
617*bdd1243dSDimitry Andric   assert(MI->getOperand(Idx).isFI() && "The Idx'th operand of MI is not a "
618*bdd1243dSDimitry Andric                                        "FrameIndex operand");
619*bdd1243dSDimitry Andric   return MI->getOperand(Idx + 1).getImm();
6200b57cec5SDimitry Andric }
6210b57cec5SDimitry Andric 
6220b57cec5SDimitry Andric Register RISCVRegisterInfo::getFrameRegister(const MachineFunction &MF) const {
6230b57cec5SDimitry Andric   const TargetFrameLowering *TFI = getFrameLowering(MF);
6240b57cec5SDimitry Andric   return TFI->hasFP(MF) ? RISCV::X8 : RISCV::X2;
6250b57cec5SDimitry Andric }
6260b57cec5SDimitry Andric 
6270b57cec5SDimitry Andric const uint32_t *
6280b57cec5SDimitry Andric RISCVRegisterInfo::getCallPreservedMask(const MachineFunction & MF,
629e8d8bef9SDimitry Andric                                         CallingConv::ID CC) const {
6300b57cec5SDimitry Andric   auto &Subtarget = MF.getSubtarget<RISCVSubtarget>();
6310b57cec5SDimitry Andric 
632e8d8bef9SDimitry Andric   if (CC == CallingConv::GHC)
633e8d8bef9SDimitry Andric     return CSR_NoRegs_RegMask;
6340b57cec5SDimitry Andric   switch (Subtarget.getTargetABI()) {
6350b57cec5SDimitry Andric   default:
6360b57cec5SDimitry Andric     llvm_unreachable("Unrecognized ABI");
6370b57cec5SDimitry Andric   case RISCVABI::ABI_ILP32:
6380b57cec5SDimitry Andric   case RISCVABI::ABI_LP64:
6390b57cec5SDimitry Andric     return CSR_ILP32_LP64_RegMask;
6400b57cec5SDimitry Andric   case RISCVABI::ABI_ILP32F:
6410b57cec5SDimitry Andric   case RISCVABI::ABI_LP64F:
6420b57cec5SDimitry Andric     return CSR_ILP32F_LP64F_RegMask;
6430b57cec5SDimitry Andric   case RISCVABI::ABI_ILP32D:
6440b57cec5SDimitry Andric   case RISCVABI::ABI_LP64D:
6450b57cec5SDimitry Andric     return CSR_ILP32D_LP64D_RegMask;
6460b57cec5SDimitry Andric   }
6470b57cec5SDimitry Andric }
648fe6060f1SDimitry Andric 
649fe6060f1SDimitry Andric const TargetRegisterClass *
650fe6060f1SDimitry Andric RISCVRegisterInfo::getLargestLegalSuperClass(const TargetRegisterClass *RC,
651fe6060f1SDimitry Andric                                              const MachineFunction &) const {
652fe6060f1SDimitry Andric   if (RC == &RISCV::VMV0RegClass)
653fe6060f1SDimitry Andric     return &RISCV::VRRegClass;
654fe6060f1SDimitry Andric   return RC;
655fe6060f1SDimitry Andric }
6564824e7fdSDimitry Andric 
6574824e7fdSDimitry Andric void RISCVRegisterInfo::getOffsetOpcodes(const StackOffset &Offset,
6584824e7fdSDimitry Andric                                          SmallVectorImpl<uint64_t> &Ops) const {
6594824e7fdSDimitry Andric   // VLENB is the length of a vector register in bytes. We use <vscale x 8 x i8>
6604824e7fdSDimitry Andric   // to represent one vector register. The dwarf offset is
6614824e7fdSDimitry Andric   // VLENB * scalable_offset / 8.
6624824e7fdSDimitry Andric   assert(Offset.getScalable() % 8 == 0 && "Invalid frame offset");
6634824e7fdSDimitry Andric 
6644824e7fdSDimitry Andric   // Add fixed-sized offset using existing DIExpression interface.
6654824e7fdSDimitry Andric   DIExpression::appendOffset(Ops, Offset.getFixed());
6664824e7fdSDimitry Andric 
6674824e7fdSDimitry Andric   unsigned VLENB = getDwarfRegNum(RISCV::VLENB, true);
6684824e7fdSDimitry Andric   int64_t VLENBSized = Offset.getScalable() / 8;
6694824e7fdSDimitry Andric   if (VLENBSized > 0) {
6704824e7fdSDimitry Andric     Ops.push_back(dwarf::DW_OP_constu);
6714824e7fdSDimitry Andric     Ops.push_back(VLENBSized);
6724824e7fdSDimitry Andric     Ops.append({dwarf::DW_OP_bregx, VLENB, 0ULL});
6734824e7fdSDimitry Andric     Ops.push_back(dwarf::DW_OP_mul);
6744824e7fdSDimitry Andric     Ops.push_back(dwarf::DW_OP_plus);
6754824e7fdSDimitry Andric   } else if (VLENBSized < 0) {
6764824e7fdSDimitry Andric     Ops.push_back(dwarf::DW_OP_constu);
6774824e7fdSDimitry Andric     Ops.push_back(-VLENBSized);
6784824e7fdSDimitry Andric     Ops.append({dwarf::DW_OP_bregx, VLENB, 0ULL});
6794824e7fdSDimitry Andric     Ops.push_back(dwarf::DW_OP_mul);
6804824e7fdSDimitry Andric     Ops.push_back(dwarf::DW_OP_minus);
6814824e7fdSDimitry Andric   }
6824824e7fdSDimitry Andric }
68304eeddc0SDimitry Andric 
68404eeddc0SDimitry Andric unsigned
68504eeddc0SDimitry Andric RISCVRegisterInfo::getRegisterCostTableIndex(const MachineFunction &MF) const {
686*bdd1243dSDimitry Andric   return MF.getSubtarget<RISCVSubtarget>().hasStdExtCOrZca() ? 1 : 0;
687*bdd1243dSDimitry Andric }
688*bdd1243dSDimitry Andric 
689*bdd1243dSDimitry Andric // Add two address hints to improve chances of being able to use a compressed
690*bdd1243dSDimitry Andric // instruction.
691*bdd1243dSDimitry Andric bool RISCVRegisterInfo::getRegAllocationHints(
692*bdd1243dSDimitry Andric     Register VirtReg, ArrayRef<MCPhysReg> Order,
693*bdd1243dSDimitry Andric     SmallVectorImpl<MCPhysReg> &Hints, const MachineFunction &MF,
694*bdd1243dSDimitry Andric     const VirtRegMap *VRM, const LiveRegMatrix *Matrix) const {
695*bdd1243dSDimitry Andric   const MachineRegisterInfo *MRI = &MF.getRegInfo();
696*bdd1243dSDimitry Andric 
697*bdd1243dSDimitry Andric   bool BaseImplRetVal = TargetRegisterInfo::getRegAllocationHints(
698*bdd1243dSDimitry Andric       VirtReg, Order, Hints, MF, VRM, Matrix);
699*bdd1243dSDimitry Andric 
700*bdd1243dSDimitry Andric   if (!VRM || DisableRegAllocHints)
701*bdd1243dSDimitry Andric     return BaseImplRetVal;
702*bdd1243dSDimitry Andric 
703*bdd1243dSDimitry Andric   // Add any two address hints after any copy hints.
704*bdd1243dSDimitry Andric   SmallSet<Register, 4> TwoAddrHints;
705*bdd1243dSDimitry Andric 
706*bdd1243dSDimitry Andric   auto tryAddHint = [&](const MachineOperand &VRRegMO, const MachineOperand &MO,
707*bdd1243dSDimitry Andric                         bool NeedGPRC) -> void {
708*bdd1243dSDimitry Andric     Register Reg = MO.getReg();
709*bdd1243dSDimitry Andric     Register PhysReg = Reg.isPhysical() ? Reg : Register(VRM->getPhys(Reg));
710*bdd1243dSDimitry Andric     if (PhysReg && (!NeedGPRC || RISCV::GPRCRegClass.contains(PhysReg))) {
711*bdd1243dSDimitry Andric       assert(!MO.getSubReg() && !VRRegMO.getSubReg() && "Unexpected subreg!");
712*bdd1243dSDimitry Andric       if (!MRI->isReserved(PhysReg) && !is_contained(Hints, PhysReg))
713*bdd1243dSDimitry Andric         TwoAddrHints.insert(PhysReg);
714*bdd1243dSDimitry Andric     }
715*bdd1243dSDimitry Andric   };
716*bdd1243dSDimitry Andric 
717*bdd1243dSDimitry Andric   // This is all of the compressible binary instructions. If an instruction
718*bdd1243dSDimitry Andric   // needs GPRC register class operands \p NeedGPRC will be set to true.
719*bdd1243dSDimitry Andric   auto isCompressible = [](const MachineInstr &MI, bool &NeedGPRC) {
720*bdd1243dSDimitry Andric     NeedGPRC = false;
721*bdd1243dSDimitry Andric     switch (MI.getOpcode()) {
722*bdd1243dSDimitry Andric     default:
723*bdd1243dSDimitry Andric       return false;
724*bdd1243dSDimitry Andric     case RISCV::AND:
725*bdd1243dSDimitry Andric     case RISCV::OR:
726*bdd1243dSDimitry Andric     case RISCV::XOR:
727*bdd1243dSDimitry Andric     case RISCV::SUB:
728*bdd1243dSDimitry Andric     case RISCV::ADDW:
729*bdd1243dSDimitry Andric     case RISCV::SUBW:
730*bdd1243dSDimitry Andric       NeedGPRC = true;
731*bdd1243dSDimitry Andric       return true;
732*bdd1243dSDimitry Andric     case RISCV::ANDI:
733*bdd1243dSDimitry Andric       NeedGPRC = true;
734*bdd1243dSDimitry Andric       return MI.getOperand(2).isImm() && isInt<6>(MI.getOperand(2).getImm());
735*bdd1243dSDimitry Andric     case RISCV::SRAI:
736*bdd1243dSDimitry Andric     case RISCV::SRLI:
737*bdd1243dSDimitry Andric       NeedGPRC = true;
738*bdd1243dSDimitry Andric       return true;
739*bdd1243dSDimitry Andric     case RISCV::ADD:
740*bdd1243dSDimitry Andric     case RISCV::SLLI:
741*bdd1243dSDimitry Andric       return true;
742*bdd1243dSDimitry Andric     case RISCV::ADDI:
743*bdd1243dSDimitry Andric     case RISCV::ADDIW:
744*bdd1243dSDimitry Andric       return MI.getOperand(2).isImm() && isInt<6>(MI.getOperand(2).getImm());
745*bdd1243dSDimitry Andric     }
746*bdd1243dSDimitry Andric   };
747*bdd1243dSDimitry Andric 
748*bdd1243dSDimitry Andric   // Returns true if this operand is compressible. For non-registers it always
749*bdd1243dSDimitry Andric   // returns true. Immediate range was already checked in isCompressible.
750*bdd1243dSDimitry Andric   // For registers, it checks if the register is a GPRC register. reg-reg
751*bdd1243dSDimitry Andric   // instructions that require GPRC need all register operands to be GPRC.
752*bdd1243dSDimitry Andric   auto isCompressibleOpnd = [&](const MachineOperand &MO) {
753*bdd1243dSDimitry Andric     if (!MO.isReg())
754*bdd1243dSDimitry Andric       return true;
755*bdd1243dSDimitry Andric     Register Reg = MO.getReg();
756*bdd1243dSDimitry Andric     Register PhysReg = Reg.isPhysical() ? Reg : Register(VRM->getPhys(Reg));
757*bdd1243dSDimitry Andric     return PhysReg && RISCV::GPRCRegClass.contains(PhysReg);
758*bdd1243dSDimitry Andric   };
759*bdd1243dSDimitry Andric 
760*bdd1243dSDimitry Andric   for (auto &MO : MRI->reg_nodbg_operands(VirtReg)) {
761*bdd1243dSDimitry Andric     const MachineInstr &MI = *MO.getParent();
762*bdd1243dSDimitry Andric     unsigned OpIdx = MI.getOperandNo(&MO);
763*bdd1243dSDimitry Andric     bool NeedGPRC;
764*bdd1243dSDimitry Andric     if (isCompressible(MI, NeedGPRC)) {
765*bdd1243dSDimitry Andric       if (OpIdx == 0 && MI.getOperand(1).isReg()) {
766*bdd1243dSDimitry Andric         if (!NeedGPRC || isCompressibleOpnd(MI.getOperand(2)))
767*bdd1243dSDimitry Andric           tryAddHint(MO, MI.getOperand(1), NeedGPRC);
768*bdd1243dSDimitry Andric         if (MI.isCommutable() && MI.getOperand(2).isReg() &&
769*bdd1243dSDimitry Andric             (!NeedGPRC || isCompressibleOpnd(MI.getOperand(1))))
770*bdd1243dSDimitry Andric           tryAddHint(MO, MI.getOperand(2), NeedGPRC);
771*bdd1243dSDimitry Andric       } else if (OpIdx == 1 &&
772*bdd1243dSDimitry Andric                  (!NeedGPRC || isCompressibleOpnd(MI.getOperand(2)))) {
773*bdd1243dSDimitry Andric         tryAddHint(MO, MI.getOperand(0), NeedGPRC);
774*bdd1243dSDimitry Andric       } else if (MI.isCommutable() && OpIdx == 2 &&
775*bdd1243dSDimitry Andric                  (!NeedGPRC || isCompressibleOpnd(MI.getOperand(1)))) {
776*bdd1243dSDimitry Andric         tryAddHint(MO, MI.getOperand(0), NeedGPRC);
777*bdd1243dSDimitry Andric       }
778*bdd1243dSDimitry Andric     }
779*bdd1243dSDimitry Andric   }
780*bdd1243dSDimitry Andric 
781*bdd1243dSDimitry Andric   for (MCPhysReg OrderReg : Order)
782*bdd1243dSDimitry Andric     if (TwoAddrHints.count(OrderReg))
783*bdd1243dSDimitry Andric       Hints.push_back(OrderReg);
784*bdd1243dSDimitry Andric 
785*bdd1243dSDimitry Andric   return BaseImplRetVal;
78604eeddc0SDimitry Andric }
787