10b57cec5SDimitry Andric //===- AArch64RegisterInfo.cpp - AArch64 Register Information -------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file contains the AArch64 implementation of the TargetRegisterInfo
100b57cec5SDimitry Andric // class.
110b57cec5SDimitry Andric //
120b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
130b57cec5SDimitry Andric
140b57cec5SDimitry Andric #include "AArch64RegisterInfo.h"
150b57cec5SDimitry Andric #include "AArch64FrameLowering.h"
160b57cec5SDimitry Andric #include "AArch64InstrInfo.h"
170b57cec5SDimitry Andric #include "AArch64MachineFunctionInfo.h"
180b57cec5SDimitry Andric #include "AArch64Subtarget.h"
190b57cec5SDimitry Andric #include "MCTargetDesc/AArch64AddressingModes.h"
20bdd1243dSDimitry Andric #include "MCTargetDesc/AArch64InstPrinter.h"
210b57cec5SDimitry Andric #include "llvm/ADT/BitVector.h"
2281ad6265SDimitry Andric #include "llvm/BinaryFormat/Dwarf.h"
230b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
240b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h"
250b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
260b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterScavenging.h"
270b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
28e8d8bef9SDimitry Andric #include "llvm/IR/DebugInfoMetadata.h"
298bcb0991SDimitry Andric #include "llvm/IR/DiagnosticInfo.h"
308bcb0991SDimitry Andric #include "llvm/IR/Function.h"
318bcb0991SDimitry Andric #include "llvm/Support/raw_ostream.h"
320b57cec5SDimitry Andric #include "llvm/Target/TargetOptions.h"
3306c3fb27SDimitry Andric #include "llvm/TargetParser/Triple.h"
340b57cec5SDimitry Andric
350b57cec5SDimitry Andric using namespace llvm;
360b57cec5SDimitry Andric
3781ad6265SDimitry Andric #define GET_CC_REGISTER_LISTS
3881ad6265SDimitry Andric #include "AArch64GenCallingConv.inc"
390b57cec5SDimitry Andric #define GET_REGINFO_TARGET_DESC
400b57cec5SDimitry Andric #include "AArch64GenRegisterInfo.inc"
410b57cec5SDimitry Andric
AArch64RegisterInfo(const Triple & TT)420b57cec5SDimitry Andric AArch64RegisterInfo::AArch64RegisterInfo(const Triple &TT)
430b57cec5SDimitry Andric : AArch64GenRegisterInfo(AArch64::LR), TT(TT) {
440b57cec5SDimitry Andric AArch64_MC::initLLVMToCVRegMapping(this);
450b57cec5SDimitry Andric }
460b57cec5SDimitry Andric
4775b4d546SDimitry Andric /// Return whether the register needs a CFI entry. Not all unwinders may know
4875b4d546SDimitry Andric /// about SVE registers, so we assume the lowest common denominator, i.e. the
4975b4d546SDimitry Andric /// callee-saves required by the base ABI. For the SVE registers z8-z15 only the
5075b4d546SDimitry Andric /// lower 64-bits (d8-d15) need to be saved. The lower 64-bits subreg is
5175b4d546SDimitry Andric /// returned in \p RegToUseForCFI.
regNeedsCFI(unsigned Reg,unsigned & RegToUseForCFI) const5275b4d546SDimitry Andric bool AArch64RegisterInfo::regNeedsCFI(unsigned Reg,
5375b4d546SDimitry Andric unsigned &RegToUseForCFI) const {
5475b4d546SDimitry Andric if (AArch64::PPRRegClass.contains(Reg))
5575b4d546SDimitry Andric return false;
5675b4d546SDimitry Andric
5775b4d546SDimitry Andric if (AArch64::ZPRRegClass.contains(Reg)) {
5875b4d546SDimitry Andric RegToUseForCFI = getSubReg(Reg, AArch64::dsub);
5975b4d546SDimitry Andric for (int I = 0; CSR_AArch64_AAPCS_SaveList[I]; ++I) {
6075b4d546SDimitry Andric if (CSR_AArch64_AAPCS_SaveList[I] == RegToUseForCFI)
6175b4d546SDimitry Andric return true;
6275b4d546SDimitry Andric }
6375b4d546SDimitry Andric return false;
6475b4d546SDimitry Andric }
6575b4d546SDimitry Andric
6675b4d546SDimitry Andric RegToUseForCFI = Reg;
6775b4d546SDimitry Andric return true;
6875b4d546SDimitry Andric }
6975b4d546SDimitry Andric
700b57cec5SDimitry Andric const MCPhysReg *
getCalleeSavedRegs(const MachineFunction * MF) const710b57cec5SDimitry Andric AArch64RegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const {
720b57cec5SDimitry Andric assert(MF && "Invalid MachineFunction pointer.");
735ffd83dbSDimitry Andric
740b57cec5SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::GHC)
750b57cec5SDimitry Andric // GHC set of callee saved regs is empty as all those regs are
760b57cec5SDimitry Andric // used for passing STG regs around
770b57cec5SDimitry Andric return CSR_AArch64_NoRegs_SaveList;
78*0fca6ea1SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::PreserveNone)
79*0fca6ea1SDimitry Andric return CSR_AArch64_NoneRegs_SaveList;
800b57cec5SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::AnyReg)
810b57cec5SDimitry Andric return CSR_AArch64_AllRegs_SaveList;
825ffd83dbSDimitry Andric
837a6dacacSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::ARM64EC_Thunk_X64)
847a6dacacSDimitry Andric return CSR_Win_AArch64_Arm64EC_Thunk_SaveList;
857a6dacacSDimitry Andric
865ffd83dbSDimitry Andric // Darwin has its own CSR_AArch64_AAPCS_SaveList, which means most CSR save
875ffd83dbSDimitry Andric // lists depending on that will need to have their Darwin variant as well.
885ffd83dbSDimitry Andric if (MF->getSubtarget<AArch64Subtarget>().isTargetDarwin())
895ffd83dbSDimitry Andric return getDarwinCalleeSavedRegs(MF);
905ffd83dbSDimitry Andric
915ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::CFGuard_Check)
925ffd83dbSDimitry Andric return CSR_Win_AArch64_CFGuard_Check_SaveList;
9306c3fb27SDimitry Andric if (MF->getSubtarget<AArch64Subtarget>().isTargetWindows()) {
9406c3fb27SDimitry Andric if (MF->getSubtarget<AArch64Subtarget>().getTargetLowering()
9506c3fb27SDimitry Andric ->supportSwiftError() &&
9606c3fb27SDimitry Andric MF->getFunction().getAttributes().hasAttrSomewhere(
9706c3fb27SDimitry Andric Attribute::SwiftError))
9806c3fb27SDimitry Andric return CSR_Win_AArch64_AAPCS_SwiftError_SaveList;
9906c3fb27SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::SwiftTail)
10006c3fb27SDimitry Andric return CSR_Win_AArch64_AAPCS_SwiftTail_SaveList;
1015ffd83dbSDimitry Andric return CSR_Win_AArch64_AAPCS_SaveList;
10206c3fb27SDimitry Andric }
1030b57cec5SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::AArch64_VectorCall)
1040b57cec5SDimitry Andric return CSR_AArch64_AAVPCS_SaveList;
105480093f4SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::AArch64_SVE_VectorCall)
106480093f4SDimitry Andric return CSR_AArch64_SVE_AAPCS_SaveList;
107bdd1243dSDimitry Andric if (MF->getFunction().getCallingConv() ==
108bdd1243dSDimitry Andric CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0)
109bdd1243dSDimitry Andric report_fatal_error(
110*0fca6ea1SDimitry Andric "Calling convention "
111*0fca6ea1SDimitry Andric "AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0 is only "
112*0fca6ea1SDimitry Andric "supported to improve calls to SME ACLE save/restore/disable-za "
113bdd1243dSDimitry Andric "functions, and is not intended to be used beyond that scope.");
114bdd1243dSDimitry Andric if (MF->getFunction().getCallingConv() ==
115*0fca6ea1SDimitry Andric CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1)
116*0fca6ea1SDimitry Andric report_fatal_error(
117*0fca6ea1SDimitry Andric "Calling convention "
118*0fca6ea1SDimitry Andric "AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1 is "
119*0fca6ea1SDimitry Andric "only supported to improve calls to SME ACLE __arm_get_current_vg "
120*0fca6ea1SDimitry Andric "function, and is not intended to be used beyond that scope.");
121*0fca6ea1SDimitry Andric if (MF->getFunction().getCallingConv() ==
122bdd1243dSDimitry Andric CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2)
123bdd1243dSDimitry Andric report_fatal_error(
124*0fca6ea1SDimitry Andric "Calling convention "
125*0fca6ea1SDimitry Andric "AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2 is "
126bdd1243dSDimitry Andric "only supported to improve calls to SME ACLE __arm_sme_state "
127bdd1243dSDimitry Andric "and is not intended to be used beyond that scope.");
1280b57cec5SDimitry Andric if (MF->getSubtarget<AArch64Subtarget>().getTargetLowering()
1290b57cec5SDimitry Andric ->supportSwiftError() &&
1300b57cec5SDimitry Andric MF->getFunction().getAttributes().hasAttrSomewhere(
1310b57cec5SDimitry Andric Attribute::SwiftError))
1320b57cec5SDimitry Andric return CSR_AArch64_AAPCS_SwiftError_SaveList;
133fe6060f1SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::SwiftTail)
134fe6060f1SDimitry Andric return CSR_AArch64_AAPCS_SwiftTail_SaveList;
1350b57cec5SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::PreserveMost)
1360b57cec5SDimitry Andric return CSR_AArch64_RT_MostRegs_SaveList;
13706c3fb27SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::PreserveAll)
13806c3fb27SDimitry Andric return CSR_AArch64_RT_AllRegs_SaveList;
1395ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::Win64)
1405ffd83dbSDimitry Andric // This is for OSes other than Windows; Windows is a separate case further
1415ffd83dbSDimitry Andric // above.
1425ffd83dbSDimitry Andric return CSR_AArch64_AAPCS_X18_SaveList;
14381ad6265SDimitry Andric if (MF->getInfo<AArch64FunctionInfo>()->isSVECC())
144979e22ffSDimitry Andric return CSR_AArch64_SVE_AAPCS_SaveList;
1450b57cec5SDimitry Andric return CSR_AArch64_AAPCS_SaveList;
1460b57cec5SDimitry Andric }
1470b57cec5SDimitry Andric
1485ffd83dbSDimitry Andric const MCPhysReg *
getDarwinCalleeSavedRegs(const MachineFunction * MF) const1495ffd83dbSDimitry Andric AArch64RegisterInfo::getDarwinCalleeSavedRegs(const MachineFunction *MF) const {
1505ffd83dbSDimitry Andric assert(MF && "Invalid MachineFunction pointer.");
1515ffd83dbSDimitry Andric assert(MF->getSubtarget<AArch64Subtarget>().isTargetDarwin() &&
1525ffd83dbSDimitry Andric "Invalid subtarget for getDarwinCalleeSavedRegs");
1535ffd83dbSDimitry Andric
1545ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::CFGuard_Check)
1555ffd83dbSDimitry Andric report_fatal_error(
1565ffd83dbSDimitry Andric "Calling convention CFGuard_Check is unsupported on Darwin.");
1575ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::AArch64_VectorCall)
1585ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAVPCS_SaveList;
1595ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::AArch64_SVE_VectorCall)
1605ffd83dbSDimitry Andric report_fatal_error(
1615ffd83dbSDimitry Andric "Calling convention SVE_VectorCall is unsupported on Darwin.");
162bdd1243dSDimitry Andric if (MF->getFunction().getCallingConv() ==
163bdd1243dSDimitry Andric CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0)
164bdd1243dSDimitry Andric report_fatal_error(
165*0fca6ea1SDimitry Andric "Calling convention "
166*0fca6ea1SDimitry Andric "AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0 is "
167bdd1243dSDimitry Andric "only supported to improve calls to SME ACLE save/restore/disable-za "
168bdd1243dSDimitry Andric "functions, and is not intended to be used beyond that scope.");
169bdd1243dSDimitry Andric if (MF->getFunction().getCallingConv() ==
170*0fca6ea1SDimitry Andric CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1)
171*0fca6ea1SDimitry Andric report_fatal_error(
172*0fca6ea1SDimitry Andric "Calling convention "
173*0fca6ea1SDimitry Andric "AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1 is "
174*0fca6ea1SDimitry Andric "only supported to improve calls to SME ACLE __arm_get_current_vg "
175*0fca6ea1SDimitry Andric "function, and is not intended to be used beyond that scope.");
176*0fca6ea1SDimitry Andric if (MF->getFunction().getCallingConv() ==
177bdd1243dSDimitry Andric CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2)
178bdd1243dSDimitry Andric report_fatal_error(
179*0fca6ea1SDimitry Andric "Calling convention "
180*0fca6ea1SDimitry Andric "AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2 is "
181bdd1243dSDimitry Andric "only supported to improve calls to SME ACLE __arm_sme_state "
182bdd1243dSDimitry Andric "and is not intended to be used beyond that scope.");
1835ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::CXX_FAST_TLS)
1845ffd83dbSDimitry Andric return MF->getInfo<AArch64FunctionInfo>()->isSplitCSR()
1855ffd83dbSDimitry Andric ? CSR_Darwin_AArch64_CXX_TLS_PE_SaveList
1865ffd83dbSDimitry Andric : CSR_Darwin_AArch64_CXX_TLS_SaveList;
1875ffd83dbSDimitry Andric if (MF->getSubtarget<AArch64Subtarget>().getTargetLowering()
1885ffd83dbSDimitry Andric ->supportSwiftError() &&
1895ffd83dbSDimitry Andric MF->getFunction().getAttributes().hasAttrSomewhere(
1905ffd83dbSDimitry Andric Attribute::SwiftError))
1915ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAPCS_SwiftError_SaveList;
192fe6060f1SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::SwiftTail)
193fe6060f1SDimitry Andric return CSR_Darwin_AArch64_AAPCS_SwiftTail_SaveList;
1945ffd83dbSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::PreserveMost)
1955ffd83dbSDimitry Andric return CSR_Darwin_AArch64_RT_MostRegs_SaveList;
19606c3fb27SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::PreserveAll)
19706c3fb27SDimitry Andric return CSR_Darwin_AArch64_RT_AllRegs_SaveList;
198bdd1243dSDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::Win64)
199bdd1243dSDimitry Andric return CSR_Darwin_AArch64_AAPCS_Win64_SaveList;
2005ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAPCS_SaveList;
2015ffd83dbSDimitry Andric }
2025ffd83dbSDimitry Andric
getCalleeSavedRegsViaCopy(const MachineFunction * MF) const2030b57cec5SDimitry Andric const MCPhysReg *AArch64RegisterInfo::getCalleeSavedRegsViaCopy(
2040b57cec5SDimitry Andric const MachineFunction *MF) const {
2050b57cec5SDimitry Andric assert(MF && "Invalid MachineFunction pointer.");
2060b57cec5SDimitry Andric if (MF->getFunction().getCallingConv() == CallingConv::CXX_FAST_TLS &&
2070b57cec5SDimitry Andric MF->getInfo<AArch64FunctionInfo>()->isSplitCSR())
2085ffd83dbSDimitry Andric return CSR_Darwin_AArch64_CXX_TLS_ViaCopy_SaveList;
2090b57cec5SDimitry Andric return nullptr;
2100b57cec5SDimitry Andric }
2110b57cec5SDimitry Andric
UpdateCustomCalleeSavedRegs(MachineFunction & MF) const2120b57cec5SDimitry Andric void AArch64RegisterInfo::UpdateCustomCalleeSavedRegs(
2130b57cec5SDimitry Andric MachineFunction &MF) const {
2140b57cec5SDimitry Andric const MCPhysReg *CSRs = getCalleeSavedRegs(&MF);
2150b57cec5SDimitry Andric SmallVector<MCPhysReg, 32> UpdatedCSRs;
2160b57cec5SDimitry Andric for (const MCPhysReg *I = CSRs; *I; ++I)
2170b57cec5SDimitry Andric UpdatedCSRs.push_back(*I);
2180b57cec5SDimitry Andric
2190b57cec5SDimitry Andric for (size_t i = 0; i < AArch64::GPR64commonRegClass.getNumRegs(); ++i) {
2200b57cec5SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isXRegCustomCalleeSaved(i)) {
2210b57cec5SDimitry Andric UpdatedCSRs.push_back(AArch64::GPR64commonRegClass.getRegister(i));
2220b57cec5SDimitry Andric }
2230b57cec5SDimitry Andric }
2240b57cec5SDimitry Andric // Register lists are zero-terminated.
2250b57cec5SDimitry Andric UpdatedCSRs.push_back(0);
2260b57cec5SDimitry Andric MF.getRegInfo().setCalleeSavedRegs(UpdatedCSRs);
2270b57cec5SDimitry Andric }
2280b57cec5SDimitry Andric
2290b57cec5SDimitry Andric const TargetRegisterClass *
getSubClassWithSubReg(const TargetRegisterClass * RC,unsigned Idx) const2300b57cec5SDimitry Andric AArch64RegisterInfo::getSubClassWithSubReg(const TargetRegisterClass *RC,
2310b57cec5SDimitry Andric unsigned Idx) const {
2320b57cec5SDimitry Andric // edge case for GPR/FPR register classes
2330b57cec5SDimitry Andric if (RC == &AArch64::GPR32allRegClass && Idx == AArch64::hsub)
2340b57cec5SDimitry Andric return &AArch64::FPR32RegClass;
2350b57cec5SDimitry Andric else if (RC == &AArch64::GPR64allRegClass && Idx == AArch64::hsub)
2360b57cec5SDimitry Andric return &AArch64::FPR64RegClass;
2370b57cec5SDimitry Andric
2380b57cec5SDimitry Andric // Forward to TableGen's default version.
2390b57cec5SDimitry Andric return AArch64GenRegisterInfo::getSubClassWithSubReg(RC, Idx);
2400b57cec5SDimitry Andric }
2410b57cec5SDimitry Andric
2420b57cec5SDimitry Andric const uint32_t *
getDarwinCallPreservedMask(const MachineFunction & MF,CallingConv::ID CC) const2435ffd83dbSDimitry Andric AArch64RegisterInfo::getDarwinCallPreservedMask(const MachineFunction &MF,
2445ffd83dbSDimitry Andric CallingConv::ID CC) const {
2455ffd83dbSDimitry Andric assert(MF.getSubtarget<AArch64Subtarget>().isTargetDarwin() &&
2465ffd83dbSDimitry Andric "Invalid subtarget for getDarwinCallPreservedMask");
2475ffd83dbSDimitry Andric
2485ffd83dbSDimitry Andric if (CC == CallingConv::CXX_FAST_TLS)
2495ffd83dbSDimitry Andric return CSR_Darwin_AArch64_CXX_TLS_RegMask;
2505ffd83dbSDimitry Andric if (CC == CallingConv::AArch64_VectorCall)
2515ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAVPCS_RegMask;
2525ffd83dbSDimitry Andric if (CC == CallingConv::AArch64_SVE_VectorCall)
2535ffd83dbSDimitry Andric report_fatal_error(
2545ffd83dbSDimitry Andric "Calling convention SVE_VectorCall is unsupported on Darwin.");
255bdd1243dSDimitry Andric if (CC == CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0)
256*0fca6ea1SDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0_RegMask;
257*0fca6ea1SDimitry Andric if (CC == CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1)
258*0fca6ea1SDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1_RegMask;
259bdd1243dSDimitry Andric if (CC == CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2)
260*0fca6ea1SDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2_RegMask;
2615ffd83dbSDimitry Andric if (CC == CallingConv::CFGuard_Check)
2625ffd83dbSDimitry Andric report_fatal_error(
2635ffd83dbSDimitry Andric "Calling convention CFGuard_Check is unsupported on Darwin.");
2645ffd83dbSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>()
2655ffd83dbSDimitry Andric .getTargetLowering()
2665ffd83dbSDimitry Andric ->supportSwiftError() &&
2675ffd83dbSDimitry Andric MF.getFunction().getAttributes().hasAttrSomewhere(Attribute::SwiftError))
2685ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAPCS_SwiftError_RegMask;
269fe6060f1SDimitry Andric if (CC == CallingConv::SwiftTail)
270fe6060f1SDimitry Andric return CSR_Darwin_AArch64_AAPCS_SwiftTail_RegMask;
2715ffd83dbSDimitry Andric if (CC == CallingConv::PreserveMost)
2725ffd83dbSDimitry Andric return CSR_Darwin_AArch64_RT_MostRegs_RegMask;
27306c3fb27SDimitry Andric if (CC == CallingConv::PreserveAll)
27406c3fb27SDimitry Andric return CSR_Darwin_AArch64_RT_AllRegs_RegMask;
2755ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAPCS_RegMask;
2765ffd83dbSDimitry Andric }
2775ffd83dbSDimitry Andric
2785ffd83dbSDimitry Andric const uint32_t *
getCallPreservedMask(const MachineFunction & MF,CallingConv::ID CC) const2790b57cec5SDimitry Andric AArch64RegisterInfo::getCallPreservedMask(const MachineFunction &MF,
2800b57cec5SDimitry Andric CallingConv::ID CC) const {
2810b57cec5SDimitry Andric bool SCS = MF.getFunction().hasFnAttribute(Attribute::ShadowCallStack);
2820b57cec5SDimitry Andric if (CC == CallingConv::GHC)
2830b57cec5SDimitry Andric // This is academic because all GHC calls are (supposed to be) tail calls
2840b57cec5SDimitry Andric return SCS ? CSR_AArch64_NoRegs_SCS_RegMask : CSR_AArch64_NoRegs_RegMask;
285*0fca6ea1SDimitry Andric if (CC == CallingConv::PreserveNone)
286*0fca6ea1SDimitry Andric return SCS ? CSR_AArch64_NoneRegs_SCS_RegMask
287*0fca6ea1SDimitry Andric : CSR_AArch64_NoneRegs_RegMask;
2880b57cec5SDimitry Andric if (CC == CallingConv::AnyReg)
2890b57cec5SDimitry Andric return SCS ? CSR_AArch64_AllRegs_SCS_RegMask : CSR_AArch64_AllRegs_RegMask;
2905ffd83dbSDimitry Andric
2915ffd83dbSDimitry Andric // All the following calling conventions are handled differently on Darwin.
2925ffd83dbSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isTargetDarwin()) {
2935ffd83dbSDimitry Andric if (SCS)
2945ffd83dbSDimitry Andric report_fatal_error("ShadowCallStack attribute not supported on Darwin.");
2955ffd83dbSDimitry Andric return getDarwinCallPreservedMask(MF, CC);
2965ffd83dbSDimitry Andric }
2975ffd83dbSDimitry Andric
2980b57cec5SDimitry Andric if (CC == CallingConv::AArch64_VectorCall)
2990b57cec5SDimitry Andric return SCS ? CSR_AArch64_AAVPCS_SCS_RegMask : CSR_AArch64_AAVPCS_RegMask;
3008bcb0991SDimitry Andric if (CC == CallingConv::AArch64_SVE_VectorCall)
301480093f4SDimitry Andric return SCS ? CSR_AArch64_SVE_AAPCS_SCS_RegMask
302480093f4SDimitry Andric : CSR_AArch64_SVE_AAPCS_RegMask;
303bdd1243dSDimitry Andric if (CC == CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0)
304bdd1243dSDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0_RegMask;
305*0fca6ea1SDimitry Andric if (CC == CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1)
306*0fca6ea1SDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1_RegMask;
307bdd1243dSDimitry Andric if (CC == CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2)
308bdd1243dSDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2_RegMask;
309480093f4SDimitry Andric if (CC == CallingConv::CFGuard_Check)
310480093f4SDimitry Andric return CSR_Win_AArch64_CFGuard_Check_RegMask;
3110b57cec5SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().getTargetLowering()
3120b57cec5SDimitry Andric ->supportSwiftError() &&
3130b57cec5SDimitry Andric MF.getFunction().getAttributes().hasAttrSomewhere(Attribute::SwiftError))
3140b57cec5SDimitry Andric return SCS ? CSR_AArch64_AAPCS_SwiftError_SCS_RegMask
3150b57cec5SDimitry Andric : CSR_AArch64_AAPCS_SwiftError_RegMask;
316fe6060f1SDimitry Andric if (CC == CallingConv::SwiftTail) {
317fe6060f1SDimitry Andric if (SCS)
318fe6060f1SDimitry Andric report_fatal_error("ShadowCallStack attribute not supported with swifttail");
319fe6060f1SDimitry Andric return CSR_AArch64_AAPCS_SwiftTail_RegMask;
320fe6060f1SDimitry Andric }
3210b57cec5SDimitry Andric if (CC == CallingConv::PreserveMost)
3220b57cec5SDimitry Andric return SCS ? CSR_AArch64_RT_MostRegs_SCS_RegMask
3230b57cec5SDimitry Andric : CSR_AArch64_RT_MostRegs_RegMask;
324*0fca6ea1SDimitry Andric if (CC == CallingConv::PreserveAll)
32506c3fb27SDimitry Andric return SCS ? CSR_AArch64_RT_AllRegs_SCS_RegMask
32606c3fb27SDimitry Andric : CSR_AArch64_RT_AllRegs_RegMask;
32706c3fb27SDimitry Andric
3280b57cec5SDimitry Andric return SCS ? CSR_AArch64_AAPCS_SCS_RegMask : CSR_AArch64_AAPCS_RegMask;
3290b57cec5SDimitry Andric }
3300b57cec5SDimitry Andric
getCustomEHPadPreservedMask(const MachineFunction & MF) const331e8d8bef9SDimitry Andric const uint32_t *AArch64RegisterInfo::getCustomEHPadPreservedMask(
332e8d8bef9SDimitry Andric const MachineFunction &MF) const {
333e8d8bef9SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isTargetLinux())
334e8d8bef9SDimitry Andric return CSR_AArch64_AAPCS_RegMask;
335e8d8bef9SDimitry Andric
336e8d8bef9SDimitry Andric return nullptr;
337e8d8bef9SDimitry Andric }
338e8d8bef9SDimitry Andric
getTLSCallPreservedMask() const3390b57cec5SDimitry Andric const uint32_t *AArch64RegisterInfo::getTLSCallPreservedMask() const {
3400b57cec5SDimitry Andric if (TT.isOSDarwin())
3415ffd83dbSDimitry Andric return CSR_Darwin_AArch64_TLS_RegMask;
3420b57cec5SDimitry Andric
3430b57cec5SDimitry Andric assert(TT.isOSBinFormatELF() && "Invalid target");
3440b57cec5SDimitry Andric return CSR_AArch64_TLS_ELF_RegMask;
3450b57cec5SDimitry Andric }
3460b57cec5SDimitry Andric
UpdateCustomCallPreservedMask(MachineFunction & MF,const uint32_t ** Mask) const3470b57cec5SDimitry Andric void AArch64RegisterInfo::UpdateCustomCallPreservedMask(MachineFunction &MF,
3480b57cec5SDimitry Andric const uint32_t **Mask) const {
3490b57cec5SDimitry Andric uint32_t *UpdatedMask = MF.allocateRegMask();
3500b57cec5SDimitry Andric unsigned RegMaskSize = MachineOperand::getRegMaskSize(getNumRegs());
3510b57cec5SDimitry Andric memcpy(UpdatedMask, *Mask, sizeof(UpdatedMask[0]) * RegMaskSize);
3520b57cec5SDimitry Andric
3530b57cec5SDimitry Andric for (size_t i = 0; i < AArch64::GPR64commonRegClass.getNumRegs(); ++i) {
3540b57cec5SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isXRegCustomCalleeSaved(i)) {
35506c3fb27SDimitry Andric for (MCPhysReg SubReg :
35606c3fb27SDimitry Andric subregs_inclusive(AArch64::GPR64commonRegClass.getRegister(i))) {
3570b57cec5SDimitry Andric // See TargetRegisterInfo::getCallPreservedMask for how to interpret the
3580b57cec5SDimitry Andric // register mask.
35906c3fb27SDimitry Andric UpdatedMask[SubReg / 32] |= 1u << (SubReg % 32);
3600b57cec5SDimitry Andric }
3610b57cec5SDimitry Andric }
3620b57cec5SDimitry Andric }
3630b57cec5SDimitry Andric *Mask = UpdatedMask;
3640b57cec5SDimitry Andric }
3650b57cec5SDimitry Andric
getSMStartStopCallPreservedMask() const366bdd1243dSDimitry Andric const uint32_t *AArch64RegisterInfo::getSMStartStopCallPreservedMask() const {
367bdd1243dSDimitry Andric return CSR_AArch64_SMStartStop_RegMask;
368bdd1243dSDimitry Andric }
369bdd1243dSDimitry Andric
370bdd1243dSDimitry Andric const uint32_t *
SMEABISupportRoutinesCallPreservedMaskFromX0() const371bdd1243dSDimitry Andric AArch64RegisterInfo::SMEABISupportRoutinesCallPreservedMaskFromX0() const {
372bdd1243dSDimitry Andric return CSR_AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0_RegMask;
373bdd1243dSDimitry Andric }
374bdd1243dSDimitry Andric
getNoPreservedMask() const3750b57cec5SDimitry Andric const uint32_t *AArch64RegisterInfo::getNoPreservedMask() const {
3760b57cec5SDimitry Andric return CSR_AArch64_NoRegs_RegMask;
3770b57cec5SDimitry Andric }
3780b57cec5SDimitry Andric
3790b57cec5SDimitry Andric const uint32_t *
getThisReturnPreservedMask(const MachineFunction & MF,CallingConv::ID CC) const3800b57cec5SDimitry Andric AArch64RegisterInfo::getThisReturnPreservedMask(const MachineFunction &MF,
3810b57cec5SDimitry Andric CallingConv::ID CC) const {
3820b57cec5SDimitry Andric // This should return a register mask that is the same as that returned by
3830b57cec5SDimitry Andric // getCallPreservedMask but that additionally preserves the register used for
3840b57cec5SDimitry Andric // the first i64 argument (which must also be the register used to return a
3850b57cec5SDimitry Andric // single i64 return value)
3860b57cec5SDimitry Andric //
3870b57cec5SDimitry Andric // In case that the calling convention does not use the same register for
3880b57cec5SDimitry Andric // both, the function should return NULL (does not currently apply)
3890b57cec5SDimitry Andric assert(CC != CallingConv::GHC && "should not be GHC calling convention.");
3905ffd83dbSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isTargetDarwin())
3915ffd83dbSDimitry Andric return CSR_Darwin_AArch64_AAPCS_ThisReturn_RegMask;
3920b57cec5SDimitry Andric return CSR_AArch64_AAPCS_ThisReturn_RegMask;
3930b57cec5SDimitry Andric }
3940b57cec5SDimitry Andric
getWindowsStackProbePreservedMask() const3950b57cec5SDimitry Andric const uint32_t *AArch64RegisterInfo::getWindowsStackProbePreservedMask() const {
3960b57cec5SDimitry Andric return CSR_AArch64_StackProbe_Windows_RegMask;
3970b57cec5SDimitry Andric }
3980b57cec5SDimitry Andric
399bdd1243dSDimitry Andric std::optional<std::string>
explainReservedReg(const MachineFunction & MF,MCRegister PhysReg) const400bdd1243dSDimitry Andric AArch64RegisterInfo::explainReservedReg(const MachineFunction &MF,
401bdd1243dSDimitry Andric MCRegister PhysReg) const {
402bdd1243dSDimitry Andric if (hasBasePointer(MF) && MCRegisterInfo::regsOverlap(PhysReg, AArch64::X19))
403bdd1243dSDimitry Andric return std::string("X19 is used as the frame base pointer register.");
404bdd1243dSDimitry Andric
405bdd1243dSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isWindowsArm64EC()) {
406bdd1243dSDimitry Andric bool warn = false;
407bdd1243dSDimitry Andric if (MCRegisterInfo::regsOverlap(PhysReg, AArch64::X13) ||
408bdd1243dSDimitry Andric MCRegisterInfo::regsOverlap(PhysReg, AArch64::X14) ||
409bdd1243dSDimitry Andric MCRegisterInfo::regsOverlap(PhysReg, AArch64::X23) ||
410bdd1243dSDimitry Andric MCRegisterInfo::regsOverlap(PhysReg, AArch64::X24) ||
411bdd1243dSDimitry Andric MCRegisterInfo::regsOverlap(PhysReg, AArch64::X28))
412bdd1243dSDimitry Andric warn = true;
413bdd1243dSDimitry Andric
414bdd1243dSDimitry Andric for (unsigned i = AArch64::B16; i <= AArch64::B31; ++i)
415bdd1243dSDimitry Andric if (MCRegisterInfo::regsOverlap(PhysReg, i))
416bdd1243dSDimitry Andric warn = true;
417bdd1243dSDimitry Andric
418bdd1243dSDimitry Andric if (warn)
419bdd1243dSDimitry Andric return std::string(AArch64InstPrinter::getRegisterName(PhysReg)) +
420bdd1243dSDimitry Andric " is clobbered by asynchronous signals when using Arm64EC.";
421bdd1243dSDimitry Andric }
422bdd1243dSDimitry Andric
423bdd1243dSDimitry Andric return {};
424bdd1243dSDimitry Andric }
425bdd1243dSDimitry Andric
4260b57cec5SDimitry Andric BitVector
getStrictlyReservedRegs(const MachineFunction & MF) const427bdd1243dSDimitry Andric AArch64RegisterInfo::getStrictlyReservedRegs(const MachineFunction &MF) const {
4280b57cec5SDimitry Andric const AArch64FrameLowering *TFI = getFrameLowering(MF);
4290b57cec5SDimitry Andric
4300b57cec5SDimitry Andric // FIXME: avoid re-calculating this every time.
4310b57cec5SDimitry Andric BitVector Reserved(getNumRegs());
4320b57cec5SDimitry Andric markSuperRegs(Reserved, AArch64::WSP);
4330b57cec5SDimitry Andric markSuperRegs(Reserved, AArch64::WZR);
4340b57cec5SDimitry Andric
4350b57cec5SDimitry Andric if (TFI->hasFP(MF) || TT.isOSDarwin())
4360b57cec5SDimitry Andric markSuperRegs(Reserved, AArch64::W29);
4370b57cec5SDimitry Andric
438bdd1243dSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isWindowsArm64EC()) {
439bdd1243dSDimitry Andric // x13, x14, x23, x24, x28, and v16-v31 are clobbered by asynchronous
440bdd1243dSDimitry Andric // signals, so we can't ever use them.
441bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::W13);
442bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::W14);
443bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::W23);
444bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::W24);
445bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::W28);
446bdd1243dSDimitry Andric for (unsigned i = AArch64::B16; i <= AArch64::B31; ++i)
447bdd1243dSDimitry Andric markSuperRegs(Reserved, i);
448bdd1243dSDimitry Andric }
449bdd1243dSDimitry Andric
4500b57cec5SDimitry Andric for (size_t i = 0; i < AArch64::GPR32commonRegClass.getNumRegs(); ++i) {
4510b57cec5SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isXRegisterReserved(i))
4520b57cec5SDimitry Andric markSuperRegs(Reserved, AArch64::GPR32commonRegClass.getRegister(i));
4530b57cec5SDimitry Andric }
4540b57cec5SDimitry Andric
4550b57cec5SDimitry Andric if (hasBasePointer(MF))
4560b57cec5SDimitry Andric markSuperRegs(Reserved, AArch64::W19);
4570b57cec5SDimitry Andric
4580b57cec5SDimitry Andric // SLH uses register W16/X16 as the taint register.
4590b57cec5SDimitry Andric if (MF.getFunction().hasFnAttribute(Attribute::SpeculativeLoadHardening))
4600b57cec5SDimitry Andric markSuperRegs(Reserved, AArch64::W16);
4610b57cec5SDimitry Andric
462*0fca6ea1SDimitry Andric // FFR is modelled as global state that cannot be allocated.
463*0fca6ea1SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().hasSVE())
464*0fca6ea1SDimitry Andric Reserved.set(AArch64::FFR);
465*0fca6ea1SDimitry Andric
46681ad6265SDimitry Andric // SME tiles are not allocatable.
46781ad6265SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().hasSME()) {
46806c3fb27SDimitry Andric for (MCPhysReg SubReg : subregs_inclusive(AArch64::ZA))
46906c3fb27SDimitry Andric Reserved.set(SubReg);
47081ad6265SDimitry Andric }
47181ad6265SDimitry Andric
472*0fca6ea1SDimitry Andric // VG cannot be allocated
473*0fca6ea1SDimitry Andric Reserved.set(AArch64::VG);
474*0fca6ea1SDimitry Andric
4755f757f3fSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().hasSME2()) {
4765f757f3fSDimitry Andric for (MCSubRegIterator SubReg(AArch64::ZT0, this, /*self=*/true);
4775f757f3fSDimitry Andric SubReg.isValid(); ++SubReg)
4785f757f3fSDimitry Andric Reserved.set(*SubReg);
4795f757f3fSDimitry Andric }
4805f757f3fSDimitry Andric
481bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::FPCR);
482*0fca6ea1SDimitry Andric markSuperRegs(Reserved, AArch64::FPSR);
483bdd1243dSDimitry Andric
4845f757f3fSDimitry Andric if (MF.getFunction().getCallingConv() == CallingConv::GRAAL) {
4855f757f3fSDimitry Andric markSuperRegs(Reserved, AArch64::X27);
4865f757f3fSDimitry Andric markSuperRegs(Reserved, AArch64::X28);
4875f757f3fSDimitry Andric markSuperRegs(Reserved, AArch64::W27);
4885f757f3fSDimitry Andric markSuperRegs(Reserved, AArch64::W28);
4895f757f3fSDimitry Andric }
4905f757f3fSDimitry Andric
491bdd1243dSDimitry Andric assert(checkAllSuperRegsMarked(Reserved));
492bdd1243dSDimitry Andric return Reserved;
493bdd1243dSDimitry Andric }
494bdd1243dSDimitry Andric
495bdd1243dSDimitry Andric BitVector
getReservedRegs(const MachineFunction & MF) const496bdd1243dSDimitry Andric AArch64RegisterInfo::getReservedRegs(const MachineFunction &MF) const {
497bdd1243dSDimitry Andric BitVector Reserved = getStrictlyReservedRegs(MF);
498bdd1243dSDimitry Andric
499bdd1243dSDimitry Andric for (size_t i = 0; i < AArch64::GPR32commonRegClass.getNumRegs(); ++i) {
500bdd1243dSDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isXRegisterReservedForRA(i))
501bdd1243dSDimitry Andric markSuperRegs(Reserved, AArch64::GPR32commonRegClass.getRegister(i));
502bdd1243dSDimitry Andric }
503bdd1243dSDimitry Andric
504*0fca6ea1SDimitry Andric if (MF.getSubtarget<AArch64Subtarget>().isLRReservedForRA()) {
505*0fca6ea1SDimitry Andric // In order to prevent the register allocator from using LR, we need to
506*0fca6ea1SDimitry Andric // mark it as reserved. However we don't want to keep it reserved throughout
507*0fca6ea1SDimitry Andric // the pipeline since it prevents other infrastructure from reasoning about
508*0fca6ea1SDimitry Andric // it's liveness. We use the NoVRegs property instead of IsSSA because
509*0fca6ea1SDimitry Andric // IsSSA is removed before VirtRegRewriter runs.
510*0fca6ea1SDimitry Andric if (!MF.getProperties().hasProperty(
511*0fca6ea1SDimitry Andric MachineFunctionProperties::Property::NoVRegs))
512*0fca6ea1SDimitry Andric markSuperRegs(Reserved, AArch64::LR);
513*0fca6ea1SDimitry Andric }
514*0fca6ea1SDimitry Andric
5150b57cec5SDimitry Andric assert(checkAllSuperRegsMarked(Reserved));
5160b57cec5SDimitry Andric return Reserved;
5170b57cec5SDimitry Andric }
5180b57cec5SDimitry Andric
isReservedReg(const MachineFunction & MF,MCRegister Reg) const5190b57cec5SDimitry Andric bool AArch64RegisterInfo::isReservedReg(const MachineFunction &MF,
5205ffd83dbSDimitry Andric MCRegister Reg) const {
5210b57cec5SDimitry Andric return getReservedRegs(MF)[Reg];
5220b57cec5SDimitry Andric }
5230b57cec5SDimitry Andric
isStrictlyReservedReg(const MachineFunction & MF,MCRegister Reg) const524bdd1243dSDimitry Andric bool AArch64RegisterInfo::isStrictlyReservedReg(const MachineFunction &MF,
525bdd1243dSDimitry Andric MCRegister Reg) const {
526bdd1243dSDimitry Andric return getStrictlyReservedRegs(MF)[Reg];
527bdd1243dSDimitry Andric }
528bdd1243dSDimitry Andric
isAnyArgRegReserved(const MachineFunction & MF) const5290b57cec5SDimitry Andric bool AArch64RegisterInfo::isAnyArgRegReserved(const MachineFunction &MF) const {
530e8d8bef9SDimitry Andric return llvm::any_of(*AArch64::GPR64argRegClass.MC, [this, &MF](MCPhysReg r) {
531bdd1243dSDimitry Andric return isStrictlyReservedReg(MF, r);
532e8d8bef9SDimitry Andric });
5330b57cec5SDimitry Andric }
5340b57cec5SDimitry Andric
emitReservedArgRegCallError(const MachineFunction & MF) const5350b57cec5SDimitry Andric void AArch64RegisterInfo::emitReservedArgRegCallError(
5360b57cec5SDimitry Andric const MachineFunction &MF) const {
5370b57cec5SDimitry Andric const Function &F = MF.getFunction();
538e8d8bef9SDimitry Andric F.getContext().diagnose(DiagnosticInfoUnsupported{F, ("AArch64 doesn't support"
539e8d8bef9SDimitry Andric " function calls if any of the argument registers is reserved.")});
5400b57cec5SDimitry Andric }
5410b57cec5SDimitry Andric
isAsmClobberable(const MachineFunction & MF,MCRegister PhysReg) const5420b57cec5SDimitry Andric bool AArch64RegisterInfo::isAsmClobberable(const MachineFunction &MF,
5435ffd83dbSDimitry Andric MCRegister PhysReg) const {
544bdd1243dSDimitry Andric // SLH uses register X16 as the taint register but it will fallback to a different
545bdd1243dSDimitry Andric // method if the user clobbers it. So X16 is not reserved for inline asm but is
546bdd1243dSDimitry Andric // for normal codegen.
547bdd1243dSDimitry Andric if (MF.getFunction().hasFnAttribute(Attribute::SpeculativeLoadHardening) &&
548bdd1243dSDimitry Andric MCRegisterInfo::regsOverlap(PhysReg, AArch64::X16))
549bdd1243dSDimitry Andric return true;
5500b57cec5SDimitry Andric
55174626c16SDimitry Andric // ZA/ZT0 registers are reserved but may be permitted in the clobber list.
55274626c16SDimitry Andric if (PhysReg == AArch64::ZA || PhysReg == AArch64::ZT0)
55374626c16SDimitry Andric return true;
55474626c16SDimitry Andric
555bdd1243dSDimitry Andric return !isReservedReg(MF, PhysReg);
5560b57cec5SDimitry Andric }
5570b57cec5SDimitry Andric
5580b57cec5SDimitry Andric const TargetRegisterClass *
getPointerRegClass(const MachineFunction & MF,unsigned Kind) const5590b57cec5SDimitry Andric AArch64RegisterInfo::getPointerRegClass(const MachineFunction &MF,
5600b57cec5SDimitry Andric unsigned Kind) const {
5610b57cec5SDimitry Andric return &AArch64::GPR64spRegClass;
5620b57cec5SDimitry Andric }
5630b57cec5SDimitry Andric
5640b57cec5SDimitry Andric const TargetRegisterClass *
getCrossCopyRegClass(const TargetRegisterClass * RC) const5650b57cec5SDimitry Andric AArch64RegisterInfo::getCrossCopyRegClass(const TargetRegisterClass *RC) const {
5660b57cec5SDimitry Andric if (RC == &AArch64::CCRRegClass)
5670b57cec5SDimitry Andric return &AArch64::GPR64RegClass; // Only MSR & MRS copy NZCV.
5680b57cec5SDimitry Andric return RC;
5690b57cec5SDimitry Andric }
5700b57cec5SDimitry Andric
getBaseRegister() const5710b57cec5SDimitry Andric unsigned AArch64RegisterInfo::getBaseRegister() const { return AArch64::X19; }
5720b57cec5SDimitry Andric
hasBasePointer(const MachineFunction & MF) const5730b57cec5SDimitry Andric bool AArch64RegisterInfo::hasBasePointer(const MachineFunction &MF) const {
5740b57cec5SDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo();
5750b57cec5SDimitry Andric
5760b57cec5SDimitry Andric // In the presence of variable sized objects or funclets, if the fixed stack
5770b57cec5SDimitry Andric // size is large enough that referencing from the FP won't result in things
5780b57cec5SDimitry Andric // being in range relatively often, we can use a base pointer to allow access
5790b57cec5SDimitry Andric // from the other direction like the SP normally works.
5800b57cec5SDimitry Andric //
5810b57cec5SDimitry Andric // Furthermore, if both variable sized objects are present, and the
5820b57cec5SDimitry Andric // stack needs to be dynamically re-aligned, the base pointer is the only
5830b57cec5SDimitry Andric // reliable way to reference the locals.
5840b57cec5SDimitry Andric if (MFI.hasVarSizedObjects() || MF.hasEHFunclets()) {
585fe6060f1SDimitry Andric if (hasStackRealignment(MF))
5860b57cec5SDimitry Andric return true;
587979e22ffSDimitry Andric
588*0fca6ea1SDimitry Andric auto &ST = MF.getSubtarget<AArch64Subtarget>();
589*0fca6ea1SDimitry Andric if (ST.hasSVE() || ST.isStreaming()) {
590979e22ffSDimitry Andric const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>();
591979e22ffSDimitry Andric // Frames that have variable sized objects and scalable SVE objects,
592979e22ffSDimitry Andric // should always use a basepointer.
593979e22ffSDimitry Andric if (!AFI->hasCalculatedStackSizeSVE() || AFI->getStackSizeSVE())
594979e22ffSDimitry Andric return true;
595979e22ffSDimitry Andric }
596979e22ffSDimitry Andric
5970b57cec5SDimitry Andric // Conservatively estimate whether the negative offset from the frame
5980b57cec5SDimitry Andric // pointer will be sufficient to reach. If a function has a smallish
5990b57cec5SDimitry Andric // frame, it's less likely to have lots of spills and callee saved
6000b57cec5SDimitry Andric // space, so it's all more likely to be within range of the frame pointer.
6010b57cec5SDimitry Andric // If it's wrong, we'll materialize the constant and still get to the
6020b57cec5SDimitry Andric // object; it's just suboptimal. Negative offsets use the unscaled
6030b57cec5SDimitry Andric // load/store instructions, which have a 9-bit signed immediate.
6040b57cec5SDimitry Andric return MFI.getLocalFrameSize() >= 256;
6050b57cec5SDimitry Andric }
6060b57cec5SDimitry Andric
6070b57cec5SDimitry Andric return false;
6080b57cec5SDimitry Andric }
6090b57cec5SDimitry Andric
isArgumentRegister(const MachineFunction & MF,MCRegister Reg) const61081ad6265SDimitry Andric bool AArch64RegisterInfo::isArgumentRegister(const MachineFunction &MF,
61181ad6265SDimitry Andric MCRegister Reg) const {
61281ad6265SDimitry Andric CallingConv::ID CC = MF.getFunction().getCallingConv();
61381ad6265SDimitry Andric const AArch64Subtarget &STI = MF.getSubtarget<AArch64Subtarget>();
614*0fca6ea1SDimitry Andric bool IsVarArg = STI.isCallingConvWin64(MF.getFunction().getCallingConv(),
615*0fca6ea1SDimitry Andric MF.getFunction().isVarArg());
61681ad6265SDimitry Andric
61781ad6265SDimitry Andric auto HasReg = [](ArrayRef<MCRegister> RegList, MCRegister Reg) {
618bdd1243dSDimitry Andric return llvm::is_contained(RegList, Reg);
61981ad6265SDimitry Andric };
62081ad6265SDimitry Andric
62181ad6265SDimitry Andric switch (CC) {
62281ad6265SDimitry Andric default:
62381ad6265SDimitry Andric report_fatal_error("Unsupported calling convention.");
62481ad6265SDimitry Andric case CallingConv::GHC:
62581ad6265SDimitry Andric return HasReg(CC_AArch64_GHC_ArgRegs, Reg);
626*0fca6ea1SDimitry Andric case CallingConv::PreserveNone:
627*0fca6ea1SDimitry Andric if (!MF.getFunction().isVarArg())
628*0fca6ea1SDimitry Andric return HasReg(CC_AArch64_Preserve_None_ArgRegs, Reg);
629*0fca6ea1SDimitry Andric [[fallthrough]];
63081ad6265SDimitry Andric case CallingConv::C:
63181ad6265SDimitry Andric case CallingConv::Fast:
63281ad6265SDimitry Andric case CallingConv::PreserveMost:
63306c3fb27SDimitry Andric case CallingConv::PreserveAll:
63481ad6265SDimitry Andric case CallingConv::CXX_FAST_TLS:
63581ad6265SDimitry Andric case CallingConv::Swift:
63681ad6265SDimitry Andric case CallingConv::SwiftTail:
63781ad6265SDimitry Andric case CallingConv::Tail:
6385f757f3fSDimitry Andric if (STI.isTargetWindows()) {
6395f757f3fSDimitry Andric if (IsVarArg)
64081ad6265SDimitry Andric return HasReg(CC_AArch64_Win64_VarArg_ArgRegs, Reg);
6415f757f3fSDimitry Andric switch (CC) {
6425f757f3fSDimitry Andric default:
6435f757f3fSDimitry Andric return HasReg(CC_AArch64_Win64PCS_ArgRegs, Reg);
6445f757f3fSDimitry Andric case CallingConv::Swift:
6455f757f3fSDimitry Andric case CallingConv::SwiftTail:
6465f757f3fSDimitry Andric return HasReg(CC_AArch64_Win64PCS_Swift_ArgRegs, Reg) ||
6475f757f3fSDimitry Andric HasReg(CC_AArch64_Win64PCS_ArgRegs, Reg);
6485f757f3fSDimitry Andric }
6495f757f3fSDimitry Andric }
65081ad6265SDimitry Andric if (!STI.isTargetDarwin()) {
65181ad6265SDimitry Andric switch (CC) {
65281ad6265SDimitry Andric default:
65381ad6265SDimitry Andric return HasReg(CC_AArch64_AAPCS_ArgRegs, Reg);
65481ad6265SDimitry Andric case CallingConv::Swift:
65581ad6265SDimitry Andric case CallingConv::SwiftTail:
65681ad6265SDimitry Andric return HasReg(CC_AArch64_AAPCS_ArgRegs, Reg) ||
65781ad6265SDimitry Andric HasReg(CC_AArch64_AAPCS_Swift_ArgRegs, Reg);
65881ad6265SDimitry Andric }
65981ad6265SDimitry Andric }
66081ad6265SDimitry Andric if (!IsVarArg) {
66181ad6265SDimitry Andric switch (CC) {
66281ad6265SDimitry Andric default:
66381ad6265SDimitry Andric return HasReg(CC_AArch64_DarwinPCS_ArgRegs, Reg);
66481ad6265SDimitry Andric case CallingConv::Swift:
66581ad6265SDimitry Andric case CallingConv::SwiftTail:
66681ad6265SDimitry Andric return HasReg(CC_AArch64_DarwinPCS_ArgRegs, Reg) ||
66781ad6265SDimitry Andric HasReg(CC_AArch64_DarwinPCS_Swift_ArgRegs, Reg);
66881ad6265SDimitry Andric }
66981ad6265SDimitry Andric }
67081ad6265SDimitry Andric if (STI.isTargetILP32())
67181ad6265SDimitry Andric return HasReg(CC_AArch64_DarwinPCS_ILP32_VarArg_ArgRegs, Reg);
67281ad6265SDimitry Andric return HasReg(CC_AArch64_DarwinPCS_VarArg_ArgRegs, Reg);
67381ad6265SDimitry Andric case CallingConv::Win64:
67481ad6265SDimitry Andric if (IsVarArg)
67581ad6265SDimitry Andric HasReg(CC_AArch64_Win64_VarArg_ArgRegs, Reg);
6765f757f3fSDimitry Andric return HasReg(CC_AArch64_Win64PCS_ArgRegs, Reg);
67781ad6265SDimitry Andric case CallingConv::CFGuard_Check:
67881ad6265SDimitry Andric return HasReg(CC_AArch64_Win64_CFGuard_Check_ArgRegs, Reg);
67981ad6265SDimitry Andric case CallingConv::AArch64_VectorCall:
68081ad6265SDimitry Andric case CallingConv::AArch64_SVE_VectorCall:
681bdd1243dSDimitry Andric case CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X0:
682*0fca6ea1SDimitry Andric case CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X1:
683bdd1243dSDimitry Andric case CallingConv::AArch64_SME_ABI_Support_Routines_PreserveMost_From_X2:
6845f757f3fSDimitry Andric if (STI.isTargetWindows())
6855f757f3fSDimitry Andric return HasReg(CC_AArch64_Win64PCS_ArgRegs, Reg);
68681ad6265SDimitry Andric return HasReg(CC_AArch64_AAPCS_ArgRegs, Reg);
68781ad6265SDimitry Andric }
68881ad6265SDimitry Andric }
68981ad6265SDimitry Andric
6900b57cec5SDimitry Andric Register
getFrameRegister(const MachineFunction & MF) const6910b57cec5SDimitry Andric AArch64RegisterInfo::getFrameRegister(const MachineFunction &MF) const {
6920b57cec5SDimitry Andric const AArch64FrameLowering *TFI = getFrameLowering(MF);
6930b57cec5SDimitry Andric return TFI->hasFP(MF) ? AArch64::FP : AArch64::SP;
6940b57cec5SDimitry Andric }
6950b57cec5SDimitry Andric
requiresRegisterScavenging(const MachineFunction & MF) const6960b57cec5SDimitry Andric bool AArch64RegisterInfo::requiresRegisterScavenging(
6970b57cec5SDimitry Andric const MachineFunction &MF) const {
6980b57cec5SDimitry Andric return true;
6990b57cec5SDimitry Andric }
7000b57cec5SDimitry Andric
requiresVirtualBaseRegisters(const MachineFunction & MF) const7010b57cec5SDimitry Andric bool AArch64RegisterInfo::requiresVirtualBaseRegisters(
7020b57cec5SDimitry Andric const MachineFunction &MF) const {
7030b57cec5SDimitry Andric return true;
7040b57cec5SDimitry Andric }
7050b57cec5SDimitry Andric
7060b57cec5SDimitry Andric bool
useFPForScavengingIndex(const MachineFunction & MF) const7070b57cec5SDimitry Andric AArch64RegisterInfo::useFPForScavengingIndex(const MachineFunction &MF) const {
7080b57cec5SDimitry Andric // This function indicates whether the emergency spillslot should be placed
7090b57cec5SDimitry Andric // close to the beginning of the stackframe (closer to FP) or the end
7100b57cec5SDimitry Andric // (closer to SP).
7110b57cec5SDimitry Andric //
7120b57cec5SDimitry Andric // The beginning works most reliably if we have a frame pointer.
713979e22ffSDimitry Andric // In the presence of any non-constant space between FP and locals,
714979e22ffSDimitry Andric // (e.g. in case of stack realignment or a scalable SVE area), it is
715979e22ffSDimitry Andric // better to use SP or BP.
7160b57cec5SDimitry Andric const AArch64FrameLowering &TFI = *getFrameLowering(MF);
717979e22ffSDimitry Andric const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>();
718979e22ffSDimitry Andric assert((!MF.getSubtarget<AArch64Subtarget>().hasSVE() ||
719979e22ffSDimitry Andric AFI->hasCalculatedStackSizeSVE()) &&
720979e22ffSDimitry Andric "Expected SVE area to be calculated by this point");
721fe6060f1SDimitry Andric return TFI.hasFP(MF) && !hasStackRealignment(MF) && !AFI->getStackSizeSVE();
7220b57cec5SDimitry Andric }
7230b57cec5SDimitry Andric
requiresFrameIndexScavenging(const MachineFunction & MF) const7240b57cec5SDimitry Andric bool AArch64RegisterInfo::requiresFrameIndexScavenging(
7250b57cec5SDimitry Andric const MachineFunction &MF) const {
7260b57cec5SDimitry Andric return true;
7270b57cec5SDimitry Andric }
7280b57cec5SDimitry Andric
7290b57cec5SDimitry Andric bool
cannotEliminateFrame(const MachineFunction & MF) const7300b57cec5SDimitry Andric AArch64RegisterInfo::cannotEliminateFrame(const MachineFunction &MF) const {
7310b57cec5SDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo();
7320b57cec5SDimitry Andric if (MF.getTarget().Options.DisableFramePointerElim(MF) && MFI.adjustsStack())
7330b57cec5SDimitry Andric return true;
7340b57cec5SDimitry Andric return MFI.hasVarSizedObjects() || MFI.isFrameAddressTaken();
7350b57cec5SDimitry Andric }
7360b57cec5SDimitry Andric
7370b57cec5SDimitry Andric /// needsFrameBaseReg - Returns true if the instruction's frame index
7380b57cec5SDimitry Andric /// reference would be better served by a base register other than FP
7390b57cec5SDimitry Andric /// or SP. Used by LocalStackFrameAllocation to determine which frame index
7400b57cec5SDimitry Andric /// references it should create new base registers for.
needsFrameBaseReg(MachineInstr * MI,int64_t Offset) const7410b57cec5SDimitry Andric bool AArch64RegisterInfo::needsFrameBaseReg(MachineInstr *MI,
7420b57cec5SDimitry Andric int64_t Offset) const {
7430b57cec5SDimitry Andric for (unsigned i = 0; !MI->getOperand(i).isFI(); ++i)
7440b57cec5SDimitry Andric assert(i < MI->getNumOperands() &&
7450b57cec5SDimitry Andric "Instr doesn't have FrameIndex operand!");
7460b57cec5SDimitry Andric
7470b57cec5SDimitry Andric // It's the load/store FI references that cause issues, as it can be difficult
7480b57cec5SDimitry Andric // to materialize the offset if it won't fit in the literal field. Estimate
7490b57cec5SDimitry Andric // based on the size of the local frame and some conservative assumptions
7500b57cec5SDimitry Andric // about the rest of the stack frame (note, this is pre-regalloc, so
7510b57cec5SDimitry Andric // we don't know everything for certain yet) whether this offset is likely
7520b57cec5SDimitry Andric // to be out of range of the immediate. Return true if so.
7530b57cec5SDimitry Andric
7540b57cec5SDimitry Andric // We only generate virtual base registers for loads and stores, so
7550b57cec5SDimitry Andric // return false for everything else.
7560b57cec5SDimitry Andric if (!MI->mayLoad() && !MI->mayStore())
7570b57cec5SDimitry Andric return false;
7580b57cec5SDimitry Andric
7590b57cec5SDimitry Andric // Without a virtual base register, if the function has variable sized
7600b57cec5SDimitry Andric // objects, all fixed-size local references will be via the frame pointer,
7610b57cec5SDimitry Andric // Approximate the offset and see if it's legal for the instruction.
7620b57cec5SDimitry Andric // Note that the incoming offset is based on the SP value at function entry,
7630b57cec5SDimitry Andric // so it'll be negative.
7640b57cec5SDimitry Andric MachineFunction &MF = *MI->getParent()->getParent();
7650b57cec5SDimitry Andric const AArch64FrameLowering *TFI = getFrameLowering(MF);
7660b57cec5SDimitry Andric MachineFrameInfo &MFI = MF.getFrameInfo();
7670b57cec5SDimitry Andric
7680b57cec5SDimitry Andric // Estimate an offset from the frame pointer.
7690b57cec5SDimitry Andric // Conservatively assume all GPR callee-saved registers get pushed.
7700b57cec5SDimitry Andric // FP, LR, X19-X28, D8-D15. 64-bits each.
7710b57cec5SDimitry Andric int64_t FPOffset = Offset - 16 * 20;
7720b57cec5SDimitry Andric // Estimate an offset from the stack pointer.
7730b57cec5SDimitry Andric // The incoming offset is relating to the SP at the start of the function,
7740b57cec5SDimitry Andric // but when we access the local it'll be relative to the SP after local
7750b57cec5SDimitry Andric // allocation, so adjust our SP-relative offset by that allocation size.
7760b57cec5SDimitry Andric Offset += MFI.getLocalFrameSize();
7770b57cec5SDimitry Andric // Assume that we'll have at least some spill slots allocated.
7780b57cec5SDimitry Andric // FIXME: This is a total SWAG number. We should run some statistics
7790b57cec5SDimitry Andric // and pick a real one.
7800b57cec5SDimitry Andric Offset += 128; // 128 bytes of spill slots
7810b57cec5SDimitry Andric
7820b57cec5SDimitry Andric // If there is a frame pointer, try using it.
7830b57cec5SDimitry Andric // The FP is only available if there is no dynamic realignment. We
7840b57cec5SDimitry Andric // don't know for sure yet whether we'll need that, so we guess based
7850b57cec5SDimitry Andric // on whether there are any local variables that would trigger it.
7860b57cec5SDimitry Andric if (TFI->hasFP(MF) && isFrameOffsetLegal(MI, AArch64::FP, FPOffset))
7870b57cec5SDimitry Andric return false;
7880b57cec5SDimitry Andric
7890b57cec5SDimitry Andric // If we can reference via the stack pointer or base pointer, try that.
7900b57cec5SDimitry Andric // FIXME: This (and the code that resolves the references) can be improved
7910b57cec5SDimitry Andric // to only disallow SP relative references in the live range of
7920b57cec5SDimitry Andric // the VLA(s). In practice, it's unclear how much difference that
7930b57cec5SDimitry Andric // would make, but it may be worth doing.
7940b57cec5SDimitry Andric if (isFrameOffsetLegal(MI, AArch64::SP, Offset))
7950b57cec5SDimitry Andric return false;
7960b57cec5SDimitry Andric
7975ffd83dbSDimitry Andric // If even offset 0 is illegal, we don't want a virtual base register.
7985ffd83dbSDimitry Andric if (!isFrameOffsetLegal(MI, AArch64::SP, 0))
7995ffd83dbSDimitry Andric return false;
8005ffd83dbSDimitry Andric
8010b57cec5SDimitry Andric // The offset likely isn't legal; we want to allocate a virtual base register.
8020b57cec5SDimitry Andric return true;
8030b57cec5SDimitry Andric }
8040b57cec5SDimitry Andric
isFrameOffsetLegal(const MachineInstr * MI,Register BaseReg,int64_t Offset) const8050b57cec5SDimitry Andric bool AArch64RegisterInfo::isFrameOffsetLegal(const MachineInstr *MI,
8065ffd83dbSDimitry Andric Register BaseReg,
8070b57cec5SDimitry Andric int64_t Offset) const {
8080b57cec5SDimitry Andric assert(MI && "Unable to get the legal offset for nil instruction.");
809e8d8bef9SDimitry Andric StackOffset SaveOffset = StackOffset::getFixed(Offset);
8100b57cec5SDimitry Andric return isAArch64FrameOffsetLegal(*MI, SaveOffset) & AArch64FrameOffsetIsLegal;
8110b57cec5SDimitry Andric }
8120b57cec5SDimitry Andric
8130b57cec5SDimitry Andric /// Insert defining instruction(s) for BaseReg to be a pointer to FrameIdx
8140b57cec5SDimitry Andric /// at the beginning of the basic block.
815e8d8bef9SDimitry Andric Register
materializeFrameBaseRegister(MachineBasicBlock * MBB,int FrameIdx,int64_t Offset) const816e8d8bef9SDimitry Andric AArch64RegisterInfo::materializeFrameBaseRegister(MachineBasicBlock *MBB,
8170b57cec5SDimitry Andric int FrameIdx,
8180b57cec5SDimitry Andric int64_t Offset) const {
8190b57cec5SDimitry Andric MachineBasicBlock::iterator Ins = MBB->begin();
8200b57cec5SDimitry Andric DebugLoc DL; // Defaults to "unknown"
8210b57cec5SDimitry Andric if (Ins != MBB->end())
8220b57cec5SDimitry Andric DL = Ins->getDebugLoc();
8230b57cec5SDimitry Andric const MachineFunction &MF = *MBB->getParent();
8240b57cec5SDimitry Andric const AArch64InstrInfo *TII =
8250b57cec5SDimitry Andric MF.getSubtarget<AArch64Subtarget>().getInstrInfo();
8260b57cec5SDimitry Andric const MCInstrDesc &MCID = TII->get(AArch64::ADDXri);
8270b57cec5SDimitry Andric MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
828e8d8bef9SDimitry Andric Register BaseReg = MRI.createVirtualRegister(&AArch64::GPR64spRegClass);
8290b57cec5SDimitry Andric MRI.constrainRegClass(BaseReg, TII->getRegClass(MCID, 0, this, MF));
8300b57cec5SDimitry Andric unsigned Shifter = AArch64_AM::getShifterImm(AArch64_AM::LSL, 0);
8310b57cec5SDimitry Andric
8320b57cec5SDimitry Andric BuildMI(*MBB, Ins, DL, MCID, BaseReg)
8330b57cec5SDimitry Andric .addFrameIndex(FrameIdx)
8340b57cec5SDimitry Andric .addImm(Offset)
8350b57cec5SDimitry Andric .addImm(Shifter);
836e8d8bef9SDimitry Andric
837e8d8bef9SDimitry Andric return BaseReg;
8380b57cec5SDimitry Andric }
8390b57cec5SDimitry Andric
resolveFrameIndex(MachineInstr & MI,Register BaseReg,int64_t Offset) const8405ffd83dbSDimitry Andric void AArch64RegisterInfo::resolveFrameIndex(MachineInstr &MI, Register BaseReg,
8410b57cec5SDimitry Andric int64_t Offset) const {
8428bcb0991SDimitry Andric // ARM doesn't need the general 64-bit offsets
843e8d8bef9SDimitry Andric StackOffset Off = StackOffset::getFixed(Offset);
8448bcb0991SDimitry Andric
8450b57cec5SDimitry Andric unsigned i = 0;
8460b57cec5SDimitry Andric while (!MI.getOperand(i).isFI()) {
8470b57cec5SDimitry Andric ++i;
8480b57cec5SDimitry Andric assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
8490b57cec5SDimitry Andric }
850e8d8bef9SDimitry Andric
8510b57cec5SDimitry Andric const MachineFunction *MF = MI.getParent()->getParent();
8520b57cec5SDimitry Andric const AArch64InstrInfo *TII =
8530b57cec5SDimitry Andric MF->getSubtarget<AArch64Subtarget>().getInstrInfo();
8540b57cec5SDimitry Andric bool Done = rewriteAArch64FrameIndex(MI, i, BaseReg, Off, TII);
8550b57cec5SDimitry Andric assert(Done && "Unable to resolve frame index!");
8560b57cec5SDimitry Andric (void)Done;
8570b57cec5SDimitry Andric }
8580b57cec5SDimitry Andric
8595ffd83dbSDimitry Andric // Create a scratch register for the frame index elimination in an instruction.
8605ffd83dbSDimitry Andric // This function has special handling of stack tagging loop pseudos, in which
86181ad6265SDimitry Andric // case it can also change the instruction opcode.
8625ffd83dbSDimitry Andric static Register
createScratchRegisterForInstruction(MachineInstr & MI,unsigned FIOperandNum,const AArch64InstrInfo * TII)86381ad6265SDimitry Andric createScratchRegisterForInstruction(MachineInstr &MI, unsigned FIOperandNum,
8645ffd83dbSDimitry Andric const AArch64InstrInfo *TII) {
8655ffd83dbSDimitry Andric // ST*Gloop have a reserved scratch register in operand 1. Use it, and also
8665ffd83dbSDimitry Andric // replace the instruction with the writeback variant because it will now
8675ffd83dbSDimitry Andric // satisfy the operand constraints for it.
86881ad6265SDimitry Andric Register ScratchReg;
86981ad6265SDimitry Andric if (MI.getOpcode() == AArch64::STGloop ||
87081ad6265SDimitry Andric MI.getOpcode() == AArch64::STZGloop) {
87181ad6265SDimitry Andric assert(FIOperandNum == 3 &&
87281ad6265SDimitry Andric "Wrong frame index operand for STGloop/STZGloop");
87381ad6265SDimitry Andric unsigned Op = MI.getOpcode() == AArch64::STGloop ? AArch64::STGloop_wback
87481ad6265SDimitry Andric : AArch64::STZGloop_wback;
87581ad6265SDimitry Andric ScratchReg = MI.getOperand(1).getReg();
87681ad6265SDimitry Andric MI.getOperand(3).ChangeToRegister(ScratchReg, false, false, true);
87781ad6265SDimitry Andric MI.setDesc(TII->get(Op));
87881ad6265SDimitry Andric MI.tieOperands(1, 3);
8795ffd83dbSDimitry Andric } else {
88081ad6265SDimitry Andric ScratchReg =
88181ad6265SDimitry Andric MI.getMF()->getRegInfo().createVirtualRegister(&AArch64::GPR64RegClass);
88281ad6265SDimitry Andric MI.getOperand(FIOperandNum)
88381ad6265SDimitry Andric .ChangeToRegister(ScratchReg, false, false, true);
8845ffd83dbSDimitry Andric }
88581ad6265SDimitry Andric return ScratchReg;
8865ffd83dbSDimitry Andric }
8875ffd83dbSDimitry Andric
getOffsetOpcodes(const StackOffset & Offset,SmallVectorImpl<uint64_t> & Ops) const888e8d8bef9SDimitry Andric void AArch64RegisterInfo::getOffsetOpcodes(
889e8d8bef9SDimitry Andric const StackOffset &Offset, SmallVectorImpl<uint64_t> &Ops) const {
890e8d8bef9SDimitry Andric // The smallest scalable element supported by scaled SVE addressing
891e8d8bef9SDimitry Andric // modes are predicates, which are 2 scalable bytes in size. So the scalable
892e8d8bef9SDimitry Andric // byte offset must always be a multiple of 2.
893e8d8bef9SDimitry Andric assert(Offset.getScalable() % 2 == 0 && "Invalid frame offset");
894e8d8bef9SDimitry Andric
895e8d8bef9SDimitry Andric // Add fixed-sized offset using existing DIExpression interface.
896e8d8bef9SDimitry Andric DIExpression::appendOffset(Ops, Offset.getFixed());
897e8d8bef9SDimitry Andric
898e8d8bef9SDimitry Andric unsigned VG = getDwarfRegNum(AArch64::VG, true);
899e8d8bef9SDimitry Andric int64_t VGSized = Offset.getScalable() / 2;
900e8d8bef9SDimitry Andric if (VGSized > 0) {
901e8d8bef9SDimitry Andric Ops.push_back(dwarf::DW_OP_constu);
902e8d8bef9SDimitry Andric Ops.push_back(VGSized);
903e8d8bef9SDimitry Andric Ops.append({dwarf::DW_OP_bregx, VG, 0ULL});
904e8d8bef9SDimitry Andric Ops.push_back(dwarf::DW_OP_mul);
905e8d8bef9SDimitry Andric Ops.push_back(dwarf::DW_OP_plus);
906e8d8bef9SDimitry Andric } else if (VGSized < 0) {
907e8d8bef9SDimitry Andric Ops.push_back(dwarf::DW_OP_constu);
908e8d8bef9SDimitry Andric Ops.push_back(-VGSized);
909e8d8bef9SDimitry Andric Ops.append({dwarf::DW_OP_bregx, VG, 0ULL});
910e8d8bef9SDimitry Andric Ops.push_back(dwarf::DW_OP_mul);
911e8d8bef9SDimitry Andric Ops.push_back(dwarf::DW_OP_minus);
912e8d8bef9SDimitry Andric }
913e8d8bef9SDimitry Andric }
914e8d8bef9SDimitry Andric
eliminateFrameIndex(MachineBasicBlock::iterator II,int SPAdj,unsigned FIOperandNum,RegScavenger * RS) const915bdd1243dSDimitry Andric bool AArch64RegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II,
9160b57cec5SDimitry Andric int SPAdj, unsigned FIOperandNum,
9170b57cec5SDimitry Andric RegScavenger *RS) const {
9180b57cec5SDimitry Andric assert(SPAdj == 0 && "Unexpected");
9190b57cec5SDimitry Andric
9200b57cec5SDimitry Andric MachineInstr &MI = *II;
9210b57cec5SDimitry Andric MachineBasicBlock &MBB = *MI.getParent();
9220b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent();
9238bcb0991SDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo();
9240b57cec5SDimitry Andric const AArch64InstrInfo *TII =
9250b57cec5SDimitry Andric MF.getSubtarget<AArch64Subtarget>().getInstrInfo();
9260b57cec5SDimitry Andric const AArch64FrameLowering *TFI = getFrameLowering(MF);
9270b57cec5SDimitry Andric int FrameIndex = MI.getOperand(FIOperandNum).getIndex();
9288bcb0991SDimitry Andric bool Tagged =
9298bcb0991SDimitry Andric MI.getOperand(FIOperandNum).getTargetFlags() & AArch64II::MO_TAGGED;
9305ffd83dbSDimitry Andric Register FrameReg;
9310b57cec5SDimitry Andric
932e8d8bef9SDimitry Andric // Special handling of dbg_value, stackmap patchpoint statepoint instructions.
933e8d8bef9SDimitry Andric if (MI.getOpcode() == TargetOpcode::STACKMAP ||
934e8d8bef9SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
935e8d8bef9SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) {
9368bcb0991SDimitry Andric StackOffset Offset =
9378bcb0991SDimitry Andric TFI->resolveFrameIndexReference(MF, FrameIndex, FrameReg,
9380b57cec5SDimitry Andric /*PreferFP=*/true,
9390b57cec5SDimitry Andric /*ForSimm=*/false);
940e8d8bef9SDimitry Andric Offset += StackOffset::getFixed(MI.getOperand(FIOperandNum + 1).getImm());
9410b57cec5SDimitry Andric MI.getOperand(FIOperandNum).ChangeToRegister(FrameReg, false /*isDef*/);
942e8d8bef9SDimitry Andric MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Offset.getFixed());
943bdd1243dSDimitry Andric return false;
9440b57cec5SDimitry Andric }
9450b57cec5SDimitry Andric
9460b57cec5SDimitry Andric if (MI.getOpcode() == TargetOpcode::LOCAL_ESCAPE) {
9470b57cec5SDimitry Andric MachineOperand &FI = MI.getOperand(FIOperandNum);
948e8d8bef9SDimitry Andric StackOffset Offset = TFI->getNonLocalFrameIndexReference(MF, FrameIndex);
949e8d8bef9SDimitry Andric assert(!Offset.getScalable() &&
950e8d8bef9SDimitry Andric "Frame offsets with a scalable component are not supported");
951e8d8bef9SDimitry Andric FI.ChangeToImmediate(Offset.getFixed());
952bdd1243dSDimitry Andric return false;
9530b57cec5SDimitry Andric }
9540b57cec5SDimitry Andric
9558bcb0991SDimitry Andric StackOffset Offset;
9560b57cec5SDimitry Andric if (MI.getOpcode() == AArch64::TAGPstack) {
9570b57cec5SDimitry Andric // TAGPstack must use the virtual frame register in its 3rd operand.
9580b57cec5SDimitry Andric const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>();
9590b57cec5SDimitry Andric FrameReg = MI.getOperand(3).getReg();
960e8d8bef9SDimitry Andric Offset = StackOffset::getFixed(MFI.getObjectOffset(FrameIndex) +
961e8d8bef9SDimitry Andric AFI->getTaggedBasePointerOffset());
9628bcb0991SDimitry Andric } else if (Tagged) {
963e8d8bef9SDimitry Andric StackOffset SPOffset = StackOffset::getFixed(
964e8d8bef9SDimitry Andric MFI.getObjectOffset(FrameIndex) + (int64_t)MFI.getStackSize());
9658bcb0991SDimitry Andric if (MFI.hasVarSizedObjects() ||
9668bcb0991SDimitry Andric isAArch64FrameOffsetLegal(MI, SPOffset, nullptr, nullptr, nullptr) !=
9678bcb0991SDimitry Andric (AArch64FrameOffsetCanUpdate | AArch64FrameOffsetIsLegal)) {
9688bcb0991SDimitry Andric // Can't update to SP + offset in place. Precalculate the tagged pointer
9698bcb0991SDimitry Andric // in a scratch register.
9708bcb0991SDimitry Andric Offset = TFI->resolveFrameIndexReference(
9718bcb0991SDimitry Andric MF, FrameIndex, FrameReg, /*PreferFP=*/false, /*ForSimm=*/true);
9728bcb0991SDimitry Andric Register ScratchReg =
9738bcb0991SDimitry Andric MF.getRegInfo().createVirtualRegister(&AArch64::GPR64RegClass);
9748bcb0991SDimitry Andric emitFrameOffset(MBB, II, MI.getDebugLoc(), ScratchReg, FrameReg, Offset,
9758bcb0991SDimitry Andric TII);
9768bcb0991SDimitry Andric BuildMI(MBB, MI, MI.getDebugLoc(), TII->get(AArch64::LDG), ScratchReg)
9778bcb0991SDimitry Andric .addReg(ScratchReg)
9788bcb0991SDimitry Andric .addReg(ScratchReg)
9798bcb0991SDimitry Andric .addImm(0);
9808bcb0991SDimitry Andric MI.getOperand(FIOperandNum)
9818bcb0991SDimitry Andric .ChangeToRegister(ScratchReg, false, false, true);
982bdd1243dSDimitry Andric return false;
9838bcb0991SDimitry Andric }
9848bcb0991SDimitry Andric FrameReg = AArch64::SP;
985e8d8bef9SDimitry Andric Offset = StackOffset::getFixed(MFI.getObjectOffset(FrameIndex) +
986e8d8bef9SDimitry Andric (int64_t)MFI.getStackSize());
9870b57cec5SDimitry Andric } else {
9880b57cec5SDimitry Andric Offset = TFI->resolveFrameIndexReference(
9890b57cec5SDimitry Andric MF, FrameIndex, FrameReg, /*PreferFP=*/false, /*ForSimm=*/true);
9900b57cec5SDimitry Andric }
9910b57cec5SDimitry Andric
9920b57cec5SDimitry Andric // Modify MI as necessary to handle as much of 'Offset' as possible
9930b57cec5SDimitry Andric if (rewriteAArch64FrameIndex(MI, FIOperandNum, FrameReg, Offset, TII))
994bdd1243dSDimitry Andric return true;
9950b57cec5SDimitry Andric
9960b57cec5SDimitry Andric assert((!RS || !RS->isScavengingFrameIndex(FrameIndex)) &&
9970b57cec5SDimitry Andric "Emergency spill slot is out of reach");
9980b57cec5SDimitry Andric
9990b57cec5SDimitry Andric // If we get here, the immediate doesn't fit into the instruction. We folded
10000b57cec5SDimitry Andric // as much as possible above. Handle the rest, providing a register that is
10010b57cec5SDimitry Andric // SP+LargeImm.
100281ad6265SDimitry Andric Register ScratchReg =
100381ad6265SDimitry Andric createScratchRegisterForInstruction(MI, FIOperandNum, TII);
10040b57cec5SDimitry Andric emitFrameOffset(MBB, II, MI.getDebugLoc(), ScratchReg, FrameReg, Offset, TII);
1005bdd1243dSDimitry Andric return false;
10060b57cec5SDimitry Andric }
10070b57cec5SDimitry Andric
getRegPressureLimit(const TargetRegisterClass * RC,MachineFunction & MF) const10080b57cec5SDimitry Andric unsigned AArch64RegisterInfo::getRegPressureLimit(const TargetRegisterClass *RC,
10090b57cec5SDimitry Andric MachineFunction &MF) const {
10100b57cec5SDimitry Andric const AArch64FrameLowering *TFI = getFrameLowering(MF);
10110b57cec5SDimitry Andric
10120b57cec5SDimitry Andric switch (RC->getID()) {
10130b57cec5SDimitry Andric default:
10140b57cec5SDimitry Andric return 0;
10150b57cec5SDimitry Andric case AArch64::GPR32RegClassID:
10160b57cec5SDimitry Andric case AArch64::GPR32spRegClassID:
10170b57cec5SDimitry Andric case AArch64::GPR32allRegClassID:
10180b57cec5SDimitry Andric case AArch64::GPR64spRegClassID:
10190b57cec5SDimitry Andric case AArch64::GPR64allRegClassID:
10200b57cec5SDimitry Andric case AArch64::GPR64RegClassID:
10210b57cec5SDimitry Andric case AArch64::GPR32commonRegClassID:
10220b57cec5SDimitry Andric case AArch64::GPR64commonRegClassID:
10230b57cec5SDimitry Andric return 32 - 1 // XZR/SP
10240b57cec5SDimitry Andric - (TFI->hasFP(MF) || TT.isOSDarwin()) // FP
10250b57cec5SDimitry Andric - MF.getSubtarget<AArch64Subtarget>().getNumXRegisterReserved()
10260b57cec5SDimitry Andric - hasBasePointer(MF); // X19
10270b57cec5SDimitry Andric case AArch64::FPR8RegClassID:
10280b57cec5SDimitry Andric case AArch64::FPR16RegClassID:
10290b57cec5SDimitry Andric case AArch64::FPR32RegClassID:
10300b57cec5SDimitry Andric case AArch64::FPR64RegClassID:
10310b57cec5SDimitry Andric case AArch64::FPR128RegClassID:
10320b57cec5SDimitry Andric return 32;
10330b57cec5SDimitry Andric
1034bdd1243dSDimitry Andric case AArch64::MatrixIndexGPR32_8_11RegClassID:
1035fe6060f1SDimitry Andric case AArch64::MatrixIndexGPR32_12_15RegClassID:
1036fe6060f1SDimitry Andric return 4;
1037fe6060f1SDimitry Andric
10380b57cec5SDimitry Andric case AArch64::DDRegClassID:
10390b57cec5SDimitry Andric case AArch64::DDDRegClassID:
10400b57cec5SDimitry Andric case AArch64::DDDDRegClassID:
10410b57cec5SDimitry Andric case AArch64::QQRegClassID:
10420b57cec5SDimitry Andric case AArch64::QQQRegClassID:
10430b57cec5SDimitry Andric case AArch64::QQQQRegClassID:
10440b57cec5SDimitry Andric return 32;
10450b57cec5SDimitry Andric
10460b57cec5SDimitry Andric case AArch64::FPR128_loRegClassID:
10475ffd83dbSDimitry Andric case AArch64::FPR64_loRegClassID:
10485ffd83dbSDimitry Andric case AArch64::FPR16_loRegClassID:
10490b57cec5SDimitry Andric return 16;
10505f757f3fSDimitry Andric case AArch64::FPR128_0to7RegClassID:
10515f757f3fSDimitry Andric return 8;
10520b57cec5SDimitry Andric }
10530b57cec5SDimitry Andric }
10540b57cec5SDimitry Andric
getLocalAddressRegister(const MachineFunction & MF) const10550b57cec5SDimitry Andric unsigned AArch64RegisterInfo::getLocalAddressRegister(
10560b57cec5SDimitry Andric const MachineFunction &MF) const {
10570b57cec5SDimitry Andric const auto &MFI = MF.getFrameInfo();
10580b57cec5SDimitry Andric if (!MF.hasEHFunclets() && !MFI.hasVarSizedObjects())
10590b57cec5SDimitry Andric return AArch64::SP;
1060fe6060f1SDimitry Andric else if (hasStackRealignment(MF))
10610b57cec5SDimitry Andric return getBaseRegister();
10620b57cec5SDimitry Andric return getFrameRegister(MF);
10630b57cec5SDimitry Andric }
1064e8d8bef9SDimitry Andric
1065e8d8bef9SDimitry Andric /// SrcRC and DstRC will be morphed into NewRC if this returns true
shouldCoalesce(MachineInstr * MI,const TargetRegisterClass * SrcRC,unsigned SubReg,const TargetRegisterClass * DstRC,unsigned DstSubReg,const TargetRegisterClass * NewRC,LiveIntervals & LIS) const1066e8d8bef9SDimitry Andric bool AArch64RegisterInfo::shouldCoalesce(
1067e8d8bef9SDimitry Andric MachineInstr *MI, const TargetRegisterClass *SrcRC, unsigned SubReg,
1068e8d8bef9SDimitry Andric const TargetRegisterClass *DstRC, unsigned DstSubReg,
1069e8d8bef9SDimitry Andric const TargetRegisterClass *NewRC, LiveIntervals &LIS) const {
1070b3edf446SDimitry Andric MachineRegisterInfo &MRI = MI->getMF()->getRegInfo();
1071b3edf446SDimitry Andric
1072e8d8bef9SDimitry Andric if (MI->isCopy() &&
1073e8d8bef9SDimitry Andric ((DstRC->getID() == AArch64::GPR64RegClassID) ||
1074e8d8bef9SDimitry Andric (DstRC->getID() == AArch64::GPR64commonRegClassID)) &&
1075e8d8bef9SDimitry Andric MI->getOperand(0).getSubReg() && MI->getOperand(1).getSubReg())
1076e8d8bef9SDimitry Andric // Do not coalesce in the case of a 32-bit subregister copy
1077e8d8bef9SDimitry Andric // which implements a 32 to 64 bit zero extension
1078e8d8bef9SDimitry Andric // which relies on the upper 32 bits being zeroed.
1079e8d8bef9SDimitry Andric return false;
1080b3edf446SDimitry Andric
1081b3edf446SDimitry Andric auto IsCoalescerBarrier = [](const MachineInstr &MI) {
1082b3edf446SDimitry Andric switch (MI.getOpcode()) {
1083b3edf446SDimitry Andric case AArch64::COALESCER_BARRIER_FPR16:
1084b3edf446SDimitry Andric case AArch64::COALESCER_BARRIER_FPR32:
1085b3edf446SDimitry Andric case AArch64::COALESCER_BARRIER_FPR64:
1086b3edf446SDimitry Andric case AArch64::COALESCER_BARRIER_FPR128:
1087b3edf446SDimitry Andric return true;
1088b3edf446SDimitry Andric default:
1089b3edf446SDimitry Andric return false;
1090b3edf446SDimitry Andric }
1091b3edf446SDimitry Andric };
1092b3edf446SDimitry Andric
1093b3edf446SDimitry Andric // For calls that temporarily have to toggle streaming mode as part of the
1094b3edf446SDimitry Andric // call-sequence, we need to be more careful when coalescing copy instructions
1095b3edf446SDimitry Andric // so that we don't end up coalescing the NEON/FP result or argument register
1096b3edf446SDimitry Andric // with a whole Z-register, such that after coalescing the register allocator
1097b3edf446SDimitry Andric // will try to spill/reload the entire Z register.
1098b3edf446SDimitry Andric //
1099b3edf446SDimitry Andric // We do this by checking if the node has any defs/uses that are
1100b3edf446SDimitry Andric // COALESCER_BARRIER pseudos. These are 'nops' in practice, but they exist to
1101b3edf446SDimitry Andric // instruct the coalescer to avoid coalescing the copy.
1102b3edf446SDimitry Andric if (MI->isCopy() && SubReg != DstSubReg &&
1103b3edf446SDimitry Andric (AArch64::ZPRRegClass.hasSubClassEq(DstRC) ||
1104b3edf446SDimitry Andric AArch64::ZPRRegClass.hasSubClassEq(SrcRC))) {
1105b3edf446SDimitry Andric unsigned SrcReg = MI->getOperand(1).getReg();
1106b3edf446SDimitry Andric if (any_of(MRI.def_instructions(SrcReg), IsCoalescerBarrier))
1107b3edf446SDimitry Andric return false;
1108b3edf446SDimitry Andric unsigned DstReg = MI->getOperand(0).getReg();
1109b3edf446SDimitry Andric if (any_of(MRI.use_nodbg_instructions(DstReg), IsCoalescerBarrier))
1110b3edf446SDimitry Andric return false;
1111b3edf446SDimitry Andric }
1112b3edf446SDimitry Andric
1113e8d8bef9SDimitry Andric return true;
1114e8d8bef9SDimitry Andric }
1115*0fca6ea1SDimitry Andric
shouldAnalyzePhysregInMachineLoopInfo(MCRegister R) const1116*0fca6ea1SDimitry Andric bool AArch64RegisterInfo::shouldAnalyzePhysregInMachineLoopInfo(
1117*0fca6ea1SDimitry Andric MCRegister R) const {
1118*0fca6ea1SDimitry Andric return R == AArch64::VG;
1119*0fca6ea1SDimitry Andric }
1120