1 //===-- Thumb1InstrInfo.cpp - Thumb-1 Instruction Information -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains the Thumb-1 implementation of the TargetInstrInfo class. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "Thumb1InstrInfo.h" 14 #include "ARMSubtarget.h" 15 #include "llvm/ADT/BitVector.h" 16 #include "llvm/CodeGen/LiveRegUnits.h" 17 #include "llvm/CodeGen/MachineFrameInfo.h" 18 #include "llvm/CodeGen/MachineInstrBuilder.h" 19 #include "llvm/CodeGen/MachineMemOperand.h" 20 #include "llvm/IR/Module.h" 21 #include "llvm/MC/MCInst.h" 22 #include "llvm/MC/MCInstBuilder.h" 23 24 using namespace llvm; 25 26 Thumb1InstrInfo::Thumb1InstrInfo(const ARMSubtarget &STI) 27 : ARMBaseInstrInfo(STI) {} 28 29 /// Return the noop instruction to use for a noop. 30 MCInst Thumb1InstrInfo::getNop() const { 31 return MCInstBuilder(ARM::tMOVr) 32 .addReg(ARM::R8) 33 .addReg(ARM::R8) 34 .addImm(ARMCC::AL) 35 .addReg(0); 36 } 37 38 unsigned Thumb1InstrInfo::getUnindexedOpcode(unsigned Opc) const { 39 return 0; 40 } 41 42 void Thumb1InstrInfo::copyPhysReg(MachineBasicBlock &MBB, 43 MachineBasicBlock::iterator I, 44 const DebugLoc &DL, MCRegister DestReg, 45 MCRegister SrcReg, bool KillSrc) const { 46 // Need to check the arch. 47 MachineFunction &MF = *MBB.getParent(); 48 const ARMSubtarget &st = MF.getSubtarget<ARMSubtarget>(); 49 50 assert(ARM::GPRRegClass.contains(DestReg, SrcReg) && 51 "Thumb1 can only copy GPR registers"); 52 53 if (st.hasV6Ops() || ARM::hGPRRegClass.contains(SrcReg) || 54 !ARM::tGPRRegClass.contains(DestReg)) 55 BuildMI(MBB, I, DL, get(ARM::tMOVr), DestReg) 56 .addReg(SrcReg, getKillRegState(KillSrc)) 57 .add(predOps(ARMCC::AL)); 58 else { 59 const TargetRegisterInfo *RegInfo = st.getRegisterInfo(); 60 LiveRegUnits UsedRegs(*RegInfo); 61 UsedRegs.addLiveOuts(MBB); 62 63 auto InstUpToI = MBB.end(); 64 while (InstUpToI != I) 65 // The pre-decrement is on purpose here. 66 // We want to have the liveness right before I. 67 UsedRegs.stepBackward(*--InstUpToI); 68 69 if (UsedRegs.available(ARM::CPSR)) { 70 BuildMI(MBB, I, DL, get(ARM::tMOVSr), DestReg) 71 .addReg(SrcReg, getKillRegState(KillSrc)) 72 ->addRegisterDead(ARM::CPSR, RegInfo); 73 return; 74 } 75 76 // Use high register to move source to destination 77 // if movs is not an option. 78 BitVector Allocatable = RegInfo->getAllocatableSet( 79 MF, RegInfo->getRegClass(ARM::hGPRRegClassID)); 80 81 Register TmpReg = ARM::NoRegister; 82 // Prefer R12 as it is known to not be preserved anyway 83 if (UsedRegs.available(ARM::R12) && Allocatable.test(ARM::R12)) { 84 TmpReg = ARM::R12; 85 } else { 86 for (Register Reg : Allocatable.set_bits()) { 87 if (UsedRegs.available(Reg)) { 88 TmpReg = Reg; 89 break; 90 } 91 } 92 } 93 94 if (TmpReg) { 95 BuildMI(MBB, I, DL, get(ARM::tMOVr), TmpReg) 96 .addReg(SrcReg, getKillRegState(KillSrc)) 97 .add(predOps(ARMCC::AL)); 98 BuildMI(MBB, I, DL, get(ARM::tMOVr), DestReg) 99 .addReg(TmpReg, getKillRegState(true)) 100 .add(predOps(ARMCC::AL)); 101 return; 102 } 103 104 // 'MOV lo, lo' is unpredictable on < v6, so use the stack to do it 105 BuildMI(MBB, I, DL, get(ARM::tPUSH)) 106 .add(predOps(ARMCC::AL)) 107 .addReg(SrcReg, getKillRegState(KillSrc)); 108 BuildMI(MBB, I, DL, get(ARM::tPOP)) 109 .add(predOps(ARMCC::AL)) 110 .addReg(DestReg, getDefRegState(true)); 111 } 112 } 113 114 void Thumb1InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB, 115 MachineBasicBlock::iterator I, 116 Register SrcReg, bool isKill, int FI, 117 const TargetRegisterClass *RC, 118 const TargetRegisterInfo *TRI, 119 Register VReg) const { 120 assert((RC == &ARM::tGPRRegClass || 121 (SrcReg.isPhysical() && isARMLowRegister(SrcReg))) && 122 "Unknown regclass!"); 123 124 if (RC == &ARM::tGPRRegClass || 125 (SrcReg.isPhysical() && isARMLowRegister(SrcReg))) { 126 DebugLoc DL; 127 if (I != MBB.end()) DL = I->getDebugLoc(); 128 129 MachineFunction &MF = *MBB.getParent(); 130 MachineFrameInfo &MFI = MF.getFrameInfo(); 131 MachineMemOperand *MMO = MF.getMachineMemOperand( 132 MachinePointerInfo::getFixedStack(MF, FI), MachineMemOperand::MOStore, 133 MFI.getObjectSize(FI), MFI.getObjectAlign(FI)); 134 BuildMI(MBB, I, DL, get(ARM::tSTRspi)) 135 .addReg(SrcReg, getKillRegState(isKill)) 136 .addFrameIndex(FI) 137 .addImm(0) 138 .addMemOperand(MMO) 139 .add(predOps(ARMCC::AL)); 140 } 141 } 142 143 void Thumb1InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB, 144 MachineBasicBlock::iterator I, 145 Register DestReg, int FI, 146 const TargetRegisterClass *RC, 147 const TargetRegisterInfo *TRI, 148 Register VReg) const { 149 assert((RC->hasSuperClassEq(&ARM::tGPRRegClass) || 150 (DestReg.isPhysical() && isARMLowRegister(DestReg))) && 151 "Unknown regclass!"); 152 153 if (RC->hasSuperClassEq(&ARM::tGPRRegClass) || 154 (DestReg.isPhysical() && isARMLowRegister(DestReg))) { 155 DebugLoc DL; 156 if (I != MBB.end()) DL = I->getDebugLoc(); 157 158 MachineFunction &MF = *MBB.getParent(); 159 MachineFrameInfo &MFI = MF.getFrameInfo(); 160 MachineMemOperand *MMO = MF.getMachineMemOperand( 161 MachinePointerInfo::getFixedStack(MF, FI), MachineMemOperand::MOLoad, 162 MFI.getObjectSize(FI), MFI.getObjectAlign(FI)); 163 BuildMI(MBB, I, DL, get(ARM::tLDRspi), DestReg) 164 .addFrameIndex(FI) 165 .addImm(0) 166 .addMemOperand(MMO) 167 .add(predOps(ARMCC::AL)); 168 } 169 } 170 171 void Thumb1InstrInfo::expandLoadStackGuard( 172 MachineBasicBlock::iterator MI) const { 173 MachineFunction &MF = *MI->getParent()->getParent(); 174 const ARMSubtarget &ST = MF.getSubtarget<ARMSubtarget>(); 175 const auto *GV = cast<GlobalValue>((*MI->memoperands_begin())->getValue()); 176 177 assert(MF.getFunction().getParent()->getStackProtectorGuard() != "tls" && 178 "TLS stack protector not supported for Thumb1 targets"); 179 180 unsigned Instr; 181 if (!GV->isDSOLocal()) 182 Instr = ARM::tLDRLIT_ga_pcrel; 183 else if (ST.genExecuteOnly() && ST.hasV8MBaselineOps()) 184 Instr = ARM::t2MOVi32imm; 185 else if (ST.genExecuteOnly()) 186 Instr = ARM::tMOVi32imm; 187 else 188 Instr = ARM::tLDRLIT_ga_abs; 189 expandLoadStackGuardBase(MI, Instr, ARM::tLDRi); 190 } 191 192 bool Thumb1InstrInfo::canCopyGluedNodeDuringSchedule(SDNode *N) const { 193 // In Thumb1 the scheduler may need to schedule a cross-copy between GPRS and CPSR 194 // but this is not always possible there, so allow the Scheduler to clone tADCS and tSBCS 195 // even if they have glue. 196 // FIXME. Actually implement the cross-copy where it is possible (post v6) 197 // because these copies entail more spilling. 198 unsigned Opcode = N->getMachineOpcode(); 199 if (Opcode == ARM::tADCS || Opcode == ARM::tSBCS) 200 return true; 201 202 return false; 203 } 204