1*0b57cec5SDimitry Andric //===-- ARMBaseInstrInfo.cpp - ARM Instruction Information ----------------===// 2*0b57cec5SDimitry Andric // 3*0b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4*0b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 5*0b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6*0b57cec5SDimitry Andric // 7*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 8*0b57cec5SDimitry Andric // 9*0b57cec5SDimitry Andric // This file contains the Base ARM implementation of the TargetInstrInfo class. 10*0b57cec5SDimitry Andric // 11*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 12*0b57cec5SDimitry Andric 13*0b57cec5SDimitry Andric #include "ARMBaseInstrInfo.h" 14*0b57cec5SDimitry Andric #include "ARMBaseRegisterInfo.h" 15*0b57cec5SDimitry Andric #include "ARMConstantPoolValue.h" 16*0b57cec5SDimitry Andric #include "ARMFeatures.h" 17*0b57cec5SDimitry Andric #include "ARMHazardRecognizer.h" 18*0b57cec5SDimitry Andric #include "ARMMachineFunctionInfo.h" 19*0b57cec5SDimitry Andric #include "ARMSubtarget.h" 20*0b57cec5SDimitry Andric #include "MCTargetDesc/ARMAddressingModes.h" 21*0b57cec5SDimitry Andric #include "MCTargetDesc/ARMBaseInfo.h" 22*0b57cec5SDimitry Andric #include "llvm/ADT/DenseMap.h" 23*0b57cec5SDimitry Andric #include "llvm/ADT/STLExtras.h" 24*0b57cec5SDimitry Andric #include "llvm/ADT/SmallSet.h" 25*0b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h" 26*0b57cec5SDimitry Andric #include "llvm/ADT/Triple.h" 27*0b57cec5SDimitry Andric #include "llvm/CodeGen/LiveVariables.h" 28*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h" 29*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineConstantPool.h" 30*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h" 31*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunction.h" 32*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstr.h" 33*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h" 34*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineMemOperand.h" 35*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineOperand.h" 36*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h" 37*0b57cec5SDimitry Andric #include "llvm/CodeGen/ScoreboardHazardRecognizer.h" 38*0b57cec5SDimitry Andric #include "llvm/CodeGen/SelectionDAGNodes.h" 39*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h" 40*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h" 41*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSchedule.h" 42*0b57cec5SDimitry Andric #include "llvm/IR/Attributes.h" 43*0b57cec5SDimitry Andric #include "llvm/IR/Constants.h" 44*0b57cec5SDimitry Andric #include "llvm/IR/DebugLoc.h" 45*0b57cec5SDimitry Andric #include "llvm/IR/Function.h" 46*0b57cec5SDimitry Andric #include "llvm/IR/GlobalValue.h" 47*0b57cec5SDimitry Andric #include "llvm/MC/MCAsmInfo.h" 48*0b57cec5SDimitry Andric #include "llvm/MC/MCInstrDesc.h" 49*0b57cec5SDimitry Andric #include "llvm/MC/MCInstrItineraries.h" 50*0b57cec5SDimitry Andric #include "llvm/Support/BranchProbability.h" 51*0b57cec5SDimitry Andric #include "llvm/Support/Casting.h" 52*0b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h" 53*0b57cec5SDimitry Andric #include "llvm/Support/Compiler.h" 54*0b57cec5SDimitry Andric #include "llvm/Support/Debug.h" 55*0b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h" 56*0b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h" 57*0b57cec5SDimitry Andric #include "llvm/Target/TargetMachine.h" 58*0b57cec5SDimitry Andric #include <algorithm> 59*0b57cec5SDimitry Andric #include <cassert> 60*0b57cec5SDimitry Andric #include <cstdint> 61*0b57cec5SDimitry Andric #include <iterator> 62*0b57cec5SDimitry Andric #include <new> 63*0b57cec5SDimitry Andric #include <utility> 64*0b57cec5SDimitry Andric #include <vector> 65*0b57cec5SDimitry Andric 66*0b57cec5SDimitry Andric using namespace llvm; 67*0b57cec5SDimitry Andric 68*0b57cec5SDimitry Andric #define DEBUG_TYPE "arm-instrinfo" 69*0b57cec5SDimitry Andric 70*0b57cec5SDimitry Andric #define GET_INSTRINFO_CTOR_DTOR 71*0b57cec5SDimitry Andric #include "ARMGenInstrInfo.inc" 72*0b57cec5SDimitry Andric 73*0b57cec5SDimitry Andric static cl::opt<bool> 74*0b57cec5SDimitry Andric EnableARM3Addr("enable-arm-3-addr-conv", cl::Hidden, 75*0b57cec5SDimitry Andric cl::desc("Enable ARM 2-addr to 3-addr conv")); 76*0b57cec5SDimitry Andric 77*0b57cec5SDimitry Andric /// ARM_MLxEntry - Record information about MLA / MLS instructions. 78*0b57cec5SDimitry Andric struct ARM_MLxEntry { 79*0b57cec5SDimitry Andric uint16_t MLxOpc; // MLA / MLS opcode 80*0b57cec5SDimitry Andric uint16_t MulOpc; // Expanded multiplication opcode 81*0b57cec5SDimitry Andric uint16_t AddSubOpc; // Expanded add / sub opcode 82*0b57cec5SDimitry Andric bool NegAcc; // True if the acc is negated before the add / sub. 83*0b57cec5SDimitry Andric bool HasLane; // True if instruction has an extra "lane" operand. 84*0b57cec5SDimitry Andric }; 85*0b57cec5SDimitry Andric 86*0b57cec5SDimitry Andric static const ARM_MLxEntry ARM_MLxTable[] = { 87*0b57cec5SDimitry Andric // MLxOpc, MulOpc, AddSubOpc, NegAcc, HasLane 88*0b57cec5SDimitry Andric // fp scalar ops 89*0b57cec5SDimitry Andric { ARM::VMLAS, ARM::VMULS, ARM::VADDS, false, false }, 90*0b57cec5SDimitry Andric { ARM::VMLSS, ARM::VMULS, ARM::VSUBS, false, false }, 91*0b57cec5SDimitry Andric { ARM::VMLAD, ARM::VMULD, ARM::VADDD, false, false }, 92*0b57cec5SDimitry Andric { ARM::VMLSD, ARM::VMULD, ARM::VSUBD, false, false }, 93*0b57cec5SDimitry Andric { ARM::VNMLAS, ARM::VNMULS, ARM::VSUBS, true, false }, 94*0b57cec5SDimitry Andric { ARM::VNMLSS, ARM::VMULS, ARM::VSUBS, true, false }, 95*0b57cec5SDimitry Andric { ARM::VNMLAD, ARM::VNMULD, ARM::VSUBD, true, false }, 96*0b57cec5SDimitry Andric { ARM::VNMLSD, ARM::VMULD, ARM::VSUBD, true, false }, 97*0b57cec5SDimitry Andric 98*0b57cec5SDimitry Andric // fp SIMD ops 99*0b57cec5SDimitry Andric { ARM::VMLAfd, ARM::VMULfd, ARM::VADDfd, false, false }, 100*0b57cec5SDimitry Andric { ARM::VMLSfd, ARM::VMULfd, ARM::VSUBfd, false, false }, 101*0b57cec5SDimitry Andric { ARM::VMLAfq, ARM::VMULfq, ARM::VADDfq, false, false }, 102*0b57cec5SDimitry Andric { ARM::VMLSfq, ARM::VMULfq, ARM::VSUBfq, false, false }, 103*0b57cec5SDimitry Andric { ARM::VMLAslfd, ARM::VMULslfd, ARM::VADDfd, false, true }, 104*0b57cec5SDimitry Andric { ARM::VMLSslfd, ARM::VMULslfd, ARM::VSUBfd, false, true }, 105*0b57cec5SDimitry Andric { ARM::VMLAslfq, ARM::VMULslfq, ARM::VADDfq, false, true }, 106*0b57cec5SDimitry Andric { ARM::VMLSslfq, ARM::VMULslfq, ARM::VSUBfq, false, true }, 107*0b57cec5SDimitry Andric }; 108*0b57cec5SDimitry Andric 109*0b57cec5SDimitry Andric ARMBaseInstrInfo::ARMBaseInstrInfo(const ARMSubtarget& STI) 110*0b57cec5SDimitry Andric : ARMGenInstrInfo(ARM::ADJCALLSTACKDOWN, ARM::ADJCALLSTACKUP), 111*0b57cec5SDimitry Andric Subtarget(STI) { 112*0b57cec5SDimitry Andric for (unsigned i = 0, e = array_lengthof(ARM_MLxTable); i != e; ++i) { 113*0b57cec5SDimitry Andric if (!MLxEntryMap.insert(std::make_pair(ARM_MLxTable[i].MLxOpc, i)).second) 114*0b57cec5SDimitry Andric llvm_unreachable("Duplicated entries?"); 115*0b57cec5SDimitry Andric MLxHazardOpcodes.insert(ARM_MLxTable[i].AddSubOpc); 116*0b57cec5SDimitry Andric MLxHazardOpcodes.insert(ARM_MLxTable[i].MulOpc); 117*0b57cec5SDimitry Andric } 118*0b57cec5SDimitry Andric } 119*0b57cec5SDimitry Andric 120*0b57cec5SDimitry Andric // Use a ScoreboardHazardRecognizer for prepass ARM scheduling. TargetInstrImpl 121*0b57cec5SDimitry Andric // currently defaults to no prepass hazard recognizer. 122*0b57cec5SDimitry Andric ScheduleHazardRecognizer * 123*0b57cec5SDimitry Andric ARMBaseInstrInfo::CreateTargetHazardRecognizer(const TargetSubtargetInfo *STI, 124*0b57cec5SDimitry Andric const ScheduleDAG *DAG) const { 125*0b57cec5SDimitry Andric if (usePreRAHazardRecognizer()) { 126*0b57cec5SDimitry Andric const InstrItineraryData *II = 127*0b57cec5SDimitry Andric static_cast<const ARMSubtarget *>(STI)->getInstrItineraryData(); 128*0b57cec5SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "pre-RA-sched"); 129*0b57cec5SDimitry Andric } 130*0b57cec5SDimitry Andric return TargetInstrInfo::CreateTargetHazardRecognizer(STI, DAG); 131*0b57cec5SDimitry Andric } 132*0b57cec5SDimitry Andric 133*0b57cec5SDimitry Andric ScheduleHazardRecognizer *ARMBaseInstrInfo:: 134*0b57cec5SDimitry Andric CreateTargetPostRAHazardRecognizer(const InstrItineraryData *II, 135*0b57cec5SDimitry Andric const ScheduleDAG *DAG) const { 136*0b57cec5SDimitry Andric if (Subtarget.isThumb2() || Subtarget.hasVFP2Base()) 137*0b57cec5SDimitry Andric return (ScheduleHazardRecognizer *)new ARMHazardRecognizer(II, DAG); 138*0b57cec5SDimitry Andric return TargetInstrInfo::CreateTargetPostRAHazardRecognizer(II, DAG); 139*0b57cec5SDimitry Andric } 140*0b57cec5SDimitry Andric 141*0b57cec5SDimitry Andric MachineInstr *ARMBaseInstrInfo::convertToThreeAddress( 142*0b57cec5SDimitry Andric MachineFunction::iterator &MFI, MachineInstr &MI, LiveVariables *LV) const { 143*0b57cec5SDimitry Andric // FIXME: Thumb2 support. 144*0b57cec5SDimitry Andric 145*0b57cec5SDimitry Andric if (!EnableARM3Addr) 146*0b57cec5SDimitry Andric return nullptr; 147*0b57cec5SDimitry Andric 148*0b57cec5SDimitry Andric MachineFunction &MF = *MI.getParent()->getParent(); 149*0b57cec5SDimitry Andric uint64_t TSFlags = MI.getDesc().TSFlags; 150*0b57cec5SDimitry Andric bool isPre = false; 151*0b57cec5SDimitry Andric switch ((TSFlags & ARMII::IndexModeMask) >> ARMII::IndexModeShift) { 152*0b57cec5SDimitry Andric default: return nullptr; 153*0b57cec5SDimitry Andric case ARMII::IndexModePre: 154*0b57cec5SDimitry Andric isPre = true; 155*0b57cec5SDimitry Andric break; 156*0b57cec5SDimitry Andric case ARMII::IndexModePost: 157*0b57cec5SDimitry Andric break; 158*0b57cec5SDimitry Andric } 159*0b57cec5SDimitry Andric 160*0b57cec5SDimitry Andric // Try splitting an indexed load/store to an un-indexed one plus an add/sub 161*0b57cec5SDimitry Andric // operation. 162*0b57cec5SDimitry Andric unsigned MemOpc = getUnindexedOpcode(MI.getOpcode()); 163*0b57cec5SDimitry Andric if (MemOpc == 0) 164*0b57cec5SDimitry Andric return nullptr; 165*0b57cec5SDimitry Andric 166*0b57cec5SDimitry Andric MachineInstr *UpdateMI = nullptr; 167*0b57cec5SDimitry Andric MachineInstr *MemMI = nullptr; 168*0b57cec5SDimitry Andric unsigned AddrMode = (TSFlags & ARMII::AddrModeMask); 169*0b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 170*0b57cec5SDimitry Andric unsigned NumOps = MCID.getNumOperands(); 171*0b57cec5SDimitry Andric bool isLoad = !MI.mayStore(); 172*0b57cec5SDimitry Andric const MachineOperand &WB = isLoad ? MI.getOperand(1) : MI.getOperand(0); 173*0b57cec5SDimitry Andric const MachineOperand &Base = MI.getOperand(2); 174*0b57cec5SDimitry Andric const MachineOperand &Offset = MI.getOperand(NumOps - 3); 175*0b57cec5SDimitry Andric unsigned WBReg = WB.getReg(); 176*0b57cec5SDimitry Andric unsigned BaseReg = Base.getReg(); 177*0b57cec5SDimitry Andric unsigned OffReg = Offset.getReg(); 178*0b57cec5SDimitry Andric unsigned OffImm = MI.getOperand(NumOps - 2).getImm(); 179*0b57cec5SDimitry Andric ARMCC::CondCodes Pred = (ARMCC::CondCodes)MI.getOperand(NumOps - 1).getImm(); 180*0b57cec5SDimitry Andric switch (AddrMode) { 181*0b57cec5SDimitry Andric default: llvm_unreachable("Unknown indexed op!"); 182*0b57cec5SDimitry Andric case ARMII::AddrMode2: { 183*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(OffImm) == ARM_AM::sub; 184*0b57cec5SDimitry Andric unsigned Amt = ARM_AM::getAM2Offset(OffImm); 185*0b57cec5SDimitry Andric if (OffReg == 0) { 186*0b57cec5SDimitry Andric if (ARM_AM::getSOImmVal(Amt) == -1) 187*0b57cec5SDimitry Andric // Can't encode it in a so_imm operand. This transformation will 188*0b57cec5SDimitry Andric // add more than 1 instruction. Abandon! 189*0b57cec5SDimitry Andric return nullptr; 190*0b57cec5SDimitry Andric UpdateMI = BuildMI(MF, MI.getDebugLoc(), 191*0b57cec5SDimitry Andric get(isSub ? ARM::SUBri : ARM::ADDri), WBReg) 192*0b57cec5SDimitry Andric .addReg(BaseReg) 193*0b57cec5SDimitry Andric .addImm(Amt) 194*0b57cec5SDimitry Andric .add(predOps(Pred)) 195*0b57cec5SDimitry Andric .add(condCodeOp()); 196*0b57cec5SDimitry Andric } else if (Amt != 0) { 197*0b57cec5SDimitry Andric ARM_AM::ShiftOpc ShOpc = ARM_AM::getAM2ShiftOpc(OffImm); 198*0b57cec5SDimitry Andric unsigned SOOpc = ARM_AM::getSORegOpc(ShOpc, Amt); 199*0b57cec5SDimitry Andric UpdateMI = BuildMI(MF, MI.getDebugLoc(), 200*0b57cec5SDimitry Andric get(isSub ? ARM::SUBrsi : ARM::ADDrsi), WBReg) 201*0b57cec5SDimitry Andric .addReg(BaseReg) 202*0b57cec5SDimitry Andric .addReg(OffReg) 203*0b57cec5SDimitry Andric .addReg(0) 204*0b57cec5SDimitry Andric .addImm(SOOpc) 205*0b57cec5SDimitry Andric .add(predOps(Pred)) 206*0b57cec5SDimitry Andric .add(condCodeOp()); 207*0b57cec5SDimitry Andric } else 208*0b57cec5SDimitry Andric UpdateMI = BuildMI(MF, MI.getDebugLoc(), 209*0b57cec5SDimitry Andric get(isSub ? ARM::SUBrr : ARM::ADDrr), WBReg) 210*0b57cec5SDimitry Andric .addReg(BaseReg) 211*0b57cec5SDimitry Andric .addReg(OffReg) 212*0b57cec5SDimitry Andric .add(predOps(Pred)) 213*0b57cec5SDimitry Andric .add(condCodeOp()); 214*0b57cec5SDimitry Andric break; 215*0b57cec5SDimitry Andric } 216*0b57cec5SDimitry Andric case ARMII::AddrMode3 : { 217*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM3Op(OffImm) == ARM_AM::sub; 218*0b57cec5SDimitry Andric unsigned Amt = ARM_AM::getAM3Offset(OffImm); 219*0b57cec5SDimitry Andric if (OffReg == 0) 220*0b57cec5SDimitry Andric // Immediate is 8-bits. It's guaranteed to fit in a so_imm operand. 221*0b57cec5SDimitry Andric UpdateMI = BuildMI(MF, MI.getDebugLoc(), 222*0b57cec5SDimitry Andric get(isSub ? ARM::SUBri : ARM::ADDri), WBReg) 223*0b57cec5SDimitry Andric .addReg(BaseReg) 224*0b57cec5SDimitry Andric .addImm(Amt) 225*0b57cec5SDimitry Andric .add(predOps(Pred)) 226*0b57cec5SDimitry Andric .add(condCodeOp()); 227*0b57cec5SDimitry Andric else 228*0b57cec5SDimitry Andric UpdateMI = BuildMI(MF, MI.getDebugLoc(), 229*0b57cec5SDimitry Andric get(isSub ? ARM::SUBrr : ARM::ADDrr), WBReg) 230*0b57cec5SDimitry Andric .addReg(BaseReg) 231*0b57cec5SDimitry Andric .addReg(OffReg) 232*0b57cec5SDimitry Andric .add(predOps(Pred)) 233*0b57cec5SDimitry Andric .add(condCodeOp()); 234*0b57cec5SDimitry Andric break; 235*0b57cec5SDimitry Andric } 236*0b57cec5SDimitry Andric } 237*0b57cec5SDimitry Andric 238*0b57cec5SDimitry Andric std::vector<MachineInstr*> NewMIs; 239*0b57cec5SDimitry Andric if (isPre) { 240*0b57cec5SDimitry Andric if (isLoad) 241*0b57cec5SDimitry Andric MemMI = 242*0b57cec5SDimitry Andric BuildMI(MF, MI.getDebugLoc(), get(MemOpc), MI.getOperand(0).getReg()) 243*0b57cec5SDimitry Andric .addReg(WBReg) 244*0b57cec5SDimitry Andric .addImm(0) 245*0b57cec5SDimitry Andric .addImm(Pred); 246*0b57cec5SDimitry Andric else 247*0b57cec5SDimitry Andric MemMI = BuildMI(MF, MI.getDebugLoc(), get(MemOpc)) 248*0b57cec5SDimitry Andric .addReg(MI.getOperand(1).getReg()) 249*0b57cec5SDimitry Andric .addReg(WBReg) 250*0b57cec5SDimitry Andric .addReg(0) 251*0b57cec5SDimitry Andric .addImm(0) 252*0b57cec5SDimitry Andric .addImm(Pred); 253*0b57cec5SDimitry Andric NewMIs.push_back(MemMI); 254*0b57cec5SDimitry Andric NewMIs.push_back(UpdateMI); 255*0b57cec5SDimitry Andric } else { 256*0b57cec5SDimitry Andric if (isLoad) 257*0b57cec5SDimitry Andric MemMI = 258*0b57cec5SDimitry Andric BuildMI(MF, MI.getDebugLoc(), get(MemOpc), MI.getOperand(0).getReg()) 259*0b57cec5SDimitry Andric .addReg(BaseReg) 260*0b57cec5SDimitry Andric .addImm(0) 261*0b57cec5SDimitry Andric .addImm(Pred); 262*0b57cec5SDimitry Andric else 263*0b57cec5SDimitry Andric MemMI = BuildMI(MF, MI.getDebugLoc(), get(MemOpc)) 264*0b57cec5SDimitry Andric .addReg(MI.getOperand(1).getReg()) 265*0b57cec5SDimitry Andric .addReg(BaseReg) 266*0b57cec5SDimitry Andric .addReg(0) 267*0b57cec5SDimitry Andric .addImm(0) 268*0b57cec5SDimitry Andric .addImm(Pred); 269*0b57cec5SDimitry Andric if (WB.isDead()) 270*0b57cec5SDimitry Andric UpdateMI->getOperand(0).setIsDead(); 271*0b57cec5SDimitry Andric NewMIs.push_back(UpdateMI); 272*0b57cec5SDimitry Andric NewMIs.push_back(MemMI); 273*0b57cec5SDimitry Andric } 274*0b57cec5SDimitry Andric 275*0b57cec5SDimitry Andric // Transfer LiveVariables states, kill / dead info. 276*0b57cec5SDimitry Andric if (LV) { 277*0b57cec5SDimitry Andric for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 278*0b57cec5SDimitry Andric MachineOperand &MO = MI.getOperand(i); 279*0b57cec5SDimitry Andric if (MO.isReg() && TargetRegisterInfo::isVirtualRegister(MO.getReg())) { 280*0b57cec5SDimitry Andric unsigned Reg = MO.getReg(); 281*0b57cec5SDimitry Andric 282*0b57cec5SDimitry Andric LiveVariables::VarInfo &VI = LV->getVarInfo(Reg); 283*0b57cec5SDimitry Andric if (MO.isDef()) { 284*0b57cec5SDimitry Andric MachineInstr *NewMI = (Reg == WBReg) ? UpdateMI : MemMI; 285*0b57cec5SDimitry Andric if (MO.isDead()) 286*0b57cec5SDimitry Andric LV->addVirtualRegisterDead(Reg, *NewMI); 287*0b57cec5SDimitry Andric } 288*0b57cec5SDimitry Andric if (MO.isUse() && MO.isKill()) { 289*0b57cec5SDimitry Andric for (unsigned j = 0; j < 2; ++j) { 290*0b57cec5SDimitry Andric // Look at the two new MI's in reverse order. 291*0b57cec5SDimitry Andric MachineInstr *NewMI = NewMIs[j]; 292*0b57cec5SDimitry Andric if (!NewMI->readsRegister(Reg)) 293*0b57cec5SDimitry Andric continue; 294*0b57cec5SDimitry Andric LV->addVirtualRegisterKilled(Reg, *NewMI); 295*0b57cec5SDimitry Andric if (VI.removeKill(MI)) 296*0b57cec5SDimitry Andric VI.Kills.push_back(NewMI); 297*0b57cec5SDimitry Andric break; 298*0b57cec5SDimitry Andric } 299*0b57cec5SDimitry Andric } 300*0b57cec5SDimitry Andric } 301*0b57cec5SDimitry Andric } 302*0b57cec5SDimitry Andric } 303*0b57cec5SDimitry Andric 304*0b57cec5SDimitry Andric MachineBasicBlock::iterator MBBI = MI.getIterator(); 305*0b57cec5SDimitry Andric MFI->insert(MBBI, NewMIs[1]); 306*0b57cec5SDimitry Andric MFI->insert(MBBI, NewMIs[0]); 307*0b57cec5SDimitry Andric return NewMIs[0]; 308*0b57cec5SDimitry Andric } 309*0b57cec5SDimitry Andric 310*0b57cec5SDimitry Andric // Branch analysis. 311*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::analyzeBranch(MachineBasicBlock &MBB, 312*0b57cec5SDimitry Andric MachineBasicBlock *&TBB, 313*0b57cec5SDimitry Andric MachineBasicBlock *&FBB, 314*0b57cec5SDimitry Andric SmallVectorImpl<MachineOperand> &Cond, 315*0b57cec5SDimitry Andric bool AllowModify) const { 316*0b57cec5SDimitry Andric TBB = nullptr; 317*0b57cec5SDimitry Andric FBB = nullptr; 318*0b57cec5SDimitry Andric 319*0b57cec5SDimitry Andric MachineBasicBlock::iterator I = MBB.end(); 320*0b57cec5SDimitry Andric if (I == MBB.begin()) 321*0b57cec5SDimitry Andric return false; // Empty blocks are easy. 322*0b57cec5SDimitry Andric --I; 323*0b57cec5SDimitry Andric 324*0b57cec5SDimitry Andric // Walk backwards from the end of the basic block until the branch is 325*0b57cec5SDimitry Andric // analyzed or we give up. 326*0b57cec5SDimitry Andric while (isPredicated(*I) || I->isTerminator() || I->isDebugValue()) { 327*0b57cec5SDimitry Andric // Flag to be raised on unanalyzeable instructions. This is useful in cases 328*0b57cec5SDimitry Andric // where we want to clean up on the end of the basic block before we bail 329*0b57cec5SDimitry Andric // out. 330*0b57cec5SDimitry Andric bool CantAnalyze = false; 331*0b57cec5SDimitry Andric 332*0b57cec5SDimitry Andric // Skip over DEBUG values and predicated nonterminators. 333*0b57cec5SDimitry Andric while (I->isDebugInstr() || !I->isTerminator()) { 334*0b57cec5SDimitry Andric if (I == MBB.begin()) 335*0b57cec5SDimitry Andric return false; 336*0b57cec5SDimitry Andric --I; 337*0b57cec5SDimitry Andric } 338*0b57cec5SDimitry Andric 339*0b57cec5SDimitry Andric if (isIndirectBranchOpcode(I->getOpcode()) || 340*0b57cec5SDimitry Andric isJumpTableBranchOpcode(I->getOpcode())) { 341*0b57cec5SDimitry Andric // Indirect branches and jump tables can't be analyzed, but we still want 342*0b57cec5SDimitry Andric // to clean up any instructions at the tail of the basic block. 343*0b57cec5SDimitry Andric CantAnalyze = true; 344*0b57cec5SDimitry Andric } else if (isUncondBranchOpcode(I->getOpcode())) { 345*0b57cec5SDimitry Andric TBB = I->getOperand(0).getMBB(); 346*0b57cec5SDimitry Andric } else if (isCondBranchOpcode(I->getOpcode())) { 347*0b57cec5SDimitry Andric // Bail out if we encounter multiple conditional branches. 348*0b57cec5SDimitry Andric if (!Cond.empty()) 349*0b57cec5SDimitry Andric return true; 350*0b57cec5SDimitry Andric 351*0b57cec5SDimitry Andric assert(!FBB && "FBB should have been null."); 352*0b57cec5SDimitry Andric FBB = TBB; 353*0b57cec5SDimitry Andric TBB = I->getOperand(0).getMBB(); 354*0b57cec5SDimitry Andric Cond.push_back(I->getOperand(1)); 355*0b57cec5SDimitry Andric Cond.push_back(I->getOperand(2)); 356*0b57cec5SDimitry Andric } else if (I->isReturn()) { 357*0b57cec5SDimitry Andric // Returns can't be analyzed, but we should run cleanup. 358*0b57cec5SDimitry Andric CantAnalyze = !isPredicated(*I); 359*0b57cec5SDimitry Andric } else { 360*0b57cec5SDimitry Andric // We encountered other unrecognized terminator. Bail out immediately. 361*0b57cec5SDimitry Andric return true; 362*0b57cec5SDimitry Andric } 363*0b57cec5SDimitry Andric 364*0b57cec5SDimitry Andric // Cleanup code - to be run for unpredicated unconditional branches and 365*0b57cec5SDimitry Andric // returns. 366*0b57cec5SDimitry Andric if (!isPredicated(*I) && 367*0b57cec5SDimitry Andric (isUncondBranchOpcode(I->getOpcode()) || 368*0b57cec5SDimitry Andric isIndirectBranchOpcode(I->getOpcode()) || 369*0b57cec5SDimitry Andric isJumpTableBranchOpcode(I->getOpcode()) || 370*0b57cec5SDimitry Andric I->isReturn())) { 371*0b57cec5SDimitry Andric // Forget any previous condition branch information - it no longer applies. 372*0b57cec5SDimitry Andric Cond.clear(); 373*0b57cec5SDimitry Andric FBB = nullptr; 374*0b57cec5SDimitry Andric 375*0b57cec5SDimitry Andric // If we can modify the function, delete everything below this 376*0b57cec5SDimitry Andric // unconditional branch. 377*0b57cec5SDimitry Andric if (AllowModify) { 378*0b57cec5SDimitry Andric MachineBasicBlock::iterator DI = std::next(I); 379*0b57cec5SDimitry Andric while (DI != MBB.end()) { 380*0b57cec5SDimitry Andric MachineInstr &InstToDelete = *DI; 381*0b57cec5SDimitry Andric ++DI; 382*0b57cec5SDimitry Andric InstToDelete.eraseFromParent(); 383*0b57cec5SDimitry Andric } 384*0b57cec5SDimitry Andric } 385*0b57cec5SDimitry Andric } 386*0b57cec5SDimitry Andric 387*0b57cec5SDimitry Andric if (CantAnalyze) 388*0b57cec5SDimitry Andric return true; 389*0b57cec5SDimitry Andric 390*0b57cec5SDimitry Andric if (I == MBB.begin()) 391*0b57cec5SDimitry Andric return false; 392*0b57cec5SDimitry Andric 393*0b57cec5SDimitry Andric --I; 394*0b57cec5SDimitry Andric } 395*0b57cec5SDimitry Andric 396*0b57cec5SDimitry Andric // We made it past the terminators without bailing out - we must have 397*0b57cec5SDimitry Andric // analyzed this branch successfully. 398*0b57cec5SDimitry Andric return false; 399*0b57cec5SDimitry Andric } 400*0b57cec5SDimitry Andric 401*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::removeBranch(MachineBasicBlock &MBB, 402*0b57cec5SDimitry Andric int *BytesRemoved) const { 403*0b57cec5SDimitry Andric assert(!BytesRemoved && "code size not handled"); 404*0b57cec5SDimitry Andric 405*0b57cec5SDimitry Andric MachineBasicBlock::iterator I = MBB.getLastNonDebugInstr(); 406*0b57cec5SDimitry Andric if (I == MBB.end()) 407*0b57cec5SDimitry Andric return 0; 408*0b57cec5SDimitry Andric 409*0b57cec5SDimitry Andric if (!isUncondBranchOpcode(I->getOpcode()) && 410*0b57cec5SDimitry Andric !isCondBranchOpcode(I->getOpcode())) 411*0b57cec5SDimitry Andric return 0; 412*0b57cec5SDimitry Andric 413*0b57cec5SDimitry Andric // Remove the branch. 414*0b57cec5SDimitry Andric I->eraseFromParent(); 415*0b57cec5SDimitry Andric 416*0b57cec5SDimitry Andric I = MBB.end(); 417*0b57cec5SDimitry Andric 418*0b57cec5SDimitry Andric if (I == MBB.begin()) return 1; 419*0b57cec5SDimitry Andric --I; 420*0b57cec5SDimitry Andric if (!isCondBranchOpcode(I->getOpcode())) 421*0b57cec5SDimitry Andric return 1; 422*0b57cec5SDimitry Andric 423*0b57cec5SDimitry Andric // Remove the branch. 424*0b57cec5SDimitry Andric I->eraseFromParent(); 425*0b57cec5SDimitry Andric return 2; 426*0b57cec5SDimitry Andric } 427*0b57cec5SDimitry Andric 428*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::insertBranch(MachineBasicBlock &MBB, 429*0b57cec5SDimitry Andric MachineBasicBlock *TBB, 430*0b57cec5SDimitry Andric MachineBasicBlock *FBB, 431*0b57cec5SDimitry Andric ArrayRef<MachineOperand> Cond, 432*0b57cec5SDimitry Andric const DebugLoc &DL, 433*0b57cec5SDimitry Andric int *BytesAdded) const { 434*0b57cec5SDimitry Andric assert(!BytesAdded && "code size not handled"); 435*0b57cec5SDimitry Andric ARMFunctionInfo *AFI = MBB.getParent()->getInfo<ARMFunctionInfo>(); 436*0b57cec5SDimitry Andric int BOpc = !AFI->isThumbFunction() 437*0b57cec5SDimitry Andric ? ARM::B : (AFI->isThumb2Function() ? ARM::t2B : ARM::tB); 438*0b57cec5SDimitry Andric int BccOpc = !AFI->isThumbFunction() 439*0b57cec5SDimitry Andric ? ARM::Bcc : (AFI->isThumb2Function() ? ARM::t2Bcc : ARM::tBcc); 440*0b57cec5SDimitry Andric bool isThumb = AFI->isThumbFunction() || AFI->isThumb2Function(); 441*0b57cec5SDimitry Andric 442*0b57cec5SDimitry Andric // Shouldn't be a fall through. 443*0b57cec5SDimitry Andric assert(TBB && "insertBranch must not be told to insert a fallthrough"); 444*0b57cec5SDimitry Andric assert((Cond.size() == 2 || Cond.size() == 0) && 445*0b57cec5SDimitry Andric "ARM branch conditions have two components!"); 446*0b57cec5SDimitry Andric 447*0b57cec5SDimitry Andric // For conditional branches, we use addOperand to preserve CPSR flags. 448*0b57cec5SDimitry Andric 449*0b57cec5SDimitry Andric if (!FBB) { 450*0b57cec5SDimitry Andric if (Cond.empty()) { // Unconditional branch? 451*0b57cec5SDimitry Andric if (isThumb) 452*0b57cec5SDimitry Andric BuildMI(&MBB, DL, get(BOpc)).addMBB(TBB).add(predOps(ARMCC::AL)); 453*0b57cec5SDimitry Andric else 454*0b57cec5SDimitry Andric BuildMI(&MBB, DL, get(BOpc)).addMBB(TBB); 455*0b57cec5SDimitry Andric } else 456*0b57cec5SDimitry Andric BuildMI(&MBB, DL, get(BccOpc)) 457*0b57cec5SDimitry Andric .addMBB(TBB) 458*0b57cec5SDimitry Andric .addImm(Cond[0].getImm()) 459*0b57cec5SDimitry Andric .add(Cond[1]); 460*0b57cec5SDimitry Andric return 1; 461*0b57cec5SDimitry Andric } 462*0b57cec5SDimitry Andric 463*0b57cec5SDimitry Andric // Two-way conditional branch. 464*0b57cec5SDimitry Andric BuildMI(&MBB, DL, get(BccOpc)) 465*0b57cec5SDimitry Andric .addMBB(TBB) 466*0b57cec5SDimitry Andric .addImm(Cond[0].getImm()) 467*0b57cec5SDimitry Andric .add(Cond[1]); 468*0b57cec5SDimitry Andric if (isThumb) 469*0b57cec5SDimitry Andric BuildMI(&MBB, DL, get(BOpc)).addMBB(FBB).add(predOps(ARMCC::AL)); 470*0b57cec5SDimitry Andric else 471*0b57cec5SDimitry Andric BuildMI(&MBB, DL, get(BOpc)).addMBB(FBB); 472*0b57cec5SDimitry Andric return 2; 473*0b57cec5SDimitry Andric } 474*0b57cec5SDimitry Andric 475*0b57cec5SDimitry Andric bool ARMBaseInstrInfo:: 476*0b57cec5SDimitry Andric reverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const { 477*0b57cec5SDimitry Andric ARMCC::CondCodes CC = (ARMCC::CondCodes)(int)Cond[0].getImm(); 478*0b57cec5SDimitry Andric Cond[0].setImm(ARMCC::getOppositeCondition(CC)); 479*0b57cec5SDimitry Andric return false; 480*0b57cec5SDimitry Andric } 481*0b57cec5SDimitry Andric 482*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isPredicated(const MachineInstr &MI) const { 483*0b57cec5SDimitry Andric if (MI.isBundle()) { 484*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator I = MI.getIterator(); 485*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator E = MI.getParent()->instr_end(); 486*0b57cec5SDimitry Andric while (++I != E && I->isInsideBundle()) { 487*0b57cec5SDimitry Andric int PIdx = I->findFirstPredOperandIdx(); 488*0b57cec5SDimitry Andric if (PIdx != -1 && I->getOperand(PIdx).getImm() != ARMCC::AL) 489*0b57cec5SDimitry Andric return true; 490*0b57cec5SDimitry Andric } 491*0b57cec5SDimitry Andric return false; 492*0b57cec5SDimitry Andric } 493*0b57cec5SDimitry Andric 494*0b57cec5SDimitry Andric int PIdx = MI.findFirstPredOperandIdx(); 495*0b57cec5SDimitry Andric return PIdx != -1 && MI.getOperand(PIdx).getImm() != ARMCC::AL; 496*0b57cec5SDimitry Andric } 497*0b57cec5SDimitry Andric 498*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::PredicateInstruction( 499*0b57cec5SDimitry Andric MachineInstr &MI, ArrayRef<MachineOperand> Pred) const { 500*0b57cec5SDimitry Andric unsigned Opc = MI.getOpcode(); 501*0b57cec5SDimitry Andric if (isUncondBranchOpcode(Opc)) { 502*0b57cec5SDimitry Andric MI.setDesc(get(getMatchingCondBranchOpcode(Opc))); 503*0b57cec5SDimitry Andric MachineInstrBuilder(*MI.getParent()->getParent(), MI) 504*0b57cec5SDimitry Andric .addImm(Pred[0].getImm()) 505*0b57cec5SDimitry Andric .addReg(Pred[1].getReg()); 506*0b57cec5SDimitry Andric return true; 507*0b57cec5SDimitry Andric } 508*0b57cec5SDimitry Andric 509*0b57cec5SDimitry Andric int PIdx = MI.findFirstPredOperandIdx(); 510*0b57cec5SDimitry Andric if (PIdx != -1) { 511*0b57cec5SDimitry Andric MachineOperand &PMO = MI.getOperand(PIdx); 512*0b57cec5SDimitry Andric PMO.setImm(Pred[0].getImm()); 513*0b57cec5SDimitry Andric MI.getOperand(PIdx+1).setReg(Pred[1].getReg()); 514*0b57cec5SDimitry Andric return true; 515*0b57cec5SDimitry Andric } 516*0b57cec5SDimitry Andric return false; 517*0b57cec5SDimitry Andric } 518*0b57cec5SDimitry Andric 519*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::SubsumesPredicate(ArrayRef<MachineOperand> Pred1, 520*0b57cec5SDimitry Andric ArrayRef<MachineOperand> Pred2) const { 521*0b57cec5SDimitry Andric if (Pred1.size() > 2 || Pred2.size() > 2) 522*0b57cec5SDimitry Andric return false; 523*0b57cec5SDimitry Andric 524*0b57cec5SDimitry Andric ARMCC::CondCodes CC1 = (ARMCC::CondCodes)Pred1[0].getImm(); 525*0b57cec5SDimitry Andric ARMCC::CondCodes CC2 = (ARMCC::CondCodes)Pred2[0].getImm(); 526*0b57cec5SDimitry Andric if (CC1 == CC2) 527*0b57cec5SDimitry Andric return true; 528*0b57cec5SDimitry Andric 529*0b57cec5SDimitry Andric switch (CC1) { 530*0b57cec5SDimitry Andric default: 531*0b57cec5SDimitry Andric return false; 532*0b57cec5SDimitry Andric case ARMCC::AL: 533*0b57cec5SDimitry Andric return true; 534*0b57cec5SDimitry Andric case ARMCC::HS: 535*0b57cec5SDimitry Andric return CC2 == ARMCC::HI; 536*0b57cec5SDimitry Andric case ARMCC::LS: 537*0b57cec5SDimitry Andric return CC2 == ARMCC::LO || CC2 == ARMCC::EQ; 538*0b57cec5SDimitry Andric case ARMCC::GE: 539*0b57cec5SDimitry Andric return CC2 == ARMCC::GT; 540*0b57cec5SDimitry Andric case ARMCC::LE: 541*0b57cec5SDimitry Andric return CC2 == ARMCC::LT; 542*0b57cec5SDimitry Andric } 543*0b57cec5SDimitry Andric } 544*0b57cec5SDimitry Andric 545*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::DefinesPredicate( 546*0b57cec5SDimitry Andric MachineInstr &MI, std::vector<MachineOperand> &Pred) const { 547*0b57cec5SDimitry Andric bool Found = false; 548*0b57cec5SDimitry Andric for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 549*0b57cec5SDimitry Andric const MachineOperand &MO = MI.getOperand(i); 550*0b57cec5SDimitry Andric if ((MO.isRegMask() && MO.clobbersPhysReg(ARM::CPSR)) || 551*0b57cec5SDimitry Andric (MO.isReg() && MO.isDef() && MO.getReg() == ARM::CPSR)) { 552*0b57cec5SDimitry Andric Pred.push_back(MO); 553*0b57cec5SDimitry Andric Found = true; 554*0b57cec5SDimitry Andric } 555*0b57cec5SDimitry Andric } 556*0b57cec5SDimitry Andric 557*0b57cec5SDimitry Andric return Found; 558*0b57cec5SDimitry Andric } 559*0b57cec5SDimitry Andric 560*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isCPSRDefined(const MachineInstr &MI) { 561*0b57cec5SDimitry Andric for (const auto &MO : MI.operands()) 562*0b57cec5SDimitry Andric if (MO.isReg() && MO.getReg() == ARM::CPSR && MO.isDef() && !MO.isDead()) 563*0b57cec5SDimitry Andric return true; 564*0b57cec5SDimitry Andric return false; 565*0b57cec5SDimitry Andric } 566*0b57cec5SDimitry Andric 567*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isAddrMode3OpImm(const MachineInstr &MI, 568*0b57cec5SDimitry Andric unsigned Op) const { 569*0b57cec5SDimitry Andric const MachineOperand &Offset = MI.getOperand(Op + 1); 570*0b57cec5SDimitry Andric return Offset.getReg() != 0; 571*0b57cec5SDimitry Andric } 572*0b57cec5SDimitry Andric 573*0b57cec5SDimitry Andric // Load with negative register offset requires additional 1cyc and +I unit 574*0b57cec5SDimitry Andric // for Cortex A57 575*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isAddrMode3OpMinusReg(const MachineInstr &MI, 576*0b57cec5SDimitry Andric unsigned Op) const { 577*0b57cec5SDimitry Andric const MachineOperand &Offset = MI.getOperand(Op + 1); 578*0b57cec5SDimitry Andric const MachineOperand &Opc = MI.getOperand(Op + 2); 579*0b57cec5SDimitry Andric assert(Opc.isImm()); 580*0b57cec5SDimitry Andric assert(Offset.isReg()); 581*0b57cec5SDimitry Andric int64_t OpcImm = Opc.getImm(); 582*0b57cec5SDimitry Andric 583*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM3Op(OpcImm) == ARM_AM::sub; 584*0b57cec5SDimitry Andric return (isSub && Offset.getReg() != 0); 585*0b57cec5SDimitry Andric } 586*0b57cec5SDimitry Andric 587*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isLdstScaledReg(const MachineInstr &MI, 588*0b57cec5SDimitry Andric unsigned Op) const { 589*0b57cec5SDimitry Andric const MachineOperand &Opc = MI.getOperand(Op + 2); 590*0b57cec5SDimitry Andric unsigned OffImm = Opc.getImm(); 591*0b57cec5SDimitry Andric return ARM_AM::getAM2ShiftOpc(OffImm) != ARM_AM::no_shift; 592*0b57cec5SDimitry Andric } 593*0b57cec5SDimitry Andric 594*0b57cec5SDimitry Andric // Load, scaled register offset, not plus LSL2 595*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isLdstScaledRegNotPlusLsl2(const MachineInstr &MI, 596*0b57cec5SDimitry Andric unsigned Op) const { 597*0b57cec5SDimitry Andric const MachineOperand &Opc = MI.getOperand(Op + 2); 598*0b57cec5SDimitry Andric unsigned OffImm = Opc.getImm(); 599*0b57cec5SDimitry Andric 600*0b57cec5SDimitry Andric bool isAdd = ARM_AM::getAM2Op(OffImm) == ARM_AM::add; 601*0b57cec5SDimitry Andric unsigned Amt = ARM_AM::getAM2Offset(OffImm); 602*0b57cec5SDimitry Andric ARM_AM::ShiftOpc ShiftOpc = ARM_AM::getAM2ShiftOpc(OffImm); 603*0b57cec5SDimitry Andric if (ShiftOpc == ARM_AM::no_shift) return false; // not scaled 604*0b57cec5SDimitry Andric bool SimpleScaled = (isAdd && ShiftOpc == ARM_AM::lsl && Amt == 2); 605*0b57cec5SDimitry Andric return !SimpleScaled; 606*0b57cec5SDimitry Andric } 607*0b57cec5SDimitry Andric 608*0b57cec5SDimitry Andric // Minus reg for ldstso addr mode 609*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isLdstSoMinusReg(const MachineInstr &MI, 610*0b57cec5SDimitry Andric unsigned Op) const { 611*0b57cec5SDimitry Andric unsigned OffImm = MI.getOperand(Op + 2).getImm(); 612*0b57cec5SDimitry Andric return ARM_AM::getAM2Op(OffImm) == ARM_AM::sub; 613*0b57cec5SDimitry Andric } 614*0b57cec5SDimitry Andric 615*0b57cec5SDimitry Andric // Load, scaled register offset 616*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isAm2ScaledReg(const MachineInstr &MI, 617*0b57cec5SDimitry Andric unsigned Op) const { 618*0b57cec5SDimitry Andric unsigned OffImm = MI.getOperand(Op + 2).getImm(); 619*0b57cec5SDimitry Andric return ARM_AM::getAM2ShiftOpc(OffImm) != ARM_AM::no_shift; 620*0b57cec5SDimitry Andric } 621*0b57cec5SDimitry Andric 622*0b57cec5SDimitry Andric static bool isEligibleForITBlock(const MachineInstr *MI) { 623*0b57cec5SDimitry Andric switch (MI->getOpcode()) { 624*0b57cec5SDimitry Andric default: return true; 625*0b57cec5SDimitry Andric case ARM::tADC: // ADC (register) T1 626*0b57cec5SDimitry Andric case ARM::tADDi3: // ADD (immediate) T1 627*0b57cec5SDimitry Andric case ARM::tADDi8: // ADD (immediate) T2 628*0b57cec5SDimitry Andric case ARM::tADDrr: // ADD (register) T1 629*0b57cec5SDimitry Andric case ARM::tAND: // AND (register) T1 630*0b57cec5SDimitry Andric case ARM::tASRri: // ASR (immediate) T1 631*0b57cec5SDimitry Andric case ARM::tASRrr: // ASR (register) T1 632*0b57cec5SDimitry Andric case ARM::tBIC: // BIC (register) T1 633*0b57cec5SDimitry Andric case ARM::tEOR: // EOR (register) T1 634*0b57cec5SDimitry Andric case ARM::tLSLri: // LSL (immediate) T1 635*0b57cec5SDimitry Andric case ARM::tLSLrr: // LSL (register) T1 636*0b57cec5SDimitry Andric case ARM::tLSRri: // LSR (immediate) T1 637*0b57cec5SDimitry Andric case ARM::tLSRrr: // LSR (register) T1 638*0b57cec5SDimitry Andric case ARM::tMUL: // MUL T1 639*0b57cec5SDimitry Andric case ARM::tMVN: // MVN (register) T1 640*0b57cec5SDimitry Andric case ARM::tORR: // ORR (register) T1 641*0b57cec5SDimitry Andric case ARM::tROR: // ROR (register) T1 642*0b57cec5SDimitry Andric case ARM::tRSB: // RSB (immediate) T1 643*0b57cec5SDimitry Andric case ARM::tSBC: // SBC (register) T1 644*0b57cec5SDimitry Andric case ARM::tSUBi3: // SUB (immediate) T1 645*0b57cec5SDimitry Andric case ARM::tSUBi8: // SUB (immediate) T2 646*0b57cec5SDimitry Andric case ARM::tSUBrr: // SUB (register) T1 647*0b57cec5SDimitry Andric return !ARMBaseInstrInfo::isCPSRDefined(*MI); 648*0b57cec5SDimitry Andric } 649*0b57cec5SDimitry Andric } 650*0b57cec5SDimitry Andric 651*0b57cec5SDimitry Andric /// isPredicable - Return true if the specified instruction can be predicated. 652*0b57cec5SDimitry Andric /// By default, this returns true for every instruction with a 653*0b57cec5SDimitry Andric /// PredicateOperand. 654*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isPredicable(const MachineInstr &MI) const { 655*0b57cec5SDimitry Andric if (!MI.isPredicable()) 656*0b57cec5SDimitry Andric return false; 657*0b57cec5SDimitry Andric 658*0b57cec5SDimitry Andric if (MI.isBundle()) 659*0b57cec5SDimitry Andric return false; 660*0b57cec5SDimitry Andric 661*0b57cec5SDimitry Andric if (!isEligibleForITBlock(&MI)) 662*0b57cec5SDimitry Andric return false; 663*0b57cec5SDimitry Andric 664*0b57cec5SDimitry Andric const ARMFunctionInfo *AFI = 665*0b57cec5SDimitry Andric MI.getParent()->getParent()->getInfo<ARMFunctionInfo>(); 666*0b57cec5SDimitry Andric 667*0b57cec5SDimitry Andric // Neon instructions in Thumb2 IT blocks are deprecated, see ARMARM. 668*0b57cec5SDimitry Andric // In their ARM encoding, they can't be encoded in a conditional form. 669*0b57cec5SDimitry Andric if ((MI.getDesc().TSFlags & ARMII::DomainMask) == ARMII::DomainNEON) 670*0b57cec5SDimitry Andric return false; 671*0b57cec5SDimitry Andric 672*0b57cec5SDimitry Andric if (AFI->isThumb2Function()) { 673*0b57cec5SDimitry Andric if (getSubtarget().restrictIT()) 674*0b57cec5SDimitry Andric return isV8EligibleForIT(&MI); 675*0b57cec5SDimitry Andric } 676*0b57cec5SDimitry Andric 677*0b57cec5SDimitry Andric return true; 678*0b57cec5SDimitry Andric } 679*0b57cec5SDimitry Andric 680*0b57cec5SDimitry Andric namespace llvm { 681*0b57cec5SDimitry Andric 682*0b57cec5SDimitry Andric template <> bool IsCPSRDead<MachineInstr>(const MachineInstr *MI) { 683*0b57cec5SDimitry Andric for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 684*0b57cec5SDimitry Andric const MachineOperand &MO = MI->getOperand(i); 685*0b57cec5SDimitry Andric if (!MO.isReg() || MO.isUndef() || MO.isUse()) 686*0b57cec5SDimitry Andric continue; 687*0b57cec5SDimitry Andric if (MO.getReg() != ARM::CPSR) 688*0b57cec5SDimitry Andric continue; 689*0b57cec5SDimitry Andric if (!MO.isDead()) 690*0b57cec5SDimitry Andric return false; 691*0b57cec5SDimitry Andric } 692*0b57cec5SDimitry Andric // all definitions of CPSR are dead 693*0b57cec5SDimitry Andric return true; 694*0b57cec5SDimitry Andric } 695*0b57cec5SDimitry Andric 696*0b57cec5SDimitry Andric } // end namespace llvm 697*0b57cec5SDimitry Andric 698*0b57cec5SDimitry Andric /// GetInstSize - Return the size of the specified MachineInstr. 699*0b57cec5SDimitry Andric /// 700*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getInstSizeInBytes(const MachineInstr &MI) const { 701*0b57cec5SDimitry Andric const MachineBasicBlock &MBB = *MI.getParent(); 702*0b57cec5SDimitry Andric const MachineFunction *MF = MBB.getParent(); 703*0b57cec5SDimitry Andric const MCAsmInfo *MAI = MF->getTarget().getMCAsmInfo(); 704*0b57cec5SDimitry Andric 705*0b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 706*0b57cec5SDimitry Andric if (MCID.getSize()) 707*0b57cec5SDimitry Andric return MCID.getSize(); 708*0b57cec5SDimitry Andric 709*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 710*0b57cec5SDimitry Andric default: 711*0b57cec5SDimitry Andric // pseudo-instruction sizes are zero. 712*0b57cec5SDimitry Andric return 0; 713*0b57cec5SDimitry Andric case TargetOpcode::BUNDLE: 714*0b57cec5SDimitry Andric return getInstBundleLength(MI); 715*0b57cec5SDimitry Andric case ARM::MOVi16_ga_pcrel: 716*0b57cec5SDimitry Andric case ARM::MOVTi16_ga_pcrel: 717*0b57cec5SDimitry Andric case ARM::t2MOVi16_ga_pcrel: 718*0b57cec5SDimitry Andric case ARM::t2MOVTi16_ga_pcrel: 719*0b57cec5SDimitry Andric return 4; 720*0b57cec5SDimitry Andric case ARM::MOVi32imm: 721*0b57cec5SDimitry Andric case ARM::t2MOVi32imm: 722*0b57cec5SDimitry Andric return 8; 723*0b57cec5SDimitry Andric case ARM::CONSTPOOL_ENTRY: 724*0b57cec5SDimitry Andric case ARM::JUMPTABLE_INSTS: 725*0b57cec5SDimitry Andric case ARM::JUMPTABLE_ADDRS: 726*0b57cec5SDimitry Andric case ARM::JUMPTABLE_TBB: 727*0b57cec5SDimitry Andric case ARM::JUMPTABLE_TBH: 728*0b57cec5SDimitry Andric // If this machine instr is a constant pool entry, its size is recorded as 729*0b57cec5SDimitry Andric // operand #2. 730*0b57cec5SDimitry Andric return MI.getOperand(2).getImm(); 731*0b57cec5SDimitry Andric case ARM::Int_eh_sjlj_longjmp: 732*0b57cec5SDimitry Andric return 16; 733*0b57cec5SDimitry Andric case ARM::tInt_eh_sjlj_longjmp: 734*0b57cec5SDimitry Andric return 10; 735*0b57cec5SDimitry Andric case ARM::tInt_WIN_eh_sjlj_longjmp: 736*0b57cec5SDimitry Andric return 12; 737*0b57cec5SDimitry Andric case ARM::Int_eh_sjlj_setjmp: 738*0b57cec5SDimitry Andric case ARM::Int_eh_sjlj_setjmp_nofp: 739*0b57cec5SDimitry Andric return 20; 740*0b57cec5SDimitry Andric case ARM::tInt_eh_sjlj_setjmp: 741*0b57cec5SDimitry Andric case ARM::t2Int_eh_sjlj_setjmp: 742*0b57cec5SDimitry Andric case ARM::t2Int_eh_sjlj_setjmp_nofp: 743*0b57cec5SDimitry Andric return 12; 744*0b57cec5SDimitry Andric case ARM::SPACE: 745*0b57cec5SDimitry Andric return MI.getOperand(1).getImm(); 746*0b57cec5SDimitry Andric case ARM::INLINEASM: 747*0b57cec5SDimitry Andric case ARM::INLINEASM_BR: { 748*0b57cec5SDimitry Andric // If this machine instr is an inline asm, measure it. 749*0b57cec5SDimitry Andric unsigned Size = getInlineAsmLength(MI.getOperand(0).getSymbolName(), *MAI); 750*0b57cec5SDimitry Andric if (!MF->getInfo<ARMFunctionInfo>()->isThumbFunction()) 751*0b57cec5SDimitry Andric Size = alignTo(Size, 4); 752*0b57cec5SDimitry Andric return Size; 753*0b57cec5SDimitry Andric } 754*0b57cec5SDimitry Andric } 755*0b57cec5SDimitry Andric } 756*0b57cec5SDimitry Andric 757*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getInstBundleLength(const MachineInstr &MI) const { 758*0b57cec5SDimitry Andric unsigned Size = 0; 759*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator I = MI.getIterator(); 760*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator E = MI.getParent()->instr_end(); 761*0b57cec5SDimitry Andric while (++I != E && I->isInsideBundle()) { 762*0b57cec5SDimitry Andric assert(!I->isBundle() && "No nested bundle!"); 763*0b57cec5SDimitry Andric Size += getInstSizeInBytes(*I); 764*0b57cec5SDimitry Andric } 765*0b57cec5SDimitry Andric return Size; 766*0b57cec5SDimitry Andric } 767*0b57cec5SDimitry Andric 768*0b57cec5SDimitry Andric void ARMBaseInstrInfo::copyFromCPSR(MachineBasicBlock &MBB, 769*0b57cec5SDimitry Andric MachineBasicBlock::iterator I, 770*0b57cec5SDimitry Andric unsigned DestReg, bool KillSrc, 771*0b57cec5SDimitry Andric const ARMSubtarget &Subtarget) const { 772*0b57cec5SDimitry Andric unsigned Opc = Subtarget.isThumb() 773*0b57cec5SDimitry Andric ? (Subtarget.isMClass() ? ARM::t2MRS_M : ARM::t2MRS_AR) 774*0b57cec5SDimitry Andric : ARM::MRS; 775*0b57cec5SDimitry Andric 776*0b57cec5SDimitry Andric MachineInstrBuilder MIB = 777*0b57cec5SDimitry Andric BuildMI(MBB, I, I->getDebugLoc(), get(Opc), DestReg); 778*0b57cec5SDimitry Andric 779*0b57cec5SDimitry Andric // There is only 1 A/R class MRS instruction, and it always refers to 780*0b57cec5SDimitry Andric // APSR. However, there are lots of other possibilities on M-class cores. 781*0b57cec5SDimitry Andric if (Subtarget.isMClass()) 782*0b57cec5SDimitry Andric MIB.addImm(0x800); 783*0b57cec5SDimitry Andric 784*0b57cec5SDimitry Andric MIB.add(predOps(ARMCC::AL)) 785*0b57cec5SDimitry Andric .addReg(ARM::CPSR, RegState::Implicit | getKillRegState(KillSrc)); 786*0b57cec5SDimitry Andric } 787*0b57cec5SDimitry Andric 788*0b57cec5SDimitry Andric void ARMBaseInstrInfo::copyToCPSR(MachineBasicBlock &MBB, 789*0b57cec5SDimitry Andric MachineBasicBlock::iterator I, 790*0b57cec5SDimitry Andric unsigned SrcReg, bool KillSrc, 791*0b57cec5SDimitry Andric const ARMSubtarget &Subtarget) const { 792*0b57cec5SDimitry Andric unsigned Opc = Subtarget.isThumb() 793*0b57cec5SDimitry Andric ? (Subtarget.isMClass() ? ARM::t2MSR_M : ARM::t2MSR_AR) 794*0b57cec5SDimitry Andric : ARM::MSR; 795*0b57cec5SDimitry Andric 796*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, I->getDebugLoc(), get(Opc)); 797*0b57cec5SDimitry Andric 798*0b57cec5SDimitry Andric if (Subtarget.isMClass()) 799*0b57cec5SDimitry Andric MIB.addImm(0x800); 800*0b57cec5SDimitry Andric else 801*0b57cec5SDimitry Andric MIB.addImm(8); 802*0b57cec5SDimitry Andric 803*0b57cec5SDimitry Andric MIB.addReg(SrcReg, getKillRegState(KillSrc)) 804*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 805*0b57cec5SDimitry Andric .addReg(ARM::CPSR, RegState::Implicit | RegState::Define); 806*0b57cec5SDimitry Andric } 807*0b57cec5SDimitry Andric 808*0b57cec5SDimitry Andric void llvm::addUnpredicatedMveVpredNOp(MachineInstrBuilder &MIB) { 809*0b57cec5SDimitry Andric MIB.addImm(ARMVCC::None); 810*0b57cec5SDimitry Andric MIB.addReg(0); 811*0b57cec5SDimitry Andric } 812*0b57cec5SDimitry Andric 813*0b57cec5SDimitry Andric void llvm::addUnpredicatedMveVpredROp(MachineInstrBuilder &MIB, 814*0b57cec5SDimitry Andric unsigned DestReg) { 815*0b57cec5SDimitry Andric addUnpredicatedMveVpredNOp(MIB); 816*0b57cec5SDimitry Andric MIB.addReg(DestReg, RegState::Undef); 817*0b57cec5SDimitry Andric } 818*0b57cec5SDimitry Andric 819*0b57cec5SDimitry Andric void llvm::addPredicatedMveVpredNOp(MachineInstrBuilder &MIB, unsigned Cond) { 820*0b57cec5SDimitry Andric MIB.addImm(Cond); 821*0b57cec5SDimitry Andric MIB.addReg(ARM::VPR, RegState::Implicit); 822*0b57cec5SDimitry Andric } 823*0b57cec5SDimitry Andric 824*0b57cec5SDimitry Andric void llvm::addPredicatedMveVpredROp(MachineInstrBuilder &MIB, 825*0b57cec5SDimitry Andric unsigned Cond, unsigned Inactive) { 826*0b57cec5SDimitry Andric addPredicatedMveVpredNOp(MIB, Cond); 827*0b57cec5SDimitry Andric MIB.addReg(Inactive); 828*0b57cec5SDimitry Andric } 829*0b57cec5SDimitry Andric 830*0b57cec5SDimitry Andric void ARMBaseInstrInfo::copyPhysReg(MachineBasicBlock &MBB, 831*0b57cec5SDimitry Andric MachineBasicBlock::iterator I, 832*0b57cec5SDimitry Andric const DebugLoc &DL, unsigned DestReg, 833*0b57cec5SDimitry Andric unsigned SrcReg, bool KillSrc) const { 834*0b57cec5SDimitry Andric bool GPRDest = ARM::GPRRegClass.contains(DestReg); 835*0b57cec5SDimitry Andric bool GPRSrc = ARM::GPRRegClass.contains(SrcReg); 836*0b57cec5SDimitry Andric 837*0b57cec5SDimitry Andric if (GPRDest && GPRSrc) { 838*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::MOVr), DestReg) 839*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(KillSrc)) 840*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 841*0b57cec5SDimitry Andric .add(condCodeOp()); 842*0b57cec5SDimitry Andric return; 843*0b57cec5SDimitry Andric } 844*0b57cec5SDimitry Andric 845*0b57cec5SDimitry Andric bool SPRDest = ARM::SPRRegClass.contains(DestReg); 846*0b57cec5SDimitry Andric bool SPRSrc = ARM::SPRRegClass.contains(SrcReg); 847*0b57cec5SDimitry Andric 848*0b57cec5SDimitry Andric unsigned Opc = 0; 849*0b57cec5SDimitry Andric if (SPRDest && SPRSrc) 850*0b57cec5SDimitry Andric Opc = ARM::VMOVS; 851*0b57cec5SDimitry Andric else if (GPRDest && SPRSrc) 852*0b57cec5SDimitry Andric Opc = ARM::VMOVRS; 853*0b57cec5SDimitry Andric else if (SPRDest && GPRSrc) 854*0b57cec5SDimitry Andric Opc = ARM::VMOVSR; 855*0b57cec5SDimitry Andric else if (ARM::DPRRegClass.contains(DestReg, SrcReg) && Subtarget.hasFP64()) 856*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 857*0b57cec5SDimitry Andric else if (ARM::QPRRegClass.contains(DestReg, SrcReg)) 858*0b57cec5SDimitry Andric Opc = Subtarget.hasNEON() ? ARM::VORRq : ARM::MVE_VORR; 859*0b57cec5SDimitry Andric 860*0b57cec5SDimitry Andric if (Opc) { 861*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DL, get(Opc), DestReg); 862*0b57cec5SDimitry Andric MIB.addReg(SrcReg, getKillRegState(KillSrc)); 863*0b57cec5SDimitry Andric if (Opc == ARM::VORRq || Opc == ARM::MVE_VORR) 864*0b57cec5SDimitry Andric MIB.addReg(SrcReg, getKillRegState(KillSrc)); 865*0b57cec5SDimitry Andric if (Opc == ARM::MVE_VORR) 866*0b57cec5SDimitry Andric addUnpredicatedMveVpredROp(MIB, DestReg); 867*0b57cec5SDimitry Andric else 868*0b57cec5SDimitry Andric MIB.add(predOps(ARMCC::AL)); 869*0b57cec5SDimitry Andric return; 870*0b57cec5SDimitry Andric } 871*0b57cec5SDimitry Andric 872*0b57cec5SDimitry Andric // Handle register classes that require multiple instructions. 873*0b57cec5SDimitry Andric unsigned BeginIdx = 0; 874*0b57cec5SDimitry Andric unsigned SubRegs = 0; 875*0b57cec5SDimitry Andric int Spacing = 1; 876*0b57cec5SDimitry Andric 877*0b57cec5SDimitry Andric // Use VORRq when possible. 878*0b57cec5SDimitry Andric if (ARM::QQPRRegClass.contains(DestReg, SrcReg)) { 879*0b57cec5SDimitry Andric Opc = Subtarget.hasNEON() ? ARM::VORRq : ARM::MVE_VORR; 880*0b57cec5SDimitry Andric BeginIdx = ARM::qsub_0; 881*0b57cec5SDimitry Andric SubRegs = 2; 882*0b57cec5SDimitry Andric } else if (ARM::QQQQPRRegClass.contains(DestReg, SrcReg)) { 883*0b57cec5SDimitry Andric Opc = Subtarget.hasNEON() ? ARM::VORRq : ARM::MVE_VORR; 884*0b57cec5SDimitry Andric BeginIdx = ARM::qsub_0; 885*0b57cec5SDimitry Andric SubRegs = 4; 886*0b57cec5SDimitry Andric // Fall back to VMOVD. 887*0b57cec5SDimitry Andric } else if (ARM::DPairRegClass.contains(DestReg, SrcReg)) { 888*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 889*0b57cec5SDimitry Andric BeginIdx = ARM::dsub_0; 890*0b57cec5SDimitry Andric SubRegs = 2; 891*0b57cec5SDimitry Andric } else if (ARM::DTripleRegClass.contains(DestReg, SrcReg)) { 892*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 893*0b57cec5SDimitry Andric BeginIdx = ARM::dsub_0; 894*0b57cec5SDimitry Andric SubRegs = 3; 895*0b57cec5SDimitry Andric } else if (ARM::DQuadRegClass.contains(DestReg, SrcReg)) { 896*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 897*0b57cec5SDimitry Andric BeginIdx = ARM::dsub_0; 898*0b57cec5SDimitry Andric SubRegs = 4; 899*0b57cec5SDimitry Andric } else if (ARM::GPRPairRegClass.contains(DestReg, SrcReg)) { 900*0b57cec5SDimitry Andric Opc = Subtarget.isThumb2() ? ARM::tMOVr : ARM::MOVr; 901*0b57cec5SDimitry Andric BeginIdx = ARM::gsub_0; 902*0b57cec5SDimitry Andric SubRegs = 2; 903*0b57cec5SDimitry Andric } else if (ARM::DPairSpcRegClass.contains(DestReg, SrcReg)) { 904*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 905*0b57cec5SDimitry Andric BeginIdx = ARM::dsub_0; 906*0b57cec5SDimitry Andric SubRegs = 2; 907*0b57cec5SDimitry Andric Spacing = 2; 908*0b57cec5SDimitry Andric } else if (ARM::DTripleSpcRegClass.contains(DestReg, SrcReg)) { 909*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 910*0b57cec5SDimitry Andric BeginIdx = ARM::dsub_0; 911*0b57cec5SDimitry Andric SubRegs = 3; 912*0b57cec5SDimitry Andric Spacing = 2; 913*0b57cec5SDimitry Andric } else if (ARM::DQuadSpcRegClass.contains(DestReg, SrcReg)) { 914*0b57cec5SDimitry Andric Opc = ARM::VMOVD; 915*0b57cec5SDimitry Andric BeginIdx = ARM::dsub_0; 916*0b57cec5SDimitry Andric SubRegs = 4; 917*0b57cec5SDimitry Andric Spacing = 2; 918*0b57cec5SDimitry Andric } else if (ARM::DPRRegClass.contains(DestReg, SrcReg) && 919*0b57cec5SDimitry Andric !Subtarget.hasFP64()) { 920*0b57cec5SDimitry Andric Opc = ARM::VMOVS; 921*0b57cec5SDimitry Andric BeginIdx = ARM::ssub_0; 922*0b57cec5SDimitry Andric SubRegs = 2; 923*0b57cec5SDimitry Andric } else if (SrcReg == ARM::CPSR) { 924*0b57cec5SDimitry Andric copyFromCPSR(MBB, I, DestReg, KillSrc, Subtarget); 925*0b57cec5SDimitry Andric return; 926*0b57cec5SDimitry Andric } else if (DestReg == ARM::CPSR) { 927*0b57cec5SDimitry Andric copyToCPSR(MBB, I, SrcReg, KillSrc, Subtarget); 928*0b57cec5SDimitry Andric return; 929*0b57cec5SDimitry Andric } else if (DestReg == ARM::VPR) { 930*0b57cec5SDimitry Andric assert(ARM::GPRRegClass.contains(SrcReg)); 931*0b57cec5SDimitry Andric BuildMI(MBB, I, I->getDebugLoc(), get(ARM::VMSR_P0), DestReg) 932*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(KillSrc)) 933*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 934*0b57cec5SDimitry Andric return; 935*0b57cec5SDimitry Andric } else if (SrcReg == ARM::VPR) { 936*0b57cec5SDimitry Andric assert(ARM::GPRRegClass.contains(DestReg)); 937*0b57cec5SDimitry Andric BuildMI(MBB, I, I->getDebugLoc(), get(ARM::VMRS_P0), DestReg) 938*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(KillSrc)) 939*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 940*0b57cec5SDimitry Andric return; 941*0b57cec5SDimitry Andric } else if (DestReg == ARM::FPSCR_NZCV) { 942*0b57cec5SDimitry Andric assert(ARM::GPRRegClass.contains(SrcReg)); 943*0b57cec5SDimitry Andric BuildMI(MBB, I, I->getDebugLoc(), get(ARM::VMSR_FPSCR_NZCVQC), DestReg) 944*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(KillSrc)) 945*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 946*0b57cec5SDimitry Andric return; 947*0b57cec5SDimitry Andric } else if (SrcReg == ARM::FPSCR_NZCV) { 948*0b57cec5SDimitry Andric assert(ARM::GPRRegClass.contains(DestReg)); 949*0b57cec5SDimitry Andric BuildMI(MBB, I, I->getDebugLoc(), get(ARM::VMRS_FPSCR_NZCVQC), DestReg) 950*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(KillSrc)) 951*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 952*0b57cec5SDimitry Andric return; 953*0b57cec5SDimitry Andric } 954*0b57cec5SDimitry Andric 955*0b57cec5SDimitry Andric assert(Opc && "Impossible reg-to-reg copy"); 956*0b57cec5SDimitry Andric 957*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI = &getRegisterInfo(); 958*0b57cec5SDimitry Andric MachineInstrBuilder Mov; 959*0b57cec5SDimitry Andric 960*0b57cec5SDimitry Andric // Copy register tuples backward when the first Dest reg overlaps with SrcReg. 961*0b57cec5SDimitry Andric if (TRI->regsOverlap(SrcReg, TRI->getSubReg(DestReg, BeginIdx))) { 962*0b57cec5SDimitry Andric BeginIdx = BeginIdx + ((SubRegs - 1) * Spacing); 963*0b57cec5SDimitry Andric Spacing = -Spacing; 964*0b57cec5SDimitry Andric } 965*0b57cec5SDimitry Andric #ifndef NDEBUG 966*0b57cec5SDimitry Andric SmallSet<unsigned, 4> DstRegs; 967*0b57cec5SDimitry Andric #endif 968*0b57cec5SDimitry Andric for (unsigned i = 0; i != SubRegs; ++i) { 969*0b57cec5SDimitry Andric unsigned Dst = TRI->getSubReg(DestReg, BeginIdx + i * Spacing); 970*0b57cec5SDimitry Andric unsigned Src = TRI->getSubReg(SrcReg, BeginIdx + i * Spacing); 971*0b57cec5SDimitry Andric assert(Dst && Src && "Bad sub-register"); 972*0b57cec5SDimitry Andric #ifndef NDEBUG 973*0b57cec5SDimitry Andric assert(!DstRegs.count(Src) && "destructive vector copy"); 974*0b57cec5SDimitry Andric DstRegs.insert(Dst); 975*0b57cec5SDimitry Andric #endif 976*0b57cec5SDimitry Andric Mov = BuildMI(MBB, I, I->getDebugLoc(), get(Opc), Dst).addReg(Src); 977*0b57cec5SDimitry Andric // VORR (NEON or MVE) takes two source operands. 978*0b57cec5SDimitry Andric if (Opc == ARM::VORRq || Opc == ARM::MVE_VORR) { 979*0b57cec5SDimitry Andric Mov.addReg(Src); 980*0b57cec5SDimitry Andric } 981*0b57cec5SDimitry Andric // MVE VORR takes predicate operands in place of an ordinary condition. 982*0b57cec5SDimitry Andric if (Opc == ARM::MVE_VORR) 983*0b57cec5SDimitry Andric addUnpredicatedMveVpredROp(Mov, Dst); 984*0b57cec5SDimitry Andric else 985*0b57cec5SDimitry Andric Mov = Mov.add(predOps(ARMCC::AL)); 986*0b57cec5SDimitry Andric // MOVr can set CC. 987*0b57cec5SDimitry Andric if (Opc == ARM::MOVr) 988*0b57cec5SDimitry Andric Mov = Mov.add(condCodeOp()); 989*0b57cec5SDimitry Andric } 990*0b57cec5SDimitry Andric // Add implicit super-register defs and kills to the last instruction. 991*0b57cec5SDimitry Andric Mov->addRegisterDefined(DestReg, TRI); 992*0b57cec5SDimitry Andric if (KillSrc) 993*0b57cec5SDimitry Andric Mov->addRegisterKilled(SrcReg, TRI); 994*0b57cec5SDimitry Andric } 995*0b57cec5SDimitry Andric 996*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isCopyInstrImpl(const MachineInstr &MI, 997*0b57cec5SDimitry Andric const MachineOperand *&Src, 998*0b57cec5SDimitry Andric const MachineOperand *&Dest) const { 999*0b57cec5SDimitry Andric // VMOVRRD is also a copy instruction but it requires 1000*0b57cec5SDimitry Andric // special way of handling. It is more complex copy version 1001*0b57cec5SDimitry Andric // and since that we are not considering it. For recognition 1002*0b57cec5SDimitry Andric // of such instruction isExtractSubregLike MI interface fuction 1003*0b57cec5SDimitry Andric // could be used. 1004*0b57cec5SDimitry Andric // VORRq is considered as a move only if two inputs are 1005*0b57cec5SDimitry Andric // the same register. 1006*0b57cec5SDimitry Andric if (!MI.isMoveReg() || 1007*0b57cec5SDimitry Andric (MI.getOpcode() == ARM::VORRq && 1008*0b57cec5SDimitry Andric MI.getOperand(1).getReg() != MI.getOperand(2).getReg())) 1009*0b57cec5SDimitry Andric return false; 1010*0b57cec5SDimitry Andric Dest = &MI.getOperand(0); 1011*0b57cec5SDimitry Andric Src = &MI.getOperand(1); 1012*0b57cec5SDimitry Andric return true; 1013*0b57cec5SDimitry Andric } 1014*0b57cec5SDimitry Andric 1015*0b57cec5SDimitry Andric const MachineInstrBuilder & 1016*0b57cec5SDimitry Andric ARMBaseInstrInfo::AddDReg(MachineInstrBuilder &MIB, unsigned Reg, 1017*0b57cec5SDimitry Andric unsigned SubIdx, unsigned State, 1018*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI) const { 1019*0b57cec5SDimitry Andric if (!SubIdx) 1020*0b57cec5SDimitry Andric return MIB.addReg(Reg, State); 1021*0b57cec5SDimitry Andric 1022*0b57cec5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(Reg)) 1023*0b57cec5SDimitry Andric return MIB.addReg(TRI->getSubReg(Reg, SubIdx), State); 1024*0b57cec5SDimitry Andric return MIB.addReg(Reg, State, SubIdx); 1025*0b57cec5SDimitry Andric } 1026*0b57cec5SDimitry Andric 1027*0b57cec5SDimitry Andric void ARMBaseInstrInfo:: 1028*0b57cec5SDimitry Andric storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator I, 1029*0b57cec5SDimitry Andric unsigned SrcReg, bool isKill, int FI, 1030*0b57cec5SDimitry Andric const TargetRegisterClass *RC, 1031*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI) const { 1032*0b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent(); 1033*0b57cec5SDimitry Andric MachineFrameInfo &MFI = MF.getFrameInfo(); 1034*0b57cec5SDimitry Andric unsigned Align = MFI.getObjectAlignment(FI); 1035*0b57cec5SDimitry Andric 1036*0b57cec5SDimitry Andric MachineMemOperand *MMO = MF.getMachineMemOperand( 1037*0b57cec5SDimitry Andric MachinePointerInfo::getFixedStack(MF, FI), MachineMemOperand::MOStore, 1038*0b57cec5SDimitry Andric MFI.getObjectSize(FI), Align); 1039*0b57cec5SDimitry Andric 1040*0b57cec5SDimitry Andric switch (TRI->getSpillSize(*RC)) { 1041*0b57cec5SDimitry Andric case 2: 1042*0b57cec5SDimitry Andric if (ARM::HPRRegClass.hasSubClassEq(RC)) { 1043*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VSTRH)) 1044*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1045*0b57cec5SDimitry Andric .addFrameIndex(FI) 1046*0b57cec5SDimitry Andric .addImm(0) 1047*0b57cec5SDimitry Andric .addMemOperand(MMO) 1048*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1049*0b57cec5SDimitry Andric } else 1050*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1051*0b57cec5SDimitry Andric break; 1052*0b57cec5SDimitry Andric case 4: 1053*0b57cec5SDimitry Andric if (ARM::GPRRegClass.hasSubClassEq(RC)) { 1054*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::STRi12)) 1055*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1056*0b57cec5SDimitry Andric .addFrameIndex(FI) 1057*0b57cec5SDimitry Andric .addImm(0) 1058*0b57cec5SDimitry Andric .addMemOperand(MMO) 1059*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1060*0b57cec5SDimitry Andric } else if (ARM::SPRRegClass.hasSubClassEq(RC)) { 1061*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VSTRS)) 1062*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1063*0b57cec5SDimitry Andric .addFrameIndex(FI) 1064*0b57cec5SDimitry Andric .addImm(0) 1065*0b57cec5SDimitry Andric .addMemOperand(MMO) 1066*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1067*0b57cec5SDimitry Andric } else if (ARM::VCCRRegClass.hasSubClassEq(RC)) { 1068*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VSTR_P0_off)) 1069*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1070*0b57cec5SDimitry Andric .addFrameIndex(FI) 1071*0b57cec5SDimitry Andric .addImm(0) 1072*0b57cec5SDimitry Andric .addMemOperand(MMO) 1073*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1074*0b57cec5SDimitry Andric } else 1075*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1076*0b57cec5SDimitry Andric break; 1077*0b57cec5SDimitry Andric case 8: 1078*0b57cec5SDimitry Andric if (ARM::DPRRegClass.hasSubClassEq(RC)) { 1079*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VSTRD)) 1080*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1081*0b57cec5SDimitry Andric .addFrameIndex(FI) 1082*0b57cec5SDimitry Andric .addImm(0) 1083*0b57cec5SDimitry Andric .addMemOperand(MMO) 1084*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1085*0b57cec5SDimitry Andric } else if (ARM::GPRPairRegClass.hasSubClassEq(RC)) { 1086*0b57cec5SDimitry Andric if (Subtarget.hasV5TEOps()) { 1087*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DebugLoc(), get(ARM::STRD)); 1088*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::gsub_0, getKillRegState(isKill), TRI); 1089*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::gsub_1, 0, TRI); 1090*0b57cec5SDimitry Andric MIB.addFrameIndex(FI).addReg(0).addImm(0).addMemOperand(MMO) 1091*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1092*0b57cec5SDimitry Andric } else { 1093*0b57cec5SDimitry Andric // Fallback to STM instruction, which has existed since the dawn of 1094*0b57cec5SDimitry Andric // time. 1095*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DebugLoc(), get(ARM::STMIA)) 1096*0b57cec5SDimitry Andric .addFrameIndex(FI) 1097*0b57cec5SDimitry Andric .addMemOperand(MMO) 1098*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1099*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::gsub_0, getKillRegState(isKill), TRI); 1100*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::gsub_1, 0, TRI); 1101*0b57cec5SDimitry Andric } 1102*0b57cec5SDimitry Andric } else 1103*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1104*0b57cec5SDimitry Andric break; 1105*0b57cec5SDimitry Andric case 16: 1106*0b57cec5SDimitry Andric if (ARM::DPairRegClass.hasSubClassEq(RC) && Subtarget.hasNEON()) { 1107*0b57cec5SDimitry Andric // Use aligned spills if the stack can be realigned. 1108*0b57cec5SDimitry Andric if (Align >= 16 && getRegisterInfo().canRealignStack(MF)) { 1109*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VST1q64)) 1110*0b57cec5SDimitry Andric .addFrameIndex(FI) 1111*0b57cec5SDimitry Andric .addImm(16) 1112*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1113*0b57cec5SDimitry Andric .addMemOperand(MMO) 1114*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1115*0b57cec5SDimitry Andric } else { 1116*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VSTMQIA)) 1117*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1118*0b57cec5SDimitry Andric .addFrameIndex(FI) 1119*0b57cec5SDimitry Andric .addMemOperand(MMO) 1120*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1121*0b57cec5SDimitry Andric } 1122*0b57cec5SDimitry Andric } else if (ARM::QPRRegClass.hasSubClassEq(RC) && 1123*0b57cec5SDimitry Andric Subtarget.hasMVEIntegerOps()) { 1124*0b57cec5SDimitry Andric auto MIB = BuildMI(MBB, I, DebugLoc(), get(ARM::MVE_VSTRWU32)); 1125*0b57cec5SDimitry Andric MIB.addReg(SrcReg, getKillRegState(isKill)) 1126*0b57cec5SDimitry Andric .addFrameIndex(FI) 1127*0b57cec5SDimitry Andric .addImm(0) 1128*0b57cec5SDimitry Andric .addMemOperand(MMO); 1129*0b57cec5SDimitry Andric addUnpredicatedMveVpredNOp(MIB); 1130*0b57cec5SDimitry Andric } else 1131*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1132*0b57cec5SDimitry Andric break; 1133*0b57cec5SDimitry Andric case 24: 1134*0b57cec5SDimitry Andric if (ARM::DTripleRegClass.hasSubClassEq(RC)) { 1135*0b57cec5SDimitry Andric // Use aligned spills if the stack can be realigned. 1136*0b57cec5SDimitry Andric if (Align >= 16 && getRegisterInfo().canRealignStack(MF)) { 1137*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VST1d64TPseudo)) 1138*0b57cec5SDimitry Andric .addFrameIndex(FI) 1139*0b57cec5SDimitry Andric .addImm(16) 1140*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1141*0b57cec5SDimitry Andric .addMemOperand(MMO) 1142*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1143*0b57cec5SDimitry Andric } else { 1144*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DebugLoc(), 1145*0b57cec5SDimitry Andric get(ARM::VSTMDIA)) 1146*0b57cec5SDimitry Andric .addFrameIndex(FI) 1147*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 1148*0b57cec5SDimitry Andric .addMemOperand(MMO); 1149*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_0, getKillRegState(isKill), TRI); 1150*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_1, 0, TRI); 1151*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::dsub_2, 0, TRI); 1152*0b57cec5SDimitry Andric } 1153*0b57cec5SDimitry Andric } else 1154*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1155*0b57cec5SDimitry Andric break; 1156*0b57cec5SDimitry Andric case 32: 1157*0b57cec5SDimitry Andric if (ARM::QQPRRegClass.hasSubClassEq(RC) || ARM::DQuadRegClass.hasSubClassEq(RC)) { 1158*0b57cec5SDimitry Andric if (Align >= 16 && getRegisterInfo().canRealignStack(MF)) { 1159*0b57cec5SDimitry Andric // FIXME: It's possible to only store part of the QQ register if the 1160*0b57cec5SDimitry Andric // spilled def has a sub-register index. 1161*0b57cec5SDimitry Andric BuildMI(MBB, I, DebugLoc(), get(ARM::VST1d64QPseudo)) 1162*0b57cec5SDimitry Andric .addFrameIndex(FI) 1163*0b57cec5SDimitry Andric .addImm(16) 1164*0b57cec5SDimitry Andric .addReg(SrcReg, getKillRegState(isKill)) 1165*0b57cec5SDimitry Andric .addMemOperand(MMO) 1166*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1167*0b57cec5SDimitry Andric } else { 1168*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DebugLoc(), 1169*0b57cec5SDimitry Andric get(ARM::VSTMDIA)) 1170*0b57cec5SDimitry Andric .addFrameIndex(FI) 1171*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 1172*0b57cec5SDimitry Andric .addMemOperand(MMO); 1173*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_0, getKillRegState(isKill), TRI); 1174*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_1, 0, TRI); 1175*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_2, 0, TRI); 1176*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::dsub_3, 0, TRI); 1177*0b57cec5SDimitry Andric } 1178*0b57cec5SDimitry Andric } else 1179*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1180*0b57cec5SDimitry Andric break; 1181*0b57cec5SDimitry Andric case 64: 1182*0b57cec5SDimitry Andric if (ARM::QQQQPRRegClass.hasSubClassEq(RC)) { 1183*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DebugLoc(), get(ARM::VSTMDIA)) 1184*0b57cec5SDimitry Andric .addFrameIndex(FI) 1185*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 1186*0b57cec5SDimitry Andric .addMemOperand(MMO); 1187*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_0, getKillRegState(isKill), TRI); 1188*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_1, 0, TRI); 1189*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_2, 0, TRI); 1190*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_3, 0, TRI); 1191*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_4, 0, TRI); 1192*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_5, 0, TRI); 1193*0b57cec5SDimitry Andric MIB = AddDReg(MIB, SrcReg, ARM::dsub_6, 0, TRI); 1194*0b57cec5SDimitry Andric AddDReg(MIB, SrcReg, ARM::dsub_7, 0, TRI); 1195*0b57cec5SDimitry Andric } else 1196*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1197*0b57cec5SDimitry Andric break; 1198*0b57cec5SDimitry Andric default: 1199*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1200*0b57cec5SDimitry Andric } 1201*0b57cec5SDimitry Andric } 1202*0b57cec5SDimitry Andric 1203*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::isStoreToStackSlot(const MachineInstr &MI, 1204*0b57cec5SDimitry Andric int &FrameIndex) const { 1205*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 1206*0b57cec5SDimitry Andric default: break; 1207*0b57cec5SDimitry Andric case ARM::STRrs: 1208*0b57cec5SDimitry Andric case ARM::t2STRs: // FIXME: don't use t2STRs to access frame. 1209*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(2).isReg() && 1210*0b57cec5SDimitry Andric MI.getOperand(3).isImm() && MI.getOperand(2).getReg() == 0 && 1211*0b57cec5SDimitry Andric MI.getOperand(3).getImm() == 0) { 1212*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1213*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1214*0b57cec5SDimitry Andric } 1215*0b57cec5SDimitry Andric break; 1216*0b57cec5SDimitry Andric case ARM::STRi12: 1217*0b57cec5SDimitry Andric case ARM::t2STRi12: 1218*0b57cec5SDimitry Andric case ARM::tSTRspi: 1219*0b57cec5SDimitry Andric case ARM::VSTRD: 1220*0b57cec5SDimitry Andric case ARM::VSTRS: 1221*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(2).isImm() && 1222*0b57cec5SDimitry Andric MI.getOperand(2).getImm() == 0) { 1223*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1224*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1225*0b57cec5SDimitry Andric } 1226*0b57cec5SDimitry Andric break; 1227*0b57cec5SDimitry Andric case ARM::VSTR_P0_off: 1228*0b57cec5SDimitry Andric if (MI.getOperand(0).isFI() && MI.getOperand(1).isImm() && 1229*0b57cec5SDimitry Andric MI.getOperand(1).getImm() == 0) { 1230*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(0).getIndex(); 1231*0b57cec5SDimitry Andric return ARM::P0; 1232*0b57cec5SDimitry Andric } 1233*0b57cec5SDimitry Andric break; 1234*0b57cec5SDimitry Andric case ARM::VST1q64: 1235*0b57cec5SDimitry Andric case ARM::VST1d64TPseudo: 1236*0b57cec5SDimitry Andric case ARM::VST1d64QPseudo: 1237*0b57cec5SDimitry Andric if (MI.getOperand(0).isFI() && MI.getOperand(2).getSubReg() == 0) { 1238*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(0).getIndex(); 1239*0b57cec5SDimitry Andric return MI.getOperand(2).getReg(); 1240*0b57cec5SDimitry Andric } 1241*0b57cec5SDimitry Andric break; 1242*0b57cec5SDimitry Andric case ARM::VSTMQIA: 1243*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(0).getSubReg() == 0) { 1244*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1245*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1246*0b57cec5SDimitry Andric } 1247*0b57cec5SDimitry Andric break; 1248*0b57cec5SDimitry Andric } 1249*0b57cec5SDimitry Andric 1250*0b57cec5SDimitry Andric return 0; 1251*0b57cec5SDimitry Andric } 1252*0b57cec5SDimitry Andric 1253*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::isStoreToStackSlotPostFE(const MachineInstr &MI, 1254*0b57cec5SDimitry Andric int &FrameIndex) const { 1255*0b57cec5SDimitry Andric SmallVector<const MachineMemOperand *, 1> Accesses; 1256*0b57cec5SDimitry Andric if (MI.mayStore() && hasStoreToStackSlot(MI, Accesses) && 1257*0b57cec5SDimitry Andric Accesses.size() == 1) { 1258*0b57cec5SDimitry Andric FrameIndex = 1259*0b57cec5SDimitry Andric cast<FixedStackPseudoSourceValue>(Accesses.front()->getPseudoValue()) 1260*0b57cec5SDimitry Andric ->getFrameIndex(); 1261*0b57cec5SDimitry Andric return true; 1262*0b57cec5SDimitry Andric } 1263*0b57cec5SDimitry Andric return false; 1264*0b57cec5SDimitry Andric } 1265*0b57cec5SDimitry Andric 1266*0b57cec5SDimitry Andric void ARMBaseInstrInfo:: 1267*0b57cec5SDimitry Andric loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator I, 1268*0b57cec5SDimitry Andric unsigned DestReg, int FI, 1269*0b57cec5SDimitry Andric const TargetRegisterClass *RC, 1270*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI) const { 1271*0b57cec5SDimitry Andric DebugLoc DL; 1272*0b57cec5SDimitry Andric if (I != MBB.end()) DL = I->getDebugLoc(); 1273*0b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent(); 1274*0b57cec5SDimitry Andric MachineFrameInfo &MFI = MF.getFrameInfo(); 1275*0b57cec5SDimitry Andric unsigned Align = MFI.getObjectAlignment(FI); 1276*0b57cec5SDimitry Andric MachineMemOperand *MMO = MF.getMachineMemOperand( 1277*0b57cec5SDimitry Andric MachinePointerInfo::getFixedStack(MF, FI), MachineMemOperand::MOLoad, 1278*0b57cec5SDimitry Andric MFI.getObjectSize(FI), Align); 1279*0b57cec5SDimitry Andric 1280*0b57cec5SDimitry Andric switch (TRI->getSpillSize(*RC)) { 1281*0b57cec5SDimitry Andric case 2: 1282*0b57cec5SDimitry Andric if (ARM::HPRRegClass.hasSubClassEq(RC)) { 1283*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLDRH), DestReg) 1284*0b57cec5SDimitry Andric .addFrameIndex(FI) 1285*0b57cec5SDimitry Andric .addImm(0) 1286*0b57cec5SDimitry Andric .addMemOperand(MMO) 1287*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1288*0b57cec5SDimitry Andric } else 1289*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1290*0b57cec5SDimitry Andric break; 1291*0b57cec5SDimitry Andric case 4: 1292*0b57cec5SDimitry Andric if (ARM::GPRRegClass.hasSubClassEq(RC)) { 1293*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::LDRi12), DestReg) 1294*0b57cec5SDimitry Andric .addFrameIndex(FI) 1295*0b57cec5SDimitry Andric .addImm(0) 1296*0b57cec5SDimitry Andric .addMemOperand(MMO) 1297*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1298*0b57cec5SDimitry Andric } else if (ARM::SPRRegClass.hasSubClassEq(RC)) { 1299*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLDRS), DestReg) 1300*0b57cec5SDimitry Andric .addFrameIndex(FI) 1301*0b57cec5SDimitry Andric .addImm(0) 1302*0b57cec5SDimitry Andric .addMemOperand(MMO) 1303*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1304*0b57cec5SDimitry Andric } else if (ARM::VCCRRegClass.hasSubClassEq(RC)) { 1305*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLDR_P0_off), DestReg) 1306*0b57cec5SDimitry Andric .addFrameIndex(FI) 1307*0b57cec5SDimitry Andric .addImm(0) 1308*0b57cec5SDimitry Andric .addMemOperand(MMO) 1309*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1310*0b57cec5SDimitry Andric } else 1311*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1312*0b57cec5SDimitry Andric break; 1313*0b57cec5SDimitry Andric case 8: 1314*0b57cec5SDimitry Andric if (ARM::DPRRegClass.hasSubClassEq(RC)) { 1315*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLDRD), DestReg) 1316*0b57cec5SDimitry Andric .addFrameIndex(FI) 1317*0b57cec5SDimitry Andric .addImm(0) 1318*0b57cec5SDimitry Andric .addMemOperand(MMO) 1319*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1320*0b57cec5SDimitry Andric } else if (ARM::GPRPairRegClass.hasSubClassEq(RC)) { 1321*0b57cec5SDimitry Andric MachineInstrBuilder MIB; 1322*0b57cec5SDimitry Andric 1323*0b57cec5SDimitry Andric if (Subtarget.hasV5TEOps()) { 1324*0b57cec5SDimitry Andric MIB = BuildMI(MBB, I, DL, get(ARM::LDRD)); 1325*0b57cec5SDimitry Andric AddDReg(MIB, DestReg, ARM::gsub_0, RegState::DefineNoRead, TRI); 1326*0b57cec5SDimitry Andric AddDReg(MIB, DestReg, ARM::gsub_1, RegState::DefineNoRead, TRI); 1327*0b57cec5SDimitry Andric MIB.addFrameIndex(FI).addReg(0).addImm(0).addMemOperand(MMO) 1328*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1329*0b57cec5SDimitry Andric } else { 1330*0b57cec5SDimitry Andric // Fallback to LDM instruction, which has existed since the dawn of 1331*0b57cec5SDimitry Andric // time. 1332*0b57cec5SDimitry Andric MIB = BuildMI(MBB, I, DL, get(ARM::LDMIA)) 1333*0b57cec5SDimitry Andric .addFrameIndex(FI) 1334*0b57cec5SDimitry Andric .addMemOperand(MMO) 1335*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1336*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::gsub_0, RegState::DefineNoRead, TRI); 1337*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::gsub_1, RegState::DefineNoRead, TRI); 1338*0b57cec5SDimitry Andric } 1339*0b57cec5SDimitry Andric 1340*0b57cec5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(DestReg)) 1341*0b57cec5SDimitry Andric MIB.addReg(DestReg, RegState::ImplicitDefine); 1342*0b57cec5SDimitry Andric } else 1343*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1344*0b57cec5SDimitry Andric break; 1345*0b57cec5SDimitry Andric case 16: 1346*0b57cec5SDimitry Andric if (ARM::DPairRegClass.hasSubClassEq(RC) && Subtarget.hasNEON()) { 1347*0b57cec5SDimitry Andric if (Align >= 16 && getRegisterInfo().canRealignStack(MF)) { 1348*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLD1q64), DestReg) 1349*0b57cec5SDimitry Andric .addFrameIndex(FI) 1350*0b57cec5SDimitry Andric .addImm(16) 1351*0b57cec5SDimitry Andric .addMemOperand(MMO) 1352*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1353*0b57cec5SDimitry Andric } else { 1354*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLDMQIA), DestReg) 1355*0b57cec5SDimitry Andric .addFrameIndex(FI) 1356*0b57cec5SDimitry Andric .addMemOperand(MMO) 1357*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1358*0b57cec5SDimitry Andric } 1359*0b57cec5SDimitry Andric } else if (ARM::QPRRegClass.hasSubClassEq(RC) && 1360*0b57cec5SDimitry Andric Subtarget.hasMVEIntegerOps()) { 1361*0b57cec5SDimitry Andric auto MIB = BuildMI(MBB, I, DL, get(ARM::MVE_VLDRWU32), DestReg); 1362*0b57cec5SDimitry Andric MIB.addFrameIndex(FI) 1363*0b57cec5SDimitry Andric .addImm(0) 1364*0b57cec5SDimitry Andric .addMemOperand(MMO); 1365*0b57cec5SDimitry Andric addUnpredicatedMveVpredNOp(MIB); 1366*0b57cec5SDimitry Andric } else 1367*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1368*0b57cec5SDimitry Andric break; 1369*0b57cec5SDimitry Andric case 24: 1370*0b57cec5SDimitry Andric if (ARM::DTripleRegClass.hasSubClassEq(RC)) { 1371*0b57cec5SDimitry Andric if (Align >= 16 && getRegisterInfo().canRealignStack(MF)) { 1372*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLD1d64TPseudo), DestReg) 1373*0b57cec5SDimitry Andric .addFrameIndex(FI) 1374*0b57cec5SDimitry Andric .addImm(16) 1375*0b57cec5SDimitry Andric .addMemOperand(MMO) 1376*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1377*0b57cec5SDimitry Andric } else { 1378*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DL, get(ARM::VLDMDIA)) 1379*0b57cec5SDimitry Andric .addFrameIndex(FI) 1380*0b57cec5SDimitry Andric .addMemOperand(MMO) 1381*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1382*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_0, RegState::DefineNoRead, TRI); 1383*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_1, RegState::DefineNoRead, TRI); 1384*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_2, RegState::DefineNoRead, TRI); 1385*0b57cec5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(DestReg)) 1386*0b57cec5SDimitry Andric MIB.addReg(DestReg, RegState::ImplicitDefine); 1387*0b57cec5SDimitry Andric } 1388*0b57cec5SDimitry Andric } else 1389*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1390*0b57cec5SDimitry Andric break; 1391*0b57cec5SDimitry Andric case 32: 1392*0b57cec5SDimitry Andric if (ARM::QQPRRegClass.hasSubClassEq(RC) || ARM::DQuadRegClass.hasSubClassEq(RC)) { 1393*0b57cec5SDimitry Andric if (Align >= 16 && getRegisterInfo().canRealignStack(MF)) { 1394*0b57cec5SDimitry Andric BuildMI(MBB, I, DL, get(ARM::VLD1d64QPseudo), DestReg) 1395*0b57cec5SDimitry Andric .addFrameIndex(FI) 1396*0b57cec5SDimitry Andric .addImm(16) 1397*0b57cec5SDimitry Andric .addMemOperand(MMO) 1398*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 1399*0b57cec5SDimitry Andric } else { 1400*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DL, get(ARM::VLDMDIA)) 1401*0b57cec5SDimitry Andric .addFrameIndex(FI) 1402*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 1403*0b57cec5SDimitry Andric .addMemOperand(MMO); 1404*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_0, RegState::DefineNoRead, TRI); 1405*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_1, RegState::DefineNoRead, TRI); 1406*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_2, RegState::DefineNoRead, TRI); 1407*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_3, RegState::DefineNoRead, TRI); 1408*0b57cec5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(DestReg)) 1409*0b57cec5SDimitry Andric MIB.addReg(DestReg, RegState::ImplicitDefine); 1410*0b57cec5SDimitry Andric } 1411*0b57cec5SDimitry Andric } else 1412*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1413*0b57cec5SDimitry Andric break; 1414*0b57cec5SDimitry Andric case 64: 1415*0b57cec5SDimitry Andric if (ARM::QQQQPRRegClass.hasSubClassEq(RC)) { 1416*0b57cec5SDimitry Andric MachineInstrBuilder MIB = BuildMI(MBB, I, DL, get(ARM::VLDMDIA)) 1417*0b57cec5SDimitry Andric .addFrameIndex(FI) 1418*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 1419*0b57cec5SDimitry Andric .addMemOperand(MMO); 1420*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_0, RegState::DefineNoRead, TRI); 1421*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_1, RegState::DefineNoRead, TRI); 1422*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_2, RegState::DefineNoRead, TRI); 1423*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_3, RegState::DefineNoRead, TRI); 1424*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_4, RegState::DefineNoRead, TRI); 1425*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_5, RegState::DefineNoRead, TRI); 1426*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_6, RegState::DefineNoRead, TRI); 1427*0b57cec5SDimitry Andric MIB = AddDReg(MIB, DestReg, ARM::dsub_7, RegState::DefineNoRead, TRI); 1428*0b57cec5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(DestReg)) 1429*0b57cec5SDimitry Andric MIB.addReg(DestReg, RegState::ImplicitDefine); 1430*0b57cec5SDimitry Andric } else 1431*0b57cec5SDimitry Andric llvm_unreachable("Unknown reg class!"); 1432*0b57cec5SDimitry Andric break; 1433*0b57cec5SDimitry Andric default: 1434*0b57cec5SDimitry Andric llvm_unreachable("Unknown regclass!"); 1435*0b57cec5SDimitry Andric } 1436*0b57cec5SDimitry Andric } 1437*0b57cec5SDimitry Andric 1438*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::isLoadFromStackSlot(const MachineInstr &MI, 1439*0b57cec5SDimitry Andric int &FrameIndex) const { 1440*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 1441*0b57cec5SDimitry Andric default: break; 1442*0b57cec5SDimitry Andric case ARM::LDRrs: 1443*0b57cec5SDimitry Andric case ARM::t2LDRs: // FIXME: don't use t2LDRs to access frame. 1444*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(2).isReg() && 1445*0b57cec5SDimitry Andric MI.getOperand(3).isImm() && MI.getOperand(2).getReg() == 0 && 1446*0b57cec5SDimitry Andric MI.getOperand(3).getImm() == 0) { 1447*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1448*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1449*0b57cec5SDimitry Andric } 1450*0b57cec5SDimitry Andric break; 1451*0b57cec5SDimitry Andric case ARM::LDRi12: 1452*0b57cec5SDimitry Andric case ARM::t2LDRi12: 1453*0b57cec5SDimitry Andric case ARM::tLDRspi: 1454*0b57cec5SDimitry Andric case ARM::VLDRD: 1455*0b57cec5SDimitry Andric case ARM::VLDRS: 1456*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(2).isImm() && 1457*0b57cec5SDimitry Andric MI.getOperand(2).getImm() == 0) { 1458*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1459*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1460*0b57cec5SDimitry Andric } 1461*0b57cec5SDimitry Andric break; 1462*0b57cec5SDimitry Andric case ARM::VLDR_P0_off: 1463*0b57cec5SDimitry Andric if (MI.getOperand(0).isFI() && MI.getOperand(1).isImm() && 1464*0b57cec5SDimitry Andric MI.getOperand(1).getImm() == 0) { 1465*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(0).getIndex(); 1466*0b57cec5SDimitry Andric return ARM::P0; 1467*0b57cec5SDimitry Andric } 1468*0b57cec5SDimitry Andric break; 1469*0b57cec5SDimitry Andric case ARM::VLD1q64: 1470*0b57cec5SDimitry Andric case ARM::VLD1d8TPseudo: 1471*0b57cec5SDimitry Andric case ARM::VLD1d16TPseudo: 1472*0b57cec5SDimitry Andric case ARM::VLD1d32TPseudo: 1473*0b57cec5SDimitry Andric case ARM::VLD1d64TPseudo: 1474*0b57cec5SDimitry Andric case ARM::VLD1d8QPseudo: 1475*0b57cec5SDimitry Andric case ARM::VLD1d16QPseudo: 1476*0b57cec5SDimitry Andric case ARM::VLD1d32QPseudo: 1477*0b57cec5SDimitry Andric case ARM::VLD1d64QPseudo: 1478*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(0).getSubReg() == 0) { 1479*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1480*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1481*0b57cec5SDimitry Andric } 1482*0b57cec5SDimitry Andric break; 1483*0b57cec5SDimitry Andric case ARM::VLDMQIA: 1484*0b57cec5SDimitry Andric if (MI.getOperand(1).isFI() && MI.getOperand(0).getSubReg() == 0) { 1485*0b57cec5SDimitry Andric FrameIndex = MI.getOperand(1).getIndex(); 1486*0b57cec5SDimitry Andric return MI.getOperand(0).getReg(); 1487*0b57cec5SDimitry Andric } 1488*0b57cec5SDimitry Andric break; 1489*0b57cec5SDimitry Andric } 1490*0b57cec5SDimitry Andric 1491*0b57cec5SDimitry Andric return 0; 1492*0b57cec5SDimitry Andric } 1493*0b57cec5SDimitry Andric 1494*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::isLoadFromStackSlotPostFE(const MachineInstr &MI, 1495*0b57cec5SDimitry Andric int &FrameIndex) const { 1496*0b57cec5SDimitry Andric SmallVector<const MachineMemOperand *, 1> Accesses; 1497*0b57cec5SDimitry Andric if (MI.mayLoad() && hasLoadFromStackSlot(MI, Accesses) && 1498*0b57cec5SDimitry Andric Accesses.size() == 1) { 1499*0b57cec5SDimitry Andric FrameIndex = 1500*0b57cec5SDimitry Andric cast<FixedStackPseudoSourceValue>(Accesses.front()->getPseudoValue()) 1501*0b57cec5SDimitry Andric ->getFrameIndex(); 1502*0b57cec5SDimitry Andric return true; 1503*0b57cec5SDimitry Andric } 1504*0b57cec5SDimitry Andric return false; 1505*0b57cec5SDimitry Andric } 1506*0b57cec5SDimitry Andric 1507*0b57cec5SDimitry Andric /// Expands MEMCPY to either LDMIA/STMIA or LDMIA_UPD/STMID_UPD 1508*0b57cec5SDimitry Andric /// depending on whether the result is used. 1509*0b57cec5SDimitry Andric void ARMBaseInstrInfo::expandMEMCPY(MachineBasicBlock::iterator MI) const { 1510*0b57cec5SDimitry Andric bool isThumb1 = Subtarget.isThumb1Only(); 1511*0b57cec5SDimitry Andric bool isThumb2 = Subtarget.isThumb2(); 1512*0b57cec5SDimitry Andric const ARMBaseInstrInfo *TII = Subtarget.getInstrInfo(); 1513*0b57cec5SDimitry Andric 1514*0b57cec5SDimitry Andric DebugLoc dl = MI->getDebugLoc(); 1515*0b57cec5SDimitry Andric MachineBasicBlock *BB = MI->getParent(); 1516*0b57cec5SDimitry Andric 1517*0b57cec5SDimitry Andric MachineInstrBuilder LDM, STM; 1518*0b57cec5SDimitry Andric if (isThumb1 || !MI->getOperand(1).isDead()) { 1519*0b57cec5SDimitry Andric MachineOperand LDWb(MI->getOperand(1)); 1520*0b57cec5SDimitry Andric LDM = BuildMI(*BB, MI, dl, TII->get(isThumb2 ? ARM::t2LDMIA_UPD 1521*0b57cec5SDimitry Andric : isThumb1 ? ARM::tLDMIA_UPD 1522*0b57cec5SDimitry Andric : ARM::LDMIA_UPD)) 1523*0b57cec5SDimitry Andric .add(LDWb); 1524*0b57cec5SDimitry Andric } else { 1525*0b57cec5SDimitry Andric LDM = BuildMI(*BB, MI, dl, TII->get(isThumb2 ? ARM::t2LDMIA : ARM::LDMIA)); 1526*0b57cec5SDimitry Andric } 1527*0b57cec5SDimitry Andric 1528*0b57cec5SDimitry Andric if (isThumb1 || !MI->getOperand(0).isDead()) { 1529*0b57cec5SDimitry Andric MachineOperand STWb(MI->getOperand(0)); 1530*0b57cec5SDimitry Andric STM = BuildMI(*BB, MI, dl, TII->get(isThumb2 ? ARM::t2STMIA_UPD 1531*0b57cec5SDimitry Andric : isThumb1 ? ARM::tSTMIA_UPD 1532*0b57cec5SDimitry Andric : ARM::STMIA_UPD)) 1533*0b57cec5SDimitry Andric .add(STWb); 1534*0b57cec5SDimitry Andric } else { 1535*0b57cec5SDimitry Andric STM = BuildMI(*BB, MI, dl, TII->get(isThumb2 ? ARM::t2STMIA : ARM::STMIA)); 1536*0b57cec5SDimitry Andric } 1537*0b57cec5SDimitry Andric 1538*0b57cec5SDimitry Andric MachineOperand LDBase(MI->getOperand(3)); 1539*0b57cec5SDimitry Andric LDM.add(LDBase).add(predOps(ARMCC::AL)); 1540*0b57cec5SDimitry Andric 1541*0b57cec5SDimitry Andric MachineOperand STBase(MI->getOperand(2)); 1542*0b57cec5SDimitry Andric STM.add(STBase).add(predOps(ARMCC::AL)); 1543*0b57cec5SDimitry Andric 1544*0b57cec5SDimitry Andric // Sort the scratch registers into ascending order. 1545*0b57cec5SDimitry Andric const TargetRegisterInfo &TRI = getRegisterInfo(); 1546*0b57cec5SDimitry Andric SmallVector<unsigned, 6> ScratchRegs; 1547*0b57cec5SDimitry Andric for(unsigned I = 5; I < MI->getNumOperands(); ++I) 1548*0b57cec5SDimitry Andric ScratchRegs.push_back(MI->getOperand(I).getReg()); 1549*0b57cec5SDimitry Andric llvm::sort(ScratchRegs, 1550*0b57cec5SDimitry Andric [&TRI](const unsigned &Reg1, const unsigned &Reg2) -> bool { 1551*0b57cec5SDimitry Andric return TRI.getEncodingValue(Reg1) < 1552*0b57cec5SDimitry Andric TRI.getEncodingValue(Reg2); 1553*0b57cec5SDimitry Andric }); 1554*0b57cec5SDimitry Andric 1555*0b57cec5SDimitry Andric for (const auto &Reg : ScratchRegs) { 1556*0b57cec5SDimitry Andric LDM.addReg(Reg, RegState::Define); 1557*0b57cec5SDimitry Andric STM.addReg(Reg, RegState::Kill); 1558*0b57cec5SDimitry Andric } 1559*0b57cec5SDimitry Andric 1560*0b57cec5SDimitry Andric BB->erase(MI); 1561*0b57cec5SDimitry Andric } 1562*0b57cec5SDimitry Andric 1563*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::expandPostRAPseudo(MachineInstr &MI) const { 1564*0b57cec5SDimitry Andric if (MI.getOpcode() == TargetOpcode::LOAD_STACK_GUARD) { 1565*0b57cec5SDimitry Andric assert(getSubtarget().getTargetTriple().isOSBinFormatMachO() && 1566*0b57cec5SDimitry Andric "LOAD_STACK_GUARD currently supported only for MachO."); 1567*0b57cec5SDimitry Andric expandLoadStackGuard(MI); 1568*0b57cec5SDimitry Andric MI.getParent()->erase(MI); 1569*0b57cec5SDimitry Andric return true; 1570*0b57cec5SDimitry Andric } 1571*0b57cec5SDimitry Andric 1572*0b57cec5SDimitry Andric if (MI.getOpcode() == ARM::MEMCPY) { 1573*0b57cec5SDimitry Andric expandMEMCPY(MI); 1574*0b57cec5SDimitry Andric return true; 1575*0b57cec5SDimitry Andric } 1576*0b57cec5SDimitry Andric 1577*0b57cec5SDimitry Andric // This hook gets to expand COPY instructions before they become 1578*0b57cec5SDimitry Andric // copyPhysReg() calls. Look for VMOVS instructions that can legally be 1579*0b57cec5SDimitry Andric // widened to VMOVD. We prefer the VMOVD when possible because it may be 1580*0b57cec5SDimitry Andric // changed into a VORR that can go down the NEON pipeline. 1581*0b57cec5SDimitry Andric if (!MI.isCopy() || Subtarget.dontWidenVMOVS() || !Subtarget.hasFP64()) 1582*0b57cec5SDimitry Andric return false; 1583*0b57cec5SDimitry Andric 1584*0b57cec5SDimitry Andric // Look for a copy between even S-registers. That is where we keep floats 1585*0b57cec5SDimitry Andric // when using NEON v2f32 instructions for f32 arithmetic. 1586*0b57cec5SDimitry Andric unsigned DstRegS = MI.getOperand(0).getReg(); 1587*0b57cec5SDimitry Andric unsigned SrcRegS = MI.getOperand(1).getReg(); 1588*0b57cec5SDimitry Andric if (!ARM::SPRRegClass.contains(DstRegS, SrcRegS)) 1589*0b57cec5SDimitry Andric return false; 1590*0b57cec5SDimitry Andric 1591*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI = &getRegisterInfo(); 1592*0b57cec5SDimitry Andric unsigned DstRegD = TRI->getMatchingSuperReg(DstRegS, ARM::ssub_0, 1593*0b57cec5SDimitry Andric &ARM::DPRRegClass); 1594*0b57cec5SDimitry Andric unsigned SrcRegD = TRI->getMatchingSuperReg(SrcRegS, ARM::ssub_0, 1595*0b57cec5SDimitry Andric &ARM::DPRRegClass); 1596*0b57cec5SDimitry Andric if (!DstRegD || !SrcRegD) 1597*0b57cec5SDimitry Andric return false; 1598*0b57cec5SDimitry Andric 1599*0b57cec5SDimitry Andric // We want to widen this into a DstRegD = VMOVD SrcRegD copy. This is only 1600*0b57cec5SDimitry Andric // legal if the COPY already defines the full DstRegD, and it isn't a 1601*0b57cec5SDimitry Andric // sub-register insertion. 1602*0b57cec5SDimitry Andric if (!MI.definesRegister(DstRegD, TRI) || MI.readsRegister(DstRegD, TRI)) 1603*0b57cec5SDimitry Andric return false; 1604*0b57cec5SDimitry Andric 1605*0b57cec5SDimitry Andric // A dead copy shouldn't show up here, but reject it just in case. 1606*0b57cec5SDimitry Andric if (MI.getOperand(0).isDead()) 1607*0b57cec5SDimitry Andric return false; 1608*0b57cec5SDimitry Andric 1609*0b57cec5SDimitry Andric // All clear, widen the COPY. 1610*0b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "widening: " << MI); 1611*0b57cec5SDimitry Andric MachineInstrBuilder MIB(*MI.getParent()->getParent(), MI); 1612*0b57cec5SDimitry Andric 1613*0b57cec5SDimitry Andric // Get rid of the old implicit-def of DstRegD. Leave it if it defines a Q-reg 1614*0b57cec5SDimitry Andric // or some other super-register. 1615*0b57cec5SDimitry Andric int ImpDefIdx = MI.findRegisterDefOperandIdx(DstRegD); 1616*0b57cec5SDimitry Andric if (ImpDefIdx != -1) 1617*0b57cec5SDimitry Andric MI.RemoveOperand(ImpDefIdx); 1618*0b57cec5SDimitry Andric 1619*0b57cec5SDimitry Andric // Change the opcode and operands. 1620*0b57cec5SDimitry Andric MI.setDesc(get(ARM::VMOVD)); 1621*0b57cec5SDimitry Andric MI.getOperand(0).setReg(DstRegD); 1622*0b57cec5SDimitry Andric MI.getOperand(1).setReg(SrcRegD); 1623*0b57cec5SDimitry Andric MIB.add(predOps(ARMCC::AL)); 1624*0b57cec5SDimitry Andric 1625*0b57cec5SDimitry Andric // We are now reading SrcRegD instead of SrcRegS. This may upset the 1626*0b57cec5SDimitry Andric // register scavenger and machine verifier, so we need to indicate that we 1627*0b57cec5SDimitry Andric // are reading an undefined value from SrcRegD, but a proper value from 1628*0b57cec5SDimitry Andric // SrcRegS. 1629*0b57cec5SDimitry Andric MI.getOperand(1).setIsUndef(); 1630*0b57cec5SDimitry Andric MIB.addReg(SrcRegS, RegState::Implicit); 1631*0b57cec5SDimitry Andric 1632*0b57cec5SDimitry Andric // SrcRegD may actually contain an unrelated value in the ssub_1 1633*0b57cec5SDimitry Andric // sub-register. Don't kill it. Only kill the ssub_0 sub-register. 1634*0b57cec5SDimitry Andric if (MI.getOperand(1).isKill()) { 1635*0b57cec5SDimitry Andric MI.getOperand(1).setIsKill(false); 1636*0b57cec5SDimitry Andric MI.addRegisterKilled(SrcRegS, TRI, true); 1637*0b57cec5SDimitry Andric } 1638*0b57cec5SDimitry Andric 1639*0b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "replaced by: " << MI); 1640*0b57cec5SDimitry Andric return true; 1641*0b57cec5SDimitry Andric } 1642*0b57cec5SDimitry Andric 1643*0b57cec5SDimitry Andric /// Create a copy of a const pool value. Update CPI to the new index and return 1644*0b57cec5SDimitry Andric /// the label UID. 1645*0b57cec5SDimitry Andric static unsigned duplicateCPV(MachineFunction &MF, unsigned &CPI) { 1646*0b57cec5SDimitry Andric MachineConstantPool *MCP = MF.getConstantPool(); 1647*0b57cec5SDimitry Andric ARMFunctionInfo *AFI = MF.getInfo<ARMFunctionInfo>(); 1648*0b57cec5SDimitry Andric 1649*0b57cec5SDimitry Andric const MachineConstantPoolEntry &MCPE = MCP->getConstants()[CPI]; 1650*0b57cec5SDimitry Andric assert(MCPE.isMachineConstantPoolEntry() && 1651*0b57cec5SDimitry Andric "Expecting a machine constantpool entry!"); 1652*0b57cec5SDimitry Andric ARMConstantPoolValue *ACPV = 1653*0b57cec5SDimitry Andric static_cast<ARMConstantPoolValue*>(MCPE.Val.MachineCPVal); 1654*0b57cec5SDimitry Andric 1655*0b57cec5SDimitry Andric unsigned PCLabelId = AFI->createPICLabelUId(); 1656*0b57cec5SDimitry Andric ARMConstantPoolValue *NewCPV = nullptr; 1657*0b57cec5SDimitry Andric 1658*0b57cec5SDimitry Andric // FIXME: The below assumes PIC relocation model and that the function 1659*0b57cec5SDimitry Andric // is Thumb mode (t1 or t2). PCAdjustment would be 8 for ARM mode PIC, and 1660*0b57cec5SDimitry Andric // zero for non-PIC in ARM or Thumb. The callers are all of thumb LDR 1661*0b57cec5SDimitry Andric // instructions, so that's probably OK, but is PIC always correct when 1662*0b57cec5SDimitry Andric // we get here? 1663*0b57cec5SDimitry Andric if (ACPV->isGlobalValue()) 1664*0b57cec5SDimitry Andric NewCPV = ARMConstantPoolConstant::Create( 1665*0b57cec5SDimitry Andric cast<ARMConstantPoolConstant>(ACPV)->getGV(), PCLabelId, ARMCP::CPValue, 1666*0b57cec5SDimitry Andric 4, ACPV->getModifier(), ACPV->mustAddCurrentAddress()); 1667*0b57cec5SDimitry Andric else if (ACPV->isExtSymbol()) 1668*0b57cec5SDimitry Andric NewCPV = ARMConstantPoolSymbol:: 1669*0b57cec5SDimitry Andric Create(MF.getFunction().getContext(), 1670*0b57cec5SDimitry Andric cast<ARMConstantPoolSymbol>(ACPV)->getSymbol(), PCLabelId, 4); 1671*0b57cec5SDimitry Andric else if (ACPV->isBlockAddress()) 1672*0b57cec5SDimitry Andric NewCPV = ARMConstantPoolConstant:: 1673*0b57cec5SDimitry Andric Create(cast<ARMConstantPoolConstant>(ACPV)->getBlockAddress(), PCLabelId, 1674*0b57cec5SDimitry Andric ARMCP::CPBlockAddress, 4); 1675*0b57cec5SDimitry Andric else if (ACPV->isLSDA()) 1676*0b57cec5SDimitry Andric NewCPV = ARMConstantPoolConstant::Create(&MF.getFunction(), PCLabelId, 1677*0b57cec5SDimitry Andric ARMCP::CPLSDA, 4); 1678*0b57cec5SDimitry Andric else if (ACPV->isMachineBasicBlock()) 1679*0b57cec5SDimitry Andric NewCPV = ARMConstantPoolMBB:: 1680*0b57cec5SDimitry Andric Create(MF.getFunction().getContext(), 1681*0b57cec5SDimitry Andric cast<ARMConstantPoolMBB>(ACPV)->getMBB(), PCLabelId, 4); 1682*0b57cec5SDimitry Andric else 1683*0b57cec5SDimitry Andric llvm_unreachable("Unexpected ARM constantpool value type!!"); 1684*0b57cec5SDimitry Andric CPI = MCP->getConstantPoolIndex(NewCPV, MCPE.getAlignment()); 1685*0b57cec5SDimitry Andric return PCLabelId; 1686*0b57cec5SDimitry Andric } 1687*0b57cec5SDimitry Andric 1688*0b57cec5SDimitry Andric void ARMBaseInstrInfo::reMaterialize(MachineBasicBlock &MBB, 1689*0b57cec5SDimitry Andric MachineBasicBlock::iterator I, 1690*0b57cec5SDimitry Andric unsigned DestReg, unsigned SubIdx, 1691*0b57cec5SDimitry Andric const MachineInstr &Orig, 1692*0b57cec5SDimitry Andric const TargetRegisterInfo &TRI) const { 1693*0b57cec5SDimitry Andric unsigned Opcode = Orig.getOpcode(); 1694*0b57cec5SDimitry Andric switch (Opcode) { 1695*0b57cec5SDimitry Andric default: { 1696*0b57cec5SDimitry Andric MachineInstr *MI = MBB.getParent()->CloneMachineInstr(&Orig); 1697*0b57cec5SDimitry Andric MI->substituteRegister(Orig.getOperand(0).getReg(), DestReg, SubIdx, TRI); 1698*0b57cec5SDimitry Andric MBB.insert(I, MI); 1699*0b57cec5SDimitry Andric break; 1700*0b57cec5SDimitry Andric } 1701*0b57cec5SDimitry Andric case ARM::tLDRpci_pic: 1702*0b57cec5SDimitry Andric case ARM::t2LDRpci_pic: { 1703*0b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent(); 1704*0b57cec5SDimitry Andric unsigned CPI = Orig.getOperand(1).getIndex(); 1705*0b57cec5SDimitry Andric unsigned PCLabelId = duplicateCPV(MF, CPI); 1706*0b57cec5SDimitry Andric BuildMI(MBB, I, Orig.getDebugLoc(), get(Opcode), DestReg) 1707*0b57cec5SDimitry Andric .addConstantPoolIndex(CPI) 1708*0b57cec5SDimitry Andric .addImm(PCLabelId) 1709*0b57cec5SDimitry Andric .cloneMemRefs(Orig); 1710*0b57cec5SDimitry Andric break; 1711*0b57cec5SDimitry Andric } 1712*0b57cec5SDimitry Andric } 1713*0b57cec5SDimitry Andric } 1714*0b57cec5SDimitry Andric 1715*0b57cec5SDimitry Andric MachineInstr & 1716*0b57cec5SDimitry Andric ARMBaseInstrInfo::duplicate(MachineBasicBlock &MBB, 1717*0b57cec5SDimitry Andric MachineBasicBlock::iterator InsertBefore, 1718*0b57cec5SDimitry Andric const MachineInstr &Orig) const { 1719*0b57cec5SDimitry Andric MachineInstr &Cloned = TargetInstrInfo::duplicate(MBB, InsertBefore, Orig); 1720*0b57cec5SDimitry Andric MachineBasicBlock::instr_iterator I = Cloned.getIterator(); 1721*0b57cec5SDimitry Andric for (;;) { 1722*0b57cec5SDimitry Andric switch (I->getOpcode()) { 1723*0b57cec5SDimitry Andric case ARM::tLDRpci_pic: 1724*0b57cec5SDimitry Andric case ARM::t2LDRpci_pic: { 1725*0b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent(); 1726*0b57cec5SDimitry Andric unsigned CPI = I->getOperand(1).getIndex(); 1727*0b57cec5SDimitry Andric unsigned PCLabelId = duplicateCPV(MF, CPI); 1728*0b57cec5SDimitry Andric I->getOperand(1).setIndex(CPI); 1729*0b57cec5SDimitry Andric I->getOperand(2).setImm(PCLabelId); 1730*0b57cec5SDimitry Andric break; 1731*0b57cec5SDimitry Andric } 1732*0b57cec5SDimitry Andric } 1733*0b57cec5SDimitry Andric if (!I->isBundledWithSucc()) 1734*0b57cec5SDimitry Andric break; 1735*0b57cec5SDimitry Andric ++I; 1736*0b57cec5SDimitry Andric } 1737*0b57cec5SDimitry Andric return Cloned; 1738*0b57cec5SDimitry Andric } 1739*0b57cec5SDimitry Andric 1740*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::produceSameValue(const MachineInstr &MI0, 1741*0b57cec5SDimitry Andric const MachineInstr &MI1, 1742*0b57cec5SDimitry Andric const MachineRegisterInfo *MRI) const { 1743*0b57cec5SDimitry Andric unsigned Opcode = MI0.getOpcode(); 1744*0b57cec5SDimitry Andric if (Opcode == ARM::t2LDRpci || 1745*0b57cec5SDimitry Andric Opcode == ARM::t2LDRpci_pic || 1746*0b57cec5SDimitry Andric Opcode == ARM::tLDRpci || 1747*0b57cec5SDimitry Andric Opcode == ARM::tLDRpci_pic || 1748*0b57cec5SDimitry Andric Opcode == ARM::LDRLIT_ga_pcrel || 1749*0b57cec5SDimitry Andric Opcode == ARM::LDRLIT_ga_pcrel_ldr || 1750*0b57cec5SDimitry Andric Opcode == ARM::tLDRLIT_ga_pcrel || 1751*0b57cec5SDimitry Andric Opcode == ARM::MOV_ga_pcrel || 1752*0b57cec5SDimitry Andric Opcode == ARM::MOV_ga_pcrel_ldr || 1753*0b57cec5SDimitry Andric Opcode == ARM::t2MOV_ga_pcrel) { 1754*0b57cec5SDimitry Andric if (MI1.getOpcode() != Opcode) 1755*0b57cec5SDimitry Andric return false; 1756*0b57cec5SDimitry Andric if (MI0.getNumOperands() != MI1.getNumOperands()) 1757*0b57cec5SDimitry Andric return false; 1758*0b57cec5SDimitry Andric 1759*0b57cec5SDimitry Andric const MachineOperand &MO0 = MI0.getOperand(1); 1760*0b57cec5SDimitry Andric const MachineOperand &MO1 = MI1.getOperand(1); 1761*0b57cec5SDimitry Andric if (MO0.getOffset() != MO1.getOffset()) 1762*0b57cec5SDimitry Andric return false; 1763*0b57cec5SDimitry Andric 1764*0b57cec5SDimitry Andric if (Opcode == ARM::LDRLIT_ga_pcrel || 1765*0b57cec5SDimitry Andric Opcode == ARM::LDRLIT_ga_pcrel_ldr || 1766*0b57cec5SDimitry Andric Opcode == ARM::tLDRLIT_ga_pcrel || 1767*0b57cec5SDimitry Andric Opcode == ARM::MOV_ga_pcrel || 1768*0b57cec5SDimitry Andric Opcode == ARM::MOV_ga_pcrel_ldr || 1769*0b57cec5SDimitry Andric Opcode == ARM::t2MOV_ga_pcrel) 1770*0b57cec5SDimitry Andric // Ignore the PC labels. 1771*0b57cec5SDimitry Andric return MO0.getGlobal() == MO1.getGlobal(); 1772*0b57cec5SDimitry Andric 1773*0b57cec5SDimitry Andric const MachineFunction *MF = MI0.getParent()->getParent(); 1774*0b57cec5SDimitry Andric const MachineConstantPool *MCP = MF->getConstantPool(); 1775*0b57cec5SDimitry Andric int CPI0 = MO0.getIndex(); 1776*0b57cec5SDimitry Andric int CPI1 = MO1.getIndex(); 1777*0b57cec5SDimitry Andric const MachineConstantPoolEntry &MCPE0 = MCP->getConstants()[CPI0]; 1778*0b57cec5SDimitry Andric const MachineConstantPoolEntry &MCPE1 = MCP->getConstants()[CPI1]; 1779*0b57cec5SDimitry Andric bool isARMCP0 = MCPE0.isMachineConstantPoolEntry(); 1780*0b57cec5SDimitry Andric bool isARMCP1 = MCPE1.isMachineConstantPoolEntry(); 1781*0b57cec5SDimitry Andric if (isARMCP0 && isARMCP1) { 1782*0b57cec5SDimitry Andric ARMConstantPoolValue *ACPV0 = 1783*0b57cec5SDimitry Andric static_cast<ARMConstantPoolValue*>(MCPE0.Val.MachineCPVal); 1784*0b57cec5SDimitry Andric ARMConstantPoolValue *ACPV1 = 1785*0b57cec5SDimitry Andric static_cast<ARMConstantPoolValue*>(MCPE1.Val.MachineCPVal); 1786*0b57cec5SDimitry Andric return ACPV0->hasSameValue(ACPV1); 1787*0b57cec5SDimitry Andric } else if (!isARMCP0 && !isARMCP1) { 1788*0b57cec5SDimitry Andric return MCPE0.Val.ConstVal == MCPE1.Val.ConstVal; 1789*0b57cec5SDimitry Andric } 1790*0b57cec5SDimitry Andric return false; 1791*0b57cec5SDimitry Andric } else if (Opcode == ARM::PICLDR) { 1792*0b57cec5SDimitry Andric if (MI1.getOpcode() != Opcode) 1793*0b57cec5SDimitry Andric return false; 1794*0b57cec5SDimitry Andric if (MI0.getNumOperands() != MI1.getNumOperands()) 1795*0b57cec5SDimitry Andric return false; 1796*0b57cec5SDimitry Andric 1797*0b57cec5SDimitry Andric unsigned Addr0 = MI0.getOperand(1).getReg(); 1798*0b57cec5SDimitry Andric unsigned Addr1 = MI1.getOperand(1).getReg(); 1799*0b57cec5SDimitry Andric if (Addr0 != Addr1) { 1800*0b57cec5SDimitry Andric if (!MRI || 1801*0b57cec5SDimitry Andric !TargetRegisterInfo::isVirtualRegister(Addr0) || 1802*0b57cec5SDimitry Andric !TargetRegisterInfo::isVirtualRegister(Addr1)) 1803*0b57cec5SDimitry Andric return false; 1804*0b57cec5SDimitry Andric 1805*0b57cec5SDimitry Andric // This assumes SSA form. 1806*0b57cec5SDimitry Andric MachineInstr *Def0 = MRI->getVRegDef(Addr0); 1807*0b57cec5SDimitry Andric MachineInstr *Def1 = MRI->getVRegDef(Addr1); 1808*0b57cec5SDimitry Andric // Check if the loaded value, e.g. a constantpool of a global address, are 1809*0b57cec5SDimitry Andric // the same. 1810*0b57cec5SDimitry Andric if (!produceSameValue(*Def0, *Def1, MRI)) 1811*0b57cec5SDimitry Andric return false; 1812*0b57cec5SDimitry Andric } 1813*0b57cec5SDimitry Andric 1814*0b57cec5SDimitry Andric for (unsigned i = 3, e = MI0.getNumOperands(); i != e; ++i) { 1815*0b57cec5SDimitry Andric // %12 = PICLDR %11, 0, 14, %noreg 1816*0b57cec5SDimitry Andric const MachineOperand &MO0 = MI0.getOperand(i); 1817*0b57cec5SDimitry Andric const MachineOperand &MO1 = MI1.getOperand(i); 1818*0b57cec5SDimitry Andric if (!MO0.isIdenticalTo(MO1)) 1819*0b57cec5SDimitry Andric return false; 1820*0b57cec5SDimitry Andric } 1821*0b57cec5SDimitry Andric return true; 1822*0b57cec5SDimitry Andric } 1823*0b57cec5SDimitry Andric 1824*0b57cec5SDimitry Andric return MI0.isIdenticalTo(MI1, MachineInstr::IgnoreVRegDefs); 1825*0b57cec5SDimitry Andric } 1826*0b57cec5SDimitry Andric 1827*0b57cec5SDimitry Andric /// areLoadsFromSameBasePtr - This is used by the pre-regalloc scheduler to 1828*0b57cec5SDimitry Andric /// determine if two loads are loading from the same base address. It should 1829*0b57cec5SDimitry Andric /// only return true if the base pointers are the same and the only differences 1830*0b57cec5SDimitry Andric /// between the two addresses is the offset. It also returns the offsets by 1831*0b57cec5SDimitry Andric /// reference. 1832*0b57cec5SDimitry Andric /// 1833*0b57cec5SDimitry Andric /// FIXME: remove this in favor of the MachineInstr interface once pre-RA-sched 1834*0b57cec5SDimitry Andric /// is permanently disabled. 1835*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::areLoadsFromSameBasePtr(SDNode *Load1, SDNode *Load2, 1836*0b57cec5SDimitry Andric int64_t &Offset1, 1837*0b57cec5SDimitry Andric int64_t &Offset2) const { 1838*0b57cec5SDimitry Andric // Don't worry about Thumb: just ARM and Thumb2. 1839*0b57cec5SDimitry Andric if (Subtarget.isThumb1Only()) return false; 1840*0b57cec5SDimitry Andric 1841*0b57cec5SDimitry Andric if (!Load1->isMachineOpcode() || !Load2->isMachineOpcode()) 1842*0b57cec5SDimitry Andric return false; 1843*0b57cec5SDimitry Andric 1844*0b57cec5SDimitry Andric switch (Load1->getMachineOpcode()) { 1845*0b57cec5SDimitry Andric default: 1846*0b57cec5SDimitry Andric return false; 1847*0b57cec5SDimitry Andric case ARM::LDRi12: 1848*0b57cec5SDimitry Andric case ARM::LDRBi12: 1849*0b57cec5SDimitry Andric case ARM::LDRD: 1850*0b57cec5SDimitry Andric case ARM::LDRH: 1851*0b57cec5SDimitry Andric case ARM::LDRSB: 1852*0b57cec5SDimitry Andric case ARM::LDRSH: 1853*0b57cec5SDimitry Andric case ARM::VLDRD: 1854*0b57cec5SDimitry Andric case ARM::VLDRS: 1855*0b57cec5SDimitry Andric case ARM::t2LDRi8: 1856*0b57cec5SDimitry Andric case ARM::t2LDRBi8: 1857*0b57cec5SDimitry Andric case ARM::t2LDRDi8: 1858*0b57cec5SDimitry Andric case ARM::t2LDRSHi8: 1859*0b57cec5SDimitry Andric case ARM::t2LDRi12: 1860*0b57cec5SDimitry Andric case ARM::t2LDRBi12: 1861*0b57cec5SDimitry Andric case ARM::t2LDRSHi12: 1862*0b57cec5SDimitry Andric break; 1863*0b57cec5SDimitry Andric } 1864*0b57cec5SDimitry Andric 1865*0b57cec5SDimitry Andric switch (Load2->getMachineOpcode()) { 1866*0b57cec5SDimitry Andric default: 1867*0b57cec5SDimitry Andric return false; 1868*0b57cec5SDimitry Andric case ARM::LDRi12: 1869*0b57cec5SDimitry Andric case ARM::LDRBi12: 1870*0b57cec5SDimitry Andric case ARM::LDRD: 1871*0b57cec5SDimitry Andric case ARM::LDRH: 1872*0b57cec5SDimitry Andric case ARM::LDRSB: 1873*0b57cec5SDimitry Andric case ARM::LDRSH: 1874*0b57cec5SDimitry Andric case ARM::VLDRD: 1875*0b57cec5SDimitry Andric case ARM::VLDRS: 1876*0b57cec5SDimitry Andric case ARM::t2LDRi8: 1877*0b57cec5SDimitry Andric case ARM::t2LDRBi8: 1878*0b57cec5SDimitry Andric case ARM::t2LDRSHi8: 1879*0b57cec5SDimitry Andric case ARM::t2LDRi12: 1880*0b57cec5SDimitry Andric case ARM::t2LDRBi12: 1881*0b57cec5SDimitry Andric case ARM::t2LDRSHi12: 1882*0b57cec5SDimitry Andric break; 1883*0b57cec5SDimitry Andric } 1884*0b57cec5SDimitry Andric 1885*0b57cec5SDimitry Andric // Check if base addresses and chain operands match. 1886*0b57cec5SDimitry Andric if (Load1->getOperand(0) != Load2->getOperand(0) || 1887*0b57cec5SDimitry Andric Load1->getOperand(4) != Load2->getOperand(4)) 1888*0b57cec5SDimitry Andric return false; 1889*0b57cec5SDimitry Andric 1890*0b57cec5SDimitry Andric // Index should be Reg0. 1891*0b57cec5SDimitry Andric if (Load1->getOperand(3) != Load2->getOperand(3)) 1892*0b57cec5SDimitry Andric return false; 1893*0b57cec5SDimitry Andric 1894*0b57cec5SDimitry Andric // Determine the offsets. 1895*0b57cec5SDimitry Andric if (isa<ConstantSDNode>(Load1->getOperand(1)) && 1896*0b57cec5SDimitry Andric isa<ConstantSDNode>(Load2->getOperand(1))) { 1897*0b57cec5SDimitry Andric Offset1 = cast<ConstantSDNode>(Load1->getOperand(1))->getSExtValue(); 1898*0b57cec5SDimitry Andric Offset2 = cast<ConstantSDNode>(Load2->getOperand(1))->getSExtValue(); 1899*0b57cec5SDimitry Andric return true; 1900*0b57cec5SDimitry Andric } 1901*0b57cec5SDimitry Andric 1902*0b57cec5SDimitry Andric return false; 1903*0b57cec5SDimitry Andric } 1904*0b57cec5SDimitry Andric 1905*0b57cec5SDimitry Andric /// shouldScheduleLoadsNear - This is a used by the pre-regalloc scheduler to 1906*0b57cec5SDimitry Andric /// determine (in conjunction with areLoadsFromSameBasePtr) if two loads should 1907*0b57cec5SDimitry Andric /// be scheduled togther. On some targets if two loads are loading from 1908*0b57cec5SDimitry Andric /// addresses in the same cache line, it's better if they are scheduled 1909*0b57cec5SDimitry Andric /// together. This function takes two integers that represent the load offsets 1910*0b57cec5SDimitry Andric /// from the common base address. It returns true if it decides it's desirable 1911*0b57cec5SDimitry Andric /// to schedule the two loads together. "NumLoads" is the number of loads that 1912*0b57cec5SDimitry Andric /// have already been scheduled after Load1. 1913*0b57cec5SDimitry Andric /// 1914*0b57cec5SDimitry Andric /// FIXME: remove this in favor of the MachineInstr interface once pre-RA-sched 1915*0b57cec5SDimitry Andric /// is permanently disabled. 1916*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::shouldScheduleLoadsNear(SDNode *Load1, SDNode *Load2, 1917*0b57cec5SDimitry Andric int64_t Offset1, int64_t Offset2, 1918*0b57cec5SDimitry Andric unsigned NumLoads) const { 1919*0b57cec5SDimitry Andric // Don't worry about Thumb: just ARM and Thumb2. 1920*0b57cec5SDimitry Andric if (Subtarget.isThumb1Only()) return false; 1921*0b57cec5SDimitry Andric 1922*0b57cec5SDimitry Andric assert(Offset2 > Offset1); 1923*0b57cec5SDimitry Andric 1924*0b57cec5SDimitry Andric if ((Offset2 - Offset1) / 8 > 64) 1925*0b57cec5SDimitry Andric return false; 1926*0b57cec5SDimitry Andric 1927*0b57cec5SDimitry Andric // Check if the machine opcodes are different. If they are different 1928*0b57cec5SDimitry Andric // then we consider them to not be of the same base address, 1929*0b57cec5SDimitry Andric // EXCEPT in the case of Thumb2 byte loads where one is LDRBi8 and the other LDRBi12. 1930*0b57cec5SDimitry Andric // In this case, they are considered to be the same because they are different 1931*0b57cec5SDimitry Andric // encoding forms of the same basic instruction. 1932*0b57cec5SDimitry Andric if ((Load1->getMachineOpcode() != Load2->getMachineOpcode()) && 1933*0b57cec5SDimitry Andric !((Load1->getMachineOpcode() == ARM::t2LDRBi8 && 1934*0b57cec5SDimitry Andric Load2->getMachineOpcode() == ARM::t2LDRBi12) || 1935*0b57cec5SDimitry Andric (Load1->getMachineOpcode() == ARM::t2LDRBi12 && 1936*0b57cec5SDimitry Andric Load2->getMachineOpcode() == ARM::t2LDRBi8))) 1937*0b57cec5SDimitry Andric return false; // FIXME: overly conservative? 1938*0b57cec5SDimitry Andric 1939*0b57cec5SDimitry Andric // Four loads in a row should be sufficient. 1940*0b57cec5SDimitry Andric if (NumLoads >= 3) 1941*0b57cec5SDimitry Andric return false; 1942*0b57cec5SDimitry Andric 1943*0b57cec5SDimitry Andric return true; 1944*0b57cec5SDimitry Andric } 1945*0b57cec5SDimitry Andric 1946*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isSchedulingBoundary(const MachineInstr &MI, 1947*0b57cec5SDimitry Andric const MachineBasicBlock *MBB, 1948*0b57cec5SDimitry Andric const MachineFunction &MF) const { 1949*0b57cec5SDimitry Andric // Debug info is never a scheduling boundary. It's necessary to be explicit 1950*0b57cec5SDimitry Andric // due to the special treatment of IT instructions below, otherwise a 1951*0b57cec5SDimitry Andric // dbg_value followed by an IT will result in the IT instruction being 1952*0b57cec5SDimitry Andric // considered a scheduling hazard, which is wrong. It should be the actual 1953*0b57cec5SDimitry Andric // instruction preceding the dbg_value instruction(s), just like it is 1954*0b57cec5SDimitry Andric // when debug info is not present. 1955*0b57cec5SDimitry Andric if (MI.isDebugInstr()) 1956*0b57cec5SDimitry Andric return false; 1957*0b57cec5SDimitry Andric 1958*0b57cec5SDimitry Andric // Terminators and labels can't be scheduled around. 1959*0b57cec5SDimitry Andric if (MI.isTerminator() || MI.isPosition()) 1960*0b57cec5SDimitry Andric return true; 1961*0b57cec5SDimitry Andric 1962*0b57cec5SDimitry Andric // Treat the start of the IT block as a scheduling boundary, but schedule 1963*0b57cec5SDimitry Andric // t2IT along with all instructions following it. 1964*0b57cec5SDimitry Andric // FIXME: This is a big hammer. But the alternative is to add all potential 1965*0b57cec5SDimitry Andric // true and anti dependencies to IT block instructions as implicit operands 1966*0b57cec5SDimitry Andric // to the t2IT instruction. The added compile time and complexity does not 1967*0b57cec5SDimitry Andric // seem worth it. 1968*0b57cec5SDimitry Andric MachineBasicBlock::const_iterator I = MI; 1969*0b57cec5SDimitry Andric // Make sure to skip any debug instructions 1970*0b57cec5SDimitry Andric while (++I != MBB->end() && I->isDebugInstr()) 1971*0b57cec5SDimitry Andric ; 1972*0b57cec5SDimitry Andric if (I != MBB->end() && I->getOpcode() == ARM::t2IT) 1973*0b57cec5SDimitry Andric return true; 1974*0b57cec5SDimitry Andric 1975*0b57cec5SDimitry Andric // Don't attempt to schedule around any instruction that defines 1976*0b57cec5SDimitry Andric // a stack-oriented pointer, as it's unlikely to be profitable. This 1977*0b57cec5SDimitry Andric // saves compile time, because it doesn't require every single 1978*0b57cec5SDimitry Andric // stack slot reference to depend on the instruction that does the 1979*0b57cec5SDimitry Andric // modification. 1980*0b57cec5SDimitry Andric // Calls don't actually change the stack pointer, even if they have imp-defs. 1981*0b57cec5SDimitry Andric // No ARM calling conventions change the stack pointer. (X86 calling 1982*0b57cec5SDimitry Andric // conventions sometimes do). 1983*0b57cec5SDimitry Andric if (!MI.isCall() && MI.definesRegister(ARM::SP)) 1984*0b57cec5SDimitry Andric return true; 1985*0b57cec5SDimitry Andric 1986*0b57cec5SDimitry Andric return false; 1987*0b57cec5SDimitry Andric } 1988*0b57cec5SDimitry Andric 1989*0b57cec5SDimitry Andric bool ARMBaseInstrInfo:: 1990*0b57cec5SDimitry Andric isProfitableToIfCvt(MachineBasicBlock &MBB, 1991*0b57cec5SDimitry Andric unsigned NumCycles, unsigned ExtraPredCycles, 1992*0b57cec5SDimitry Andric BranchProbability Probability) const { 1993*0b57cec5SDimitry Andric if (!NumCycles) 1994*0b57cec5SDimitry Andric return false; 1995*0b57cec5SDimitry Andric 1996*0b57cec5SDimitry Andric // If we are optimizing for size, see if the branch in the predecessor can be 1997*0b57cec5SDimitry Andric // lowered to cbn?z by the constant island lowering pass, and return false if 1998*0b57cec5SDimitry Andric // so. This results in a shorter instruction sequence. 1999*0b57cec5SDimitry Andric if (MBB.getParent()->getFunction().hasOptSize()) { 2000*0b57cec5SDimitry Andric MachineBasicBlock *Pred = *MBB.pred_begin(); 2001*0b57cec5SDimitry Andric if (!Pred->empty()) { 2002*0b57cec5SDimitry Andric MachineInstr *LastMI = &*Pred->rbegin(); 2003*0b57cec5SDimitry Andric if (LastMI->getOpcode() == ARM::t2Bcc) { 2004*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI = &getRegisterInfo(); 2005*0b57cec5SDimitry Andric MachineInstr *CmpMI = findCMPToFoldIntoCBZ(LastMI, TRI); 2006*0b57cec5SDimitry Andric if (CmpMI) 2007*0b57cec5SDimitry Andric return false; 2008*0b57cec5SDimitry Andric } 2009*0b57cec5SDimitry Andric } 2010*0b57cec5SDimitry Andric } 2011*0b57cec5SDimitry Andric return isProfitableToIfCvt(MBB, NumCycles, ExtraPredCycles, 2012*0b57cec5SDimitry Andric MBB, 0, 0, Probability); 2013*0b57cec5SDimitry Andric } 2014*0b57cec5SDimitry Andric 2015*0b57cec5SDimitry Andric bool ARMBaseInstrInfo:: 2016*0b57cec5SDimitry Andric isProfitableToIfCvt(MachineBasicBlock &TBB, 2017*0b57cec5SDimitry Andric unsigned TCycles, unsigned TExtra, 2018*0b57cec5SDimitry Andric MachineBasicBlock &FBB, 2019*0b57cec5SDimitry Andric unsigned FCycles, unsigned FExtra, 2020*0b57cec5SDimitry Andric BranchProbability Probability) const { 2021*0b57cec5SDimitry Andric if (!TCycles) 2022*0b57cec5SDimitry Andric return false; 2023*0b57cec5SDimitry Andric 2024*0b57cec5SDimitry Andric // In thumb code we often end up trading one branch for a IT block, and 2025*0b57cec5SDimitry Andric // if we are cloning the instruction can increase code size. Prevent 2026*0b57cec5SDimitry Andric // blocks with multiple predecesors from being ifcvted to prevent this 2027*0b57cec5SDimitry Andric // cloning. 2028*0b57cec5SDimitry Andric if (Subtarget.isThumb2() && TBB.getParent()->getFunction().hasMinSize()) { 2029*0b57cec5SDimitry Andric if (TBB.pred_size() != 1 || FBB.pred_size() != 1) 2030*0b57cec5SDimitry Andric return false; 2031*0b57cec5SDimitry Andric } 2032*0b57cec5SDimitry Andric 2033*0b57cec5SDimitry Andric // Attempt to estimate the relative costs of predication versus branching. 2034*0b57cec5SDimitry Andric // Here we scale up each component of UnpredCost to avoid precision issue when 2035*0b57cec5SDimitry Andric // scaling TCycles/FCycles by Probability. 2036*0b57cec5SDimitry Andric const unsigned ScalingUpFactor = 1024; 2037*0b57cec5SDimitry Andric 2038*0b57cec5SDimitry Andric unsigned PredCost = (TCycles + FCycles + TExtra + FExtra) * ScalingUpFactor; 2039*0b57cec5SDimitry Andric unsigned UnpredCost; 2040*0b57cec5SDimitry Andric if (!Subtarget.hasBranchPredictor()) { 2041*0b57cec5SDimitry Andric // When we don't have a branch predictor it's always cheaper to not take a 2042*0b57cec5SDimitry Andric // branch than take it, so we have to take that into account. 2043*0b57cec5SDimitry Andric unsigned NotTakenBranchCost = 1; 2044*0b57cec5SDimitry Andric unsigned TakenBranchCost = Subtarget.getMispredictionPenalty(); 2045*0b57cec5SDimitry Andric unsigned TUnpredCycles, FUnpredCycles; 2046*0b57cec5SDimitry Andric if (!FCycles) { 2047*0b57cec5SDimitry Andric // Triangle: TBB is the fallthrough 2048*0b57cec5SDimitry Andric TUnpredCycles = TCycles + NotTakenBranchCost; 2049*0b57cec5SDimitry Andric FUnpredCycles = TakenBranchCost; 2050*0b57cec5SDimitry Andric } else { 2051*0b57cec5SDimitry Andric // Diamond: TBB is the block that is branched to, FBB is the fallthrough 2052*0b57cec5SDimitry Andric TUnpredCycles = TCycles + TakenBranchCost; 2053*0b57cec5SDimitry Andric FUnpredCycles = FCycles + NotTakenBranchCost; 2054*0b57cec5SDimitry Andric // The branch at the end of FBB will disappear when it's predicated, so 2055*0b57cec5SDimitry Andric // discount it from PredCost. 2056*0b57cec5SDimitry Andric PredCost -= 1 * ScalingUpFactor; 2057*0b57cec5SDimitry Andric } 2058*0b57cec5SDimitry Andric // The total cost is the cost of each path scaled by their probabilites 2059*0b57cec5SDimitry Andric unsigned TUnpredCost = Probability.scale(TUnpredCycles * ScalingUpFactor); 2060*0b57cec5SDimitry Andric unsigned FUnpredCost = Probability.getCompl().scale(FUnpredCycles * ScalingUpFactor); 2061*0b57cec5SDimitry Andric UnpredCost = TUnpredCost + FUnpredCost; 2062*0b57cec5SDimitry Andric // When predicating assume that the first IT can be folded away but later 2063*0b57cec5SDimitry Andric // ones cost one cycle each 2064*0b57cec5SDimitry Andric if (Subtarget.isThumb2() && TCycles + FCycles > 4) { 2065*0b57cec5SDimitry Andric PredCost += ((TCycles + FCycles - 4) / 4) * ScalingUpFactor; 2066*0b57cec5SDimitry Andric } 2067*0b57cec5SDimitry Andric } else { 2068*0b57cec5SDimitry Andric unsigned TUnpredCost = Probability.scale(TCycles * ScalingUpFactor); 2069*0b57cec5SDimitry Andric unsigned FUnpredCost = 2070*0b57cec5SDimitry Andric Probability.getCompl().scale(FCycles * ScalingUpFactor); 2071*0b57cec5SDimitry Andric UnpredCost = TUnpredCost + FUnpredCost; 2072*0b57cec5SDimitry Andric UnpredCost += 1 * ScalingUpFactor; // The branch itself 2073*0b57cec5SDimitry Andric UnpredCost += Subtarget.getMispredictionPenalty() * ScalingUpFactor / 10; 2074*0b57cec5SDimitry Andric } 2075*0b57cec5SDimitry Andric 2076*0b57cec5SDimitry Andric return PredCost <= UnpredCost; 2077*0b57cec5SDimitry Andric } 2078*0b57cec5SDimitry Andric 2079*0b57cec5SDimitry Andric bool 2080*0b57cec5SDimitry Andric ARMBaseInstrInfo::isProfitableToUnpredicate(MachineBasicBlock &TMBB, 2081*0b57cec5SDimitry Andric MachineBasicBlock &FMBB) const { 2082*0b57cec5SDimitry Andric // Reduce false anti-dependencies to let the target's out-of-order execution 2083*0b57cec5SDimitry Andric // engine do its thing. 2084*0b57cec5SDimitry Andric return Subtarget.isProfitableToUnpredicate(); 2085*0b57cec5SDimitry Andric } 2086*0b57cec5SDimitry Andric 2087*0b57cec5SDimitry Andric /// getInstrPredicate - If instruction is predicated, returns its predicate 2088*0b57cec5SDimitry Andric /// condition, otherwise returns AL. It also returns the condition code 2089*0b57cec5SDimitry Andric /// register by reference. 2090*0b57cec5SDimitry Andric ARMCC::CondCodes llvm::getInstrPredicate(const MachineInstr &MI, 2091*0b57cec5SDimitry Andric unsigned &PredReg) { 2092*0b57cec5SDimitry Andric int PIdx = MI.findFirstPredOperandIdx(); 2093*0b57cec5SDimitry Andric if (PIdx == -1) { 2094*0b57cec5SDimitry Andric PredReg = 0; 2095*0b57cec5SDimitry Andric return ARMCC::AL; 2096*0b57cec5SDimitry Andric } 2097*0b57cec5SDimitry Andric 2098*0b57cec5SDimitry Andric PredReg = MI.getOperand(PIdx+1).getReg(); 2099*0b57cec5SDimitry Andric return (ARMCC::CondCodes)MI.getOperand(PIdx).getImm(); 2100*0b57cec5SDimitry Andric } 2101*0b57cec5SDimitry Andric 2102*0b57cec5SDimitry Andric unsigned llvm::getMatchingCondBranchOpcode(unsigned Opc) { 2103*0b57cec5SDimitry Andric if (Opc == ARM::B) 2104*0b57cec5SDimitry Andric return ARM::Bcc; 2105*0b57cec5SDimitry Andric if (Opc == ARM::tB) 2106*0b57cec5SDimitry Andric return ARM::tBcc; 2107*0b57cec5SDimitry Andric if (Opc == ARM::t2B) 2108*0b57cec5SDimitry Andric return ARM::t2Bcc; 2109*0b57cec5SDimitry Andric 2110*0b57cec5SDimitry Andric llvm_unreachable("Unknown unconditional branch opcode!"); 2111*0b57cec5SDimitry Andric } 2112*0b57cec5SDimitry Andric 2113*0b57cec5SDimitry Andric MachineInstr *ARMBaseInstrInfo::commuteInstructionImpl(MachineInstr &MI, 2114*0b57cec5SDimitry Andric bool NewMI, 2115*0b57cec5SDimitry Andric unsigned OpIdx1, 2116*0b57cec5SDimitry Andric unsigned OpIdx2) const { 2117*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 2118*0b57cec5SDimitry Andric case ARM::MOVCCr: 2119*0b57cec5SDimitry Andric case ARM::t2MOVCCr: { 2120*0b57cec5SDimitry Andric // MOVCC can be commuted by inverting the condition. 2121*0b57cec5SDimitry Andric unsigned PredReg = 0; 2122*0b57cec5SDimitry Andric ARMCC::CondCodes CC = getInstrPredicate(MI, PredReg); 2123*0b57cec5SDimitry Andric // MOVCC AL can't be inverted. Shouldn't happen. 2124*0b57cec5SDimitry Andric if (CC == ARMCC::AL || PredReg != ARM::CPSR) 2125*0b57cec5SDimitry Andric return nullptr; 2126*0b57cec5SDimitry Andric MachineInstr *CommutedMI = 2127*0b57cec5SDimitry Andric TargetInstrInfo::commuteInstructionImpl(MI, NewMI, OpIdx1, OpIdx2); 2128*0b57cec5SDimitry Andric if (!CommutedMI) 2129*0b57cec5SDimitry Andric return nullptr; 2130*0b57cec5SDimitry Andric // After swapping the MOVCC operands, also invert the condition. 2131*0b57cec5SDimitry Andric CommutedMI->getOperand(CommutedMI->findFirstPredOperandIdx()) 2132*0b57cec5SDimitry Andric .setImm(ARMCC::getOppositeCondition(CC)); 2133*0b57cec5SDimitry Andric return CommutedMI; 2134*0b57cec5SDimitry Andric } 2135*0b57cec5SDimitry Andric } 2136*0b57cec5SDimitry Andric return TargetInstrInfo::commuteInstructionImpl(MI, NewMI, OpIdx1, OpIdx2); 2137*0b57cec5SDimitry Andric } 2138*0b57cec5SDimitry Andric 2139*0b57cec5SDimitry Andric /// Identify instructions that can be folded into a MOVCC instruction, and 2140*0b57cec5SDimitry Andric /// return the defining instruction. 2141*0b57cec5SDimitry Andric MachineInstr * 2142*0b57cec5SDimitry Andric ARMBaseInstrInfo::canFoldIntoMOVCC(unsigned Reg, const MachineRegisterInfo &MRI, 2143*0b57cec5SDimitry Andric const TargetInstrInfo *TII) const { 2144*0b57cec5SDimitry Andric if (!TargetRegisterInfo::isVirtualRegister(Reg)) 2145*0b57cec5SDimitry Andric return nullptr; 2146*0b57cec5SDimitry Andric if (!MRI.hasOneNonDBGUse(Reg)) 2147*0b57cec5SDimitry Andric return nullptr; 2148*0b57cec5SDimitry Andric MachineInstr *MI = MRI.getVRegDef(Reg); 2149*0b57cec5SDimitry Andric if (!MI) 2150*0b57cec5SDimitry Andric return nullptr; 2151*0b57cec5SDimitry Andric // Check if MI can be predicated and folded into the MOVCC. 2152*0b57cec5SDimitry Andric if (!isPredicable(*MI)) 2153*0b57cec5SDimitry Andric return nullptr; 2154*0b57cec5SDimitry Andric // Check if MI has any non-dead defs or physreg uses. This also detects 2155*0b57cec5SDimitry Andric // predicated instructions which will be reading CPSR. 2156*0b57cec5SDimitry Andric for (unsigned i = 1, e = MI->getNumOperands(); i != e; ++i) { 2157*0b57cec5SDimitry Andric const MachineOperand &MO = MI->getOperand(i); 2158*0b57cec5SDimitry Andric // Reject frame index operands, PEI can't handle the predicated pseudos. 2159*0b57cec5SDimitry Andric if (MO.isFI() || MO.isCPI() || MO.isJTI()) 2160*0b57cec5SDimitry Andric return nullptr; 2161*0b57cec5SDimitry Andric if (!MO.isReg()) 2162*0b57cec5SDimitry Andric continue; 2163*0b57cec5SDimitry Andric // MI can't have any tied operands, that would conflict with predication. 2164*0b57cec5SDimitry Andric if (MO.isTied()) 2165*0b57cec5SDimitry Andric return nullptr; 2166*0b57cec5SDimitry Andric if (TargetRegisterInfo::isPhysicalRegister(MO.getReg())) 2167*0b57cec5SDimitry Andric return nullptr; 2168*0b57cec5SDimitry Andric if (MO.isDef() && !MO.isDead()) 2169*0b57cec5SDimitry Andric return nullptr; 2170*0b57cec5SDimitry Andric } 2171*0b57cec5SDimitry Andric bool DontMoveAcrossStores = true; 2172*0b57cec5SDimitry Andric if (!MI->isSafeToMove(/* AliasAnalysis = */ nullptr, DontMoveAcrossStores)) 2173*0b57cec5SDimitry Andric return nullptr; 2174*0b57cec5SDimitry Andric return MI; 2175*0b57cec5SDimitry Andric } 2176*0b57cec5SDimitry Andric 2177*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::analyzeSelect(const MachineInstr &MI, 2178*0b57cec5SDimitry Andric SmallVectorImpl<MachineOperand> &Cond, 2179*0b57cec5SDimitry Andric unsigned &TrueOp, unsigned &FalseOp, 2180*0b57cec5SDimitry Andric bool &Optimizable) const { 2181*0b57cec5SDimitry Andric assert((MI.getOpcode() == ARM::MOVCCr || MI.getOpcode() == ARM::t2MOVCCr) && 2182*0b57cec5SDimitry Andric "Unknown select instruction"); 2183*0b57cec5SDimitry Andric // MOVCC operands: 2184*0b57cec5SDimitry Andric // 0: Def. 2185*0b57cec5SDimitry Andric // 1: True use. 2186*0b57cec5SDimitry Andric // 2: False use. 2187*0b57cec5SDimitry Andric // 3: Condition code. 2188*0b57cec5SDimitry Andric // 4: CPSR use. 2189*0b57cec5SDimitry Andric TrueOp = 1; 2190*0b57cec5SDimitry Andric FalseOp = 2; 2191*0b57cec5SDimitry Andric Cond.push_back(MI.getOperand(3)); 2192*0b57cec5SDimitry Andric Cond.push_back(MI.getOperand(4)); 2193*0b57cec5SDimitry Andric // We can always fold a def. 2194*0b57cec5SDimitry Andric Optimizable = true; 2195*0b57cec5SDimitry Andric return false; 2196*0b57cec5SDimitry Andric } 2197*0b57cec5SDimitry Andric 2198*0b57cec5SDimitry Andric MachineInstr * 2199*0b57cec5SDimitry Andric ARMBaseInstrInfo::optimizeSelect(MachineInstr &MI, 2200*0b57cec5SDimitry Andric SmallPtrSetImpl<MachineInstr *> &SeenMIs, 2201*0b57cec5SDimitry Andric bool PreferFalse) const { 2202*0b57cec5SDimitry Andric assert((MI.getOpcode() == ARM::MOVCCr || MI.getOpcode() == ARM::t2MOVCCr) && 2203*0b57cec5SDimitry Andric "Unknown select instruction"); 2204*0b57cec5SDimitry Andric MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); 2205*0b57cec5SDimitry Andric MachineInstr *DefMI = canFoldIntoMOVCC(MI.getOperand(2).getReg(), MRI, this); 2206*0b57cec5SDimitry Andric bool Invert = !DefMI; 2207*0b57cec5SDimitry Andric if (!DefMI) 2208*0b57cec5SDimitry Andric DefMI = canFoldIntoMOVCC(MI.getOperand(1).getReg(), MRI, this); 2209*0b57cec5SDimitry Andric if (!DefMI) 2210*0b57cec5SDimitry Andric return nullptr; 2211*0b57cec5SDimitry Andric 2212*0b57cec5SDimitry Andric // Find new register class to use. 2213*0b57cec5SDimitry Andric MachineOperand FalseReg = MI.getOperand(Invert ? 2 : 1); 2214*0b57cec5SDimitry Andric unsigned DestReg = MI.getOperand(0).getReg(); 2215*0b57cec5SDimitry Andric const TargetRegisterClass *PreviousClass = MRI.getRegClass(FalseReg.getReg()); 2216*0b57cec5SDimitry Andric if (!MRI.constrainRegClass(DestReg, PreviousClass)) 2217*0b57cec5SDimitry Andric return nullptr; 2218*0b57cec5SDimitry Andric 2219*0b57cec5SDimitry Andric // Create a new predicated version of DefMI. 2220*0b57cec5SDimitry Andric // Rfalse is the first use. 2221*0b57cec5SDimitry Andric MachineInstrBuilder NewMI = 2222*0b57cec5SDimitry Andric BuildMI(*MI.getParent(), MI, MI.getDebugLoc(), DefMI->getDesc(), DestReg); 2223*0b57cec5SDimitry Andric 2224*0b57cec5SDimitry Andric // Copy all the DefMI operands, excluding its (null) predicate. 2225*0b57cec5SDimitry Andric const MCInstrDesc &DefDesc = DefMI->getDesc(); 2226*0b57cec5SDimitry Andric for (unsigned i = 1, e = DefDesc.getNumOperands(); 2227*0b57cec5SDimitry Andric i != e && !DefDesc.OpInfo[i].isPredicate(); ++i) 2228*0b57cec5SDimitry Andric NewMI.add(DefMI->getOperand(i)); 2229*0b57cec5SDimitry Andric 2230*0b57cec5SDimitry Andric unsigned CondCode = MI.getOperand(3).getImm(); 2231*0b57cec5SDimitry Andric if (Invert) 2232*0b57cec5SDimitry Andric NewMI.addImm(ARMCC::getOppositeCondition(ARMCC::CondCodes(CondCode))); 2233*0b57cec5SDimitry Andric else 2234*0b57cec5SDimitry Andric NewMI.addImm(CondCode); 2235*0b57cec5SDimitry Andric NewMI.add(MI.getOperand(4)); 2236*0b57cec5SDimitry Andric 2237*0b57cec5SDimitry Andric // DefMI is not the -S version that sets CPSR, so add an optional %noreg. 2238*0b57cec5SDimitry Andric if (NewMI->hasOptionalDef()) 2239*0b57cec5SDimitry Andric NewMI.add(condCodeOp()); 2240*0b57cec5SDimitry Andric 2241*0b57cec5SDimitry Andric // The output register value when the predicate is false is an implicit 2242*0b57cec5SDimitry Andric // register operand tied to the first def. 2243*0b57cec5SDimitry Andric // The tie makes the register allocator ensure the FalseReg is allocated the 2244*0b57cec5SDimitry Andric // same register as operand 0. 2245*0b57cec5SDimitry Andric FalseReg.setImplicit(); 2246*0b57cec5SDimitry Andric NewMI.add(FalseReg); 2247*0b57cec5SDimitry Andric NewMI->tieOperands(0, NewMI->getNumOperands() - 1); 2248*0b57cec5SDimitry Andric 2249*0b57cec5SDimitry Andric // Update SeenMIs set: register newly created MI and erase removed DefMI. 2250*0b57cec5SDimitry Andric SeenMIs.insert(NewMI); 2251*0b57cec5SDimitry Andric SeenMIs.erase(DefMI); 2252*0b57cec5SDimitry Andric 2253*0b57cec5SDimitry Andric // If MI is inside a loop, and DefMI is outside the loop, then kill flags on 2254*0b57cec5SDimitry Andric // DefMI would be invalid when tranferred inside the loop. Checking for a 2255*0b57cec5SDimitry Andric // loop is expensive, but at least remove kill flags if they are in different 2256*0b57cec5SDimitry Andric // BBs. 2257*0b57cec5SDimitry Andric if (DefMI->getParent() != MI.getParent()) 2258*0b57cec5SDimitry Andric NewMI->clearKillInfo(); 2259*0b57cec5SDimitry Andric 2260*0b57cec5SDimitry Andric // The caller will erase MI, but not DefMI. 2261*0b57cec5SDimitry Andric DefMI->eraseFromParent(); 2262*0b57cec5SDimitry Andric return NewMI; 2263*0b57cec5SDimitry Andric } 2264*0b57cec5SDimitry Andric 2265*0b57cec5SDimitry Andric /// Map pseudo instructions that imply an 'S' bit onto real opcodes. Whether the 2266*0b57cec5SDimitry Andric /// instruction is encoded with an 'S' bit is determined by the optional CPSR 2267*0b57cec5SDimitry Andric /// def operand. 2268*0b57cec5SDimitry Andric /// 2269*0b57cec5SDimitry Andric /// This will go away once we can teach tblgen how to set the optional CPSR def 2270*0b57cec5SDimitry Andric /// operand itself. 2271*0b57cec5SDimitry Andric struct AddSubFlagsOpcodePair { 2272*0b57cec5SDimitry Andric uint16_t PseudoOpc; 2273*0b57cec5SDimitry Andric uint16_t MachineOpc; 2274*0b57cec5SDimitry Andric }; 2275*0b57cec5SDimitry Andric 2276*0b57cec5SDimitry Andric static const AddSubFlagsOpcodePair AddSubFlagsOpcodeMap[] = { 2277*0b57cec5SDimitry Andric {ARM::ADDSri, ARM::ADDri}, 2278*0b57cec5SDimitry Andric {ARM::ADDSrr, ARM::ADDrr}, 2279*0b57cec5SDimitry Andric {ARM::ADDSrsi, ARM::ADDrsi}, 2280*0b57cec5SDimitry Andric {ARM::ADDSrsr, ARM::ADDrsr}, 2281*0b57cec5SDimitry Andric 2282*0b57cec5SDimitry Andric {ARM::SUBSri, ARM::SUBri}, 2283*0b57cec5SDimitry Andric {ARM::SUBSrr, ARM::SUBrr}, 2284*0b57cec5SDimitry Andric {ARM::SUBSrsi, ARM::SUBrsi}, 2285*0b57cec5SDimitry Andric {ARM::SUBSrsr, ARM::SUBrsr}, 2286*0b57cec5SDimitry Andric 2287*0b57cec5SDimitry Andric {ARM::RSBSri, ARM::RSBri}, 2288*0b57cec5SDimitry Andric {ARM::RSBSrsi, ARM::RSBrsi}, 2289*0b57cec5SDimitry Andric {ARM::RSBSrsr, ARM::RSBrsr}, 2290*0b57cec5SDimitry Andric 2291*0b57cec5SDimitry Andric {ARM::tADDSi3, ARM::tADDi3}, 2292*0b57cec5SDimitry Andric {ARM::tADDSi8, ARM::tADDi8}, 2293*0b57cec5SDimitry Andric {ARM::tADDSrr, ARM::tADDrr}, 2294*0b57cec5SDimitry Andric {ARM::tADCS, ARM::tADC}, 2295*0b57cec5SDimitry Andric 2296*0b57cec5SDimitry Andric {ARM::tSUBSi3, ARM::tSUBi3}, 2297*0b57cec5SDimitry Andric {ARM::tSUBSi8, ARM::tSUBi8}, 2298*0b57cec5SDimitry Andric {ARM::tSUBSrr, ARM::tSUBrr}, 2299*0b57cec5SDimitry Andric {ARM::tSBCS, ARM::tSBC}, 2300*0b57cec5SDimitry Andric {ARM::tRSBS, ARM::tRSB}, 2301*0b57cec5SDimitry Andric 2302*0b57cec5SDimitry Andric {ARM::t2ADDSri, ARM::t2ADDri}, 2303*0b57cec5SDimitry Andric {ARM::t2ADDSrr, ARM::t2ADDrr}, 2304*0b57cec5SDimitry Andric {ARM::t2ADDSrs, ARM::t2ADDrs}, 2305*0b57cec5SDimitry Andric 2306*0b57cec5SDimitry Andric {ARM::t2SUBSri, ARM::t2SUBri}, 2307*0b57cec5SDimitry Andric {ARM::t2SUBSrr, ARM::t2SUBrr}, 2308*0b57cec5SDimitry Andric {ARM::t2SUBSrs, ARM::t2SUBrs}, 2309*0b57cec5SDimitry Andric 2310*0b57cec5SDimitry Andric {ARM::t2RSBSri, ARM::t2RSBri}, 2311*0b57cec5SDimitry Andric {ARM::t2RSBSrs, ARM::t2RSBrs}, 2312*0b57cec5SDimitry Andric }; 2313*0b57cec5SDimitry Andric 2314*0b57cec5SDimitry Andric unsigned llvm::convertAddSubFlagsOpcode(unsigned OldOpc) { 2315*0b57cec5SDimitry Andric for (unsigned i = 0, e = array_lengthof(AddSubFlagsOpcodeMap); i != e; ++i) 2316*0b57cec5SDimitry Andric if (OldOpc == AddSubFlagsOpcodeMap[i].PseudoOpc) 2317*0b57cec5SDimitry Andric return AddSubFlagsOpcodeMap[i].MachineOpc; 2318*0b57cec5SDimitry Andric return 0; 2319*0b57cec5SDimitry Andric } 2320*0b57cec5SDimitry Andric 2321*0b57cec5SDimitry Andric void llvm::emitARMRegPlusImmediate(MachineBasicBlock &MBB, 2322*0b57cec5SDimitry Andric MachineBasicBlock::iterator &MBBI, 2323*0b57cec5SDimitry Andric const DebugLoc &dl, unsigned DestReg, 2324*0b57cec5SDimitry Andric unsigned BaseReg, int NumBytes, 2325*0b57cec5SDimitry Andric ARMCC::CondCodes Pred, unsigned PredReg, 2326*0b57cec5SDimitry Andric const ARMBaseInstrInfo &TII, 2327*0b57cec5SDimitry Andric unsigned MIFlags) { 2328*0b57cec5SDimitry Andric if (NumBytes == 0 && DestReg != BaseReg) { 2329*0b57cec5SDimitry Andric BuildMI(MBB, MBBI, dl, TII.get(ARM::MOVr), DestReg) 2330*0b57cec5SDimitry Andric .addReg(BaseReg, RegState::Kill) 2331*0b57cec5SDimitry Andric .add(predOps(Pred, PredReg)) 2332*0b57cec5SDimitry Andric .add(condCodeOp()) 2333*0b57cec5SDimitry Andric .setMIFlags(MIFlags); 2334*0b57cec5SDimitry Andric return; 2335*0b57cec5SDimitry Andric } 2336*0b57cec5SDimitry Andric 2337*0b57cec5SDimitry Andric bool isSub = NumBytes < 0; 2338*0b57cec5SDimitry Andric if (isSub) NumBytes = -NumBytes; 2339*0b57cec5SDimitry Andric 2340*0b57cec5SDimitry Andric while (NumBytes) { 2341*0b57cec5SDimitry Andric unsigned RotAmt = ARM_AM::getSOImmValRotate(NumBytes); 2342*0b57cec5SDimitry Andric unsigned ThisVal = NumBytes & ARM_AM::rotr32(0xFF, RotAmt); 2343*0b57cec5SDimitry Andric assert(ThisVal && "Didn't extract field correctly"); 2344*0b57cec5SDimitry Andric 2345*0b57cec5SDimitry Andric // We will handle these bits from offset, clear them. 2346*0b57cec5SDimitry Andric NumBytes &= ~ThisVal; 2347*0b57cec5SDimitry Andric 2348*0b57cec5SDimitry Andric assert(ARM_AM::getSOImmVal(ThisVal) != -1 && "Bit extraction didn't work?"); 2349*0b57cec5SDimitry Andric 2350*0b57cec5SDimitry Andric // Build the new ADD / SUB. 2351*0b57cec5SDimitry Andric unsigned Opc = isSub ? ARM::SUBri : ARM::ADDri; 2352*0b57cec5SDimitry Andric BuildMI(MBB, MBBI, dl, TII.get(Opc), DestReg) 2353*0b57cec5SDimitry Andric .addReg(BaseReg, RegState::Kill) 2354*0b57cec5SDimitry Andric .addImm(ThisVal) 2355*0b57cec5SDimitry Andric .add(predOps(Pred, PredReg)) 2356*0b57cec5SDimitry Andric .add(condCodeOp()) 2357*0b57cec5SDimitry Andric .setMIFlags(MIFlags); 2358*0b57cec5SDimitry Andric BaseReg = DestReg; 2359*0b57cec5SDimitry Andric } 2360*0b57cec5SDimitry Andric } 2361*0b57cec5SDimitry Andric 2362*0b57cec5SDimitry Andric bool llvm::tryFoldSPUpdateIntoPushPop(const ARMSubtarget &Subtarget, 2363*0b57cec5SDimitry Andric MachineFunction &MF, MachineInstr *MI, 2364*0b57cec5SDimitry Andric unsigned NumBytes) { 2365*0b57cec5SDimitry Andric // This optimisation potentially adds lots of load and store 2366*0b57cec5SDimitry Andric // micro-operations, it's only really a great benefit to code-size. 2367*0b57cec5SDimitry Andric if (!Subtarget.hasMinSize()) 2368*0b57cec5SDimitry Andric return false; 2369*0b57cec5SDimitry Andric 2370*0b57cec5SDimitry Andric // If only one register is pushed/popped, LLVM can use an LDR/STR 2371*0b57cec5SDimitry Andric // instead. We can't modify those so make sure we're dealing with an 2372*0b57cec5SDimitry Andric // instruction we understand. 2373*0b57cec5SDimitry Andric bool IsPop = isPopOpcode(MI->getOpcode()); 2374*0b57cec5SDimitry Andric bool IsPush = isPushOpcode(MI->getOpcode()); 2375*0b57cec5SDimitry Andric if (!IsPush && !IsPop) 2376*0b57cec5SDimitry Andric return false; 2377*0b57cec5SDimitry Andric 2378*0b57cec5SDimitry Andric bool IsVFPPushPop = MI->getOpcode() == ARM::VSTMDDB_UPD || 2379*0b57cec5SDimitry Andric MI->getOpcode() == ARM::VLDMDIA_UPD; 2380*0b57cec5SDimitry Andric bool IsT1PushPop = MI->getOpcode() == ARM::tPUSH || 2381*0b57cec5SDimitry Andric MI->getOpcode() == ARM::tPOP || 2382*0b57cec5SDimitry Andric MI->getOpcode() == ARM::tPOP_RET; 2383*0b57cec5SDimitry Andric 2384*0b57cec5SDimitry Andric assert((IsT1PushPop || (MI->getOperand(0).getReg() == ARM::SP && 2385*0b57cec5SDimitry Andric MI->getOperand(1).getReg() == ARM::SP)) && 2386*0b57cec5SDimitry Andric "trying to fold sp update into non-sp-updating push/pop"); 2387*0b57cec5SDimitry Andric 2388*0b57cec5SDimitry Andric // The VFP push & pop act on D-registers, so we can only fold an adjustment 2389*0b57cec5SDimitry Andric // by a multiple of 8 bytes in correctly. Similarly rN is 4-bytes. Don't try 2390*0b57cec5SDimitry Andric // if this is violated. 2391*0b57cec5SDimitry Andric if (NumBytes % (IsVFPPushPop ? 8 : 4) != 0) 2392*0b57cec5SDimitry Andric return false; 2393*0b57cec5SDimitry Andric 2394*0b57cec5SDimitry Andric // ARM and Thumb2 push/pop insts have explicit "sp, sp" operands (+ 2395*0b57cec5SDimitry Andric // pred) so the list starts at 4. Thumb1 starts after the predicate. 2396*0b57cec5SDimitry Andric int RegListIdx = IsT1PushPop ? 2 : 4; 2397*0b57cec5SDimitry Andric 2398*0b57cec5SDimitry Andric // Calculate the space we'll need in terms of registers. 2399*0b57cec5SDimitry Andric unsigned RegsNeeded; 2400*0b57cec5SDimitry Andric const TargetRegisterClass *RegClass; 2401*0b57cec5SDimitry Andric if (IsVFPPushPop) { 2402*0b57cec5SDimitry Andric RegsNeeded = NumBytes / 8; 2403*0b57cec5SDimitry Andric RegClass = &ARM::DPRRegClass; 2404*0b57cec5SDimitry Andric } else { 2405*0b57cec5SDimitry Andric RegsNeeded = NumBytes / 4; 2406*0b57cec5SDimitry Andric RegClass = &ARM::GPRRegClass; 2407*0b57cec5SDimitry Andric } 2408*0b57cec5SDimitry Andric 2409*0b57cec5SDimitry Andric // We're going to have to strip all list operands off before 2410*0b57cec5SDimitry Andric // re-adding them since the order matters, so save the existing ones 2411*0b57cec5SDimitry Andric // for later. 2412*0b57cec5SDimitry Andric SmallVector<MachineOperand, 4> RegList; 2413*0b57cec5SDimitry Andric 2414*0b57cec5SDimitry Andric // We're also going to need the first register transferred by this 2415*0b57cec5SDimitry Andric // instruction, which won't necessarily be the first register in the list. 2416*0b57cec5SDimitry Andric unsigned FirstRegEnc = -1; 2417*0b57cec5SDimitry Andric 2418*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getRegInfo().getTargetRegisterInfo(); 2419*0b57cec5SDimitry Andric for (int i = MI->getNumOperands() - 1; i >= RegListIdx; --i) { 2420*0b57cec5SDimitry Andric MachineOperand &MO = MI->getOperand(i); 2421*0b57cec5SDimitry Andric RegList.push_back(MO); 2422*0b57cec5SDimitry Andric 2423*0b57cec5SDimitry Andric if (MO.isReg() && TRI->getEncodingValue(MO.getReg()) < FirstRegEnc) 2424*0b57cec5SDimitry Andric FirstRegEnc = TRI->getEncodingValue(MO.getReg()); 2425*0b57cec5SDimitry Andric } 2426*0b57cec5SDimitry Andric 2427*0b57cec5SDimitry Andric const MCPhysReg *CSRegs = TRI->getCalleeSavedRegs(&MF); 2428*0b57cec5SDimitry Andric 2429*0b57cec5SDimitry Andric // Now try to find enough space in the reglist to allocate NumBytes. 2430*0b57cec5SDimitry Andric for (int CurRegEnc = FirstRegEnc - 1; CurRegEnc >= 0 && RegsNeeded; 2431*0b57cec5SDimitry Andric --CurRegEnc) { 2432*0b57cec5SDimitry Andric unsigned CurReg = RegClass->getRegister(CurRegEnc); 2433*0b57cec5SDimitry Andric if (IsT1PushPop && CurReg > ARM::R7) 2434*0b57cec5SDimitry Andric continue; 2435*0b57cec5SDimitry Andric if (!IsPop) { 2436*0b57cec5SDimitry Andric // Pushing any register is completely harmless, mark the register involved 2437*0b57cec5SDimitry Andric // as undef since we don't care about its value and must not restore it 2438*0b57cec5SDimitry Andric // during stack unwinding. 2439*0b57cec5SDimitry Andric RegList.push_back(MachineOperand::CreateReg(CurReg, false, false, 2440*0b57cec5SDimitry Andric false, false, true)); 2441*0b57cec5SDimitry Andric --RegsNeeded; 2442*0b57cec5SDimitry Andric continue; 2443*0b57cec5SDimitry Andric } 2444*0b57cec5SDimitry Andric 2445*0b57cec5SDimitry Andric // However, we can only pop an extra register if it's not live. For 2446*0b57cec5SDimitry Andric // registers live within the function we might clobber a return value 2447*0b57cec5SDimitry Andric // register; the other way a register can be live here is if it's 2448*0b57cec5SDimitry Andric // callee-saved. 2449*0b57cec5SDimitry Andric if (isCalleeSavedRegister(CurReg, CSRegs) || 2450*0b57cec5SDimitry Andric MI->getParent()->computeRegisterLiveness(TRI, CurReg, MI) != 2451*0b57cec5SDimitry Andric MachineBasicBlock::LQR_Dead) { 2452*0b57cec5SDimitry Andric // VFP pops don't allow holes in the register list, so any skip is fatal 2453*0b57cec5SDimitry Andric // for our transformation. GPR pops do, so we should just keep looking. 2454*0b57cec5SDimitry Andric if (IsVFPPushPop) 2455*0b57cec5SDimitry Andric return false; 2456*0b57cec5SDimitry Andric else 2457*0b57cec5SDimitry Andric continue; 2458*0b57cec5SDimitry Andric } 2459*0b57cec5SDimitry Andric 2460*0b57cec5SDimitry Andric // Mark the unimportant registers as <def,dead> in the POP. 2461*0b57cec5SDimitry Andric RegList.push_back(MachineOperand::CreateReg(CurReg, true, false, false, 2462*0b57cec5SDimitry Andric true)); 2463*0b57cec5SDimitry Andric --RegsNeeded; 2464*0b57cec5SDimitry Andric } 2465*0b57cec5SDimitry Andric 2466*0b57cec5SDimitry Andric if (RegsNeeded > 0) 2467*0b57cec5SDimitry Andric return false; 2468*0b57cec5SDimitry Andric 2469*0b57cec5SDimitry Andric // Finally we know we can profitably perform the optimisation so go 2470*0b57cec5SDimitry Andric // ahead: strip all existing registers off and add them back again 2471*0b57cec5SDimitry Andric // in the right order. 2472*0b57cec5SDimitry Andric for (int i = MI->getNumOperands() - 1; i >= RegListIdx; --i) 2473*0b57cec5SDimitry Andric MI->RemoveOperand(i); 2474*0b57cec5SDimitry Andric 2475*0b57cec5SDimitry Andric // Add the complete list back in. 2476*0b57cec5SDimitry Andric MachineInstrBuilder MIB(MF, &*MI); 2477*0b57cec5SDimitry Andric for (int i = RegList.size() - 1; i >= 0; --i) 2478*0b57cec5SDimitry Andric MIB.add(RegList[i]); 2479*0b57cec5SDimitry Andric 2480*0b57cec5SDimitry Andric return true; 2481*0b57cec5SDimitry Andric } 2482*0b57cec5SDimitry Andric 2483*0b57cec5SDimitry Andric bool llvm::rewriteARMFrameIndex(MachineInstr &MI, unsigned FrameRegIdx, 2484*0b57cec5SDimitry Andric unsigned FrameReg, int &Offset, 2485*0b57cec5SDimitry Andric const ARMBaseInstrInfo &TII) { 2486*0b57cec5SDimitry Andric unsigned Opcode = MI.getOpcode(); 2487*0b57cec5SDimitry Andric const MCInstrDesc &Desc = MI.getDesc(); 2488*0b57cec5SDimitry Andric unsigned AddrMode = (Desc.TSFlags & ARMII::AddrModeMask); 2489*0b57cec5SDimitry Andric bool isSub = false; 2490*0b57cec5SDimitry Andric 2491*0b57cec5SDimitry Andric // Memory operands in inline assembly always use AddrMode2. 2492*0b57cec5SDimitry Andric if (Opcode == ARM::INLINEASM || Opcode == ARM::INLINEASM_BR) 2493*0b57cec5SDimitry Andric AddrMode = ARMII::AddrMode2; 2494*0b57cec5SDimitry Andric 2495*0b57cec5SDimitry Andric if (Opcode == ARM::ADDri) { 2496*0b57cec5SDimitry Andric Offset += MI.getOperand(FrameRegIdx+1).getImm(); 2497*0b57cec5SDimitry Andric if (Offset == 0) { 2498*0b57cec5SDimitry Andric // Turn it into a move. 2499*0b57cec5SDimitry Andric MI.setDesc(TII.get(ARM::MOVr)); 2500*0b57cec5SDimitry Andric MI.getOperand(FrameRegIdx).ChangeToRegister(FrameReg, false); 2501*0b57cec5SDimitry Andric MI.RemoveOperand(FrameRegIdx+1); 2502*0b57cec5SDimitry Andric Offset = 0; 2503*0b57cec5SDimitry Andric return true; 2504*0b57cec5SDimitry Andric } else if (Offset < 0) { 2505*0b57cec5SDimitry Andric Offset = -Offset; 2506*0b57cec5SDimitry Andric isSub = true; 2507*0b57cec5SDimitry Andric MI.setDesc(TII.get(ARM::SUBri)); 2508*0b57cec5SDimitry Andric } 2509*0b57cec5SDimitry Andric 2510*0b57cec5SDimitry Andric // Common case: small offset, fits into instruction. 2511*0b57cec5SDimitry Andric if (ARM_AM::getSOImmVal(Offset) != -1) { 2512*0b57cec5SDimitry Andric // Replace the FrameIndex with sp / fp 2513*0b57cec5SDimitry Andric MI.getOperand(FrameRegIdx).ChangeToRegister(FrameReg, false); 2514*0b57cec5SDimitry Andric MI.getOperand(FrameRegIdx+1).ChangeToImmediate(Offset); 2515*0b57cec5SDimitry Andric Offset = 0; 2516*0b57cec5SDimitry Andric return true; 2517*0b57cec5SDimitry Andric } 2518*0b57cec5SDimitry Andric 2519*0b57cec5SDimitry Andric // Otherwise, pull as much of the immedidate into this ADDri/SUBri 2520*0b57cec5SDimitry Andric // as possible. 2521*0b57cec5SDimitry Andric unsigned RotAmt = ARM_AM::getSOImmValRotate(Offset); 2522*0b57cec5SDimitry Andric unsigned ThisImmVal = Offset & ARM_AM::rotr32(0xFF, RotAmt); 2523*0b57cec5SDimitry Andric 2524*0b57cec5SDimitry Andric // We will handle these bits from offset, clear them. 2525*0b57cec5SDimitry Andric Offset &= ~ThisImmVal; 2526*0b57cec5SDimitry Andric 2527*0b57cec5SDimitry Andric // Get the properly encoded SOImmVal field. 2528*0b57cec5SDimitry Andric assert(ARM_AM::getSOImmVal(ThisImmVal) != -1 && 2529*0b57cec5SDimitry Andric "Bit extraction didn't work?"); 2530*0b57cec5SDimitry Andric MI.getOperand(FrameRegIdx+1).ChangeToImmediate(ThisImmVal); 2531*0b57cec5SDimitry Andric } else { 2532*0b57cec5SDimitry Andric unsigned ImmIdx = 0; 2533*0b57cec5SDimitry Andric int InstrOffs = 0; 2534*0b57cec5SDimitry Andric unsigned NumBits = 0; 2535*0b57cec5SDimitry Andric unsigned Scale = 1; 2536*0b57cec5SDimitry Andric switch (AddrMode) { 2537*0b57cec5SDimitry Andric case ARMII::AddrMode_i12: 2538*0b57cec5SDimitry Andric ImmIdx = FrameRegIdx + 1; 2539*0b57cec5SDimitry Andric InstrOffs = MI.getOperand(ImmIdx).getImm(); 2540*0b57cec5SDimitry Andric NumBits = 12; 2541*0b57cec5SDimitry Andric break; 2542*0b57cec5SDimitry Andric case ARMII::AddrMode2: 2543*0b57cec5SDimitry Andric ImmIdx = FrameRegIdx+2; 2544*0b57cec5SDimitry Andric InstrOffs = ARM_AM::getAM2Offset(MI.getOperand(ImmIdx).getImm()); 2545*0b57cec5SDimitry Andric if (ARM_AM::getAM2Op(MI.getOperand(ImmIdx).getImm()) == ARM_AM::sub) 2546*0b57cec5SDimitry Andric InstrOffs *= -1; 2547*0b57cec5SDimitry Andric NumBits = 12; 2548*0b57cec5SDimitry Andric break; 2549*0b57cec5SDimitry Andric case ARMII::AddrMode3: 2550*0b57cec5SDimitry Andric ImmIdx = FrameRegIdx+2; 2551*0b57cec5SDimitry Andric InstrOffs = ARM_AM::getAM3Offset(MI.getOperand(ImmIdx).getImm()); 2552*0b57cec5SDimitry Andric if (ARM_AM::getAM3Op(MI.getOperand(ImmIdx).getImm()) == ARM_AM::sub) 2553*0b57cec5SDimitry Andric InstrOffs *= -1; 2554*0b57cec5SDimitry Andric NumBits = 8; 2555*0b57cec5SDimitry Andric break; 2556*0b57cec5SDimitry Andric case ARMII::AddrMode4: 2557*0b57cec5SDimitry Andric case ARMII::AddrMode6: 2558*0b57cec5SDimitry Andric // Can't fold any offset even if it's zero. 2559*0b57cec5SDimitry Andric return false; 2560*0b57cec5SDimitry Andric case ARMII::AddrMode5: 2561*0b57cec5SDimitry Andric ImmIdx = FrameRegIdx+1; 2562*0b57cec5SDimitry Andric InstrOffs = ARM_AM::getAM5Offset(MI.getOperand(ImmIdx).getImm()); 2563*0b57cec5SDimitry Andric if (ARM_AM::getAM5Op(MI.getOperand(ImmIdx).getImm()) == ARM_AM::sub) 2564*0b57cec5SDimitry Andric InstrOffs *= -1; 2565*0b57cec5SDimitry Andric NumBits = 8; 2566*0b57cec5SDimitry Andric Scale = 4; 2567*0b57cec5SDimitry Andric break; 2568*0b57cec5SDimitry Andric case ARMII::AddrMode5FP16: 2569*0b57cec5SDimitry Andric ImmIdx = FrameRegIdx+1; 2570*0b57cec5SDimitry Andric InstrOffs = ARM_AM::getAM5Offset(MI.getOperand(ImmIdx).getImm()); 2571*0b57cec5SDimitry Andric if (ARM_AM::getAM5Op(MI.getOperand(ImmIdx).getImm()) == ARM_AM::sub) 2572*0b57cec5SDimitry Andric InstrOffs *= -1; 2573*0b57cec5SDimitry Andric NumBits = 8; 2574*0b57cec5SDimitry Andric Scale = 2; 2575*0b57cec5SDimitry Andric break; 2576*0b57cec5SDimitry Andric case ARMII::AddrModeT2_i7: 2577*0b57cec5SDimitry Andric case ARMII::AddrModeT2_i7s2: 2578*0b57cec5SDimitry Andric case ARMII::AddrModeT2_i7s4: 2579*0b57cec5SDimitry Andric ImmIdx = FrameRegIdx+1; 2580*0b57cec5SDimitry Andric InstrOffs = MI.getOperand(ImmIdx).getImm(); 2581*0b57cec5SDimitry Andric NumBits = 7; 2582*0b57cec5SDimitry Andric Scale = (AddrMode == ARMII::AddrModeT2_i7s2 ? 2 : 2583*0b57cec5SDimitry Andric AddrMode == ARMII::AddrModeT2_i7s4 ? 4 : 1); 2584*0b57cec5SDimitry Andric break; 2585*0b57cec5SDimitry Andric default: 2586*0b57cec5SDimitry Andric llvm_unreachable("Unsupported addressing mode!"); 2587*0b57cec5SDimitry Andric } 2588*0b57cec5SDimitry Andric 2589*0b57cec5SDimitry Andric Offset += InstrOffs * Scale; 2590*0b57cec5SDimitry Andric assert((Offset & (Scale-1)) == 0 && "Can't encode this offset!"); 2591*0b57cec5SDimitry Andric if (Offset < 0) { 2592*0b57cec5SDimitry Andric Offset = -Offset; 2593*0b57cec5SDimitry Andric isSub = true; 2594*0b57cec5SDimitry Andric } 2595*0b57cec5SDimitry Andric 2596*0b57cec5SDimitry Andric // Attempt to fold address comp. if opcode has offset bits 2597*0b57cec5SDimitry Andric if (NumBits > 0) { 2598*0b57cec5SDimitry Andric // Common case: small offset, fits into instruction. 2599*0b57cec5SDimitry Andric MachineOperand &ImmOp = MI.getOperand(ImmIdx); 2600*0b57cec5SDimitry Andric int ImmedOffset = Offset / Scale; 2601*0b57cec5SDimitry Andric unsigned Mask = (1 << NumBits) - 1; 2602*0b57cec5SDimitry Andric if ((unsigned)Offset <= Mask * Scale) { 2603*0b57cec5SDimitry Andric // Replace the FrameIndex with sp 2604*0b57cec5SDimitry Andric MI.getOperand(FrameRegIdx).ChangeToRegister(FrameReg, false); 2605*0b57cec5SDimitry Andric // FIXME: When addrmode2 goes away, this will simplify (like the 2606*0b57cec5SDimitry Andric // T2 version), as the LDR.i12 versions don't need the encoding 2607*0b57cec5SDimitry Andric // tricks for the offset value. 2608*0b57cec5SDimitry Andric if (isSub) { 2609*0b57cec5SDimitry Andric if (AddrMode == ARMII::AddrMode_i12) 2610*0b57cec5SDimitry Andric ImmedOffset = -ImmedOffset; 2611*0b57cec5SDimitry Andric else 2612*0b57cec5SDimitry Andric ImmedOffset |= 1 << NumBits; 2613*0b57cec5SDimitry Andric } 2614*0b57cec5SDimitry Andric ImmOp.ChangeToImmediate(ImmedOffset); 2615*0b57cec5SDimitry Andric Offset = 0; 2616*0b57cec5SDimitry Andric return true; 2617*0b57cec5SDimitry Andric } 2618*0b57cec5SDimitry Andric 2619*0b57cec5SDimitry Andric // Otherwise, it didn't fit. Pull in what we can to simplify the immed. 2620*0b57cec5SDimitry Andric ImmedOffset = ImmedOffset & Mask; 2621*0b57cec5SDimitry Andric if (isSub) { 2622*0b57cec5SDimitry Andric if (AddrMode == ARMII::AddrMode_i12) 2623*0b57cec5SDimitry Andric ImmedOffset = -ImmedOffset; 2624*0b57cec5SDimitry Andric else 2625*0b57cec5SDimitry Andric ImmedOffset |= 1 << NumBits; 2626*0b57cec5SDimitry Andric } 2627*0b57cec5SDimitry Andric ImmOp.ChangeToImmediate(ImmedOffset); 2628*0b57cec5SDimitry Andric Offset &= ~(Mask*Scale); 2629*0b57cec5SDimitry Andric } 2630*0b57cec5SDimitry Andric } 2631*0b57cec5SDimitry Andric 2632*0b57cec5SDimitry Andric Offset = (isSub) ? -Offset : Offset; 2633*0b57cec5SDimitry Andric return Offset == 0; 2634*0b57cec5SDimitry Andric } 2635*0b57cec5SDimitry Andric 2636*0b57cec5SDimitry Andric /// analyzeCompare - For a comparison instruction, return the source registers 2637*0b57cec5SDimitry Andric /// in SrcReg and SrcReg2 if having two register operands, and the value it 2638*0b57cec5SDimitry Andric /// compares against in CmpValue. Return true if the comparison instruction 2639*0b57cec5SDimitry Andric /// can be analyzed. 2640*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::analyzeCompare(const MachineInstr &MI, unsigned &SrcReg, 2641*0b57cec5SDimitry Andric unsigned &SrcReg2, int &CmpMask, 2642*0b57cec5SDimitry Andric int &CmpValue) const { 2643*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 2644*0b57cec5SDimitry Andric default: break; 2645*0b57cec5SDimitry Andric case ARM::CMPri: 2646*0b57cec5SDimitry Andric case ARM::t2CMPri: 2647*0b57cec5SDimitry Andric case ARM::tCMPi8: 2648*0b57cec5SDimitry Andric SrcReg = MI.getOperand(0).getReg(); 2649*0b57cec5SDimitry Andric SrcReg2 = 0; 2650*0b57cec5SDimitry Andric CmpMask = ~0; 2651*0b57cec5SDimitry Andric CmpValue = MI.getOperand(1).getImm(); 2652*0b57cec5SDimitry Andric return true; 2653*0b57cec5SDimitry Andric case ARM::CMPrr: 2654*0b57cec5SDimitry Andric case ARM::t2CMPrr: 2655*0b57cec5SDimitry Andric case ARM::tCMPr: 2656*0b57cec5SDimitry Andric SrcReg = MI.getOperand(0).getReg(); 2657*0b57cec5SDimitry Andric SrcReg2 = MI.getOperand(1).getReg(); 2658*0b57cec5SDimitry Andric CmpMask = ~0; 2659*0b57cec5SDimitry Andric CmpValue = 0; 2660*0b57cec5SDimitry Andric return true; 2661*0b57cec5SDimitry Andric case ARM::TSTri: 2662*0b57cec5SDimitry Andric case ARM::t2TSTri: 2663*0b57cec5SDimitry Andric SrcReg = MI.getOperand(0).getReg(); 2664*0b57cec5SDimitry Andric SrcReg2 = 0; 2665*0b57cec5SDimitry Andric CmpMask = MI.getOperand(1).getImm(); 2666*0b57cec5SDimitry Andric CmpValue = 0; 2667*0b57cec5SDimitry Andric return true; 2668*0b57cec5SDimitry Andric } 2669*0b57cec5SDimitry Andric 2670*0b57cec5SDimitry Andric return false; 2671*0b57cec5SDimitry Andric } 2672*0b57cec5SDimitry Andric 2673*0b57cec5SDimitry Andric /// isSuitableForMask - Identify a suitable 'and' instruction that 2674*0b57cec5SDimitry Andric /// operates on the given source register and applies the same mask 2675*0b57cec5SDimitry Andric /// as a 'tst' instruction. Provide a limited look-through for copies. 2676*0b57cec5SDimitry Andric /// When successful, MI will hold the found instruction. 2677*0b57cec5SDimitry Andric static bool isSuitableForMask(MachineInstr *&MI, unsigned SrcReg, 2678*0b57cec5SDimitry Andric int CmpMask, bool CommonUse) { 2679*0b57cec5SDimitry Andric switch (MI->getOpcode()) { 2680*0b57cec5SDimitry Andric case ARM::ANDri: 2681*0b57cec5SDimitry Andric case ARM::t2ANDri: 2682*0b57cec5SDimitry Andric if (CmpMask != MI->getOperand(2).getImm()) 2683*0b57cec5SDimitry Andric return false; 2684*0b57cec5SDimitry Andric if (SrcReg == MI->getOperand(CommonUse ? 1 : 0).getReg()) 2685*0b57cec5SDimitry Andric return true; 2686*0b57cec5SDimitry Andric break; 2687*0b57cec5SDimitry Andric } 2688*0b57cec5SDimitry Andric 2689*0b57cec5SDimitry Andric return false; 2690*0b57cec5SDimitry Andric } 2691*0b57cec5SDimitry Andric 2692*0b57cec5SDimitry Andric /// getSwappedCondition - assume the flags are set by MI(a,b), return 2693*0b57cec5SDimitry Andric /// the condition code if we modify the instructions such that flags are 2694*0b57cec5SDimitry Andric /// set by MI(b,a). 2695*0b57cec5SDimitry Andric inline static ARMCC::CondCodes getSwappedCondition(ARMCC::CondCodes CC) { 2696*0b57cec5SDimitry Andric switch (CC) { 2697*0b57cec5SDimitry Andric default: return ARMCC::AL; 2698*0b57cec5SDimitry Andric case ARMCC::EQ: return ARMCC::EQ; 2699*0b57cec5SDimitry Andric case ARMCC::NE: return ARMCC::NE; 2700*0b57cec5SDimitry Andric case ARMCC::HS: return ARMCC::LS; 2701*0b57cec5SDimitry Andric case ARMCC::LO: return ARMCC::HI; 2702*0b57cec5SDimitry Andric case ARMCC::HI: return ARMCC::LO; 2703*0b57cec5SDimitry Andric case ARMCC::LS: return ARMCC::HS; 2704*0b57cec5SDimitry Andric case ARMCC::GE: return ARMCC::LE; 2705*0b57cec5SDimitry Andric case ARMCC::LT: return ARMCC::GT; 2706*0b57cec5SDimitry Andric case ARMCC::GT: return ARMCC::LT; 2707*0b57cec5SDimitry Andric case ARMCC::LE: return ARMCC::GE; 2708*0b57cec5SDimitry Andric } 2709*0b57cec5SDimitry Andric } 2710*0b57cec5SDimitry Andric 2711*0b57cec5SDimitry Andric /// getCmpToAddCondition - assume the flags are set by CMP(a,b), return 2712*0b57cec5SDimitry Andric /// the condition code if we modify the instructions such that flags are 2713*0b57cec5SDimitry Andric /// set by ADD(a,b,X). 2714*0b57cec5SDimitry Andric inline static ARMCC::CondCodes getCmpToAddCondition(ARMCC::CondCodes CC) { 2715*0b57cec5SDimitry Andric switch (CC) { 2716*0b57cec5SDimitry Andric default: return ARMCC::AL; 2717*0b57cec5SDimitry Andric case ARMCC::HS: return ARMCC::LO; 2718*0b57cec5SDimitry Andric case ARMCC::LO: return ARMCC::HS; 2719*0b57cec5SDimitry Andric case ARMCC::VS: return ARMCC::VS; 2720*0b57cec5SDimitry Andric case ARMCC::VC: return ARMCC::VC; 2721*0b57cec5SDimitry Andric } 2722*0b57cec5SDimitry Andric } 2723*0b57cec5SDimitry Andric 2724*0b57cec5SDimitry Andric /// isRedundantFlagInstr - check whether the first instruction, whose only 2725*0b57cec5SDimitry Andric /// purpose is to update flags, can be made redundant. 2726*0b57cec5SDimitry Andric /// CMPrr can be made redundant by SUBrr if the operands are the same. 2727*0b57cec5SDimitry Andric /// CMPri can be made redundant by SUBri if the operands are the same. 2728*0b57cec5SDimitry Andric /// CMPrr(r0, r1) can be made redundant by ADDr[ri](r0, r1, X). 2729*0b57cec5SDimitry Andric /// This function can be extended later on. 2730*0b57cec5SDimitry Andric inline static bool isRedundantFlagInstr(const MachineInstr *CmpI, 2731*0b57cec5SDimitry Andric unsigned SrcReg, unsigned SrcReg2, 2732*0b57cec5SDimitry Andric int ImmValue, const MachineInstr *OI, 2733*0b57cec5SDimitry Andric bool &IsThumb1) { 2734*0b57cec5SDimitry Andric if ((CmpI->getOpcode() == ARM::CMPrr || CmpI->getOpcode() == ARM::t2CMPrr) && 2735*0b57cec5SDimitry Andric (OI->getOpcode() == ARM::SUBrr || OI->getOpcode() == ARM::t2SUBrr) && 2736*0b57cec5SDimitry Andric ((OI->getOperand(1).getReg() == SrcReg && 2737*0b57cec5SDimitry Andric OI->getOperand(2).getReg() == SrcReg2) || 2738*0b57cec5SDimitry Andric (OI->getOperand(1).getReg() == SrcReg2 && 2739*0b57cec5SDimitry Andric OI->getOperand(2).getReg() == SrcReg))) { 2740*0b57cec5SDimitry Andric IsThumb1 = false; 2741*0b57cec5SDimitry Andric return true; 2742*0b57cec5SDimitry Andric } 2743*0b57cec5SDimitry Andric 2744*0b57cec5SDimitry Andric if (CmpI->getOpcode() == ARM::tCMPr && OI->getOpcode() == ARM::tSUBrr && 2745*0b57cec5SDimitry Andric ((OI->getOperand(2).getReg() == SrcReg && 2746*0b57cec5SDimitry Andric OI->getOperand(3).getReg() == SrcReg2) || 2747*0b57cec5SDimitry Andric (OI->getOperand(2).getReg() == SrcReg2 && 2748*0b57cec5SDimitry Andric OI->getOperand(3).getReg() == SrcReg))) { 2749*0b57cec5SDimitry Andric IsThumb1 = true; 2750*0b57cec5SDimitry Andric return true; 2751*0b57cec5SDimitry Andric } 2752*0b57cec5SDimitry Andric 2753*0b57cec5SDimitry Andric if ((CmpI->getOpcode() == ARM::CMPri || CmpI->getOpcode() == ARM::t2CMPri) && 2754*0b57cec5SDimitry Andric (OI->getOpcode() == ARM::SUBri || OI->getOpcode() == ARM::t2SUBri) && 2755*0b57cec5SDimitry Andric OI->getOperand(1).getReg() == SrcReg && 2756*0b57cec5SDimitry Andric OI->getOperand(2).getImm() == ImmValue) { 2757*0b57cec5SDimitry Andric IsThumb1 = false; 2758*0b57cec5SDimitry Andric return true; 2759*0b57cec5SDimitry Andric } 2760*0b57cec5SDimitry Andric 2761*0b57cec5SDimitry Andric if (CmpI->getOpcode() == ARM::tCMPi8 && 2762*0b57cec5SDimitry Andric (OI->getOpcode() == ARM::tSUBi8 || OI->getOpcode() == ARM::tSUBi3) && 2763*0b57cec5SDimitry Andric OI->getOperand(2).getReg() == SrcReg && 2764*0b57cec5SDimitry Andric OI->getOperand(3).getImm() == ImmValue) { 2765*0b57cec5SDimitry Andric IsThumb1 = true; 2766*0b57cec5SDimitry Andric return true; 2767*0b57cec5SDimitry Andric } 2768*0b57cec5SDimitry Andric 2769*0b57cec5SDimitry Andric if ((CmpI->getOpcode() == ARM::CMPrr || CmpI->getOpcode() == ARM::t2CMPrr) && 2770*0b57cec5SDimitry Andric (OI->getOpcode() == ARM::ADDrr || OI->getOpcode() == ARM::t2ADDrr || 2771*0b57cec5SDimitry Andric OI->getOpcode() == ARM::ADDri || OI->getOpcode() == ARM::t2ADDri) && 2772*0b57cec5SDimitry Andric OI->getOperand(0).isReg() && OI->getOperand(1).isReg() && 2773*0b57cec5SDimitry Andric OI->getOperand(0).getReg() == SrcReg && 2774*0b57cec5SDimitry Andric OI->getOperand(1).getReg() == SrcReg2) { 2775*0b57cec5SDimitry Andric IsThumb1 = false; 2776*0b57cec5SDimitry Andric return true; 2777*0b57cec5SDimitry Andric } 2778*0b57cec5SDimitry Andric 2779*0b57cec5SDimitry Andric if (CmpI->getOpcode() == ARM::tCMPr && 2780*0b57cec5SDimitry Andric (OI->getOpcode() == ARM::tADDi3 || OI->getOpcode() == ARM::tADDi8 || 2781*0b57cec5SDimitry Andric OI->getOpcode() == ARM::tADDrr) && 2782*0b57cec5SDimitry Andric OI->getOperand(0).getReg() == SrcReg && 2783*0b57cec5SDimitry Andric OI->getOperand(2).getReg() == SrcReg2) { 2784*0b57cec5SDimitry Andric IsThumb1 = true; 2785*0b57cec5SDimitry Andric return true; 2786*0b57cec5SDimitry Andric } 2787*0b57cec5SDimitry Andric 2788*0b57cec5SDimitry Andric return false; 2789*0b57cec5SDimitry Andric } 2790*0b57cec5SDimitry Andric 2791*0b57cec5SDimitry Andric static bool isOptimizeCompareCandidate(MachineInstr *MI, bool &IsThumb1) { 2792*0b57cec5SDimitry Andric switch (MI->getOpcode()) { 2793*0b57cec5SDimitry Andric default: return false; 2794*0b57cec5SDimitry Andric case ARM::tLSLri: 2795*0b57cec5SDimitry Andric case ARM::tLSRri: 2796*0b57cec5SDimitry Andric case ARM::tLSLrr: 2797*0b57cec5SDimitry Andric case ARM::tLSRrr: 2798*0b57cec5SDimitry Andric case ARM::tSUBrr: 2799*0b57cec5SDimitry Andric case ARM::tADDrr: 2800*0b57cec5SDimitry Andric case ARM::tADDi3: 2801*0b57cec5SDimitry Andric case ARM::tADDi8: 2802*0b57cec5SDimitry Andric case ARM::tSUBi3: 2803*0b57cec5SDimitry Andric case ARM::tSUBi8: 2804*0b57cec5SDimitry Andric case ARM::tMUL: 2805*0b57cec5SDimitry Andric case ARM::tADC: 2806*0b57cec5SDimitry Andric case ARM::tSBC: 2807*0b57cec5SDimitry Andric case ARM::tRSB: 2808*0b57cec5SDimitry Andric case ARM::tAND: 2809*0b57cec5SDimitry Andric case ARM::tORR: 2810*0b57cec5SDimitry Andric case ARM::tEOR: 2811*0b57cec5SDimitry Andric case ARM::tBIC: 2812*0b57cec5SDimitry Andric case ARM::tMVN: 2813*0b57cec5SDimitry Andric case ARM::tASRri: 2814*0b57cec5SDimitry Andric case ARM::tASRrr: 2815*0b57cec5SDimitry Andric case ARM::tROR: 2816*0b57cec5SDimitry Andric IsThumb1 = true; 2817*0b57cec5SDimitry Andric LLVM_FALLTHROUGH; 2818*0b57cec5SDimitry Andric case ARM::RSBrr: 2819*0b57cec5SDimitry Andric case ARM::RSBri: 2820*0b57cec5SDimitry Andric case ARM::RSCrr: 2821*0b57cec5SDimitry Andric case ARM::RSCri: 2822*0b57cec5SDimitry Andric case ARM::ADDrr: 2823*0b57cec5SDimitry Andric case ARM::ADDri: 2824*0b57cec5SDimitry Andric case ARM::ADCrr: 2825*0b57cec5SDimitry Andric case ARM::ADCri: 2826*0b57cec5SDimitry Andric case ARM::SUBrr: 2827*0b57cec5SDimitry Andric case ARM::SUBri: 2828*0b57cec5SDimitry Andric case ARM::SBCrr: 2829*0b57cec5SDimitry Andric case ARM::SBCri: 2830*0b57cec5SDimitry Andric case ARM::t2RSBri: 2831*0b57cec5SDimitry Andric case ARM::t2ADDrr: 2832*0b57cec5SDimitry Andric case ARM::t2ADDri: 2833*0b57cec5SDimitry Andric case ARM::t2ADCrr: 2834*0b57cec5SDimitry Andric case ARM::t2ADCri: 2835*0b57cec5SDimitry Andric case ARM::t2SUBrr: 2836*0b57cec5SDimitry Andric case ARM::t2SUBri: 2837*0b57cec5SDimitry Andric case ARM::t2SBCrr: 2838*0b57cec5SDimitry Andric case ARM::t2SBCri: 2839*0b57cec5SDimitry Andric case ARM::ANDrr: 2840*0b57cec5SDimitry Andric case ARM::ANDri: 2841*0b57cec5SDimitry Andric case ARM::t2ANDrr: 2842*0b57cec5SDimitry Andric case ARM::t2ANDri: 2843*0b57cec5SDimitry Andric case ARM::ORRrr: 2844*0b57cec5SDimitry Andric case ARM::ORRri: 2845*0b57cec5SDimitry Andric case ARM::t2ORRrr: 2846*0b57cec5SDimitry Andric case ARM::t2ORRri: 2847*0b57cec5SDimitry Andric case ARM::EORrr: 2848*0b57cec5SDimitry Andric case ARM::EORri: 2849*0b57cec5SDimitry Andric case ARM::t2EORrr: 2850*0b57cec5SDimitry Andric case ARM::t2EORri: 2851*0b57cec5SDimitry Andric case ARM::t2LSRri: 2852*0b57cec5SDimitry Andric case ARM::t2LSRrr: 2853*0b57cec5SDimitry Andric case ARM::t2LSLri: 2854*0b57cec5SDimitry Andric case ARM::t2LSLrr: 2855*0b57cec5SDimitry Andric return true; 2856*0b57cec5SDimitry Andric } 2857*0b57cec5SDimitry Andric } 2858*0b57cec5SDimitry Andric 2859*0b57cec5SDimitry Andric /// optimizeCompareInstr - Convert the instruction supplying the argument to the 2860*0b57cec5SDimitry Andric /// comparison into one that sets the zero bit in the flags register; 2861*0b57cec5SDimitry Andric /// Remove a redundant Compare instruction if an earlier instruction can set the 2862*0b57cec5SDimitry Andric /// flags in the same way as Compare. 2863*0b57cec5SDimitry Andric /// E.g. SUBrr(r1,r2) and CMPrr(r1,r2). We also handle the case where two 2864*0b57cec5SDimitry Andric /// operands are swapped: SUBrr(r1,r2) and CMPrr(r2,r1), by updating the 2865*0b57cec5SDimitry Andric /// condition code of instructions which use the flags. 2866*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::optimizeCompareInstr( 2867*0b57cec5SDimitry Andric MachineInstr &CmpInstr, unsigned SrcReg, unsigned SrcReg2, int CmpMask, 2868*0b57cec5SDimitry Andric int CmpValue, const MachineRegisterInfo *MRI) const { 2869*0b57cec5SDimitry Andric // Get the unique definition of SrcReg. 2870*0b57cec5SDimitry Andric MachineInstr *MI = MRI->getUniqueVRegDef(SrcReg); 2871*0b57cec5SDimitry Andric if (!MI) return false; 2872*0b57cec5SDimitry Andric 2873*0b57cec5SDimitry Andric // Masked compares sometimes use the same register as the corresponding 'and'. 2874*0b57cec5SDimitry Andric if (CmpMask != ~0) { 2875*0b57cec5SDimitry Andric if (!isSuitableForMask(MI, SrcReg, CmpMask, false) || isPredicated(*MI)) { 2876*0b57cec5SDimitry Andric MI = nullptr; 2877*0b57cec5SDimitry Andric for (MachineRegisterInfo::use_instr_iterator 2878*0b57cec5SDimitry Andric UI = MRI->use_instr_begin(SrcReg), UE = MRI->use_instr_end(); 2879*0b57cec5SDimitry Andric UI != UE; ++UI) { 2880*0b57cec5SDimitry Andric if (UI->getParent() != CmpInstr.getParent()) 2881*0b57cec5SDimitry Andric continue; 2882*0b57cec5SDimitry Andric MachineInstr *PotentialAND = &*UI; 2883*0b57cec5SDimitry Andric if (!isSuitableForMask(PotentialAND, SrcReg, CmpMask, true) || 2884*0b57cec5SDimitry Andric isPredicated(*PotentialAND)) 2885*0b57cec5SDimitry Andric continue; 2886*0b57cec5SDimitry Andric MI = PotentialAND; 2887*0b57cec5SDimitry Andric break; 2888*0b57cec5SDimitry Andric } 2889*0b57cec5SDimitry Andric if (!MI) return false; 2890*0b57cec5SDimitry Andric } 2891*0b57cec5SDimitry Andric } 2892*0b57cec5SDimitry Andric 2893*0b57cec5SDimitry Andric // Get ready to iterate backward from CmpInstr. 2894*0b57cec5SDimitry Andric MachineBasicBlock::iterator I = CmpInstr, E = MI, 2895*0b57cec5SDimitry Andric B = CmpInstr.getParent()->begin(); 2896*0b57cec5SDimitry Andric 2897*0b57cec5SDimitry Andric // Early exit if CmpInstr is at the beginning of the BB. 2898*0b57cec5SDimitry Andric if (I == B) return false; 2899*0b57cec5SDimitry Andric 2900*0b57cec5SDimitry Andric // There are two possible candidates which can be changed to set CPSR: 2901*0b57cec5SDimitry Andric // One is MI, the other is a SUB or ADD instruction. 2902*0b57cec5SDimitry Andric // For CMPrr(r1,r2), we are looking for SUB(r1,r2), SUB(r2,r1), or 2903*0b57cec5SDimitry Andric // ADDr[ri](r1, r2, X). 2904*0b57cec5SDimitry Andric // For CMPri(r1, CmpValue), we are looking for SUBri(r1, CmpValue). 2905*0b57cec5SDimitry Andric MachineInstr *SubAdd = nullptr; 2906*0b57cec5SDimitry Andric if (SrcReg2 != 0) 2907*0b57cec5SDimitry Andric // MI is not a candidate for CMPrr. 2908*0b57cec5SDimitry Andric MI = nullptr; 2909*0b57cec5SDimitry Andric else if (MI->getParent() != CmpInstr.getParent() || CmpValue != 0) { 2910*0b57cec5SDimitry Andric // Conservatively refuse to convert an instruction which isn't in the same 2911*0b57cec5SDimitry Andric // BB as the comparison. 2912*0b57cec5SDimitry Andric // For CMPri w/ CmpValue != 0, a SubAdd may still be a candidate. 2913*0b57cec5SDimitry Andric // Thus we cannot return here. 2914*0b57cec5SDimitry Andric if (CmpInstr.getOpcode() == ARM::CMPri || 2915*0b57cec5SDimitry Andric CmpInstr.getOpcode() == ARM::t2CMPri || 2916*0b57cec5SDimitry Andric CmpInstr.getOpcode() == ARM::tCMPi8) 2917*0b57cec5SDimitry Andric MI = nullptr; 2918*0b57cec5SDimitry Andric else 2919*0b57cec5SDimitry Andric return false; 2920*0b57cec5SDimitry Andric } 2921*0b57cec5SDimitry Andric 2922*0b57cec5SDimitry Andric bool IsThumb1 = false; 2923*0b57cec5SDimitry Andric if (MI && !isOptimizeCompareCandidate(MI, IsThumb1)) 2924*0b57cec5SDimitry Andric return false; 2925*0b57cec5SDimitry Andric 2926*0b57cec5SDimitry Andric // We also want to do this peephole for cases like this: if (a*b == 0), 2927*0b57cec5SDimitry Andric // and optimise away the CMP instruction from the generated code sequence: 2928*0b57cec5SDimitry Andric // MULS, MOVS, MOVS, CMP. Here the MOVS instructions load the boolean values 2929*0b57cec5SDimitry Andric // resulting from the select instruction, but these MOVS instructions for 2930*0b57cec5SDimitry Andric // Thumb1 (V6M) are flag setting and are thus preventing this optimisation. 2931*0b57cec5SDimitry Andric // However, if we only have MOVS instructions in between the CMP and the 2932*0b57cec5SDimitry Andric // other instruction (the MULS in this example), then the CPSR is dead so we 2933*0b57cec5SDimitry Andric // can safely reorder the sequence into: MOVS, MOVS, MULS, CMP. We do this 2934*0b57cec5SDimitry Andric // reordering and then continue the analysis hoping we can eliminate the 2935*0b57cec5SDimitry Andric // CMP. This peephole works on the vregs, so is still in SSA form. As a 2936*0b57cec5SDimitry Andric // consequence, the movs won't redefine/kill the MUL operands which would 2937*0b57cec5SDimitry Andric // make this reordering illegal. 2938*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI = &getRegisterInfo(); 2939*0b57cec5SDimitry Andric if (MI && IsThumb1) { 2940*0b57cec5SDimitry Andric --I; 2941*0b57cec5SDimitry Andric if (I != E && !MI->readsRegister(ARM::CPSR, TRI)) { 2942*0b57cec5SDimitry Andric bool CanReorder = true; 2943*0b57cec5SDimitry Andric for (; I != E; --I) { 2944*0b57cec5SDimitry Andric if (I->getOpcode() != ARM::tMOVi8) { 2945*0b57cec5SDimitry Andric CanReorder = false; 2946*0b57cec5SDimitry Andric break; 2947*0b57cec5SDimitry Andric } 2948*0b57cec5SDimitry Andric } 2949*0b57cec5SDimitry Andric if (CanReorder) { 2950*0b57cec5SDimitry Andric MI = MI->removeFromParent(); 2951*0b57cec5SDimitry Andric E = CmpInstr; 2952*0b57cec5SDimitry Andric CmpInstr.getParent()->insert(E, MI); 2953*0b57cec5SDimitry Andric } 2954*0b57cec5SDimitry Andric } 2955*0b57cec5SDimitry Andric I = CmpInstr; 2956*0b57cec5SDimitry Andric E = MI; 2957*0b57cec5SDimitry Andric } 2958*0b57cec5SDimitry Andric 2959*0b57cec5SDimitry Andric // Check that CPSR isn't set between the comparison instruction and the one we 2960*0b57cec5SDimitry Andric // want to change. At the same time, search for SubAdd. 2961*0b57cec5SDimitry Andric bool SubAddIsThumb1 = false; 2962*0b57cec5SDimitry Andric do { 2963*0b57cec5SDimitry Andric const MachineInstr &Instr = *--I; 2964*0b57cec5SDimitry Andric 2965*0b57cec5SDimitry Andric // Check whether CmpInstr can be made redundant by the current instruction. 2966*0b57cec5SDimitry Andric if (isRedundantFlagInstr(&CmpInstr, SrcReg, SrcReg2, CmpValue, &Instr, 2967*0b57cec5SDimitry Andric SubAddIsThumb1)) { 2968*0b57cec5SDimitry Andric SubAdd = &*I; 2969*0b57cec5SDimitry Andric break; 2970*0b57cec5SDimitry Andric } 2971*0b57cec5SDimitry Andric 2972*0b57cec5SDimitry Andric // Allow E (which was initially MI) to be SubAdd but do not search before E. 2973*0b57cec5SDimitry Andric if (I == E) 2974*0b57cec5SDimitry Andric break; 2975*0b57cec5SDimitry Andric 2976*0b57cec5SDimitry Andric if (Instr.modifiesRegister(ARM::CPSR, TRI) || 2977*0b57cec5SDimitry Andric Instr.readsRegister(ARM::CPSR, TRI)) 2978*0b57cec5SDimitry Andric // This instruction modifies or uses CPSR after the one we want to 2979*0b57cec5SDimitry Andric // change. We can't do this transformation. 2980*0b57cec5SDimitry Andric return false; 2981*0b57cec5SDimitry Andric 2982*0b57cec5SDimitry Andric if (I == B) { 2983*0b57cec5SDimitry Andric // In some cases, we scan the use-list of an instruction for an AND; 2984*0b57cec5SDimitry Andric // that AND is in the same BB, but may not be scheduled before the 2985*0b57cec5SDimitry Andric // corresponding TST. In that case, bail out. 2986*0b57cec5SDimitry Andric // 2987*0b57cec5SDimitry Andric // FIXME: We could try to reschedule the AND. 2988*0b57cec5SDimitry Andric return false; 2989*0b57cec5SDimitry Andric } 2990*0b57cec5SDimitry Andric } while (true); 2991*0b57cec5SDimitry Andric 2992*0b57cec5SDimitry Andric // Return false if no candidates exist. 2993*0b57cec5SDimitry Andric if (!MI && !SubAdd) 2994*0b57cec5SDimitry Andric return false; 2995*0b57cec5SDimitry Andric 2996*0b57cec5SDimitry Andric // If we found a SubAdd, use it as it will be closer to the CMP 2997*0b57cec5SDimitry Andric if (SubAdd) { 2998*0b57cec5SDimitry Andric MI = SubAdd; 2999*0b57cec5SDimitry Andric IsThumb1 = SubAddIsThumb1; 3000*0b57cec5SDimitry Andric } 3001*0b57cec5SDimitry Andric 3002*0b57cec5SDimitry Andric // We can't use a predicated instruction - it doesn't always write the flags. 3003*0b57cec5SDimitry Andric if (isPredicated(*MI)) 3004*0b57cec5SDimitry Andric return false; 3005*0b57cec5SDimitry Andric 3006*0b57cec5SDimitry Andric // Scan forward for the use of CPSR 3007*0b57cec5SDimitry Andric // When checking against MI: if it's a conditional code that requires 3008*0b57cec5SDimitry Andric // checking of the V bit or C bit, then this is not safe to do. 3009*0b57cec5SDimitry Andric // It is safe to remove CmpInstr if CPSR is redefined or killed. 3010*0b57cec5SDimitry Andric // If we are done with the basic block, we need to check whether CPSR is 3011*0b57cec5SDimitry Andric // live-out. 3012*0b57cec5SDimitry Andric SmallVector<std::pair<MachineOperand*, ARMCC::CondCodes>, 4> 3013*0b57cec5SDimitry Andric OperandsToUpdate; 3014*0b57cec5SDimitry Andric bool isSafe = false; 3015*0b57cec5SDimitry Andric I = CmpInstr; 3016*0b57cec5SDimitry Andric E = CmpInstr.getParent()->end(); 3017*0b57cec5SDimitry Andric while (!isSafe && ++I != E) { 3018*0b57cec5SDimitry Andric const MachineInstr &Instr = *I; 3019*0b57cec5SDimitry Andric for (unsigned IO = 0, EO = Instr.getNumOperands(); 3020*0b57cec5SDimitry Andric !isSafe && IO != EO; ++IO) { 3021*0b57cec5SDimitry Andric const MachineOperand &MO = Instr.getOperand(IO); 3022*0b57cec5SDimitry Andric if (MO.isRegMask() && MO.clobbersPhysReg(ARM::CPSR)) { 3023*0b57cec5SDimitry Andric isSafe = true; 3024*0b57cec5SDimitry Andric break; 3025*0b57cec5SDimitry Andric } 3026*0b57cec5SDimitry Andric if (!MO.isReg() || MO.getReg() != ARM::CPSR) 3027*0b57cec5SDimitry Andric continue; 3028*0b57cec5SDimitry Andric if (MO.isDef()) { 3029*0b57cec5SDimitry Andric isSafe = true; 3030*0b57cec5SDimitry Andric break; 3031*0b57cec5SDimitry Andric } 3032*0b57cec5SDimitry Andric // Condition code is after the operand before CPSR except for VSELs. 3033*0b57cec5SDimitry Andric ARMCC::CondCodes CC; 3034*0b57cec5SDimitry Andric bool IsInstrVSel = true; 3035*0b57cec5SDimitry Andric switch (Instr.getOpcode()) { 3036*0b57cec5SDimitry Andric default: 3037*0b57cec5SDimitry Andric IsInstrVSel = false; 3038*0b57cec5SDimitry Andric CC = (ARMCC::CondCodes)Instr.getOperand(IO - 1).getImm(); 3039*0b57cec5SDimitry Andric break; 3040*0b57cec5SDimitry Andric case ARM::VSELEQD: 3041*0b57cec5SDimitry Andric case ARM::VSELEQS: 3042*0b57cec5SDimitry Andric CC = ARMCC::EQ; 3043*0b57cec5SDimitry Andric break; 3044*0b57cec5SDimitry Andric case ARM::VSELGTD: 3045*0b57cec5SDimitry Andric case ARM::VSELGTS: 3046*0b57cec5SDimitry Andric CC = ARMCC::GT; 3047*0b57cec5SDimitry Andric break; 3048*0b57cec5SDimitry Andric case ARM::VSELGED: 3049*0b57cec5SDimitry Andric case ARM::VSELGES: 3050*0b57cec5SDimitry Andric CC = ARMCC::GE; 3051*0b57cec5SDimitry Andric break; 3052*0b57cec5SDimitry Andric case ARM::VSELVSS: 3053*0b57cec5SDimitry Andric case ARM::VSELVSD: 3054*0b57cec5SDimitry Andric CC = ARMCC::VS; 3055*0b57cec5SDimitry Andric break; 3056*0b57cec5SDimitry Andric } 3057*0b57cec5SDimitry Andric 3058*0b57cec5SDimitry Andric if (SubAdd) { 3059*0b57cec5SDimitry Andric // If we have SUB(r1, r2) and CMP(r2, r1), the condition code based 3060*0b57cec5SDimitry Andric // on CMP needs to be updated to be based on SUB. 3061*0b57cec5SDimitry Andric // If we have ADD(r1, r2, X) and CMP(r1, r2), the condition code also 3062*0b57cec5SDimitry Andric // needs to be modified. 3063*0b57cec5SDimitry Andric // Push the condition code operands to OperandsToUpdate. 3064*0b57cec5SDimitry Andric // If it is safe to remove CmpInstr, the condition code of these 3065*0b57cec5SDimitry Andric // operands will be modified. 3066*0b57cec5SDimitry Andric unsigned Opc = SubAdd->getOpcode(); 3067*0b57cec5SDimitry Andric bool IsSub = Opc == ARM::SUBrr || Opc == ARM::t2SUBrr || 3068*0b57cec5SDimitry Andric Opc == ARM::SUBri || Opc == ARM::t2SUBri || 3069*0b57cec5SDimitry Andric Opc == ARM::tSUBrr || Opc == ARM::tSUBi3 || 3070*0b57cec5SDimitry Andric Opc == ARM::tSUBi8; 3071*0b57cec5SDimitry Andric unsigned OpI = Opc != ARM::tSUBrr ? 1 : 2; 3072*0b57cec5SDimitry Andric if (!IsSub || 3073*0b57cec5SDimitry Andric (SrcReg2 != 0 && SubAdd->getOperand(OpI).getReg() == SrcReg2 && 3074*0b57cec5SDimitry Andric SubAdd->getOperand(OpI + 1).getReg() == SrcReg)) { 3075*0b57cec5SDimitry Andric // VSel doesn't support condition code update. 3076*0b57cec5SDimitry Andric if (IsInstrVSel) 3077*0b57cec5SDimitry Andric return false; 3078*0b57cec5SDimitry Andric // Ensure we can swap the condition. 3079*0b57cec5SDimitry Andric ARMCC::CondCodes NewCC = (IsSub ? getSwappedCondition(CC) : getCmpToAddCondition(CC)); 3080*0b57cec5SDimitry Andric if (NewCC == ARMCC::AL) 3081*0b57cec5SDimitry Andric return false; 3082*0b57cec5SDimitry Andric OperandsToUpdate.push_back( 3083*0b57cec5SDimitry Andric std::make_pair(&((*I).getOperand(IO - 1)), NewCC)); 3084*0b57cec5SDimitry Andric } 3085*0b57cec5SDimitry Andric } else { 3086*0b57cec5SDimitry Andric // No SubAdd, so this is x = <op> y, z; cmp x, 0. 3087*0b57cec5SDimitry Andric switch (CC) { 3088*0b57cec5SDimitry Andric case ARMCC::EQ: // Z 3089*0b57cec5SDimitry Andric case ARMCC::NE: // Z 3090*0b57cec5SDimitry Andric case ARMCC::MI: // N 3091*0b57cec5SDimitry Andric case ARMCC::PL: // N 3092*0b57cec5SDimitry Andric case ARMCC::AL: // none 3093*0b57cec5SDimitry Andric // CPSR can be used multiple times, we should continue. 3094*0b57cec5SDimitry Andric break; 3095*0b57cec5SDimitry Andric case ARMCC::HS: // C 3096*0b57cec5SDimitry Andric case ARMCC::LO: // C 3097*0b57cec5SDimitry Andric case ARMCC::VS: // V 3098*0b57cec5SDimitry Andric case ARMCC::VC: // V 3099*0b57cec5SDimitry Andric case ARMCC::HI: // C Z 3100*0b57cec5SDimitry Andric case ARMCC::LS: // C Z 3101*0b57cec5SDimitry Andric case ARMCC::GE: // N V 3102*0b57cec5SDimitry Andric case ARMCC::LT: // N V 3103*0b57cec5SDimitry Andric case ARMCC::GT: // Z N V 3104*0b57cec5SDimitry Andric case ARMCC::LE: // Z N V 3105*0b57cec5SDimitry Andric // The instruction uses the V bit or C bit which is not safe. 3106*0b57cec5SDimitry Andric return false; 3107*0b57cec5SDimitry Andric } 3108*0b57cec5SDimitry Andric } 3109*0b57cec5SDimitry Andric } 3110*0b57cec5SDimitry Andric } 3111*0b57cec5SDimitry Andric 3112*0b57cec5SDimitry Andric // If CPSR is not killed nor re-defined, we should check whether it is 3113*0b57cec5SDimitry Andric // live-out. If it is live-out, do not optimize. 3114*0b57cec5SDimitry Andric if (!isSafe) { 3115*0b57cec5SDimitry Andric MachineBasicBlock *MBB = CmpInstr.getParent(); 3116*0b57cec5SDimitry Andric for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(), 3117*0b57cec5SDimitry Andric SE = MBB->succ_end(); SI != SE; ++SI) 3118*0b57cec5SDimitry Andric if ((*SI)->isLiveIn(ARM::CPSR)) 3119*0b57cec5SDimitry Andric return false; 3120*0b57cec5SDimitry Andric } 3121*0b57cec5SDimitry Andric 3122*0b57cec5SDimitry Andric // Toggle the optional operand to CPSR (if it exists - in Thumb1 we always 3123*0b57cec5SDimitry Andric // set CPSR so this is represented as an explicit output) 3124*0b57cec5SDimitry Andric if (!IsThumb1) { 3125*0b57cec5SDimitry Andric MI->getOperand(5).setReg(ARM::CPSR); 3126*0b57cec5SDimitry Andric MI->getOperand(5).setIsDef(true); 3127*0b57cec5SDimitry Andric } 3128*0b57cec5SDimitry Andric assert(!isPredicated(*MI) && "Can't use flags from predicated instruction"); 3129*0b57cec5SDimitry Andric CmpInstr.eraseFromParent(); 3130*0b57cec5SDimitry Andric 3131*0b57cec5SDimitry Andric // Modify the condition code of operands in OperandsToUpdate. 3132*0b57cec5SDimitry Andric // Since we have SUB(r1, r2) and CMP(r2, r1), the condition code needs to 3133*0b57cec5SDimitry Andric // be changed from r2 > r1 to r1 < r2, from r2 < r1 to r1 > r2, etc. 3134*0b57cec5SDimitry Andric for (unsigned i = 0, e = OperandsToUpdate.size(); i < e; i++) 3135*0b57cec5SDimitry Andric OperandsToUpdate[i].first->setImm(OperandsToUpdate[i].second); 3136*0b57cec5SDimitry Andric 3137*0b57cec5SDimitry Andric MI->clearRegisterDeads(ARM::CPSR); 3138*0b57cec5SDimitry Andric 3139*0b57cec5SDimitry Andric return true; 3140*0b57cec5SDimitry Andric } 3141*0b57cec5SDimitry Andric 3142*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::shouldSink(const MachineInstr &MI) const { 3143*0b57cec5SDimitry Andric // Do not sink MI if it might be used to optimize a redundant compare. 3144*0b57cec5SDimitry Andric // We heuristically only look at the instruction immediately following MI to 3145*0b57cec5SDimitry Andric // avoid potentially searching the entire basic block. 3146*0b57cec5SDimitry Andric if (isPredicated(MI)) 3147*0b57cec5SDimitry Andric return true; 3148*0b57cec5SDimitry Andric MachineBasicBlock::const_iterator Next = &MI; 3149*0b57cec5SDimitry Andric ++Next; 3150*0b57cec5SDimitry Andric unsigned SrcReg, SrcReg2; 3151*0b57cec5SDimitry Andric int CmpMask, CmpValue; 3152*0b57cec5SDimitry Andric bool IsThumb1; 3153*0b57cec5SDimitry Andric if (Next != MI.getParent()->end() && 3154*0b57cec5SDimitry Andric analyzeCompare(*Next, SrcReg, SrcReg2, CmpMask, CmpValue) && 3155*0b57cec5SDimitry Andric isRedundantFlagInstr(&*Next, SrcReg, SrcReg2, CmpValue, &MI, IsThumb1)) 3156*0b57cec5SDimitry Andric return false; 3157*0b57cec5SDimitry Andric return true; 3158*0b57cec5SDimitry Andric } 3159*0b57cec5SDimitry Andric 3160*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::FoldImmediate(MachineInstr &UseMI, MachineInstr &DefMI, 3161*0b57cec5SDimitry Andric unsigned Reg, 3162*0b57cec5SDimitry Andric MachineRegisterInfo *MRI) const { 3163*0b57cec5SDimitry Andric // Fold large immediates into add, sub, or, xor. 3164*0b57cec5SDimitry Andric unsigned DefOpc = DefMI.getOpcode(); 3165*0b57cec5SDimitry Andric if (DefOpc != ARM::t2MOVi32imm && DefOpc != ARM::MOVi32imm) 3166*0b57cec5SDimitry Andric return false; 3167*0b57cec5SDimitry Andric if (!DefMI.getOperand(1).isImm()) 3168*0b57cec5SDimitry Andric // Could be t2MOVi32imm @xx 3169*0b57cec5SDimitry Andric return false; 3170*0b57cec5SDimitry Andric 3171*0b57cec5SDimitry Andric if (!MRI->hasOneNonDBGUse(Reg)) 3172*0b57cec5SDimitry Andric return false; 3173*0b57cec5SDimitry Andric 3174*0b57cec5SDimitry Andric const MCInstrDesc &DefMCID = DefMI.getDesc(); 3175*0b57cec5SDimitry Andric if (DefMCID.hasOptionalDef()) { 3176*0b57cec5SDimitry Andric unsigned NumOps = DefMCID.getNumOperands(); 3177*0b57cec5SDimitry Andric const MachineOperand &MO = DefMI.getOperand(NumOps - 1); 3178*0b57cec5SDimitry Andric if (MO.getReg() == ARM::CPSR && !MO.isDead()) 3179*0b57cec5SDimitry Andric // If DefMI defines CPSR and it is not dead, it's obviously not safe 3180*0b57cec5SDimitry Andric // to delete DefMI. 3181*0b57cec5SDimitry Andric return false; 3182*0b57cec5SDimitry Andric } 3183*0b57cec5SDimitry Andric 3184*0b57cec5SDimitry Andric const MCInstrDesc &UseMCID = UseMI.getDesc(); 3185*0b57cec5SDimitry Andric if (UseMCID.hasOptionalDef()) { 3186*0b57cec5SDimitry Andric unsigned NumOps = UseMCID.getNumOperands(); 3187*0b57cec5SDimitry Andric if (UseMI.getOperand(NumOps - 1).getReg() == ARM::CPSR) 3188*0b57cec5SDimitry Andric // If the instruction sets the flag, do not attempt this optimization 3189*0b57cec5SDimitry Andric // since it may change the semantics of the code. 3190*0b57cec5SDimitry Andric return false; 3191*0b57cec5SDimitry Andric } 3192*0b57cec5SDimitry Andric 3193*0b57cec5SDimitry Andric unsigned UseOpc = UseMI.getOpcode(); 3194*0b57cec5SDimitry Andric unsigned NewUseOpc = 0; 3195*0b57cec5SDimitry Andric uint32_t ImmVal = (uint32_t)DefMI.getOperand(1).getImm(); 3196*0b57cec5SDimitry Andric uint32_t SOImmValV1 = 0, SOImmValV2 = 0; 3197*0b57cec5SDimitry Andric bool Commute = false; 3198*0b57cec5SDimitry Andric switch (UseOpc) { 3199*0b57cec5SDimitry Andric default: return false; 3200*0b57cec5SDimitry Andric case ARM::SUBrr: 3201*0b57cec5SDimitry Andric case ARM::ADDrr: 3202*0b57cec5SDimitry Andric case ARM::ORRrr: 3203*0b57cec5SDimitry Andric case ARM::EORrr: 3204*0b57cec5SDimitry Andric case ARM::t2SUBrr: 3205*0b57cec5SDimitry Andric case ARM::t2ADDrr: 3206*0b57cec5SDimitry Andric case ARM::t2ORRrr: 3207*0b57cec5SDimitry Andric case ARM::t2EORrr: { 3208*0b57cec5SDimitry Andric Commute = UseMI.getOperand(2).getReg() != Reg; 3209*0b57cec5SDimitry Andric switch (UseOpc) { 3210*0b57cec5SDimitry Andric default: break; 3211*0b57cec5SDimitry Andric case ARM::ADDrr: 3212*0b57cec5SDimitry Andric case ARM::SUBrr: 3213*0b57cec5SDimitry Andric if (UseOpc == ARM::SUBrr && Commute) 3214*0b57cec5SDimitry Andric return false; 3215*0b57cec5SDimitry Andric 3216*0b57cec5SDimitry Andric // ADD/SUB are special because they're essentially the same operation, so 3217*0b57cec5SDimitry Andric // we can handle a larger range of immediates. 3218*0b57cec5SDimitry Andric if (ARM_AM::isSOImmTwoPartVal(ImmVal)) 3219*0b57cec5SDimitry Andric NewUseOpc = UseOpc == ARM::ADDrr ? ARM::ADDri : ARM::SUBri; 3220*0b57cec5SDimitry Andric else if (ARM_AM::isSOImmTwoPartVal(-ImmVal)) { 3221*0b57cec5SDimitry Andric ImmVal = -ImmVal; 3222*0b57cec5SDimitry Andric NewUseOpc = UseOpc == ARM::ADDrr ? ARM::SUBri : ARM::ADDri; 3223*0b57cec5SDimitry Andric } else 3224*0b57cec5SDimitry Andric return false; 3225*0b57cec5SDimitry Andric SOImmValV1 = (uint32_t)ARM_AM::getSOImmTwoPartFirst(ImmVal); 3226*0b57cec5SDimitry Andric SOImmValV2 = (uint32_t)ARM_AM::getSOImmTwoPartSecond(ImmVal); 3227*0b57cec5SDimitry Andric break; 3228*0b57cec5SDimitry Andric case ARM::ORRrr: 3229*0b57cec5SDimitry Andric case ARM::EORrr: 3230*0b57cec5SDimitry Andric if (!ARM_AM::isSOImmTwoPartVal(ImmVal)) 3231*0b57cec5SDimitry Andric return false; 3232*0b57cec5SDimitry Andric SOImmValV1 = (uint32_t)ARM_AM::getSOImmTwoPartFirst(ImmVal); 3233*0b57cec5SDimitry Andric SOImmValV2 = (uint32_t)ARM_AM::getSOImmTwoPartSecond(ImmVal); 3234*0b57cec5SDimitry Andric switch (UseOpc) { 3235*0b57cec5SDimitry Andric default: break; 3236*0b57cec5SDimitry Andric case ARM::ORRrr: NewUseOpc = ARM::ORRri; break; 3237*0b57cec5SDimitry Andric case ARM::EORrr: NewUseOpc = ARM::EORri; break; 3238*0b57cec5SDimitry Andric } 3239*0b57cec5SDimitry Andric break; 3240*0b57cec5SDimitry Andric case ARM::t2ADDrr: 3241*0b57cec5SDimitry Andric case ARM::t2SUBrr: 3242*0b57cec5SDimitry Andric if (UseOpc == ARM::t2SUBrr && Commute) 3243*0b57cec5SDimitry Andric return false; 3244*0b57cec5SDimitry Andric 3245*0b57cec5SDimitry Andric // ADD/SUB are special because they're essentially the same operation, so 3246*0b57cec5SDimitry Andric // we can handle a larger range of immediates. 3247*0b57cec5SDimitry Andric if (ARM_AM::isT2SOImmTwoPartVal(ImmVal)) 3248*0b57cec5SDimitry Andric NewUseOpc = UseOpc == ARM::t2ADDrr ? ARM::t2ADDri : ARM::t2SUBri; 3249*0b57cec5SDimitry Andric else if (ARM_AM::isT2SOImmTwoPartVal(-ImmVal)) { 3250*0b57cec5SDimitry Andric ImmVal = -ImmVal; 3251*0b57cec5SDimitry Andric NewUseOpc = UseOpc == ARM::t2ADDrr ? ARM::t2SUBri : ARM::t2ADDri; 3252*0b57cec5SDimitry Andric } else 3253*0b57cec5SDimitry Andric return false; 3254*0b57cec5SDimitry Andric SOImmValV1 = (uint32_t)ARM_AM::getT2SOImmTwoPartFirst(ImmVal); 3255*0b57cec5SDimitry Andric SOImmValV2 = (uint32_t)ARM_AM::getT2SOImmTwoPartSecond(ImmVal); 3256*0b57cec5SDimitry Andric break; 3257*0b57cec5SDimitry Andric case ARM::t2ORRrr: 3258*0b57cec5SDimitry Andric case ARM::t2EORrr: 3259*0b57cec5SDimitry Andric if (!ARM_AM::isT2SOImmTwoPartVal(ImmVal)) 3260*0b57cec5SDimitry Andric return false; 3261*0b57cec5SDimitry Andric SOImmValV1 = (uint32_t)ARM_AM::getT2SOImmTwoPartFirst(ImmVal); 3262*0b57cec5SDimitry Andric SOImmValV2 = (uint32_t)ARM_AM::getT2SOImmTwoPartSecond(ImmVal); 3263*0b57cec5SDimitry Andric switch (UseOpc) { 3264*0b57cec5SDimitry Andric default: break; 3265*0b57cec5SDimitry Andric case ARM::t2ORRrr: NewUseOpc = ARM::t2ORRri; break; 3266*0b57cec5SDimitry Andric case ARM::t2EORrr: NewUseOpc = ARM::t2EORri; break; 3267*0b57cec5SDimitry Andric } 3268*0b57cec5SDimitry Andric break; 3269*0b57cec5SDimitry Andric } 3270*0b57cec5SDimitry Andric } 3271*0b57cec5SDimitry Andric } 3272*0b57cec5SDimitry Andric 3273*0b57cec5SDimitry Andric unsigned OpIdx = Commute ? 2 : 1; 3274*0b57cec5SDimitry Andric unsigned Reg1 = UseMI.getOperand(OpIdx).getReg(); 3275*0b57cec5SDimitry Andric bool isKill = UseMI.getOperand(OpIdx).isKill(); 3276*0b57cec5SDimitry Andric unsigned NewReg = MRI->createVirtualRegister(MRI->getRegClass(Reg)); 3277*0b57cec5SDimitry Andric BuildMI(*UseMI.getParent(), UseMI, UseMI.getDebugLoc(), get(NewUseOpc), 3278*0b57cec5SDimitry Andric NewReg) 3279*0b57cec5SDimitry Andric .addReg(Reg1, getKillRegState(isKill)) 3280*0b57cec5SDimitry Andric .addImm(SOImmValV1) 3281*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)) 3282*0b57cec5SDimitry Andric .add(condCodeOp()); 3283*0b57cec5SDimitry Andric UseMI.setDesc(get(NewUseOpc)); 3284*0b57cec5SDimitry Andric UseMI.getOperand(1).setReg(NewReg); 3285*0b57cec5SDimitry Andric UseMI.getOperand(1).setIsKill(); 3286*0b57cec5SDimitry Andric UseMI.getOperand(2).ChangeToImmediate(SOImmValV2); 3287*0b57cec5SDimitry Andric DefMI.eraseFromParent(); 3288*0b57cec5SDimitry Andric return true; 3289*0b57cec5SDimitry Andric } 3290*0b57cec5SDimitry Andric 3291*0b57cec5SDimitry Andric static unsigned getNumMicroOpsSwiftLdSt(const InstrItineraryData *ItinData, 3292*0b57cec5SDimitry Andric const MachineInstr &MI) { 3293*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 3294*0b57cec5SDimitry Andric default: { 3295*0b57cec5SDimitry Andric const MCInstrDesc &Desc = MI.getDesc(); 3296*0b57cec5SDimitry Andric int UOps = ItinData->getNumMicroOps(Desc.getSchedClass()); 3297*0b57cec5SDimitry Andric assert(UOps >= 0 && "bad # UOps"); 3298*0b57cec5SDimitry Andric return UOps; 3299*0b57cec5SDimitry Andric } 3300*0b57cec5SDimitry Andric 3301*0b57cec5SDimitry Andric case ARM::LDRrs: 3302*0b57cec5SDimitry Andric case ARM::LDRBrs: 3303*0b57cec5SDimitry Andric case ARM::STRrs: 3304*0b57cec5SDimitry Andric case ARM::STRBrs: { 3305*0b57cec5SDimitry Andric unsigned ShOpVal = MI.getOperand(3).getImm(); 3306*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(ShOpVal) == ARM_AM::sub; 3307*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 3308*0b57cec5SDimitry Andric if (!isSub && 3309*0b57cec5SDimitry Andric (ShImm == 0 || 3310*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 3311*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl))) 3312*0b57cec5SDimitry Andric return 1; 3313*0b57cec5SDimitry Andric return 2; 3314*0b57cec5SDimitry Andric } 3315*0b57cec5SDimitry Andric 3316*0b57cec5SDimitry Andric case ARM::LDRH: 3317*0b57cec5SDimitry Andric case ARM::STRH: { 3318*0b57cec5SDimitry Andric if (!MI.getOperand(2).getReg()) 3319*0b57cec5SDimitry Andric return 1; 3320*0b57cec5SDimitry Andric 3321*0b57cec5SDimitry Andric unsigned ShOpVal = MI.getOperand(3).getImm(); 3322*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(ShOpVal) == ARM_AM::sub; 3323*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 3324*0b57cec5SDimitry Andric if (!isSub && 3325*0b57cec5SDimitry Andric (ShImm == 0 || 3326*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 3327*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl))) 3328*0b57cec5SDimitry Andric return 1; 3329*0b57cec5SDimitry Andric return 2; 3330*0b57cec5SDimitry Andric } 3331*0b57cec5SDimitry Andric 3332*0b57cec5SDimitry Andric case ARM::LDRSB: 3333*0b57cec5SDimitry Andric case ARM::LDRSH: 3334*0b57cec5SDimitry Andric return (ARM_AM::getAM3Op(MI.getOperand(3).getImm()) == ARM_AM::sub) ? 3 : 2; 3335*0b57cec5SDimitry Andric 3336*0b57cec5SDimitry Andric case ARM::LDRSB_POST: 3337*0b57cec5SDimitry Andric case ARM::LDRSH_POST: { 3338*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3339*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3340*0b57cec5SDimitry Andric return (Rt == Rm) ? 4 : 3; 3341*0b57cec5SDimitry Andric } 3342*0b57cec5SDimitry Andric 3343*0b57cec5SDimitry Andric case ARM::LDR_PRE_REG: 3344*0b57cec5SDimitry Andric case ARM::LDRB_PRE_REG: { 3345*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3346*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3347*0b57cec5SDimitry Andric if (Rt == Rm) 3348*0b57cec5SDimitry Andric return 3; 3349*0b57cec5SDimitry Andric unsigned ShOpVal = MI.getOperand(4).getImm(); 3350*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(ShOpVal) == ARM_AM::sub; 3351*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 3352*0b57cec5SDimitry Andric if (!isSub && 3353*0b57cec5SDimitry Andric (ShImm == 0 || 3354*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 3355*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl))) 3356*0b57cec5SDimitry Andric return 2; 3357*0b57cec5SDimitry Andric return 3; 3358*0b57cec5SDimitry Andric } 3359*0b57cec5SDimitry Andric 3360*0b57cec5SDimitry Andric case ARM::STR_PRE_REG: 3361*0b57cec5SDimitry Andric case ARM::STRB_PRE_REG: { 3362*0b57cec5SDimitry Andric unsigned ShOpVal = MI.getOperand(4).getImm(); 3363*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(ShOpVal) == ARM_AM::sub; 3364*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 3365*0b57cec5SDimitry Andric if (!isSub && 3366*0b57cec5SDimitry Andric (ShImm == 0 || 3367*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 3368*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl))) 3369*0b57cec5SDimitry Andric return 2; 3370*0b57cec5SDimitry Andric return 3; 3371*0b57cec5SDimitry Andric } 3372*0b57cec5SDimitry Andric 3373*0b57cec5SDimitry Andric case ARM::LDRH_PRE: 3374*0b57cec5SDimitry Andric case ARM::STRH_PRE: { 3375*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3376*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3377*0b57cec5SDimitry Andric if (!Rm) 3378*0b57cec5SDimitry Andric return 2; 3379*0b57cec5SDimitry Andric if (Rt == Rm) 3380*0b57cec5SDimitry Andric return 3; 3381*0b57cec5SDimitry Andric return (ARM_AM::getAM3Op(MI.getOperand(4).getImm()) == ARM_AM::sub) ? 3 : 2; 3382*0b57cec5SDimitry Andric } 3383*0b57cec5SDimitry Andric 3384*0b57cec5SDimitry Andric case ARM::LDR_POST_REG: 3385*0b57cec5SDimitry Andric case ARM::LDRB_POST_REG: 3386*0b57cec5SDimitry Andric case ARM::LDRH_POST: { 3387*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3388*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3389*0b57cec5SDimitry Andric return (Rt == Rm) ? 3 : 2; 3390*0b57cec5SDimitry Andric } 3391*0b57cec5SDimitry Andric 3392*0b57cec5SDimitry Andric case ARM::LDR_PRE_IMM: 3393*0b57cec5SDimitry Andric case ARM::LDRB_PRE_IMM: 3394*0b57cec5SDimitry Andric case ARM::LDR_POST_IMM: 3395*0b57cec5SDimitry Andric case ARM::LDRB_POST_IMM: 3396*0b57cec5SDimitry Andric case ARM::STRB_POST_IMM: 3397*0b57cec5SDimitry Andric case ARM::STRB_POST_REG: 3398*0b57cec5SDimitry Andric case ARM::STRB_PRE_IMM: 3399*0b57cec5SDimitry Andric case ARM::STRH_POST: 3400*0b57cec5SDimitry Andric case ARM::STR_POST_IMM: 3401*0b57cec5SDimitry Andric case ARM::STR_POST_REG: 3402*0b57cec5SDimitry Andric case ARM::STR_PRE_IMM: 3403*0b57cec5SDimitry Andric return 2; 3404*0b57cec5SDimitry Andric 3405*0b57cec5SDimitry Andric case ARM::LDRSB_PRE: 3406*0b57cec5SDimitry Andric case ARM::LDRSH_PRE: { 3407*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3408*0b57cec5SDimitry Andric if (Rm == 0) 3409*0b57cec5SDimitry Andric return 3; 3410*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3411*0b57cec5SDimitry Andric if (Rt == Rm) 3412*0b57cec5SDimitry Andric return 4; 3413*0b57cec5SDimitry Andric unsigned ShOpVal = MI.getOperand(4).getImm(); 3414*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(ShOpVal) == ARM_AM::sub; 3415*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 3416*0b57cec5SDimitry Andric if (!isSub && 3417*0b57cec5SDimitry Andric (ShImm == 0 || 3418*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 3419*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl))) 3420*0b57cec5SDimitry Andric return 3; 3421*0b57cec5SDimitry Andric return 4; 3422*0b57cec5SDimitry Andric } 3423*0b57cec5SDimitry Andric 3424*0b57cec5SDimitry Andric case ARM::LDRD: { 3425*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3426*0b57cec5SDimitry Andric unsigned Rn = MI.getOperand(2).getReg(); 3427*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3428*0b57cec5SDimitry Andric if (Rm) 3429*0b57cec5SDimitry Andric return (ARM_AM::getAM3Op(MI.getOperand(4).getImm()) == ARM_AM::sub) ? 4 3430*0b57cec5SDimitry Andric : 3; 3431*0b57cec5SDimitry Andric return (Rt == Rn) ? 3 : 2; 3432*0b57cec5SDimitry Andric } 3433*0b57cec5SDimitry Andric 3434*0b57cec5SDimitry Andric case ARM::STRD: { 3435*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(3).getReg(); 3436*0b57cec5SDimitry Andric if (Rm) 3437*0b57cec5SDimitry Andric return (ARM_AM::getAM3Op(MI.getOperand(4).getImm()) == ARM_AM::sub) ? 4 3438*0b57cec5SDimitry Andric : 3; 3439*0b57cec5SDimitry Andric return 2; 3440*0b57cec5SDimitry Andric } 3441*0b57cec5SDimitry Andric 3442*0b57cec5SDimitry Andric case ARM::LDRD_POST: 3443*0b57cec5SDimitry Andric case ARM::t2LDRD_POST: 3444*0b57cec5SDimitry Andric return 3; 3445*0b57cec5SDimitry Andric 3446*0b57cec5SDimitry Andric case ARM::STRD_POST: 3447*0b57cec5SDimitry Andric case ARM::t2STRD_POST: 3448*0b57cec5SDimitry Andric return 4; 3449*0b57cec5SDimitry Andric 3450*0b57cec5SDimitry Andric case ARM::LDRD_PRE: { 3451*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3452*0b57cec5SDimitry Andric unsigned Rn = MI.getOperand(3).getReg(); 3453*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(4).getReg(); 3454*0b57cec5SDimitry Andric if (Rm) 3455*0b57cec5SDimitry Andric return (ARM_AM::getAM3Op(MI.getOperand(5).getImm()) == ARM_AM::sub) ? 5 3456*0b57cec5SDimitry Andric : 4; 3457*0b57cec5SDimitry Andric return (Rt == Rn) ? 4 : 3; 3458*0b57cec5SDimitry Andric } 3459*0b57cec5SDimitry Andric 3460*0b57cec5SDimitry Andric case ARM::t2LDRD_PRE: { 3461*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3462*0b57cec5SDimitry Andric unsigned Rn = MI.getOperand(3).getReg(); 3463*0b57cec5SDimitry Andric return (Rt == Rn) ? 4 : 3; 3464*0b57cec5SDimitry Andric } 3465*0b57cec5SDimitry Andric 3466*0b57cec5SDimitry Andric case ARM::STRD_PRE: { 3467*0b57cec5SDimitry Andric unsigned Rm = MI.getOperand(4).getReg(); 3468*0b57cec5SDimitry Andric if (Rm) 3469*0b57cec5SDimitry Andric return (ARM_AM::getAM3Op(MI.getOperand(5).getImm()) == ARM_AM::sub) ? 5 3470*0b57cec5SDimitry Andric : 4; 3471*0b57cec5SDimitry Andric return 3; 3472*0b57cec5SDimitry Andric } 3473*0b57cec5SDimitry Andric 3474*0b57cec5SDimitry Andric case ARM::t2STRD_PRE: 3475*0b57cec5SDimitry Andric return 3; 3476*0b57cec5SDimitry Andric 3477*0b57cec5SDimitry Andric case ARM::t2LDR_POST: 3478*0b57cec5SDimitry Andric case ARM::t2LDRB_POST: 3479*0b57cec5SDimitry Andric case ARM::t2LDRB_PRE: 3480*0b57cec5SDimitry Andric case ARM::t2LDRSBi12: 3481*0b57cec5SDimitry Andric case ARM::t2LDRSBi8: 3482*0b57cec5SDimitry Andric case ARM::t2LDRSBpci: 3483*0b57cec5SDimitry Andric case ARM::t2LDRSBs: 3484*0b57cec5SDimitry Andric case ARM::t2LDRH_POST: 3485*0b57cec5SDimitry Andric case ARM::t2LDRH_PRE: 3486*0b57cec5SDimitry Andric case ARM::t2LDRSBT: 3487*0b57cec5SDimitry Andric case ARM::t2LDRSB_POST: 3488*0b57cec5SDimitry Andric case ARM::t2LDRSB_PRE: 3489*0b57cec5SDimitry Andric case ARM::t2LDRSH_POST: 3490*0b57cec5SDimitry Andric case ARM::t2LDRSH_PRE: 3491*0b57cec5SDimitry Andric case ARM::t2LDRSHi12: 3492*0b57cec5SDimitry Andric case ARM::t2LDRSHi8: 3493*0b57cec5SDimitry Andric case ARM::t2LDRSHpci: 3494*0b57cec5SDimitry Andric case ARM::t2LDRSHs: 3495*0b57cec5SDimitry Andric return 2; 3496*0b57cec5SDimitry Andric 3497*0b57cec5SDimitry Andric case ARM::t2LDRDi8: { 3498*0b57cec5SDimitry Andric unsigned Rt = MI.getOperand(0).getReg(); 3499*0b57cec5SDimitry Andric unsigned Rn = MI.getOperand(2).getReg(); 3500*0b57cec5SDimitry Andric return (Rt == Rn) ? 3 : 2; 3501*0b57cec5SDimitry Andric } 3502*0b57cec5SDimitry Andric 3503*0b57cec5SDimitry Andric case ARM::t2STRB_POST: 3504*0b57cec5SDimitry Andric case ARM::t2STRB_PRE: 3505*0b57cec5SDimitry Andric case ARM::t2STRBs: 3506*0b57cec5SDimitry Andric case ARM::t2STRDi8: 3507*0b57cec5SDimitry Andric case ARM::t2STRH_POST: 3508*0b57cec5SDimitry Andric case ARM::t2STRH_PRE: 3509*0b57cec5SDimitry Andric case ARM::t2STRHs: 3510*0b57cec5SDimitry Andric case ARM::t2STR_POST: 3511*0b57cec5SDimitry Andric case ARM::t2STR_PRE: 3512*0b57cec5SDimitry Andric case ARM::t2STRs: 3513*0b57cec5SDimitry Andric return 2; 3514*0b57cec5SDimitry Andric } 3515*0b57cec5SDimitry Andric } 3516*0b57cec5SDimitry Andric 3517*0b57cec5SDimitry Andric // Return the number of 32-bit words loaded by LDM or stored by STM. If this 3518*0b57cec5SDimitry Andric // can't be easily determined return 0 (missing MachineMemOperand). 3519*0b57cec5SDimitry Andric // 3520*0b57cec5SDimitry Andric // FIXME: The current MachineInstr design does not support relying on machine 3521*0b57cec5SDimitry Andric // mem operands to determine the width of a memory access. Instead, we expect 3522*0b57cec5SDimitry Andric // the target to provide this information based on the instruction opcode and 3523*0b57cec5SDimitry Andric // operands. However, using MachineMemOperand is the best solution now for 3524*0b57cec5SDimitry Andric // two reasons: 3525*0b57cec5SDimitry Andric // 3526*0b57cec5SDimitry Andric // 1) getNumMicroOps tries to infer LDM memory width from the total number of MI 3527*0b57cec5SDimitry Andric // operands. This is much more dangerous than using the MachineMemOperand 3528*0b57cec5SDimitry Andric // sizes because CodeGen passes can insert/remove optional machine operands. In 3529*0b57cec5SDimitry Andric // fact, it's totally incorrect for preRA passes and appears to be wrong for 3530*0b57cec5SDimitry Andric // postRA passes as well. 3531*0b57cec5SDimitry Andric // 3532*0b57cec5SDimitry Andric // 2) getNumLDMAddresses is only used by the scheduling machine model and any 3533*0b57cec5SDimitry Andric // machine model that calls this should handle the unknown (zero size) case. 3534*0b57cec5SDimitry Andric // 3535*0b57cec5SDimitry Andric // Long term, we should require a target hook that verifies MachineMemOperand 3536*0b57cec5SDimitry Andric // sizes during MC lowering. That target hook should be local to MC lowering 3537*0b57cec5SDimitry Andric // because we can't ensure that it is aware of other MI forms. Doing this will 3538*0b57cec5SDimitry Andric // ensure that MachineMemOperands are correctly propagated through all passes. 3539*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getNumLDMAddresses(const MachineInstr &MI) const { 3540*0b57cec5SDimitry Andric unsigned Size = 0; 3541*0b57cec5SDimitry Andric for (MachineInstr::mmo_iterator I = MI.memoperands_begin(), 3542*0b57cec5SDimitry Andric E = MI.memoperands_end(); 3543*0b57cec5SDimitry Andric I != E; ++I) { 3544*0b57cec5SDimitry Andric Size += (*I)->getSize(); 3545*0b57cec5SDimitry Andric } 3546*0b57cec5SDimitry Andric // FIXME: The scheduler currently can't handle values larger than 16. But 3547*0b57cec5SDimitry Andric // the values can actually go up to 32 for floating-point load/store 3548*0b57cec5SDimitry Andric // multiple (VLDMIA etc.). Also, the way this code is reasoning about memory 3549*0b57cec5SDimitry Andric // operations isn't right; we could end up with "extra" memory operands for 3550*0b57cec5SDimitry Andric // various reasons, like tail merge merging two memory operations. 3551*0b57cec5SDimitry Andric return std::min(Size / 4, 16U); 3552*0b57cec5SDimitry Andric } 3553*0b57cec5SDimitry Andric 3554*0b57cec5SDimitry Andric static unsigned getNumMicroOpsSingleIssuePlusExtras(unsigned Opc, 3555*0b57cec5SDimitry Andric unsigned NumRegs) { 3556*0b57cec5SDimitry Andric unsigned UOps = 1 + NumRegs; // 1 for address computation. 3557*0b57cec5SDimitry Andric switch (Opc) { 3558*0b57cec5SDimitry Andric default: 3559*0b57cec5SDimitry Andric break; 3560*0b57cec5SDimitry Andric case ARM::VLDMDIA_UPD: 3561*0b57cec5SDimitry Andric case ARM::VLDMDDB_UPD: 3562*0b57cec5SDimitry Andric case ARM::VLDMSIA_UPD: 3563*0b57cec5SDimitry Andric case ARM::VLDMSDB_UPD: 3564*0b57cec5SDimitry Andric case ARM::VSTMDIA_UPD: 3565*0b57cec5SDimitry Andric case ARM::VSTMDDB_UPD: 3566*0b57cec5SDimitry Andric case ARM::VSTMSIA_UPD: 3567*0b57cec5SDimitry Andric case ARM::VSTMSDB_UPD: 3568*0b57cec5SDimitry Andric case ARM::LDMIA_UPD: 3569*0b57cec5SDimitry Andric case ARM::LDMDA_UPD: 3570*0b57cec5SDimitry Andric case ARM::LDMDB_UPD: 3571*0b57cec5SDimitry Andric case ARM::LDMIB_UPD: 3572*0b57cec5SDimitry Andric case ARM::STMIA_UPD: 3573*0b57cec5SDimitry Andric case ARM::STMDA_UPD: 3574*0b57cec5SDimitry Andric case ARM::STMDB_UPD: 3575*0b57cec5SDimitry Andric case ARM::STMIB_UPD: 3576*0b57cec5SDimitry Andric case ARM::tLDMIA_UPD: 3577*0b57cec5SDimitry Andric case ARM::tSTMIA_UPD: 3578*0b57cec5SDimitry Andric case ARM::t2LDMIA_UPD: 3579*0b57cec5SDimitry Andric case ARM::t2LDMDB_UPD: 3580*0b57cec5SDimitry Andric case ARM::t2STMIA_UPD: 3581*0b57cec5SDimitry Andric case ARM::t2STMDB_UPD: 3582*0b57cec5SDimitry Andric ++UOps; // One for base register writeback. 3583*0b57cec5SDimitry Andric break; 3584*0b57cec5SDimitry Andric case ARM::LDMIA_RET: 3585*0b57cec5SDimitry Andric case ARM::tPOP_RET: 3586*0b57cec5SDimitry Andric case ARM::t2LDMIA_RET: 3587*0b57cec5SDimitry Andric UOps += 2; // One for base reg wb, one for write to pc. 3588*0b57cec5SDimitry Andric break; 3589*0b57cec5SDimitry Andric } 3590*0b57cec5SDimitry Andric return UOps; 3591*0b57cec5SDimitry Andric } 3592*0b57cec5SDimitry Andric 3593*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getNumMicroOps(const InstrItineraryData *ItinData, 3594*0b57cec5SDimitry Andric const MachineInstr &MI) const { 3595*0b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 3596*0b57cec5SDimitry Andric return 1; 3597*0b57cec5SDimitry Andric 3598*0b57cec5SDimitry Andric const MCInstrDesc &Desc = MI.getDesc(); 3599*0b57cec5SDimitry Andric unsigned Class = Desc.getSchedClass(); 3600*0b57cec5SDimitry Andric int ItinUOps = ItinData->getNumMicroOps(Class); 3601*0b57cec5SDimitry Andric if (ItinUOps >= 0) { 3602*0b57cec5SDimitry Andric if (Subtarget.isSwift() && (Desc.mayLoad() || Desc.mayStore())) 3603*0b57cec5SDimitry Andric return getNumMicroOpsSwiftLdSt(ItinData, MI); 3604*0b57cec5SDimitry Andric 3605*0b57cec5SDimitry Andric return ItinUOps; 3606*0b57cec5SDimitry Andric } 3607*0b57cec5SDimitry Andric 3608*0b57cec5SDimitry Andric unsigned Opc = MI.getOpcode(); 3609*0b57cec5SDimitry Andric switch (Opc) { 3610*0b57cec5SDimitry Andric default: 3611*0b57cec5SDimitry Andric llvm_unreachable("Unexpected multi-uops instruction!"); 3612*0b57cec5SDimitry Andric case ARM::VLDMQIA: 3613*0b57cec5SDimitry Andric case ARM::VSTMQIA: 3614*0b57cec5SDimitry Andric return 2; 3615*0b57cec5SDimitry Andric 3616*0b57cec5SDimitry Andric // The number of uOps for load / store multiple are determined by the number 3617*0b57cec5SDimitry Andric // registers. 3618*0b57cec5SDimitry Andric // 3619*0b57cec5SDimitry Andric // On Cortex-A8, each pair of register loads / stores can be scheduled on the 3620*0b57cec5SDimitry Andric // same cycle. The scheduling for the first load / store must be done 3621*0b57cec5SDimitry Andric // separately by assuming the address is not 64-bit aligned. 3622*0b57cec5SDimitry Andric // 3623*0b57cec5SDimitry Andric // On Cortex-A9, the formula is simply (#reg / 2) + (#reg % 2). If the address 3624*0b57cec5SDimitry Andric // is not 64-bit aligned, then AGU would take an extra cycle. For VFP / NEON 3625*0b57cec5SDimitry Andric // load / store multiple, the formula is (#reg / 2) + (#reg % 2) + 1. 3626*0b57cec5SDimitry Andric case ARM::VLDMDIA: 3627*0b57cec5SDimitry Andric case ARM::VLDMDIA_UPD: 3628*0b57cec5SDimitry Andric case ARM::VLDMDDB_UPD: 3629*0b57cec5SDimitry Andric case ARM::VLDMSIA: 3630*0b57cec5SDimitry Andric case ARM::VLDMSIA_UPD: 3631*0b57cec5SDimitry Andric case ARM::VLDMSDB_UPD: 3632*0b57cec5SDimitry Andric case ARM::VSTMDIA: 3633*0b57cec5SDimitry Andric case ARM::VSTMDIA_UPD: 3634*0b57cec5SDimitry Andric case ARM::VSTMDDB_UPD: 3635*0b57cec5SDimitry Andric case ARM::VSTMSIA: 3636*0b57cec5SDimitry Andric case ARM::VSTMSIA_UPD: 3637*0b57cec5SDimitry Andric case ARM::VSTMSDB_UPD: { 3638*0b57cec5SDimitry Andric unsigned NumRegs = MI.getNumOperands() - Desc.getNumOperands(); 3639*0b57cec5SDimitry Andric return (NumRegs / 2) + (NumRegs % 2) + 1; 3640*0b57cec5SDimitry Andric } 3641*0b57cec5SDimitry Andric 3642*0b57cec5SDimitry Andric case ARM::LDMIA_RET: 3643*0b57cec5SDimitry Andric case ARM::LDMIA: 3644*0b57cec5SDimitry Andric case ARM::LDMDA: 3645*0b57cec5SDimitry Andric case ARM::LDMDB: 3646*0b57cec5SDimitry Andric case ARM::LDMIB: 3647*0b57cec5SDimitry Andric case ARM::LDMIA_UPD: 3648*0b57cec5SDimitry Andric case ARM::LDMDA_UPD: 3649*0b57cec5SDimitry Andric case ARM::LDMDB_UPD: 3650*0b57cec5SDimitry Andric case ARM::LDMIB_UPD: 3651*0b57cec5SDimitry Andric case ARM::STMIA: 3652*0b57cec5SDimitry Andric case ARM::STMDA: 3653*0b57cec5SDimitry Andric case ARM::STMDB: 3654*0b57cec5SDimitry Andric case ARM::STMIB: 3655*0b57cec5SDimitry Andric case ARM::STMIA_UPD: 3656*0b57cec5SDimitry Andric case ARM::STMDA_UPD: 3657*0b57cec5SDimitry Andric case ARM::STMDB_UPD: 3658*0b57cec5SDimitry Andric case ARM::STMIB_UPD: 3659*0b57cec5SDimitry Andric case ARM::tLDMIA: 3660*0b57cec5SDimitry Andric case ARM::tLDMIA_UPD: 3661*0b57cec5SDimitry Andric case ARM::tSTMIA_UPD: 3662*0b57cec5SDimitry Andric case ARM::tPOP_RET: 3663*0b57cec5SDimitry Andric case ARM::tPOP: 3664*0b57cec5SDimitry Andric case ARM::tPUSH: 3665*0b57cec5SDimitry Andric case ARM::t2LDMIA_RET: 3666*0b57cec5SDimitry Andric case ARM::t2LDMIA: 3667*0b57cec5SDimitry Andric case ARM::t2LDMDB: 3668*0b57cec5SDimitry Andric case ARM::t2LDMIA_UPD: 3669*0b57cec5SDimitry Andric case ARM::t2LDMDB_UPD: 3670*0b57cec5SDimitry Andric case ARM::t2STMIA: 3671*0b57cec5SDimitry Andric case ARM::t2STMDB: 3672*0b57cec5SDimitry Andric case ARM::t2STMIA_UPD: 3673*0b57cec5SDimitry Andric case ARM::t2STMDB_UPD: { 3674*0b57cec5SDimitry Andric unsigned NumRegs = MI.getNumOperands() - Desc.getNumOperands() + 1; 3675*0b57cec5SDimitry Andric switch (Subtarget.getLdStMultipleTiming()) { 3676*0b57cec5SDimitry Andric case ARMSubtarget::SingleIssuePlusExtras: 3677*0b57cec5SDimitry Andric return getNumMicroOpsSingleIssuePlusExtras(Opc, NumRegs); 3678*0b57cec5SDimitry Andric case ARMSubtarget::SingleIssue: 3679*0b57cec5SDimitry Andric // Assume the worst. 3680*0b57cec5SDimitry Andric return NumRegs; 3681*0b57cec5SDimitry Andric case ARMSubtarget::DoubleIssue: { 3682*0b57cec5SDimitry Andric if (NumRegs < 4) 3683*0b57cec5SDimitry Andric return 2; 3684*0b57cec5SDimitry Andric // 4 registers would be issued: 2, 2. 3685*0b57cec5SDimitry Andric // 5 registers would be issued: 2, 2, 1. 3686*0b57cec5SDimitry Andric unsigned UOps = (NumRegs / 2); 3687*0b57cec5SDimitry Andric if (NumRegs % 2) 3688*0b57cec5SDimitry Andric ++UOps; 3689*0b57cec5SDimitry Andric return UOps; 3690*0b57cec5SDimitry Andric } 3691*0b57cec5SDimitry Andric case ARMSubtarget::DoubleIssueCheckUnalignedAccess: { 3692*0b57cec5SDimitry Andric unsigned UOps = (NumRegs / 2); 3693*0b57cec5SDimitry Andric // If there are odd number of registers or if it's not 64-bit aligned, 3694*0b57cec5SDimitry Andric // then it takes an extra AGU (Address Generation Unit) cycle. 3695*0b57cec5SDimitry Andric if ((NumRegs % 2) || !MI.hasOneMemOperand() || 3696*0b57cec5SDimitry Andric (*MI.memoperands_begin())->getAlignment() < 8) 3697*0b57cec5SDimitry Andric ++UOps; 3698*0b57cec5SDimitry Andric return UOps; 3699*0b57cec5SDimitry Andric } 3700*0b57cec5SDimitry Andric } 3701*0b57cec5SDimitry Andric } 3702*0b57cec5SDimitry Andric } 3703*0b57cec5SDimitry Andric llvm_unreachable("Didn't find the number of microops"); 3704*0b57cec5SDimitry Andric } 3705*0b57cec5SDimitry Andric 3706*0b57cec5SDimitry Andric int 3707*0b57cec5SDimitry Andric ARMBaseInstrInfo::getVLDMDefCycle(const InstrItineraryData *ItinData, 3708*0b57cec5SDimitry Andric const MCInstrDesc &DefMCID, 3709*0b57cec5SDimitry Andric unsigned DefClass, 3710*0b57cec5SDimitry Andric unsigned DefIdx, unsigned DefAlign) const { 3711*0b57cec5SDimitry Andric int RegNo = (int)(DefIdx+1) - DefMCID.getNumOperands() + 1; 3712*0b57cec5SDimitry Andric if (RegNo <= 0) 3713*0b57cec5SDimitry Andric // Def is the address writeback. 3714*0b57cec5SDimitry Andric return ItinData->getOperandCycle(DefClass, DefIdx); 3715*0b57cec5SDimitry Andric 3716*0b57cec5SDimitry Andric int DefCycle; 3717*0b57cec5SDimitry Andric if (Subtarget.isCortexA8() || Subtarget.isCortexA7()) { 3718*0b57cec5SDimitry Andric // (regno / 2) + (regno % 2) + 1 3719*0b57cec5SDimitry Andric DefCycle = RegNo / 2 + 1; 3720*0b57cec5SDimitry Andric if (RegNo % 2) 3721*0b57cec5SDimitry Andric ++DefCycle; 3722*0b57cec5SDimitry Andric } else if (Subtarget.isLikeA9() || Subtarget.isSwift()) { 3723*0b57cec5SDimitry Andric DefCycle = RegNo; 3724*0b57cec5SDimitry Andric bool isSLoad = false; 3725*0b57cec5SDimitry Andric 3726*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 3727*0b57cec5SDimitry Andric default: break; 3728*0b57cec5SDimitry Andric case ARM::VLDMSIA: 3729*0b57cec5SDimitry Andric case ARM::VLDMSIA_UPD: 3730*0b57cec5SDimitry Andric case ARM::VLDMSDB_UPD: 3731*0b57cec5SDimitry Andric isSLoad = true; 3732*0b57cec5SDimitry Andric break; 3733*0b57cec5SDimitry Andric } 3734*0b57cec5SDimitry Andric 3735*0b57cec5SDimitry Andric // If there are odd number of 'S' registers or if it's not 64-bit aligned, 3736*0b57cec5SDimitry Andric // then it takes an extra cycle. 3737*0b57cec5SDimitry Andric if ((isSLoad && (RegNo % 2)) || DefAlign < 8) 3738*0b57cec5SDimitry Andric ++DefCycle; 3739*0b57cec5SDimitry Andric } else { 3740*0b57cec5SDimitry Andric // Assume the worst. 3741*0b57cec5SDimitry Andric DefCycle = RegNo + 2; 3742*0b57cec5SDimitry Andric } 3743*0b57cec5SDimitry Andric 3744*0b57cec5SDimitry Andric return DefCycle; 3745*0b57cec5SDimitry Andric } 3746*0b57cec5SDimitry Andric 3747*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isLDMBaseRegInList(const MachineInstr &MI) const { 3748*0b57cec5SDimitry Andric unsigned BaseReg = MI.getOperand(0).getReg(); 3749*0b57cec5SDimitry Andric for (unsigned i = 1, sz = MI.getNumOperands(); i < sz; ++i) { 3750*0b57cec5SDimitry Andric const auto &Op = MI.getOperand(i); 3751*0b57cec5SDimitry Andric if (Op.isReg() && Op.getReg() == BaseReg) 3752*0b57cec5SDimitry Andric return true; 3753*0b57cec5SDimitry Andric } 3754*0b57cec5SDimitry Andric return false; 3755*0b57cec5SDimitry Andric } 3756*0b57cec5SDimitry Andric unsigned 3757*0b57cec5SDimitry Andric ARMBaseInstrInfo::getLDMVariableDefsSize(const MachineInstr &MI) const { 3758*0b57cec5SDimitry Andric // ins GPR:$Rn, $p (2xOp), reglist:$regs, variable_ops 3759*0b57cec5SDimitry Andric // (outs GPR:$wb), (ins GPR:$Rn, $p (2xOp), reglist:$regs, variable_ops) 3760*0b57cec5SDimitry Andric return MI.getNumOperands() + 1 - MI.getDesc().getNumOperands(); 3761*0b57cec5SDimitry Andric } 3762*0b57cec5SDimitry Andric 3763*0b57cec5SDimitry Andric int 3764*0b57cec5SDimitry Andric ARMBaseInstrInfo::getLDMDefCycle(const InstrItineraryData *ItinData, 3765*0b57cec5SDimitry Andric const MCInstrDesc &DefMCID, 3766*0b57cec5SDimitry Andric unsigned DefClass, 3767*0b57cec5SDimitry Andric unsigned DefIdx, unsigned DefAlign) const { 3768*0b57cec5SDimitry Andric int RegNo = (int)(DefIdx+1) - DefMCID.getNumOperands() + 1; 3769*0b57cec5SDimitry Andric if (RegNo <= 0) 3770*0b57cec5SDimitry Andric // Def is the address writeback. 3771*0b57cec5SDimitry Andric return ItinData->getOperandCycle(DefClass, DefIdx); 3772*0b57cec5SDimitry Andric 3773*0b57cec5SDimitry Andric int DefCycle; 3774*0b57cec5SDimitry Andric if (Subtarget.isCortexA8() || Subtarget.isCortexA7()) { 3775*0b57cec5SDimitry Andric // 4 registers would be issued: 1, 2, 1. 3776*0b57cec5SDimitry Andric // 5 registers would be issued: 1, 2, 2. 3777*0b57cec5SDimitry Andric DefCycle = RegNo / 2; 3778*0b57cec5SDimitry Andric if (DefCycle < 1) 3779*0b57cec5SDimitry Andric DefCycle = 1; 3780*0b57cec5SDimitry Andric // Result latency is issue cycle + 2: E2. 3781*0b57cec5SDimitry Andric DefCycle += 2; 3782*0b57cec5SDimitry Andric } else if (Subtarget.isLikeA9() || Subtarget.isSwift()) { 3783*0b57cec5SDimitry Andric DefCycle = (RegNo / 2); 3784*0b57cec5SDimitry Andric // If there are odd number of registers or if it's not 64-bit aligned, 3785*0b57cec5SDimitry Andric // then it takes an extra AGU (Address Generation Unit) cycle. 3786*0b57cec5SDimitry Andric if ((RegNo % 2) || DefAlign < 8) 3787*0b57cec5SDimitry Andric ++DefCycle; 3788*0b57cec5SDimitry Andric // Result latency is AGU cycles + 2. 3789*0b57cec5SDimitry Andric DefCycle += 2; 3790*0b57cec5SDimitry Andric } else { 3791*0b57cec5SDimitry Andric // Assume the worst. 3792*0b57cec5SDimitry Andric DefCycle = RegNo + 2; 3793*0b57cec5SDimitry Andric } 3794*0b57cec5SDimitry Andric 3795*0b57cec5SDimitry Andric return DefCycle; 3796*0b57cec5SDimitry Andric } 3797*0b57cec5SDimitry Andric 3798*0b57cec5SDimitry Andric int 3799*0b57cec5SDimitry Andric ARMBaseInstrInfo::getVSTMUseCycle(const InstrItineraryData *ItinData, 3800*0b57cec5SDimitry Andric const MCInstrDesc &UseMCID, 3801*0b57cec5SDimitry Andric unsigned UseClass, 3802*0b57cec5SDimitry Andric unsigned UseIdx, unsigned UseAlign) const { 3803*0b57cec5SDimitry Andric int RegNo = (int)(UseIdx+1) - UseMCID.getNumOperands() + 1; 3804*0b57cec5SDimitry Andric if (RegNo <= 0) 3805*0b57cec5SDimitry Andric return ItinData->getOperandCycle(UseClass, UseIdx); 3806*0b57cec5SDimitry Andric 3807*0b57cec5SDimitry Andric int UseCycle; 3808*0b57cec5SDimitry Andric if (Subtarget.isCortexA8() || Subtarget.isCortexA7()) { 3809*0b57cec5SDimitry Andric // (regno / 2) + (regno % 2) + 1 3810*0b57cec5SDimitry Andric UseCycle = RegNo / 2 + 1; 3811*0b57cec5SDimitry Andric if (RegNo % 2) 3812*0b57cec5SDimitry Andric ++UseCycle; 3813*0b57cec5SDimitry Andric } else if (Subtarget.isLikeA9() || Subtarget.isSwift()) { 3814*0b57cec5SDimitry Andric UseCycle = RegNo; 3815*0b57cec5SDimitry Andric bool isSStore = false; 3816*0b57cec5SDimitry Andric 3817*0b57cec5SDimitry Andric switch (UseMCID.getOpcode()) { 3818*0b57cec5SDimitry Andric default: break; 3819*0b57cec5SDimitry Andric case ARM::VSTMSIA: 3820*0b57cec5SDimitry Andric case ARM::VSTMSIA_UPD: 3821*0b57cec5SDimitry Andric case ARM::VSTMSDB_UPD: 3822*0b57cec5SDimitry Andric isSStore = true; 3823*0b57cec5SDimitry Andric break; 3824*0b57cec5SDimitry Andric } 3825*0b57cec5SDimitry Andric 3826*0b57cec5SDimitry Andric // If there are odd number of 'S' registers or if it's not 64-bit aligned, 3827*0b57cec5SDimitry Andric // then it takes an extra cycle. 3828*0b57cec5SDimitry Andric if ((isSStore && (RegNo % 2)) || UseAlign < 8) 3829*0b57cec5SDimitry Andric ++UseCycle; 3830*0b57cec5SDimitry Andric } else { 3831*0b57cec5SDimitry Andric // Assume the worst. 3832*0b57cec5SDimitry Andric UseCycle = RegNo + 2; 3833*0b57cec5SDimitry Andric } 3834*0b57cec5SDimitry Andric 3835*0b57cec5SDimitry Andric return UseCycle; 3836*0b57cec5SDimitry Andric } 3837*0b57cec5SDimitry Andric 3838*0b57cec5SDimitry Andric int 3839*0b57cec5SDimitry Andric ARMBaseInstrInfo::getSTMUseCycle(const InstrItineraryData *ItinData, 3840*0b57cec5SDimitry Andric const MCInstrDesc &UseMCID, 3841*0b57cec5SDimitry Andric unsigned UseClass, 3842*0b57cec5SDimitry Andric unsigned UseIdx, unsigned UseAlign) const { 3843*0b57cec5SDimitry Andric int RegNo = (int)(UseIdx+1) - UseMCID.getNumOperands() + 1; 3844*0b57cec5SDimitry Andric if (RegNo <= 0) 3845*0b57cec5SDimitry Andric return ItinData->getOperandCycle(UseClass, UseIdx); 3846*0b57cec5SDimitry Andric 3847*0b57cec5SDimitry Andric int UseCycle; 3848*0b57cec5SDimitry Andric if (Subtarget.isCortexA8() || Subtarget.isCortexA7()) { 3849*0b57cec5SDimitry Andric UseCycle = RegNo / 2; 3850*0b57cec5SDimitry Andric if (UseCycle < 2) 3851*0b57cec5SDimitry Andric UseCycle = 2; 3852*0b57cec5SDimitry Andric // Read in E3. 3853*0b57cec5SDimitry Andric UseCycle += 2; 3854*0b57cec5SDimitry Andric } else if (Subtarget.isLikeA9() || Subtarget.isSwift()) { 3855*0b57cec5SDimitry Andric UseCycle = (RegNo / 2); 3856*0b57cec5SDimitry Andric // If there are odd number of registers or if it's not 64-bit aligned, 3857*0b57cec5SDimitry Andric // then it takes an extra AGU (Address Generation Unit) cycle. 3858*0b57cec5SDimitry Andric if ((RegNo % 2) || UseAlign < 8) 3859*0b57cec5SDimitry Andric ++UseCycle; 3860*0b57cec5SDimitry Andric } else { 3861*0b57cec5SDimitry Andric // Assume the worst. 3862*0b57cec5SDimitry Andric UseCycle = 1; 3863*0b57cec5SDimitry Andric } 3864*0b57cec5SDimitry Andric return UseCycle; 3865*0b57cec5SDimitry Andric } 3866*0b57cec5SDimitry Andric 3867*0b57cec5SDimitry Andric int 3868*0b57cec5SDimitry Andric ARMBaseInstrInfo::getOperandLatency(const InstrItineraryData *ItinData, 3869*0b57cec5SDimitry Andric const MCInstrDesc &DefMCID, 3870*0b57cec5SDimitry Andric unsigned DefIdx, unsigned DefAlign, 3871*0b57cec5SDimitry Andric const MCInstrDesc &UseMCID, 3872*0b57cec5SDimitry Andric unsigned UseIdx, unsigned UseAlign) const { 3873*0b57cec5SDimitry Andric unsigned DefClass = DefMCID.getSchedClass(); 3874*0b57cec5SDimitry Andric unsigned UseClass = UseMCID.getSchedClass(); 3875*0b57cec5SDimitry Andric 3876*0b57cec5SDimitry Andric if (DefIdx < DefMCID.getNumDefs() && UseIdx < UseMCID.getNumOperands()) 3877*0b57cec5SDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx); 3878*0b57cec5SDimitry Andric 3879*0b57cec5SDimitry Andric // This may be a def / use of a variable_ops instruction, the operand 3880*0b57cec5SDimitry Andric // latency might be determinable dynamically. Let the target try to 3881*0b57cec5SDimitry Andric // figure it out. 3882*0b57cec5SDimitry Andric int DefCycle = -1; 3883*0b57cec5SDimitry Andric bool LdmBypass = false; 3884*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 3885*0b57cec5SDimitry Andric default: 3886*0b57cec5SDimitry Andric DefCycle = ItinData->getOperandCycle(DefClass, DefIdx); 3887*0b57cec5SDimitry Andric break; 3888*0b57cec5SDimitry Andric 3889*0b57cec5SDimitry Andric case ARM::VLDMDIA: 3890*0b57cec5SDimitry Andric case ARM::VLDMDIA_UPD: 3891*0b57cec5SDimitry Andric case ARM::VLDMDDB_UPD: 3892*0b57cec5SDimitry Andric case ARM::VLDMSIA: 3893*0b57cec5SDimitry Andric case ARM::VLDMSIA_UPD: 3894*0b57cec5SDimitry Andric case ARM::VLDMSDB_UPD: 3895*0b57cec5SDimitry Andric DefCycle = getVLDMDefCycle(ItinData, DefMCID, DefClass, DefIdx, DefAlign); 3896*0b57cec5SDimitry Andric break; 3897*0b57cec5SDimitry Andric 3898*0b57cec5SDimitry Andric case ARM::LDMIA_RET: 3899*0b57cec5SDimitry Andric case ARM::LDMIA: 3900*0b57cec5SDimitry Andric case ARM::LDMDA: 3901*0b57cec5SDimitry Andric case ARM::LDMDB: 3902*0b57cec5SDimitry Andric case ARM::LDMIB: 3903*0b57cec5SDimitry Andric case ARM::LDMIA_UPD: 3904*0b57cec5SDimitry Andric case ARM::LDMDA_UPD: 3905*0b57cec5SDimitry Andric case ARM::LDMDB_UPD: 3906*0b57cec5SDimitry Andric case ARM::LDMIB_UPD: 3907*0b57cec5SDimitry Andric case ARM::tLDMIA: 3908*0b57cec5SDimitry Andric case ARM::tLDMIA_UPD: 3909*0b57cec5SDimitry Andric case ARM::tPUSH: 3910*0b57cec5SDimitry Andric case ARM::t2LDMIA_RET: 3911*0b57cec5SDimitry Andric case ARM::t2LDMIA: 3912*0b57cec5SDimitry Andric case ARM::t2LDMDB: 3913*0b57cec5SDimitry Andric case ARM::t2LDMIA_UPD: 3914*0b57cec5SDimitry Andric case ARM::t2LDMDB_UPD: 3915*0b57cec5SDimitry Andric LdmBypass = true; 3916*0b57cec5SDimitry Andric DefCycle = getLDMDefCycle(ItinData, DefMCID, DefClass, DefIdx, DefAlign); 3917*0b57cec5SDimitry Andric break; 3918*0b57cec5SDimitry Andric } 3919*0b57cec5SDimitry Andric 3920*0b57cec5SDimitry Andric if (DefCycle == -1) 3921*0b57cec5SDimitry Andric // We can't seem to determine the result latency of the def, assume it's 2. 3922*0b57cec5SDimitry Andric DefCycle = 2; 3923*0b57cec5SDimitry Andric 3924*0b57cec5SDimitry Andric int UseCycle = -1; 3925*0b57cec5SDimitry Andric switch (UseMCID.getOpcode()) { 3926*0b57cec5SDimitry Andric default: 3927*0b57cec5SDimitry Andric UseCycle = ItinData->getOperandCycle(UseClass, UseIdx); 3928*0b57cec5SDimitry Andric break; 3929*0b57cec5SDimitry Andric 3930*0b57cec5SDimitry Andric case ARM::VSTMDIA: 3931*0b57cec5SDimitry Andric case ARM::VSTMDIA_UPD: 3932*0b57cec5SDimitry Andric case ARM::VSTMDDB_UPD: 3933*0b57cec5SDimitry Andric case ARM::VSTMSIA: 3934*0b57cec5SDimitry Andric case ARM::VSTMSIA_UPD: 3935*0b57cec5SDimitry Andric case ARM::VSTMSDB_UPD: 3936*0b57cec5SDimitry Andric UseCycle = getVSTMUseCycle(ItinData, UseMCID, UseClass, UseIdx, UseAlign); 3937*0b57cec5SDimitry Andric break; 3938*0b57cec5SDimitry Andric 3939*0b57cec5SDimitry Andric case ARM::STMIA: 3940*0b57cec5SDimitry Andric case ARM::STMDA: 3941*0b57cec5SDimitry Andric case ARM::STMDB: 3942*0b57cec5SDimitry Andric case ARM::STMIB: 3943*0b57cec5SDimitry Andric case ARM::STMIA_UPD: 3944*0b57cec5SDimitry Andric case ARM::STMDA_UPD: 3945*0b57cec5SDimitry Andric case ARM::STMDB_UPD: 3946*0b57cec5SDimitry Andric case ARM::STMIB_UPD: 3947*0b57cec5SDimitry Andric case ARM::tSTMIA_UPD: 3948*0b57cec5SDimitry Andric case ARM::tPOP_RET: 3949*0b57cec5SDimitry Andric case ARM::tPOP: 3950*0b57cec5SDimitry Andric case ARM::t2STMIA: 3951*0b57cec5SDimitry Andric case ARM::t2STMDB: 3952*0b57cec5SDimitry Andric case ARM::t2STMIA_UPD: 3953*0b57cec5SDimitry Andric case ARM::t2STMDB_UPD: 3954*0b57cec5SDimitry Andric UseCycle = getSTMUseCycle(ItinData, UseMCID, UseClass, UseIdx, UseAlign); 3955*0b57cec5SDimitry Andric break; 3956*0b57cec5SDimitry Andric } 3957*0b57cec5SDimitry Andric 3958*0b57cec5SDimitry Andric if (UseCycle == -1) 3959*0b57cec5SDimitry Andric // Assume it's read in the first stage. 3960*0b57cec5SDimitry Andric UseCycle = 1; 3961*0b57cec5SDimitry Andric 3962*0b57cec5SDimitry Andric UseCycle = DefCycle - UseCycle + 1; 3963*0b57cec5SDimitry Andric if (UseCycle > 0) { 3964*0b57cec5SDimitry Andric if (LdmBypass) { 3965*0b57cec5SDimitry Andric // It's a variable_ops instruction so we can't use DefIdx here. Just use 3966*0b57cec5SDimitry Andric // first def operand. 3967*0b57cec5SDimitry Andric if (ItinData->hasPipelineForwarding(DefClass, DefMCID.getNumOperands()-1, 3968*0b57cec5SDimitry Andric UseClass, UseIdx)) 3969*0b57cec5SDimitry Andric --UseCycle; 3970*0b57cec5SDimitry Andric } else if (ItinData->hasPipelineForwarding(DefClass, DefIdx, 3971*0b57cec5SDimitry Andric UseClass, UseIdx)) { 3972*0b57cec5SDimitry Andric --UseCycle; 3973*0b57cec5SDimitry Andric } 3974*0b57cec5SDimitry Andric } 3975*0b57cec5SDimitry Andric 3976*0b57cec5SDimitry Andric return UseCycle; 3977*0b57cec5SDimitry Andric } 3978*0b57cec5SDimitry Andric 3979*0b57cec5SDimitry Andric static const MachineInstr *getBundledDefMI(const TargetRegisterInfo *TRI, 3980*0b57cec5SDimitry Andric const MachineInstr *MI, unsigned Reg, 3981*0b57cec5SDimitry Andric unsigned &DefIdx, unsigned &Dist) { 3982*0b57cec5SDimitry Andric Dist = 0; 3983*0b57cec5SDimitry Andric 3984*0b57cec5SDimitry Andric MachineBasicBlock::const_iterator I = MI; ++I; 3985*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator II = std::prev(I.getInstrIterator()); 3986*0b57cec5SDimitry Andric assert(II->isInsideBundle() && "Empty bundle?"); 3987*0b57cec5SDimitry Andric 3988*0b57cec5SDimitry Andric int Idx = -1; 3989*0b57cec5SDimitry Andric while (II->isInsideBundle()) { 3990*0b57cec5SDimitry Andric Idx = II->findRegisterDefOperandIdx(Reg, false, true, TRI); 3991*0b57cec5SDimitry Andric if (Idx != -1) 3992*0b57cec5SDimitry Andric break; 3993*0b57cec5SDimitry Andric --II; 3994*0b57cec5SDimitry Andric ++Dist; 3995*0b57cec5SDimitry Andric } 3996*0b57cec5SDimitry Andric 3997*0b57cec5SDimitry Andric assert(Idx != -1 && "Cannot find bundled definition!"); 3998*0b57cec5SDimitry Andric DefIdx = Idx; 3999*0b57cec5SDimitry Andric return &*II; 4000*0b57cec5SDimitry Andric } 4001*0b57cec5SDimitry Andric 4002*0b57cec5SDimitry Andric static const MachineInstr *getBundledUseMI(const TargetRegisterInfo *TRI, 4003*0b57cec5SDimitry Andric const MachineInstr &MI, unsigned Reg, 4004*0b57cec5SDimitry Andric unsigned &UseIdx, unsigned &Dist) { 4005*0b57cec5SDimitry Andric Dist = 0; 4006*0b57cec5SDimitry Andric 4007*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator II = ++MI.getIterator(); 4008*0b57cec5SDimitry Andric assert(II->isInsideBundle() && "Empty bundle?"); 4009*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator E = MI.getParent()->instr_end(); 4010*0b57cec5SDimitry Andric 4011*0b57cec5SDimitry Andric // FIXME: This doesn't properly handle multiple uses. 4012*0b57cec5SDimitry Andric int Idx = -1; 4013*0b57cec5SDimitry Andric while (II != E && II->isInsideBundle()) { 4014*0b57cec5SDimitry Andric Idx = II->findRegisterUseOperandIdx(Reg, false, TRI); 4015*0b57cec5SDimitry Andric if (Idx != -1) 4016*0b57cec5SDimitry Andric break; 4017*0b57cec5SDimitry Andric if (II->getOpcode() != ARM::t2IT) 4018*0b57cec5SDimitry Andric ++Dist; 4019*0b57cec5SDimitry Andric ++II; 4020*0b57cec5SDimitry Andric } 4021*0b57cec5SDimitry Andric 4022*0b57cec5SDimitry Andric if (Idx == -1) { 4023*0b57cec5SDimitry Andric Dist = 0; 4024*0b57cec5SDimitry Andric return nullptr; 4025*0b57cec5SDimitry Andric } 4026*0b57cec5SDimitry Andric 4027*0b57cec5SDimitry Andric UseIdx = Idx; 4028*0b57cec5SDimitry Andric return &*II; 4029*0b57cec5SDimitry Andric } 4030*0b57cec5SDimitry Andric 4031*0b57cec5SDimitry Andric /// Return the number of cycles to add to (or subtract from) the static 4032*0b57cec5SDimitry Andric /// itinerary based on the def opcode and alignment. The caller will ensure that 4033*0b57cec5SDimitry Andric /// adjusted latency is at least one cycle. 4034*0b57cec5SDimitry Andric static int adjustDefLatency(const ARMSubtarget &Subtarget, 4035*0b57cec5SDimitry Andric const MachineInstr &DefMI, 4036*0b57cec5SDimitry Andric const MCInstrDesc &DefMCID, unsigned DefAlign) { 4037*0b57cec5SDimitry Andric int Adjust = 0; 4038*0b57cec5SDimitry Andric if (Subtarget.isCortexA8() || Subtarget.isLikeA9() || Subtarget.isCortexA7()) { 4039*0b57cec5SDimitry Andric // FIXME: Shifter op hack: no shift (i.e. [r +/- r]) or [r + r << 2] 4040*0b57cec5SDimitry Andric // variants are one cycle cheaper. 4041*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 4042*0b57cec5SDimitry Andric default: break; 4043*0b57cec5SDimitry Andric case ARM::LDRrs: 4044*0b57cec5SDimitry Andric case ARM::LDRBrs: { 4045*0b57cec5SDimitry Andric unsigned ShOpVal = DefMI.getOperand(3).getImm(); 4046*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 4047*0b57cec5SDimitry Andric if (ShImm == 0 || 4048*0b57cec5SDimitry Andric (ShImm == 2 && ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl)) 4049*0b57cec5SDimitry Andric --Adjust; 4050*0b57cec5SDimitry Andric break; 4051*0b57cec5SDimitry Andric } 4052*0b57cec5SDimitry Andric case ARM::t2LDRs: 4053*0b57cec5SDimitry Andric case ARM::t2LDRBs: 4054*0b57cec5SDimitry Andric case ARM::t2LDRHs: 4055*0b57cec5SDimitry Andric case ARM::t2LDRSHs: { 4056*0b57cec5SDimitry Andric // Thumb2 mode: lsl only. 4057*0b57cec5SDimitry Andric unsigned ShAmt = DefMI.getOperand(3).getImm(); 4058*0b57cec5SDimitry Andric if (ShAmt == 0 || ShAmt == 2) 4059*0b57cec5SDimitry Andric --Adjust; 4060*0b57cec5SDimitry Andric break; 4061*0b57cec5SDimitry Andric } 4062*0b57cec5SDimitry Andric } 4063*0b57cec5SDimitry Andric } else if (Subtarget.isSwift()) { 4064*0b57cec5SDimitry Andric // FIXME: Properly handle all of the latency adjustments for address 4065*0b57cec5SDimitry Andric // writeback. 4066*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 4067*0b57cec5SDimitry Andric default: break; 4068*0b57cec5SDimitry Andric case ARM::LDRrs: 4069*0b57cec5SDimitry Andric case ARM::LDRBrs: { 4070*0b57cec5SDimitry Andric unsigned ShOpVal = DefMI.getOperand(3).getImm(); 4071*0b57cec5SDimitry Andric bool isSub = ARM_AM::getAM2Op(ShOpVal) == ARM_AM::sub; 4072*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 4073*0b57cec5SDimitry Andric if (!isSub && 4074*0b57cec5SDimitry Andric (ShImm == 0 || 4075*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 4076*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl))) 4077*0b57cec5SDimitry Andric Adjust -= 2; 4078*0b57cec5SDimitry Andric else if (!isSub && 4079*0b57cec5SDimitry Andric ShImm == 1 && ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsr) 4080*0b57cec5SDimitry Andric --Adjust; 4081*0b57cec5SDimitry Andric break; 4082*0b57cec5SDimitry Andric } 4083*0b57cec5SDimitry Andric case ARM::t2LDRs: 4084*0b57cec5SDimitry Andric case ARM::t2LDRBs: 4085*0b57cec5SDimitry Andric case ARM::t2LDRHs: 4086*0b57cec5SDimitry Andric case ARM::t2LDRSHs: { 4087*0b57cec5SDimitry Andric // Thumb2 mode: lsl only. 4088*0b57cec5SDimitry Andric unsigned ShAmt = DefMI.getOperand(3).getImm(); 4089*0b57cec5SDimitry Andric if (ShAmt == 0 || ShAmt == 1 || ShAmt == 2 || ShAmt == 3) 4090*0b57cec5SDimitry Andric Adjust -= 2; 4091*0b57cec5SDimitry Andric break; 4092*0b57cec5SDimitry Andric } 4093*0b57cec5SDimitry Andric } 4094*0b57cec5SDimitry Andric } 4095*0b57cec5SDimitry Andric 4096*0b57cec5SDimitry Andric if (DefAlign < 8 && Subtarget.checkVLDnAccessAlignment()) { 4097*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 4098*0b57cec5SDimitry Andric default: break; 4099*0b57cec5SDimitry Andric case ARM::VLD1q8: 4100*0b57cec5SDimitry Andric case ARM::VLD1q16: 4101*0b57cec5SDimitry Andric case ARM::VLD1q32: 4102*0b57cec5SDimitry Andric case ARM::VLD1q64: 4103*0b57cec5SDimitry Andric case ARM::VLD1q8wb_fixed: 4104*0b57cec5SDimitry Andric case ARM::VLD1q16wb_fixed: 4105*0b57cec5SDimitry Andric case ARM::VLD1q32wb_fixed: 4106*0b57cec5SDimitry Andric case ARM::VLD1q64wb_fixed: 4107*0b57cec5SDimitry Andric case ARM::VLD1q8wb_register: 4108*0b57cec5SDimitry Andric case ARM::VLD1q16wb_register: 4109*0b57cec5SDimitry Andric case ARM::VLD1q32wb_register: 4110*0b57cec5SDimitry Andric case ARM::VLD1q64wb_register: 4111*0b57cec5SDimitry Andric case ARM::VLD2d8: 4112*0b57cec5SDimitry Andric case ARM::VLD2d16: 4113*0b57cec5SDimitry Andric case ARM::VLD2d32: 4114*0b57cec5SDimitry Andric case ARM::VLD2q8: 4115*0b57cec5SDimitry Andric case ARM::VLD2q16: 4116*0b57cec5SDimitry Andric case ARM::VLD2q32: 4117*0b57cec5SDimitry Andric case ARM::VLD2d8wb_fixed: 4118*0b57cec5SDimitry Andric case ARM::VLD2d16wb_fixed: 4119*0b57cec5SDimitry Andric case ARM::VLD2d32wb_fixed: 4120*0b57cec5SDimitry Andric case ARM::VLD2q8wb_fixed: 4121*0b57cec5SDimitry Andric case ARM::VLD2q16wb_fixed: 4122*0b57cec5SDimitry Andric case ARM::VLD2q32wb_fixed: 4123*0b57cec5SDimitry Andric case ARM::VLD2d8wb_register: 4124*0b57cec5SDimitry Andric case ARM::VLD2d16wb_register: 4125*0b57cec5SDimitry Andric case ARM::VLD2d32wb_register: 4126*0b57cec5SDimitry Andric case ARM::VLD2q8wb_register: 4127*0b57cec5SDimitry Andric case ARM::VLD2q16wb_register: 4128*0b57cec5SDimitry Andric case ARM::VLD2q32wb_register: 4129*0b57cec5SDimitry Andric case ARM::VLD3d8: 4130*0b57cec5SDimitry Andric case ARM::VLD3d16: 4131*0b57cec5SDimitry Andric case ARM::VLD3d32: 4132*0b57cec5SDimitry Andric case ARM::VLD1d64T: 4133*0b57cec5SDimitry Andric case ARM::VLD3d8_UPD: 4134*0b57cec5SDimitry Andric case ARM::VLD3d16_UPD: 4135*0b57cec5SDimitry Andric case ARM::VLD3d32_UPD: 4136*0b57cec5SDimitry Andric case ARM::VLD1d64Twb_fixed: 4137*0b57cec5SDimitry Andric case ARM::VLD1d64Twb_register: 4138*0b57cec5SDimitry Andric case ARM::VLD3q8_UPD: 4139*0b57cec5SDimitry Andric case ARM::VLD3q16_UPD: 4140*0b57cec5SDimitry Andric case ARM::VLD3q32_UPD: 4141*0b57cec5SDimitry Andric case ARM::VLD4d8: 4142*0b57cec5SDimitry Andric case ARM::VLD4d16: 4143*0b57cec5SDimitry Andric case ARM::VLD4d32: 4144*0b57cec5SDimitry Andric case ARM::VLD1d64Q: 4145*0b57cec5SDimitry Andric case ARM::VLD4d8_UPD: 4146*0b57cec5SDimitry Andric case ARM::VLD4d16_UPD: 4147*0b57cec5SDimitry Andric case ARM::VLD4d32_UPD: 4148*0b57cec5SDimitry Andric case ARM::VLD1d64Qwb_fixed: 4149*0b57cec5SDimitry Andric case ARM::VLD1d64Qwb_register: 4150*0b57cec5SDimitry Andric case ARM::VLD4q8_UPD: 4151*0b57cec5SDimitry Andric case ARM::VLD4q16_UPD: 4152*0b57cec5SDimitry Andric case ARM::VLD4q32_UPD: 4153*0b57cec5SDimitry Andric case ARM::VLD1DUPq8: 4154*0b57cec5SDimitry Andric case ARM::VLD1DUPq16: 4155*0b57cec5SDimitry Andric case ARM::VLD1DUPq32: 4156*0b57cec5SDimitry Andric case ARM::VLD1DUPq8wb_fixed: 4157*0b57cec5SDimitry Andric case ARM::VLD1DUPq16wb_fixed: 4158*0b57cec5SDimitry Andric case ARM::VLD1DUPq32wb_fixed: 4159*0b57cec5SDimitry Andric case ARM::VLD1DUPq8wb_register: 4160*0b57cec5SDimitry Andric case ARM::VLD1DUPq16wb_register: 4161*0b57cec5SDimitry Andric case ARM::VLD1DUPq32wb_register: 4162*0b57cec5SDimitry Andric case ARM::VLD2DUPd8: 4163*0b57cec5SDimitry Andric case ARM::VLD2DUPd16: 4164*0b57cec5SDimitry Andric case ARM::VLD2DUPd32: 4165*0b57cec5SDimitry Andric case ARM::VLD2DUPd8wb_fixed: 4166*0b57cec5SDimitry Andric case ARM::VLD2DUPd16wb_fixed: 4167*0b57cec5SDimitry Andric case ARM::VLD2DUPd32wb_fixed: 4168*0b57cec5SDimitry Andric case ARM::VLD2DUPd8wb_register: 4169*0b57cec5SDimitry Andric case ARM::VLD2DUPd16wb_register: 4170*0b57cec5SDimitry Andric case ARM::VLD2DUPd32wb_register: 4171*0b57cec5SDimitry Andric case ARM::VLD4DUPd8: 4172*0b57cec5SDimitry Andric case ARM::VLD4DUPd16: 4173*0b57cec5SDimitry Andric case ARM::VLD4DUPd32: 4174*0b57cec5SDimitry Andric case ARM::VLD4DUPd8_UPD: 4175*0b57cec5SDimitry Andric case ARM::VLD4DUPd16_UPD: 4176*0b57cec5SDimitry Andric case ARM::VLD4DUPd32_UPD: 4177*0b57cec5SDimitry Andric case ARM::VLD1LNd8: 4178*0b57cec5SDimitry Andric case ARM::VLD1LNd16: 4179*0b57cec5SDimitry Andric case ARM::VLD1LNd32: 4180*0b57cec5SDimitry Andric case ARM::VLD1LNd8_UPD: 4181*0b57cec5SDimitry Andric case ARM::VLD1LNd16_UPD: 4182*0b57cec5SDimitry Andric case ARM::VLD1LNd32_UPD: 4183*0b57cec5SDimitry Andric case ARM::VLD2LNd8: 4184*0b57cec5SDimitry Andric case ARM::VLD2LNd16: 4185*0b57cec5SDimitry Andric case ARM::VLD2LNd32: 4186*0b57cec5SDimitry Andric case ARM::VLD2LNq16: 4187*0b57cec5SDimitry Andric case ARM::VLD2LNq32: 4188*0b57cec5SDimitry Andric case ARM::VLD2LNd8_UPD: 4189*0b57cec5SDimitry Andric case ARM::VLD2LNd16_UPD: 4190*0b57cec5SDimitry Andric case ARM::VLD2LNd32_UPD: 4191*0b57cec5SDimitry Andric case ARM::VLD2LNq16_UPD: 4192*0b57cec5SDimitry Andric case ARM::VLD2LNq32_UPD: 4193*0b57cec5SDimitry Andric case ARM::VLD4LNd8: 4194*0b57cec5SDimitry Andric case ARM::VLD4LNd16: 4195*0b57cec5SDimitry Andric case ARM::VLD4LNd32: 4196*0b57cec5SDimitry Andric case ARM::VLD4LNq16: 4197*0b57cec5SDimitry Andric case ARM::VLD4LNq32: 4198*0b57cec5SDimitry Andric case ARM::VLD4LNd8_UPD: 4199*0b57cec5SDimitry Andric case ARM::VLD4LNd16_UPD: 4200*0b57cec5SDimitry Andric case ARM::VLD4LNd32_UPD: 4201*0b57cec5SDimitry Andric case ARM::VLD4LNq16_UPD: 4202*0b57cec5SDimitry Andric case ARM::VLD4LNq32_UPD: 4203*0b57cec5SDimitry Andric // If the address is not 64-bit aligned, the latencies of these 4204*0b57cec5SDimitry Andric // instructions increases by one. 4205*0b57cec5SDimitry Andric ++Adjust; 4206*0b57cec5SDimitry Andric break; 4207*0b57cec5SDimitry Andric } 4208*0b57cec5SDimitry Andric } 4209*0b57cec5SDimitry Andric return Adjust; 4210*0b57cec5SDimitry Andric } 4211*0b57cec5SDimitry Andric 4212*0b57cec5SDimitry Andric int ARMBaseInstrInfo::getOperandLatency(const InstrItineraryData *ItinData, 4213*0b57cec5SDimitry Andric const MachineInstr &DefMI, 4214*0b57cec5SDimitry Andric unsigned DefIdx, 4215*0b57cec5SDimitry Andric const MachineInstr &UseMI, 4216*0b57cec5SDimitry Andric unsigned UseIdx) const { 4217*0b57cec5SDimitry Andric // No operand latency. The caller may fall back to getInstrLatency. 4218*0b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 4219*0b57cec5SDimitry Andric return -1; 4220*0b57cec5SDimitry Andric 4221*0b57cec5SDimitry Andric const MachineOperand &DefMO = DefMI.getOperand(DefIdx); 4222*0b57cec5SDimitry Andric unsigned Reg = DefMO.getReg(); 4223*0b57cec5SDimitry Andric 4224*0b57cec5SDimitry Andric const MachineInstr *ResolvedDefMI = &DefMI; 4225*0b57cec5SDimitry Andric unsigned DefAdj = 0; 4226*0b57cec5SDimitry Andric if (DefMI.isBundle()) 4227*0b57cec5SDimitry Andric ResolvedDefMI = 4228*0b57cec5SDimitry Andric getBundledDefMI(&getRegisterInfo(), &DefMI, Reg, DefIdx, DefAdj); 4229*0b57cec5SDimitry Andric if (ResolvedDefMI->isCopyLike() || ResolvedDefMI->isInsertSubreg() || 4230*0b57cec5SDimitry Andric ResolvedDefMI->isRegSequence() || ResolvedDefMI->isImplicitDef()) { 4231*0b57cec5SDimitry Andric return 1; 4232*0b57cec5SDimitry Andric } 4233*0b57cec5SDimitry Andric 4234*0b57cec5SDimitry Andric const MachineInstr *ResolvedUseMI = &UseMI; 4235*0b57cec5SDimitry Andric unsigned UseAdj = 0; 4236*0b57cec5SDimitry Andric if (UseMI.isBundle()) { 4237*0b57cec5SDimitry Andric ResolvedUseMI = 4238*0b57cec5SDimitry Andric getBundledUseMI(&getRegisterInfo(), UseMI, Reg, UseIdx, UseAdj); 4239*0b57cec5SDimitry Andric if (!ResolvedUseMI) 4240*0b57cec5SDimitry Andric return -1; 4241*0b57cec5SDimitry Andric } 4242*0b57cec5SDimitry Andric 4243*0b57cec5SDimitry Andric return getOperandLatencyImpl( 4244*0b57cec5SDimitry Andric ItinData, *ResolvedDefMI, DefIdx, ResolvedDefMI->getDesc(), DefAdj, DefMO, 4245*0b57cec5SDimitry Andric Reg, *ResolvedUseMI, UseIdx, ResolvedUseMI->getDesc(), UseAdj); 4246*0b57cec5SDimitry Andric } 4247*0b57cec5SDimitry Andric 4248*0b57cec5SDimitry Andric int ARMBaseInstrInfo::getOperandLatencyImpl( 4249*0b57cec5SDimitry Andric const InstrItineraryData *ItinData, const MachineInstr &DefMI, 4250*0b57cec5SDimitry Andric unsigned DefIdx, const MCInstrDesc &DefMCID, unsigned DefAdj, 4251*0b57cec5SDimitry Andric const MachineOperand &DefMO, unsigned Reg, const MachineInstr &UseMI, 4252*0b57cec5SDimitry Andric unsigned UseIdx, const MCInstrDesc &UseMCID, unsigned UseAdj) const { 4253*0b57cec5SDimitry Andric if (Reg == ARM::CPSR) { 4254*0b57cec5SDimitry Andric if (DefMI.getOpcode() == ARM::FMSTAT) { 4255*0b57cec5SDimitry Andric // fpscr -> cpsr stalls over 20 cycles on A8 (and earlier?) 4256*0b57cec5SDimitry Andric return Subtarget.isLikeA9() ? 1 : 20; 4257*0b57cec5SDimitry Andric } 4258*0b57cec5SDimitry Andric 4259*0b57cec5SDimitry Andric // CPSR set and branch can be paired in the same cycle. 4260*0b57cec5SDimitry Andric if (UseMI.isBranch()) 4261*0b57cec5SDimitry Andric return 0; 4262*0b57cec5SDimitry Andric 4263*0b57cec5SDimitry Andric // Otherwise it takes the instruction latency (generally one). 4264*0b57cec5SDimitry Andric unsigned Latency = getInstrLatency(ItinData, DefMI); 4265*0b57cec5SDimitry Andric 4266*0b57cec5SDimitry Andric // For Thumb2 and -Os, prefer scheduling CPSR setting instruction close to 4267*0b57cec5SDimitry Andric // its uses. Instructions which are otherwise scheduled between them may 4268*0b57cec5SDimitry Andric // incur a code size penalty (not able to use the CPSR setting 16-bit 4269*0b57cec5SDimitry Andric // instructions). 4270*0b57cec5SDimitry Andric if (Latency > 0 && Subtarget.isThumb2()) { 4271*0b57cec5SDimitry Andric const MachineFunction *MF = DefMI.getParent()->getParent(); 4272*0b57cec5SDimitry Andric // FIXME: Use Function::hasOptSize(). 4273*0b57cec5SDimitry Andric if (MF->getFunction().hasFnAttribute(Attribute::OptimizeForSize)) 4274*0b57cec5SDimitry Andric --Latency; 4275*0b57cec5SDimitry Andric } 4276*0b57cec5SDimitry Andric return Latency; 4277*0b57cec5SDimitry Andric } 4278*0b57cec5SDimitry Andric 4279*0b57cec5SDimitry Andric if (DefMO.isImplicit() || UseMI.getOperand(UseIdx).isImplicit()) 4280*0b57cec5SDimitry Andric return -1; 4281*0b57cec5SDimitry Andric 4282*0b57cec5SDimitry Andric unsigned DefAlign = DefMI.hasOneMemOperand() 4283*0b57cec5SDimitry Andric ? (*DefMI.memoperands_begin())->getAlignment() 4284*0b57cec5SDimitry Andric : 0; 4285*0b57cec5SDimitry Andric unsigned UseAlign = UseMI.hasOneMemOperand() 4286*0b57cec5SDimitry Andric ? (*UseMI.memoperands_begin())->getAlignment() 4287*0b57cec5SDimitry Andric : 0; 4288*0b57cec5SDimitry Andric 4289*0b57cec5SDimitry Andric // Get the itinerary's latency if possible, and handle variable_ops. 4290*0b57cec5SDimitry Andric int Latency = getOperandLatency(ItinData, DefMCID, DefIdx, DefAlign, UseMCID, 4291*0b57cec5SDimitry Andric UseIdx, UseAlign); 4292*0b57cec5SDimitry Andric // Unable to find operand latency. The caller may resort to getInstrLatency. 4293*0b57cec5SDimitry Andric if (Latency < 0) 4294*0b57cec5SDimitry Andric return Latency; 4295*0b57cec5SDimitry Andric 4296*0b57cec5SDimitry Andric // Adjust for IT block position. 4297*0b57cec5SDimitry Andric int Adj = DefAdj + UseAdj; 4298*0b57cec5SDimitry Andric 4299*0b57cec5SDimitry Andric // Adjust for dynamic def-side opcode variants not captured by the itinerary. 4300*0b57cec5SDimitry Andric Adj += adjustDefLatency(Subtarget, DefMI, DefMCID, DefAlign); 4301*0b57cec5SDimitry Andric if (Adj >= 0 || (int)Latency > -Adj) { 4302*0b57cec5SDimitry Andric return Latency + Adj; 4303*0b57cec5SDimitry Andric } 4304*0b57cec5SDimitry Andric // Return the itinerary latency, which may be zero but not less than zero. 4305*0b57cec5SDimitry Andric return Latency; 4306*0b57cec5SDimitry Andric } 4307*0b57cec5SDimitry Andric 4308*0b57cec5SDimitry Andric int 4309*0b57cec5SDimitry Andric ARMBaseInstrInfo::getOperandLatency(const InstrItineraryData *ItinData, 4310*0b57cec5SDimitry Andric SDNode *DefNode, unsigned DefIdx, 4311*0b57cec5SDimitry Andric SDNode *UseNode, unsigned UseIdx) const { 4312*0b57cec5SDimitry Andric if (!DefNode->isMachineOpcode()) 4313*0b57cec5SDimitry Andric return 1; 4314*0b57cec5SDimitry Andric 4315*0b57cec5SDimitry Andric const MCInstrDesc &DefMCID = get(DefNode->getMachineOpcode()); 4316*0b57cec5SDimitry Andric 4317*0b57cec5SDimitry Andric if (isZeroCost(DefMCID.Opcode)) 4318*0b57cec5SDimitry Andric return 0; 4319*0b57cec5SDimitry Andric 4320*0b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 4321*0b57cec5SDimitry Andric return DefMCID.mayLoad() ? 3 : 1; 4322*0b57cec5SDimitry Andric 4323*0b57cec5SDimitry Andric if (!UseNode->isMachineOpcode()) { 4324*0b57cec5SDimitry Andric int Latency = ItinData->getOperandCycle(DefMCID.getSchedClass(), DefIdx); 4325*0b57cec5SDimitry Andric int Adj = Subtarget.getPreISelOperandLatencyAdjustment(); 4326*0b57cec5SDimitry Andric int Threshold = 1 + Adj; 4327*0b57cec5SDimitry Andric return Latency <= Threshold ? 1 : Latency - Adj; 4328*0b57cec5SDimitry Andric } 4329*0b57cec5SDimitry Andric 4330*0b57cec5SDimitry Andric const MCInstrDesc &UseMCID = get(UseNode->getMachineOpcode()); 4331*0b57cec5SDimitry Andric const MachineSDNode *DefMN = dyn_cast<MachineSDNode>(DefNode); 4332*0b57cec5SDimitry Andric unsigned DefAlign = !DefMN->memoperands_empty() 4333*0b57cec5SDimitry Andric ? (*DefMN->memoperands_begin())->getAlignment() : 0; 4334*0b57cec5SDimitry Andric const MachineSDNode *UseMN = dyn_cast<MachineSDNode>(UseNode); 4335*0b57cec5SDimitry Andric unsigned UseAlign = !UseMN->memoperands_empty() 4336*0b57cec5SDimitry Andric ? (*UseMN->memoperands_begin())->getAlignment() : 0; 4337*0b57cec5SDimitry Andric int Latency = getOperandLatency(ItinData, DefMCID, DefIdx, DefAlign, 4338*0b57cec5SDimitry Andric UseMCID, UseIdx, UseAlign); 4339*0b57cec5SDimitry Andric 4340*0b57cec5SDimitry Andric if (Latency > 1 && 4341*0b57cec5SDimitry Andric (Subtarget.isCortexA8() || Subtarget.isLikeA9() || 4342*0b57cec5SDimitry Andric Subtarget.isCortexA7())) { 4343*0b57cec5SDimitry Andric // FIXME: Shifter op hack: no shift (i.e. [r +/- r]) or [r + r << 2] 4344*0b57cec5SDimitry Andric // variants are one cycle cheaper. 4345*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 4346*0b57cec5SDimitry Andric default: break; 4347*0b57cec5SDimitry Andric case ARM::LDRrs: 4348*0b57cec5SDimitry Andric case ARM::LDRBrs: { 4349*0b57cec5SDimitry Andric unsigned ShOpVal = 4350*0b57cec5SDimitry Andric cast<ConstantSDNode>(DefNode->getOperand(2))->getZExtValue(); 4351*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 4352*0b57cec5SDimitry Andric if (ShImm == 0 || 4353*0b57cec5SDimitry Andric (ShImm == 2 && ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl)) 4354*0b57cec5SDimitry Andric --Latency; 4355*0b57cec5SDimitry Andric break; 4356*0b57cec5SDimitry Andric } 4357*0b57cec5SDimitry Andric case ARM::t2LDRs: 4358*0b57cec5SDimitry Andric case ARM::t2LDRBs: 4359*0b57cec5SDimitry Andric case ARM::t2LDRHs: 4360*0b57cec5SDimitry Andric case ARM::t2LDRSHs: { 4361*0b57cec5SDimitry Andric // Thumb2 mode: lsl only. 4362*0b57cec5SDimitry Andric unsigned ShAmt = 4363*0b57cec5SDimitry Andric cast<ConstantSDNode>(DefNode->getOperand(2))->getZExtValue(); 4364*0b57cec5SDimitry Andric if (ShAmt == 0 || ShAmt == 2) 4365*0b57cec5SDimitry Andric --Latency; 4366*0b57cec5SDimitry Andric break; 4367*0b57cec5SDimitry Andric } 4368*0b57cec5SDimitry Andric } 4369*0b57cec5SDimitry Andric } else if (DefIdx == 0 && Latency > 2 && Subtarget.isSwift()) { 4370*0b57cec5SDimitry Andric // FIXME: Properly handle all of the latency adjustments for address 4371*0b57cec5SDimitry Andric // writeback. 4372*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 4373*0b57cec5SDimitry Andric default: break; 4374*0b57cec5SDimitry Andric case ARM::LDRrs: 4375*0b57cec5SDimitry Andric case ARM::LDRBrs: { 4376*0b57cec5SDimitry Andric unsigned ShOpVal = 4377*0b57cec5SDimitry Andric cast<ConstantSDNode>(DefNode->getOperand(2))->getZExtValue(); 4378*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getAM2Offset(ShOpVal); 4379*0b57cec5SDimitry Andric if (ShImm == 0 || 4380*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2 || ShImm == 3) && 4381*0b57cec5SDimitry Andric ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsl)) 4382*0b57cec5SDimitry Andric Latency -= 2; 4383*0b57cec5SDimitry Andric else if (ShImm == 1 && ARM_AM::getAM2ShiftOpc(ShOpVal) == ARM_AM::lsr) 4384*0b57cec5SDimitry Andric --Latency; 4385*0b57cec5SDimitry Andric break; 4386*0b57cec5SDimitry Andric } 4387*0b57cec5SDimitry Andric case ARM::t2LDRs: 4388*0b57cec5SDimitry Andric case ARM::t2LDRBs: 4389*0b57cec5SDimitry Andric case ARM::t2LDRHs: 4390*0b57cec5SDimitry Andric case ARM::t2LDRSHs: 4391*0b57cec5SDimitry Andric // Thumb2 mode: lsl 0-3 only. 4392*0b57cec5SDimitry Andric Latency -= 2; 4393*0b57cec5SDimitry Andric break; 4394*0b57cec5SDimitry Andric } 4395*0b57cec5SDimitry Andric } 4396*0b57cec5SDimitry Andric 4397*0b57cec5SDimitry Andric if (DefAlign < 8 && Subtarget.checkVLDnAccessAlignment()) 4398*0b57cec5SDimitry Andric switch (DefMCID.getOpcode()) { 4399*0b57cec5SDimitry Andric default: break; 4400*0b57cec5SDimitry Andric case ARM::VLD1q8: 4401*0b57cec5SDimitry Andric case ARM::VLD1q16: 4402*0b57cec5SDimitry Andric case ARM::VLD1q32: 4403*0b57cec5SDimitry Andric case ARM::VLD1q64: 4404*0b57cec5SDimitry Andric case ARM::VLD1q8wb_register: 4405*0b57cec5SDimitry Andric case ARM::VLD1q16wb_register: 4406*0b57cec5SDimitry Andric case ARM::VLD1q32wb_register: 4407*0b57cec5SDimitry Andric case ARM::VLD1q64wb_register: 4408*0b57cec5SDimitry Andric case ARM::VLD1q8wb_fixed: 4409*0b57cec5SDimitry Andric case ARM::VLD1q16wb_fixed: 4410*0b57cec5SDimitry Andric case ARM::VLD1q32wb_fixed: 4411*0b57cec5SDimitry Andric case ARM::VLD1q64wb_fixed: 4412*0b57cec5SDimitry Andric case ARM::VLD2d8: 4413*0b57cec5SDimitry Andric case ARM::VLD2d16: 4414*0b57cec5SDimitry Andric case ARM::VLD2d32: 4415*0b57cec5SDimitry Andric case ARM::VLD2q8Pseudo: 4416*0b57cec5SDimitry Andric case ARM::VLD2q16Pseudo: 4417*0b57cec5SDimitry Andric case ARM::VLD2q32Pseudo: 4418*0b57cec5SDimitry Andric case ARM::VLD2d8wb_fixed: 4419*0b57cec5SDimitry Andric case ARM::VLD2d16wb_fixed: 4420*0b57cec5SDimitry Andric case ARM::VLD2d32wb_fixed: 4421*0b57cec5SDimitry Andric case ARM::VLD2q8PseudoWB_fixed: 4422*0b57cec5SDimitry Andric case ARM::VLD2q16PseudoWB_fixed: 4423*0b57cec5SDimitry Andric case ARM::VLD2q32PseudoWB_fixed: 4424*0b57cec5SDimitry Andric case ARM::VLD2d8wb_register: 4425*0b57cec5SDimitry Andric case ARM::VLD2d16wb_register: 4426*0b57cec5SDimitry Andric case ARM::VLD2d32wb_register: 4427*0b57cec5SDimitry Andric case ARM::VLD2q8PseudoWB_register: 4428*0b57cec5SDimitry Andric case ARM::VLD2q16PseudoWB_register: 4429*0b57cec5SDimitry Andric case ARM::VLD2q32PseudoWB_register: 4430*0b57cec5SDimitry Andric case ARM::VLD3d8Pseudo: 4431*0b57cec5SDimitry Andric case ARM::VLD3d16Pseudo: 4432*0b57cec5SDimitry Andric case ARM::VLD3d32Pseudo: 4433*0b57cec5SDimitry Andric case ARM::VLD1d8TPseudo: 4434*0b57cec5SDimitry Andric case ARM::VLD1d16TPseudo: 4435*0b57cec5SDimitry Andric case ARM::VLD1d32TPseudo: 4436*0b57cec5SDimitry Andric case ARM::VLD1d64TPseudo: 4437*0b57cec5SDimitry Andric case ARM::VLD1d64TPseudoWB_fixed: 4438*0b57cec5SDimitry Andric case ARM::VLD1d64TPseudoWB_register: 4439*0b57cec5SDimitry Andric case ARM::VLD3d8Pseudo_UPD: 4440*0b57cec5SDimitry Andric case ARM::VLD3d16Pseudo_UPD: 4441*0b57cec5SDimitry Andric case ARM::VLD3d32Pseudo_UPD: 4442*0b57cec5SDimitry Andric case ARM::VLD3q8Pseudo_UPD: 4443*0b57cec5SDimitry Andric case ARM::VLD3q16Pseudo_UPD: 4444*0b57cec5SDimitry Andric case ARM::VLD3q32Pseudo_UPD: 4445*0b57cec5SDimitry Andric case ARM::VLD3q8oddPseudo: 4446*0b57cec5SDimitry Andric case ARM::VLD3q16oddPseudo: 4447*0b57cec5SDimitry Andric case ARM::VLD3q32oddPseudo: 4448*0b57cec5SDimitry Andric case ARM::VLD3q8oddPseudo_UPD: 4449*0b57cec5SDimitry Andric case ARM::VLD3q16oddPseudo_UPD: 4450*0b57cec5SDimitry Andric case ARM::VLD3q32oddPseudo_UPD: 4451*0b57cec5SDimitry Andric case ARM::VLD4d8Pseudo: 4452*0b57cec5SDimitry Andric case ARM::VLD4d16Pseudo: 4453*0b57cec5SDimitry Andric case ARM::VLD4d32Pseudo: 4454*0b57cec5SDimitry Andric case ARM::VLD1d8QPseudo: 4455*0b57cec5SDimitry Andric case ARM::VLD1d16QPseudo: 4456*0b57cec5SDimitry Andric case ARM::VLD1d32QPseudo: 4457*0b57cec5SDimitry Andric case ARM::VLD1d64QPseudo: 4458*0b57cec5SDimitry Andric case ARM::VLD1d64QPseudoWB_fixed: 4459*0b57cec5SDimitry Andric case ARM::VLD1d64QPseudoWB_register: 4460*0b57cec5SDimitry Andric case ARM::VLD1q8HighQPseudo: 4461*0b57cec5SDimitry Andric case ARM::VLD1q8LowQPseudo_UPD: 4462*0b57cec5SDimitry Andric case ARM::VLD1q8HighTPseudo: 4463*0b57cec5SDimitry Andric case ARM::VLD1q8LowTPseudo_UPD: 4464*0b57cec5SDimitry Andric case ARM::VLD1q16HighQPseudo: 4465*0b57cec5SDimitry Andric case ARM::VLD1q16LowQPseudo_UPD: 4466*0b57cec5SDimitry Andric case ARM::VLD1q16HighTPseudo: 4467*0b57cec5SDimitry Andric case ARM::VLD1q16LowTPseudo_UPD: 4468*0b57cec5SDimitry Andric case ARM::VLD1q32HighQPseudo: 4469*0b57cec5SDimitry Andric case ARM::VLD1q32LowQPseudo_UPD: 4470*0b57cec5SDimitry Andric case ARM::VLD1q32HighTPseudo: 4471*0b57cec5SDimitry Andric case ARM::VLD1q32LowTPseudo_UPD: 4472*0b57cec5SDimitry Andric case ARM::VLD1q64HighQPseudo: 4473*0b57cec5SDimitry Andric case ARM::VLD1q64LowQPseudo_UPD: 4474*0b57cec5SDimitry Andric case ARM::VLD1q64HighTPseudo: 4475*0b57cec5SDimitry Andric case ARM::VLD1q64LowTPseudo_UPD: 4476*0b57cec5SDimitry Andric case ARM::VLD4d8Pseudo_UPD: 4477*0b57cec5SDimitry Andric case ARM::VLD4d16Pseudo_UPD: 4478*0b57cec5SDimitry Andric case ARM::VLD4d32Pseudo_UPD: 4479*0b57cec5SDimitry Andric case ARM::VLD4q8Pseudo_UPD: 4480*0b57cec5SDimitry Andric case ARM::VLD4q16Pseudo_UPD: 4481*0b57cec5SDimitry Andric case ARM::VLD4q32Pseudo_UPD: 4482*0b57cec5SDimitry Andric case ARM::VLD4q8oddPseudo: 4483*0b57cec5SDimitry Andric case ARM::VLD4q16oddPseudo: 4484*0b57cec5SDimitry Andric case ARM::VLD4q32oddPseudo: 4485*0b57cec5SDimitry Andric case ARM::VLD4q8oddPseudo_UPD: 4486*0b57cec5SDimitry Andric case ARM::VLD4q16oddPseudo_UPD: 4487*0b57cec5SDimitry Andric case ARM::VLD4q32oddPseudo_UPD: 4488*0b57cec5SDimitry Andric case ARM::VLD1DUPq8: 4489*0b57cec5SDimitry Andric case ARM::VLD1DUPq16: 4490*0b57cec5SDimitry Andric case ARM::VLD1DUPq32: 4491*0b57cec5SDimitry Andric case ARM::VLD1DUPq8wb_fixed: 4492*0b57cec5SDimitry Andric case ARM::VLD1DUPq16wb_fixed: 4493*0b57cec5SDimitry Andric case ARM::VLD1DUPq32wb_fixed: 4494*0b57cec5SDimitry Andric case ARM::VLD1DUPq8wb_register: 4495*0b57cec5SDimitry Andric case ARM::VLD1DUPq16wb_register: 4496*0b57cec5SDimitry Andric case ARM::VLD1DUPq32wb_register: 4497*0b57cec5SDimitry Andric case ARM::VLD2DUPd8: 4498*0b57cec5SDimitry Andric case ARM::VLD2DUPd16: 4499*0b57cec5SDimitry Andric case ARM::VLD2DUPd32: 4500*0b57cec5SDimitry Andric case ARM::VLD2DUPd8wb_fixed: 4501*0b57cec5SDimitry Andric case ARM::VLD2DUPd16wb_fixed: 4502*0b57cec5SDimitry Andric case ARM::VLD2DUPd32wb_fixed: 4503*0b57cec5SDimitry Andric case ARM::VLD2DUPd8wb_register: 4504*0b57cec5SDimitry Andric case ARM::VLD2DUPd16wb_register: 4505*0b57cec5SDimitry Andric case ARM::VLD2DUPd32wb_register: 4506*0b57cec5SDimitry Andric case ARM::VLD2DUPq8EvenPseudo: 4507*0b57cec5SDimitry Andric case ARM::VLD2DUPq8OddPseudo: 4508*0b57cec5SDimitry Andric case ARM::VLD2DUPq16EvenPseudo: 4509*0b57cec5SDimitry Andric case ARM::VLD2DUPq16OddPseudo: 4510*0b57cec5SDimitry Andric case ARM::VLD2DUPq32EvenPseudo: 4511*0b57cec5SDimitry Andric case ARM::VLD2DUPq32OddPseudo: 4512*0b57cec5SDimitry Andric case ARM::VLD3DUPq8EvenPseudo: 4513*0b57cec5SDimitry Andric case ARM::VLD3DUPq8OddPseudo: 4514*0b57cec5SDimitry Andric case ARM::VLD3DUPq16EvenPseudo: 4515*0b57cec5SDimitry Andric case ARM::VLD3DUPq16OddPseudo: 4516*0b57cec5SDimitry Andric case ARM::VLD3DUPq32EvenPseudo: 4517*0b57cec5SDimitry Andric case ARM::VLD3DUPq32OddPseudo: 4518*0b57cec5SDimitry Andric case ARM::VLD4DUPd8Pseudo: 4519*0b57cec5SDimitry Andric case ARM::VLD4DUPd16Pseudo: 4520*0b57cec5SDimitry Andric case ARM::VLD4DUPd32Pseudo: 4521*0b57cec5SDimitry Andric case ARM::VLD4DUPd8Pseudo_UPD: 4522*0b57cec5SDimitry Andric case ARM::VLD4DUPd16Pseudo_UPD: 4523*0b57cec5SDimitry Andric case ARM::VLD4DUPd32Pseudo_UPD: 4524*0b57cec5SDimitry Andric case ARM::VLD4DUPq8EvenPseudo: 4525*0b57cec5SDimitry Andric case ARM::VLD4DUPq8OddPseudo: 4526*0b57cec5SDimitry Andric case ARM::VLD4DUPq16EvenPseudo: 4527*0b57cec5SDimitry Andric case ARM::VLD4DUPq16OddPseudo: 4528*0b57cec5SDimitry Andric case ARM::VLD4DUPq32EvenPseudo: 4529*0b57cec5SDimitry Andric case ARM::VLD4DUPq32OddPseudo: 4530*0b57cec5SDimitry Andric case ARM::VLD1LNq8Pseudo: 4531*0b57cec5SDimitry Andric case ARM::VLD1LNq16Pseudo: 4532*0b57cec5SDimitry Andric case ARM::VLD1LNq32Pseudo: 4533*0b57cec5SDimitry Andric case ARM::VLD1LNq8Pseudo_UPD: 4534*0b57cec5SDimitry Andric case ARM::VLD1LNq16Pseudo_UPD: 4535*0b57cec5SDimitry Andric case ARM::VLD1LNq32Pseudo_UPD: 4536*0b57cec5SDimitry Andric case ARM::VLD2LNd8Pseudo: 4537*0b57cec5SDimitry Andric case ARM::VLD2LNd16Pseudo: 4538*0b57cec5SDimitry Andric case ARM::VLD2LNd32Pseudo: 4539*0b57cec5SDimitry Andric case ARM::VLD2LNq16Pseudo: 4540*0b57cec5SDimitry Andric case ARM::VLD2LNq32Pseudo: 4541*0b57cec5SDimitry Andric case ARM::VLD2LNd8Pseudo_UPD: 4542*0b57cec5SDimitry Andric case ARM::VLD2LNd16Pseudo_UPD: 4543*0b57cec5SDimitry Andric case ARM::VLD2LNd32Pseudo_UPD: 4544*0b57cec5SDimitry Andric case ARM::VLD2LNq16Pseudo_UPD: 4545*0b57cec5SDimitry Andric case ARM::VLD2LNq32Pseudo_UPD: 4546*0b57cec5SDimitry Andric case ARM::VLD4LNd8Pseudo: 4547*0b57cec5SDimitry Andric case ARM::VLD4LNd16Pseudo: 4548*0b57cec5SDimitry Andric case ARM::VLD4LNd32Pseudo: 4549*0b57cec5SDimitry Andric case ARM::VLD4LNq16Pseudo: 4550*0b57cec5SDimitry Andric case ARM::VLD4LNq32Pseudo: 4551*0b57cec5SDimitry Andric case ARM::VLD4LNd8Pseudo_UPD: 4552*0b57cec5SDimitry Andric case ARM::VLD4LNd16Pseudo_UPD: 4553*0b57cec5SDimitry Andric case ARM::VLD4LNd32Pseudo_UPD: 4554*0b57cec5SDimitry Andric case ARM::VLD4LNq16Pseudo_UPD: 4555*0b57cec5SDimitry Andric case ARM::VLD4LNq32Pseudo_UPD: 4556*0b57cec5SDimitry Andric // If the address is not 64-bit aligned, the latencies of these 4557*0b57cec5SDimitry Andric // instructions increases by one. 4558*0b57cec5SDimitry Andric ++Latency; 4559*0b57cec5SDimitry Andric break; 4560*0b57cec5SDimitry Andric } 4561*0b57cec5SDimitry Andric 4562*0b57cec5SDimitry Andric return Latency; 4563*0b57cec5SDimitry Andric } 4564*0b57cec5SDimitry Andric 4565*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getPredicationCost(const MachineInstr &MI) const { 4566*0b57cec5SDimitry Andric if (MI.isCopyLike() || MI.isInsertSubreg() || MI.isRegSequence() || 4567*0b57cec5SDimitry Andric MI.isImplicitDef()) 4568*0b57cec5SDimitry Andric return 0; 4569*0b57cec5SDimitry Andric 4570*0b57cec5SDimitry Andric if (MI.isBundle()) 4571*0b57cec5SDimitry Andric return 0; 4572*0b57cec5SDimitry Andric 4573*0b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 4574*0b57cec5SDimitry Andric 4575*0b57cec5SDimitry Andric if (MCID.isCall() || (MCID.hasImplicitDefOfPhysReg(ARM::CPSR) && 4576*0b57cec5SDimitry Andric !Subtarget.cheapPredicableCPSRDef())) { 4577*0b57cec5SDimitry Andric // When predicated, CPSR is an additional source operand for CPSR updating 4578*0b57cec5SDimitry Andric // instructions, this apparently increases their latencies. 4579*0b57cec5SDimitry Andric return 1; 4580*0b57cec5SDimitry Andric } 4581*0b57cec5SDimitry Andric return 0; 4582*0b57cec5SDimitry Andric } 4583*0b57cec5SDimitry Andric 4584*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getInstrLatency(const InstrItineraryData *ItinData, 4585*0b57cec5SDimitry Andric const MachineInstr &MI, 4586*0b57cec5SDimitry Andric unsigned *PredCost) const { 4587*0b57cec5SDimitry Andric if (MI.isCopyLike() || MI.isInsertSubreg() || MI.isRegSequence() || 4588*0b57cec5SDimitry Andric MI.isImplicitDef()) 4589*0b57cec5SDimitry Andric return 1; 4590*0b57cec5SDimitry Andric 4591*0b57cec5SDimitry Andric // An instruction scheduler typically runs on unbundled instructions, however 4592*0b57cec5SDimitry Andric // other passes may query the latency of a bundled instruction. 4593*0b57cec5SDimitry Andric if (MI.isBundle()) { 4594*0b57cec5SDimitry Andric unsigned Latency = 0; 4595*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator I = MI.getIterator(); 4596*0b57cec5SDimitry Andric MachineBasicBlock::const_instr_iterator E = MI.getParent()->instr_end(); 4597*0b57cec5SDimitry Andric while (++I != E && I->isInsideBundle()) { 4598*0b57cec5SDimitry Andric if (I->getOpcode() != ARM::t2IT) 4599*0b57cec5SDimitry Andric Latency += getInstrLatency(ItinData, *I, PredCost); 4600*0b57cec5SDimitry Andric } 4601*0b57cec5SDimitry Andric return Latency; 4602*0b57cec5SDimitry Andric } 4603*0b57cec5SDimitry Andric 4604*0b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 4605*0b57cec5SDimitry Andric if (PredCost && (MCID.isCall() || (MCID.hasImplicitDefOfPhysReg(ARM::CPSR) && 4606*0b57cec5SDimitry Andric !Subtarget.cheapPredicableCPSRDef()))) { 4607*0b57cec5SDimitry Andric // When predicated, CPSR is an additional source operand for CPSR updating 4608*0b57cec5SDimitry Andric // instructions, this apparently increases their latencies. 4609*0b57cec5SDimitry Andric *PredCost = 1; 4610*0b57cec5SDimitry Andric } 4611*0b57cec5SDimitry Andric // Be sure to call getStageLatency for an empty itinerary in case it has a 4612*0b57cec5SDimitry Andric // valid MinLatency property. 4613*0b57cec5SDimitry Andric if (!ItinData) 4614*0b57cec5SDimitry Andric return MI.mayLoad() ? 3 : 1; 4615*0b57cec5SDimitry Andric 4616*0b57cec5SDimitry Andric unsigned Class = MCID.getSchedClass(); 4617*0b57cec5SDimitry Andric 4618*0b57cec5SDimitry Andric // For instructions with variable uops, use uops as latency. 4619*0b57cec5SDimitry Andric if (!ItinData->isEmpty() && ItinData->getNumMicroOps(Class) < 0) 4620*0b57cec5SDimitry Andric return getNumMicroOps(ItinData, MI); 4621*0b57cec5SDimitry Andric 4622*0b57cec5SDimitry Andric // For the common case, fall back on the itinerary's latency. 4623*0b57cec5SDimitry Andric unsigned Latency = ItinData->getStageLatency(Class); 4624*0b57cec5SDimitry Andric 4625*0b57cec5SDimitry Andric // Adjust for dynamic def-side opcode variants not captured by the itinerary. 4626*0b57cec5SDimitry Andric unsigned DefAlign = 4627*0b57cec5SDimitry Andric MI.hasOneMemOperand() ? (*MI.memoperands_begin())->getAlignment() : 0; 4628*0b57cec5SDimitry Andric int Adj = adjustDefLatency(Subtarget, MI, MCID, DefAlign); 4629*0b57cec5SDimitry Andric if (Adj >= 0 || (int)Latency > -Adj) { 4630*0b57cec5SDimitry Andric return Latency + Adj; 4631*0b57cec5SDimitry Andric } 4632*0b57cec5SDimitry Andric return Latency; 4633*0b57cec5SDimitry Andric } 4634*0b57cec5SDimitry Andric 4635*0b57cec5SDimitry Andric int ARMBaseInstrInfo::getInstrLatency(const InstrItineraryData *ItinData, 4636*0b57cec5SDimitry Andric SDNode *Node) const { 4637*0b57cec5SDimitry Andric if (!Node->isMachineOpcode()) 4638*0b57cec5SDimitry Andric return 1; 4639*0b57cec5SDimitry Andric 4640*0b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 4641*0b57cec5SDimitry Andric return 1; 4642*0b57cec5SDimitry Andric 4643*0b57cec5SDimitry Andric unsigned Opcode = Node->getMachineOpcode(); 4644*0b57cec5SDimitry Andric switch (Opcode) { 4645*0b57cec5SDimitry Andric default: 4646*0b57cec5SDimitry Andric return ItinData->getStageLatency(get(Opcode).getSchedClass()); 4647*0b57cec5SDimitry Andric case ARM::VLDMQIA: 4648*0b57cec5SDimitry Andric case ARM::VSTMQIA: 4649*0b57cec5SDimitry Andric return 2; 4650*0b57cec5SDimitry Andric } 4651*0b57cec5SDimitry Andric } 4652*0b57cec5SDimitry Andric 4653*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::hasHighOperandLatency(const TargetSchedModel &SchedModel, 4654*0b57cec5SDimitry Andric const MachineRegisterInfo *MRI, 4655*0b57cec5SDimitry Andric const MachineInstr &DefMI, 4656*0b57cec5SDimitry Andric unsigned DefIdx, 4657*0b57cec5SDimitry Andric const MachineInstr &UseMI, 4658*0b57cec5SDimitry Andric unsigned UseIdx) const { 4659*0b57cec5SDimitry Andric unsigned DDomain = DefMI.getDesc().TSFlags & ARMII::DomainMask; 4660*0b57cec5SDimitry Andric unsigned UDomain = UseMI.getDesc().TSFlags & ARMII::DomainMask; 4661*0b57cec5SDimitry Andric if (Subtarget.nonpipelinedVFP() && 4662*0b57cec5SDimitry Andric (DDomain == ARMII::DomainVFP || UDomain == ARMII::DomainVFP)) 4663*0b57cec5SDimitry Andric return true; 4664*0b57cec5SDimitry Andric 4665*0b57cec5SDimitry Andric // Hoist VFP / NEON instructions with 4 or higher latency. 4666*0b57cec5SDimitry Andric unsigned Latency = 4667*0b57cec5SDimitry Andric SchedModel.computeOperandLatency(&DefMI, DefIdx, &UseMI, UseIdx); 4668*0b57cec5SDimitry Andric if (Latency <= 3) 4669*0b57cec5SDimitry Andric return false; 4670*0b57cec5SDimitry Andric return DDomain == ARMII::DomainVFP || DDomain == ARMII::DomainNEON || 4671*0b57cec5SDimitry Andric UDomain == ARMII::DomainVFP || UDomain == ARMII::DomainNEON; 4672*0b57cec5SDimitry Andric } 4673*0b57cec5SDimitry Andric 4674*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::hasLowDefLatency(const TargetSchedModel &SchedModel, 4675*0b57cec5SDimitry Andric const MachineInstr &DefMI, 4676*0b57cec5SDimitry Andric unsigned DefIdx) const { 4677*0b57cec5SDimitry Andric const InstrItineraryData *ItinData = SchedModel.getInstrItineraries(); 4678*0b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 4679*0b57cec5SDimitry Andric return false; 4680*0b57cec5SDimitry Andric 4681*0b57cec5SDimitry Andric unsigned DDomain = DefMI.getDesc().TSFlags & ARMII::DomainMask; 4682*0b57cec5SDimitry Andric if (DDomain == ARMII::DomainGeneral) { 4683*0b57cec5SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass(); 4684*0b57cec5SDimitry Andric int DefCycle = ItinData->getOperandCycle(DefClass, DefIdx); 4685*0b57cec5SDimitry Andric return (DefCycle != -1 && DefCycle <= 2); 4686*0b57cec5SDimitry Andric } 4687*0b57cec5SDimitry Andric return false; 4688*0b57cec5SDimitry Andric } 4689*0b57cec5SDimitry Andric 4690*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::verifyInstruction(const MachineInstr &MI, 4691*0b57cec5SDimitry Andric StringRef &ErrInfo) const { 4692*0b57cec5SDimitry Andric if (convertAddSubFlagsOpcode(MI.getOpcode())) { 4693*0b57cec5SDimitry Andric ErrInfo = "Pseudo flag setting opcodes only exist in Selection DAG"; 4694*0b57cec5SDimitry Andric return false; 4695*0b57cec5SDimitry Andric } 4696*0b57cec5SDimitry Andric if (MI.getOpcode() == ARM::tMOVr && !Subtarget.hasV6Ops()) { 4697*0b57cec5SDimitry Andric // Make sure we don't generate a lo-lo mov that isn't supported. 4698*0b57cec5SDimitry Andric if (!ARM::hGPRRegClass.contains(MI.getOperand(0).getReg()) && 4699*0b57cec5SDimitry Andric !ARM::hGPRRegClass.contains(MI.getOperand(1).getReg())) { 4700*0b57cec5SDimitry Andric ErrInfo = "Non-flag-setting Thumb1 mov is v6-only"; 4701*0b57cec5SDimitry Andric return false; 4702*0b57cec5SDimitry Andric } 4703*0b57cec5SDimitry Andric } 4704*0b57cec5SDimitry Andric if (MI.getOpcode() == ARM::tPUSH || 4705*0b57cec5SDimitry Andric MI.getOpcode() == ARM::tPOP || 4706*0b57cec5SDimitry Andric MI.getOpcode() == ARM::tPOP_RET) { 4707*0b57cec5SDimitry Andric for (int i = 2, e = MI.getNumOperands(); i < e; ++i) { 4708*0b57cec5SDimitry Andric if (MI.getOperand(i).isImplicit() || 4709*0b57cec5SDimitry Andric !MI.getOperand(i).isReg()) 4710*0b57cec5SDimitry Andric continue; 4711*0b57cec5SDimitry Andric unsigned Reg = MI.getOperand(i).getReg(); 4712*0b57cec5SDimitry Andric if (Reg < ARM::R0 || Reg > ARM::R7) { 4713*0b57cec5SDimitry Andric if (!(MI.getOpcode() == ARM::tPUSH && Reg == ARM::LR) && 4714*0b57cec5SDimitry Andric !(MI.getOpcode() == ARM::tPOP_RET && Reg == ARM::PC)) { 4715*0b57cec5SDimitry Andric ErrInfo = "Unsupported register in Thumb1 push/pop"; 4716*0b57cec5SDimitry Andric return false; 4717*0b57cec5SDimitry Andric } 4718*0b57cec5SDimitry Andric } 4719*0b57cec5SDimitry Andric } 4720*0b57cec5SDimitry Andric } 4721*0b57cec5SDimitry Andric return true; 4722*0b57cec5SDimitry Andric } 4723*0b57cec5SDimitry Andric 4724*0b57cec5SDimitry Andric // LoadStackGuard has so far only been implemented for MachO. Different code 4725*0b57cec5SDimitry Andric // sequence is needed for other targets. 4726*0b57cec5SDimitry Andric void ARMBaseInstrInfo::expandLoadStackGuardBase(MachineBasicBlock::iterator MI, 4727*0b57cec5SDimitry Andric unsigned LoadImmOpc, 4728*0b57cec5SDimitry Andric unsigned LoadOpc) const { 4729*0b57cec5SDimitry Andric assert(!Subtarget.isROPI() && !Subtarget.isRWPI() && 4730*0b57cec5SDimitry Andric "ROPI/RWPI not currently supported with stack guard"); 4731*0b57cec5SDimitry Andric 4732*0b57cec5SDimitry Andric MachineBasicBlock &MBB = *MI->getParent(); 4733*0b57cec5SDimitry Andric DebugLoc DL = MI->getDebugLoc(); 4734*0b57cec5SDimitry Andric unsigned Reg = MI->getOperand(0).getReg(); 4735*0b57cec5SDimitry Andric const GlobalValue *GV = 4736*0b57cec5SDimitry Andric cast<GlobalValue>((*MI->memoperands_begin())->getValue()); 4737*0b57cec5SDimitry Andric MachineInstrBuilder MIB; 4738*0b57cec5SDimitry Andric 4739*0b57cec5SDimitry Andric BuildMI(MBB, MI, DL, get(LoadImmOpc), Reg) 4740*0b57cec5SDimitry Andric .addGlobalAddress(GV, 0, ARMII::MO_NONLAZY); 4741*0b57cec5SDimitry Andric 4742*0b57cec5SDimitry Andric if (Subtarget.isGVIndirectSymbol(GV)) { 4743*0b57cec5SDimitry Andric MIB = BuildMI(MBB, MI, DL, get(LoadOpc), Reg); 4744*0b57cec5SDimitry Andric MIB.addReg(Reg, RegState::Kill).addImm(0); 4745*0b57cec5SDimitry Andric auto Flags = MachineMemOperand::MOLoad | 4746*0b57cec5SDimitry Andric MachineMemOperand::MODereferenceable | 4747*0b57cec5SDimitry Andric MachineMemOperand::MOInvariant; 4748*0b57cec5SDimitry Andric MachineMemOperand *MMO = MBB.getParent()->getMachineMemOperand( 4749*0b57cec5SDimitry Andric MachinePointerInfo::getGOT(*MBB.getParent()), Flags, 4, 4); 4750*0b57cec5SDimitry Andric MIB.addMemOperand(MMO).add(predOps(ARMCC::AL)); 4751*0b57cec5SDimitry Andric } 4752*0b57cec5SDimitry Andric 4753*0b57cec5SDimitry Andric MIB = BuildMI(MBB, MI, DL, get(LoadOpc), Reg); 4754*0b57cec5SDimitry Andric MIB.addReg(Reg, RegState::Kill) 4755*0b57cec5SDimitry Andric .addImm(0) 4756*0b57cec5SDimitry Andric .cloneMemRefs(*MI) 4757*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 4758*0b57cec5SDimitry Andric } 4759*0b57cec5SDimitry Andric 4760*0b57cec5SDimitry Andric bool 4761*0b57cec5SDimitry Andric ARMBaseInstrInfo::isFpMLxInstruction(unsigned Opcode, unsigned &MulOpc, 4762*0b57cec5SDimitry Andric unsigned &AddSubOpc, 4763*0b57cec5SDimitry Andric bool &NegAcc, bool &HasLane) const { 4764*0b57cec5SDimitry Andric DenseMap<unsigned, unsigned>::const_iterator I = MLxEntryMap.find(Opcode); 4765*0b57cec5SDimitry Andric if (I == MLxEntryMap.end()) 4766*0b57cec5SDimitry Andric return false; 4767*0b57cec5SDimitry Andric 4768*0b57cec5SDimitry Andric const ARM_MLxEntry &Entry = ARM_MLxTable[I->second]; 4769*0b57cec5SDimitry Andric MulOpc = Entry.MulOpc; 4770*0b57cec5SDimitry Andric AddSubOpc = Entry.AddSubOpc; 4771*0b57cec5SDimitry Andric NegAcc = Entry.NegAcc; 4772*0b57cec5SDimitry Andric HasLane = Entry.HasLane; 4773*0b57cec5SDimitry Andric return true; 4774*0b57cec5SDimitry Andric } 4775*0b57cec5SDimitry Andric 4776*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 4777*0b57cec5SDimitry Andric // Execution domains. 4778*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 4779*0b57cec5SDimitry Andric // 4780*0b57cec5SDimitry Andric // Some instructions go down the NEON pipeline, some go down the VFP pipeline, 4781*0b57cec5SDimitry Andric // and some can go down both. The vmov instructions go down the VFP pipeline, 4782*0b57cec5SDimitry Andric // but they can be changed to vorr equivalents that are executed by the NEON 4783*0b57cec5SDimitry Andric // pipeline. 4784*0b57cec5SDimitry Andric // 4785*0b57cec5SDimitry Andric // We use the following execution domain numbering: 4786*0b57cec5SDimitry Andric // 4787*0b57cec5SDimitry Andric enum ARMExeDomain { 4788*0b57cec5SDimitry Andric ExeGeneric = 0, 4789*0b57cec5SDimitry Andric ExeVFP = 1, 4790*0b57cec5SDimitry Andric ExeNEON = 2 4791*0b57cec5SDimitry Andric }; 4792*0b57cec5SDimitry Andric 4793*0b57cec5SDimitry Andric // 4794*0b57cec5SDimitry Andric // Also see ARMInstrFormats.td and Domain* enums in ARMBaseInfo.h 4795*0b57cec5SDimitry Andric // 4796*0b57cec5SDimitry Andric std::pair<uint16_t, uint16_t> 4797*0b57cec5SDimitry Andric ARMBaseInstrInfo::getExecutionDomain(const MachineInstr &MI) const { 4798*0b57cec5SDimitry Andric // If we don't have access to NEON instructions then we won't be able 4799*0b57cec5SDimitry Andric // to swizzle anything to the NEON domain. Check to make sure. 4800*0b57cec5SDimitry Andric if (Subtarget.hasNEON()) { 4801*0b57cec5SDimitry Andric // VMOVD, VMOVRS and VMOVSR are VFP instructions, but can be changed to NEON 4802*0b57cec5SDimitry Andric // if they are not predicated. 4803*0b57cec5SDimitry Andric if (MI.getOpcode() == ARM::VMOVD && !isPredicated(MI)) 4804*0b57cec5SDimitry Andric return std::make_pair(ExeVFP, (1 << ExeVFP) | (1 << ExeNEON)); 4805*0b57cec5SDimitry Andric 4806*0b57cec5SDimitry Andric // CortexA9 is particularly picky about mixing the two and wants these 4807*0b57cec5SDimitry Andric // converted. 4808*0b57cec5SDimitry Andric if (Subtarget.useNEONForFPMovs() && !isPredicated(MI) && 4809*0b57cec5SDimitry Andric (MI.getOpcode() == ARM::VMOVRS || MI.getOpcode() == ARM::VMOVSR || 4810*0b57cec5SDimitry Andric MI.getOpcode() == ARM::VMOVS)) 4811*0b57cec5SDimitry Andric return std::make_pair(ExeVFP, (1 << ExeVFP) | (1 << ExeNEON)); 4812*0b57cec5SDimitry Andric } 4813*0b57cec5SDimitry Andric // No other instructions can be swizzled, so just determine their domain. 4814*0b57cec5SDimitry Andric unsigned Domain = MI.getDesc().TSFlags & ARMII::DomainMask; 4815*0b57cec5SDimitry Andric 4816*0b57cec5SDimitry Andric if (Domain & ARMII::DomainNEON) 4817*0b57cec5SDimitry Andric return std::make_pair(ExeNEON, 0); 4818*0b57cec5SDimitry Andric 4819*0b57cec5SDimitry Andric // Certain instructions can go either way on Cortex-A8. 4820*0b57cec5SDimitry Andric // Treat them as NEON instructions. 4821*0b57cec5SDimitry Andric if ((Domain & ARMII::DomainNEONA8) && Subtarget.isCortexA8()) 4822*0b57cec5SDimitry Andric return std::make_pair(ExeNEON, 0); 4823*0b57cec5SDimitry Andric 4824*0b57cec5SDimitry Andric if (Domain & ARMII::DomainVFP) 4825*0b57cec5SDimitry Andric return std::make_pair(ExeVFP, 0); 4826*0b57cec5SDimitry Andric 4827*0b57cec5SDimitry Andric return std::make_pair(ExeGeneric, 0); 4828*0b57cec5SDimitry Andric } 4829*0b57cec5SDimitry Andric 4830*0b57cec5SDimitry Andric static unsigned getCorrespondingDRegAndLane(const TargetRegisterInfo *TRI, 4831*0b57cec5SDimitry Andric unsigned SReg, unsigned &Lane) { 4832*0b57cec5SDimitry Andric unsigned DReg = TRI->getMatchingSuperReg(SReg, ARM::ssub_0, &ARM::DPRRegClass); 4833*0b57cec5SDimitry Andric Lane = 0; 4834*0b57cec5SDimitry Andric 4835*0b57cec5SDimitry Andric if (DReg != ARM::NoRegister) 4836*0b57cec5SDimitry Andric return DReg; 4837*0b57cec5SDimitry Andric 4838*0b57cec5SDimitry Andric Lane = 1; 4839*0b57cec5SDimitry Andric DReg = TRI->getMatchingSuperReg(SReg, ARM::ssub_1, &ARM::DPRRegClass); 4840*0b57cec5SDimitry Andric 4841*0b57cec5SDimitry Andric assert(DReg && "S-register with no D super-register?"); 4842*0b57cec5SDimitry Andric return DReg; 4843*0b57cec5SDimitry Andric } 4844*0b57cec5SDimitry Andric 4845*0b57cec5SDimitry Andric /// getImplicitSPRUseForDPRUse - Given a use of a DPR register and lane, 4846*0b57cec5SDimitry Andric /// set ImplicitSReg to a register number that must be marked as implicit-use or 4847*0b57cec5SDimitry Andric /// zero if no register needs to be defined as implicit-use. 4848*0b57cec5SDimitry Andric /// 4849*0b57cec5SDimitry Andric /// If the function cannot determine if an SPR should be marked implicit use or 4850*0b57cec5SDimitry Andric /// not, it returns false. 4851*0b57cec5SDimitry Andric /// 4852*0b57cec5SDimitry Andric /// This function handles cases where an instruction is being modified from taking 4853*0b57cec5SDimitry Andric /// an SPR to a DPR[Lane]. A use of the DPR is being added, which may conflict 4854*0b57cec5SDimitry Andric /// with an earlier def of an SPR corresponding to DPR[Lane^1] (i.e. the other 4855*0b57cec5SDimitry Andric /// lane of the DPR). 4856*0b57cec5SDimitry Andric /// 4857*0b57cec5SDimitry Andric /// If the other SPR is defined, an implicit-use of it should be added. Else, 4858*0b57cec5SDimitry Andric /// (including the case where the DPR itself is defined), it should not. 4859*0b57cec5SDimitry Andric /// 4860*0b57cec5SDimitry Andric static bool getImplicitSPRUseForDPRUse(const TargetRegisterInfo *TRI, 4861*0b57cec5SDimitry Andric MachineInstr &MI, unsigned DReg, 4862*0b57cec5SDimitry Andric unsigned Lane, unsigned &ImplicitSReg) { 4863*0b57cec5SDimitry Andric // If the DPR is defined or used already, the other SPR lane will be chained 4864*0b57cec5SDimitry Andric // correctly, so there is nothing to be done. 4865*0b57cec5SDimitry Andric if (MI.definesRegister(DReg, TRI) || MI.readsRegister(DReg, TRI)) { 4866*0b57cec5SDimitry Andric ImplicitSReg = 0; 4867*0b57cec5SDimitry Andric return true; 4868*0b57cec5SDimitry Andric } 4869*0b57cec5SDimitry Andric 4870*0b57cec5SDimitry Andric // Otherwise we need to go searching to see if the SPR is set explicitly. 4871*0b57cec5SDimitry Andric ImplicitSReg = TRI->getSubReg(DReg, 4872*0b57cec5SDimitry Andric (Lane & 1) ? ARM::ssub_0 : ARM::ssub_1); 4873*0b57cec5SDimitry Andric MachineBasicBlock::LivenessQueryResult LQR = 4874*0b57cec5SDimitry Andric MI.getParent()->computeRegisterLiveness(TRI, ImplicitSReg, MI); 4875*0b57cec5SDimitry Andric 4876*0b57cec5SDimitry Andric if (LQR == MachineBasicBlock::LQR_Live) 4877*0b57cec5SDimitry Andric return true; 4878*0b57cec5SDimitry Andric else if (LQR == MachineBasicBlock::LQR_Unknown) 4879*0b57cec5SDimitry Andric return false; 4880*0b57cec5SDimitry Andric 4881*0b57cec5SDimitry Andric // If the register is known not to be live, there is no need to add an 4882*0b57cec5SDimitry Andric // implicit-use. 4883*0b57cec5SDimitry Andric ImplicitSReg = 0; 4884*0b57cec5SDimitry Andric return true; 4885*0b57cec5SDimitry Andric } 4886*0b57cec5SDimitry Andric 4887*0b57cec5SDimitry Andric void ARMBaseInstrInfo::setExecutionDomain(MachineInstr &MI, 4888*0b57cec5SDimitry Andric unsigned Domain) const { 4889*0b57cec5SDimitry Andric unsigned DstReg, SrcReg, DReg; 4890*0b57cec5SDimitry Andric unsigned Lane; 4891*0b57cec5SDimitry Andric MachineInstrBuilder MIB(*MI.getParent()->getParent(), MI); 4892*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI = &getRegisterInfo(); 4893*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 4894*0b57cec5SDimitry Andric default: 4895*0b57cec5SDimitry Andric llvm_unreachable("cannot handle opcode!"); 4896*0b57cec5SDimitry Andric break; 4897*0b57cec5SDimitry Andric case ARM::VMOVD: 4898*0b57cec5SDimitry Andric if (Domain != ExeNEON) 4899*0b57cec5SDimitry Andric break; 4900*0b57cec5SDimitry Andric 4901*0b57cec5SDimitry Andric // Zap the predicate operands. 4902*0b57cec5SDimitry Andric assert(!isPredicated(MI) && "Cannot predicate a VORRd"); 4903*0b57cec5SDimitry Andric 4904*0b57cec5SDimitry Andric // Make sure we've got NEON instructions. 4905*0b57cec5SDimitry Andric assert(Subtarget.hasNEON() && "VORRd requires NEON"); 4906*0b57cec5SDimitry Andric 4907*0b57cec5SDimitry Andric // Source instruction is %DDst = VMOVD %DSrc, 14, %noreg (; implicits) 4908*0b57cec5SDimitry Andric DstReg = MI.getOperand(0).getReg(); 4909*0b57cec5SDimitry Andric SrcReg = MI.getOperand(1).getReg(); 4910*0b57cec5SDimitry Andric 4911*0b57cec5SDimitry Andric for (unsigned i = MI.getDesc().getNumOperands(); i; --i) 4912*0b57cec5SDimitry Andric MI.RemoveOperand(i - 1); 4913*0b57cec5SDimitry Andric 4914*0b57cec5SDimitry Andric // Change to a %DDst = VORRd %DSrc, %DSrc, 14, %noreg (; implicits) 4915*0b57cec5SDimitry Andric MI.setDesc(get(ARM::VORRd)); 4916*0b57cec5SDimitry Andric MIB.addReg(DstReg, RegState::Define) 4917*0b57cec5SDimitry Andric .addReg(SrcReg) 4918*0b57cec5SDimitry Andric .addReg(SrcReg) 4919*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 4920*0b57cec5SDimitry Andric break; 4921*0b57cec5SDimitry Andric case ARM::VMOVRS: 4922*0b57cec5SDimitry Andric if (Domain != ExeNEON) 4923*0b57cec5SDimitry Andric break; 4924*0b57cec5SDimitry Andric assert(!isPredicated(MI) && "Cannot predicate a VGETLN"); 4925*0b57cec5SDimitry Andric 4926*0b57cec5SDimitry Andric // Source instruction is %RDst = VMOVRS %SSrc, 14, %noreg (; implicits) 4927*0b57cec5SDimitry Andric DstReg = MI.getOperand(0).getReg(); 4928*0b57cec5SDimitry Andric SrcReg = MI.getOperand(1).getReg(); 4929*0b57cec5SDimitry Andric 4930*0b57cec5SDimitry Andric for (unsigned i = MI.getDesc().getNumOperands(); i; --i) 4931*0b57cec5SDimitry Andric MI.RemoveOperand(i - 1); 4932*0b57cec5SDimitry Andric 4933*0b57cec5SDimitry Andric DReg = getCorrespondingDRegAndLane(TRI, SrcReg, Lane); 4934*0b57cec5SDimitry Andric 4935*0b57cec5SDimitry Andric // Convert to %RDst = VGETLNi32 %DSrc, Lane, 14, %noreg (; imps) 4936*0b57cec5SDimitry Andric // Note that DSrc has been widened and the other lane may be undef, which 4937*0b57cec5SDimitry Andric // contaminates the entire register. 4938*0b57cec5SDimitry Andric MI.setDesc(get(ARM::VGETLNi32)); 4939*0b57cec5SDimitry Andric MIB.addReg(DstReg, RegState::Define) 4940*0b57cec5SDimitry Andric .addReg(DReg, RegState::Undef) 4941*0b57cec5SDimitry Andric .addImm(Lane) 4942*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 4943*0b57cec5SDimitry Andric 4944*0b57cec5SDimitry Andric // The old source should be an implicit use, otherwise we might think it 4945*0b57cec5SDimitry Andric // was dead before here. 4946*0b57cec5SDimitry Andric MIB.addReg(SrcReg, RegState::Implicit); 4947*0b57cec5SDimitry Andric break; 4948*0b57cec5SDimitry Andric case ARM::VMOVSR: { 4949*0b57cec5SDimitry Andric if (Domain != ExeNEON) 4950*0b57cec5SDimitry Andric break; 4951*0b57cec5SDimitry Andric assert(!isPredicated(MI) && "Cannot predicate a VSETLN"); 4952*0b57cec5SDimitry Andric 4953*0b57cec5SDimitry Andric // Source instruction is %SDst = VMOVSR %RSrc, 14, %noreg (; implicits) 4954*0b57cec5SDimitry Andric DstReg = MI.getOperand(0).getReg(); 4955*0b57cec5SDimitry Andric SrcReg = MI.getOperand(1).getReg(); 4956*0b57cec5SDimitry Andric 4957*0b57cec5SDimitry Andric DReg = getCorrespondingDRegAndLane(TRI, DstReg, Lane); 4958*0b57cec5SDimitry Andric 4959*0b57cec5SDimitry Andric unsigned ImplicitSReg; 4960*0b57cec5SDimitry Andric if (!getImplicitSPRUseForDPRUse(TRI, MI, DReg, Lane, ImplicitSReg)) 4961*0b57cec5SDimitry Andric break; 4962*0b57cec5SDimitry Andric 4963*0b57cec5SDimitry Andric for (unsigned i = MI.getDesc().getNumOperands(); i; --i) 4964*0b57cec5SDimitry Andric MI.RemoveOperand(i - 1); 4965*0b57cec5SDimitry Andric 4966*0b57cec5SDimitry Andric // Convert to %DDst = VSETLNi32 %DDst, %RSrc, Lane, 14, %noreg (; imps) 4967*0b57cec5SDimitry Andric // Again DDst may be undefined at the beginning of this instruction. 4968*0b57cec5SDimitry Andric MI.setDesc(get(ARM::VSETLNi32)); 4969*0b57cec5SDimitry Andric MIB.addReg(DReg, RegState::Define) 4970*0b57cec5SDimitry Andric .addReg(DReg, getUndefRegState(!MI.readsRegister(DReg, TRI))) 4971*0b57cec5SDimitry Andric .addReg(SrcReg) 4972*0b57cec5SDimitry Andric .addImm(Lane) 4973*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 4974*0b57cec5SDimitry Andric 4975*0b57cec5SDimitry Andric // The narrower destination must be marked as set to keep previous chains 4976*0b57cec5SDimitry Andric // in place. 4977*0b57cec5SDimitry Andric MIB.addReg(DstReg, RegState::Define | RegState::Implicit); 4978*0b57cec5SDimitry Andric if (ImplicitSReg != 0) 4979*0b57cec5SDimitry Andric MIB.addReg(ImplicitSReg, RegState::Implicit); 4980*0b57cec5SDimitry Andric break; 4981*0b57cec5SDimitry Andric } 4982*0b57cec5SDimitry Andric case ARM::VMOVS: { 4983*0b57cec5SDimitry Andric if (Domain != ExeNEON) 4984*0b57cec5SDimitry Andric break; 4985*0b57cec5SDimitry Andric 4986*0b57cec5SDimitry Andric // Source instruction is %SDst = VMOVS %SSrc, 14, %noreg (; implicits) 4987*0b57cec5SDimitry Andric DstReg = MI.getOperand(0).getReg(); 4988*0b57cec5SDimitry Andric SrcReg = MI.getOperand(1).getReg(); 4989*0b57cec5SDimitry Andric 4990*0b57cec5SDimitry Andric unsigned DstLane = 0, SrcLane = 0, DDst, DSrc; 4991*0b57cec5SDimitry Andric DDst = getCorrespondingDRegAndLane(TRI, DstReg, DstLane); 4992*0b57cec5SDimitry Andric DSrc = getCorrespondingDRegAndLane(TRI, SrcReg, SrcLane); 4993*0b57cec5SDimitry Andric 4994*0b57cec5SDimitry Andric unsigned ImplicitSReg; 4995*0b57cec5SDimitry Andric if (!getImplicitSPRUseForDPRUse(TRI, MI, DSrc, SrcLane, ImplicitSReg)) 4996*0b57cec5SDimitry Andric break; 4997*0b57cec5SDimitry Andric 4998*0b57cec5SDimitry Andric for (unsigned i = MI.getDesc().getNumOperands(); i; --i) 4999*0b57cec5SDimitry Andric MI.RemoveOperand(i - 1); 5000*0b57cec5SDimitry Andric 5001*0b57cec5SDimitry Andric if (DSrc == DDst) { 5002*0b57cec5SDimitry Andric // Destination can be: 5003*0b57cec5SDimitry Andric // %DDst = VDUPLN32d %DDst, Lane, 14, %noreg (; implicits) 5004*0b57cec5SDimitry Andric MI.setDesc(get(ARM::VDUPLN32d)); 5005*0b57cec5SDimitry Andric MIB.addReg(DDst, RegState::Define) 5006*0b57cec5SDimitry Andric .addReg(DDst, getUndefRegState(!MI.readsRegister(DDst, TRI))) 5007*0b57cec5SDimitry Andric .addImm(SrcLane) 5008*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 5009*0b57cec5SDimitry Andric 5010*0b57cec5SDimitry Andric // Neither the source or the destination are naturally represented any 5011*0b57cec5SDimitry Andric // more, so add them in manually. 5012*0b57cec5SDimitry Andric MIB.addReg(DstReg, RegState::Implicit | RegState::Define); 5013*0b57cec5SDimitry Andric MIB.addReg(SrcReg, RegState::Implicit); 5014*0b57cec5SDimitry Andric if (ImplicitSReg != 0) 5015*0b57cec5SDimitry Andric MIB.addReg(ImplicitSReg, RegState::Implicit); 5016*0b57cec5SDimitry Andric break; 5017*0b57cec5SDimitry Andric } 5018*0b57cec5SDimitry Andric 5019*0b57cec5SDimitry Andric // In general there's no single instruction that can perform an S <-> S 5020*0b57cec5SDimitry Andric // move in NEON space, but a pair of VEXT instructions *can* do the 5021*0b57cec5SDimitry Andric // job. It turns out that the VEXTs needed will only use DSrc once, with 5022*0b57cec5SDimitry Andric // the position based purely on the combination of lane-0 and lane-1 5023*0b57cec5SDimitry Andric // involved. For example 5024*0b57cec5SDimitry Andric // vmov s0, s2 -> vext.32 d0, d0, d1, #1 vext.32 d0, d0, d0, #1 5025*0b57cec5SDimitry Andric // vmov s1, s3 -> vext.32 d0, d1, d0, #1 vext.32 d0, d0, d0, #1 5026*0b57cec5SDimitry Andric // vmov s0, s3 -> vext.32 d0, d0, d0, #1 vext.32 d0, d1, d0, #1 5027*0b57cec5SDimitry Andric // vmov s1, s2 -> vext.32 d0, d0, d0, #1 vext.32 d0, d0, d1, #1 5028*0b57cec5SDimitry Andric // 5029*0b57cec5SDimitry Andric // Pattern of the MachineInstrs is: 5030*0b57cec5SDimitry Andric // %DDst = VEXTd32 %DSrc1, %DSrc2, Lane, 14, %noreg (;implicits) 5031*0b57cec5SDimitry Andric MachineInstrBuilder NewMIB; 5032*0b57cec5SDimitry Andric NewMIB = BuildMI(*MI.getParent(), MI, MI.getDebugLoc(), get(ARM::VEXTd32), 5033*0b57cec5SDimitry Andric DDst); 5034*0b57cec5SDimitry Andric 5035*0b57cec5SDimitry Andric // On the first instruction, both DSrc and DDst may be undef if present. 5036*0b57cec5SDimitry Andric // Specifically when the original instruction didn't have them as an 5037*0b57cec5SDimitry Andric // <imp-use>. 5038*0b57cec5SDimitry Andric unsigned CurReg = SrcLane == 1 && DstLane == 1 ? DSrc : DDst; 5039*0b57cec5SDimitry Andric bool CurUndef = !MI.readsRegister(CurReg, TRI); 5040*0b57cec5SDimitry Andric NewMIB.addReg(CurReg, getUndefRegState(CurUndef)); 5041*0b57cec5SDimitry Andric 5042*0b57cec5SDimitry Andric CurReg = SrcLane == 0 && DstLane == 0 ? DSrc : DDst; 5043*0b57cec5SDimitry Andric CurUndef = !MI.readsRegister(CurReg, TRI); 5044*0b57cec5SDimitry Andric NewMIB.addReg(CurReg, getUndefRegState(CurUndef)) 5045*0b57cec5SDimitry Andric .addImm(1) 5046*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 5047*0b57cec5SDimitry Andric 5048*0b57cec5SDimitry Andric if (SrcLane == DstLane) 5049*0b57cec5SDimitry Andric NewMIB.addReg(SrcReg, RegState::Implicit); 5050*0b57cec5SDimitry Andric 5051*0b57cec5SDimitry Andric MI.setDesc(get(ARM::VEXTd32)); 5052*0b57cec5SDimitry Andric MIB.addReg(DDst, RegState::Define); 5053*0b57cec5SDimitry Andric 5054*0b57cec5SDimitry Andric // On the second instruction, DDst has definitely been defined above, so 5055*0b57cec5SDimitry Andric // it is not undef. DSrc, if present, can be undef as above. 5056*0b57cec5SDimitry Andric CurReg = SrcLane == 1 && DstLane == 0 ? DSrc : DDst; 5057*0b57cec5SDimitry Andric CurUndef = CurReg == DSrc && !MI.readsRegister(CurReg, TRI); 5058*0b57cec5SDimitry Andric MIB.addReg(CurReg, getUndefRegState(CurUndef)); 5059*0b57cec5SDimitry Andric 5060*0b57cec5SDimitry Andric CurReg = SrcLane == 0 && DstLane == 1 ? DSrc : DDst; 5061*0b57cec5SDimitry Andric CurUndef = CurReg == DSrc && !MI.readsRegister(CurReg, TRI); 5062*0b57cec5SDimitry Andric MIB.addReg(CurReg, getUndefRegState(CurUndef)) 5063*0b57cec5SDimitry Andric .addImm(1) 5064*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 5065*0b57cec5SDimitry Andric 5066*0b57cec5SDimitry Andric if (SrcLane != DstLane) 5067*0b57cec5SDimitry Andric MIB.addReg(SrcReg, RegState::Implicit); 5068*0b57cec5SDimitry Andric 5069*0b57cec5SDimitry Andric // As before, the original destination is no longer represented, add it 5070*0b57cec5SDimitry Andric // implicitly. 5071*0b57cec5SDimitry Andric MIB.addReg(DstReg, RegState::Define | RegState::Implicit); 5072*0b57cec5SDimitry Andric if (ImplicitSReg != 0) 5073*0b57cec5SDimitry Andric MIB.addReg(ImplicitSReg, RegState::Implicit); 5074*0b57cec5SDimitry Andric break; 5075*0b57cec5SDimitry Andric } 5076*0b57cec5SDimitry Andric } 5077*0b57cec5SDimitry Andric } 5078*0b57cec5SDimitry Andric 5079*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 5080*0b57cec5SDimitry Andric // Partial register updates 5081*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 5082*0b57cec5SDimitry Andric // 5083*0b57cec5SDimitry Andric // Swift renames NEON registers with 64-bit granularity. That means any 5084*0b57cec5SDimitry Andric // instruction writing an S-reg implicitly reads the containing D-reg. The 5085*0b57cec5SDimitry Andric // problem is mostly avoided by translating f32 operations to v2f32 operations 5086*0b57cec5SDimitry Andric // on D-registers, but f32 loads are still a problem. 5087*0b57cec5SDimitry Andric // 5088*0b57cec5SDimitry Andric // These instructions can load an f32 into a NEON register: 5089*0b57cec5SDimitry Andric // 5090*0b57cec5SDimitry Andric // VLDRS - Only writes S, partial D update. 5091*0b57cec5SDimitry Andric // VLD1LNd32 - Writes all D-regs, explicit partial D update, 2 uops. 5092*0b57cec5SDimitry Andric // VLD1DUPd32 - Writes all D-regs, no partial reg update, 2 uops. 5093*0b57cec5SDimitry Andric // 5094*0b57cec5SDimitry Andric // FCONSTD can be used as a dependency-breaking instruction. 5095*0b57cec5SDimitry Andric unsigned ARMBaseInstrInfo::getPartialRegUpdateClearance( 5096*0b57cec5SDimitry Andric const MachineInstr &MI, unsigned OpNum, 5097*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI) const { 5098*0b57cec5SDimitry Andric auto PartialUpdateClearance = Subtarget.getPartialUpdateClearance(); 5099*0b57cec5SDimitry Andric if (!PartialUpdateClearance) 5100*0b57cec5SDimitry Andric return 0; 5101*0b57cec5SDimitry Andric 5102*0b57cec5SDimitry Andric assert(TRI && "Need TRI instance"); 5103*0b57cec5SDimitry Andric 5104*0b57cec5SDimitry Andric const MachineOperand &MO = MI.getOperand(OpNum); 5105*0b57cec5SDimitry Andric if (MO.readsReg()) 5106*0b57cec5SDimitry Andric return 0; 5107*0b57cec5SDimitry Andric unsigned Reg = MO.getReg(); 5108*0b57cec5SDimitry Andric int UseOp = -1; 5109*0b57cec5SDimitry Andric 5110*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 5111*0b57cec5SDimitry Andric // Normal instructions writing only an S-register. 5112*0b57cec5SDimitry Andric case ARM::VLDRS: 5113*0b57cec5SDimitry Andric case ARM::FCONSTS: 5114*0b57cec5SDimitry Andric case ARM::VMOVSR: 5115*0b57cec5SDimitry Andric case ARM::VMOVv8i8: 5116*0b57cec5SDimitry Andric case ARM::VMOVv4i16: 5117*0b57cec5SDimitry Andric case ARM::VMOVv2i32: 5118*0b57cec5SDimitry Andric case ARM::VMOVv2f32: 5119*0b57cec5SDimitry Andric case ARM::VMOVv1i64: 5120*0b57cec5SDimitry Andric UseOp = MI.findRegisterUseOperandIdx(Reg, false, TRI); 5121*0b57cec5SDimitry Andric break; 5122*0b57cec5SDimitry Andric 5123*0b57cec5SDimitry Andric // Explicitly reads the dependency. 5124*0b57cec5SDimitry Andric case ARM::VLD1LNd32: 5125*0b57cec5SDimitry Andric UseOp = 3; 5126*0b57cec5SDimitry Andric break; 5127*0b57cec5SDimitry Andric default: 5128*0b57cec5SDimitry Andric return 0; 5129*0b57cec5SDimitry Andric } 5130*0b57cec5SDimitry Andric 5131*0b57cec5SDimitry Andric // If this instruction actually reads a value from Reg, there is no unwanted 5132*0b57cec5SDimitry Andric // dependency. 5133*0b57cec5SDimitry Andric if (UseOp != -1 && MI.getOperand(UseOp).readsReg()) 5134*0b57cec5SDimitry Andric return 0; 5135*0b57cec5SDimitry Andric 5136*0b57cec5SDimitry Andric // We must be able to clobber the whole D-reg. 5137*0b57cec5SDimitry Andric if (TargetRegisterInfo::isVirtualRegister(Reg)) { 5138*0b57cec5SDimitry Andric // Virtual register must be a def undef foo:ssub_0 operand. 5139*0b57cec5SDimitry Andric if (!MO.getSubReg() || MI.readsVirtualRegister(Reg)) 5140*0b57cec5SDimitry Andric return 0; 5141*0b57cec5SDimitry Andric } else if (ARM::SPRRegClass.contains(Reg)) { 5142*0b57cec5SDimitry Andric // Physical register: MI must define the full D-reg. 5143*0b57cec5SDimitry Andric unsigned DReg = TRI->getMatchingSuperReg(Reg, ARM::ssub_0, 5144*0b57cec5SDimitry Andric &ARM::DPRRegClass); 5145*0b57cec5SDimitry Andric if (!DReg || !MI.definesRegister(DReg, TRI)) 5146*0b57cec5SDimitry Andric return 0; 5147*0b57cec5SDimitry Andric } 5148*0b57cec5SDimitry Andric 5149*0b57cec5SDimitry Andric // MI has an unwanted D-register dependency. 5150*0b57cec5SDimitry Andric // Avoid defs in the previous N instructrions. 5151*0b57cec5SDimitry Andric return PartialUpdateClearance; 5152*0b57cec5SDimitry Andric } 5153*0b57cec5SDimitry Andric 5154*0b57cec5SDimitry Andric // Break a partial register dependency after getPartialRegUpdateClearance 5155*0b57cec5SDimitry Andric // returned non-zero. 5156*0b57cec5SDimitry Andric void ARMBaseInstrInfo::breakPartialRegDependency( 5157*0b57cec5SDimitry Andric MachineInstr &MI, unsigned OpNum, const TargetRegisterInfo *TRI) const { 5158*0b57cec5SDimitry Andric assert(OpNum < MI.getDesc().getNumDefs() && "OpNum is not a def"); 5159*0b57cec5SDimitry Andric assert(TRI && "Need TRI instance"); 5160*0b57cec5SDimitry Andric 5161*0b57cec5SDimitry Andric const MachineOperand &MO = MI.getOperand(OpNum); 5162*0b57cec5SDimitry Andric unsigned Reg = MO.getReg(); 5163*0b57cec5SDimitry Andric assert(TargetRegisterInfo::isPhysicalRegister(Reg) && 5164*0b57cec5SDimitry Andric "Can't break virtual register dependencies."); 5165*0b57cec5SDimitry Andric unsigned DReg = Reg; 5166*0b57cec5SDimitry Andric 5167*0b57cec5SDimitry Andric // If MI defines an S-reg, find the corresponding D super-register. 5168*0b57cec5SDimitry Andric if (ARM::SPRRegClass.contains(Reg)) { 5169*0b57cec5SDimitry Andric DReg = ARM::D0 + (Reg - ARM::S0) / 2; 5170*0b57cec5SDimitry Andric assert(TRI->isSuperRegister(Reg, DReg) && "Register enums broken"); 5171*0b57cec5SDimitry Andric } 5172*0b57cec5SDimitry Andric 5173*0b57cec5SDimitry Andric assert(ARM::DPRRegClass.contains(DReg) && "Can only break D-reg deps"); 5174*0b57cec5SDimitry Andric assert(MI.definesRegister(DReg, TRI) && "MI doesn't clobber full D-reg"); 5175*0b57cec5SDimitry Andric 5176*0b57cec5SDimitry Andric // FIXME: In some cases, VLDRS can be changed to a VLD1DUPd32 which defines 5177*0b57cec5SDimitry Andric // the full D-register by loading the same value to both lanes. The 5178*0b57cec5SDimitry Andric // instruction is micro-coded with 2 uops, so don't do this until we can 5179*0b57cec5SDimitry Andric // properly schedule micro-coded instructions. The dispatcher stalls cause 5180*0b57cec5SDimitry Andric // too big regressions. 5181*0b57cec5SDimitry Andric 5182*0b57cec5SDimitry Andric // Insert the dependency-breaking FCONSTD before MI. 5183*0b57cec5SDimitry Andric // 96 is the encoding of 0.5, but the actual value doesn't matter here. 5184*0b57cec5SDimitry Andric BuildMI(*MI.getParent(), MI, MI.getDebugLoc(), get(ARM::FCONSTD), DReg) 5185*0b57cec5SDimitry Andric .addImm(96) 5186*0b57cec5SDimitry Andric .add(predOps(ARMCC::AL)); 5187*0b57cec5SDimitry Andric MI.addRegisterKilled(DReg, TRI, true); 5188*0b57cec5SDimitry Andric } 5189*0b57cec5SDimitry Andric 5190*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::hasNOP() const { 5191*0b57cec5SDimitry Andric return Subtarget.getFeatureBits()[ARM::HasV6KOps]; 5192*0b57cec5SDimitry Andric } 5193*0b57cec5SDimitry Andric 5194*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::isSwiftFastImmShift(const MachineInstr *MI) const { 5195*0b57cec5SDimitry Andric if (MI->getNumOperands() < 4) 5196*0b57cec5SDimitry Andric return true; 5197*0b57cec5SDimitry Andric unsigned ShOpVal = MI->getOperand(3).getImm(); 5198*0b57cec5SDimitry Andric unsigned ShImm = ARM_AM::getSORegOffset(ShOpVal); 5199*0b57cec5SDimitry Andric // Swift supports faster shifts for: lsl 2, lsl 1, and lsr 1. 5200*0b57cec5SDimitry Andric if ((ShImm == 1 && ARM_AM::getSORegShOp(ShOpVal) == ARM_AM::lsr) || 5201*0b57cec5SDimitry Andric ((ShImm == 1 || ShImm == 2) && 5202*0b57cec5SDimitry Andric ARM_AM::getSORegShOp(ShOpVal) == ARM_AM::lsl)) 5203*0b57cec5SDimitry Andric return true; 5204*0b57cec5SDimitry Andric 5205*0b57cec5SDimitry Andric return false; 5206*0b57cec5SDimitry Andric } 5207*0b57cec5SDimitry Andric 5208*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::getRegSequenceLikeInputs( 5209*0b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx, 5210*0b57cec5SDimitry Andric SmallVectorImpl<RegSubRegPairAndIdx> &InputRegs) const { 5211*0b57cec5SDimitry Andric assert(DefIdx < MI.getDesc().getNumDefs() && "Invalid definition index"); 5212*0b57cec5SDimitry Andric assert(MI.isRegSequenceLike() && "Invalid kind of instruction"); 5213*0b57cec5SDimitry Andric 5214*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 5215*0b57cec5SDimitry Andric case ARM::VMOVDRR: 5216*0b57cec5SDimitry Andric // dX = VMOVDRR rY, rZ 5217*0b57cec5SDimitry Andric // is the same as: 5218*0b57cec5SDimitry Andric // dX = REG_SEQUENCE rY, ssub_0, rZ, ssub_1 5219*0b57cec5SDimitry Andric // Populate the InputRegs accordingly. 5220*0b57cec5SDimitry Andric // rY 5221*0b57cec5SDimitry Andric const MachineOperand *MOReg = &MI.getOperand(1); 5222*0b57cec5SDimitry Andric if (!MOReg->isUndef()) 5223*0b57cec5SDimitry Andric InputRegs.push_back(RegSubRegPairAndIdx(MOReg->getReg(), 5224*0b57cec5SDimitry Andric MOReg->getSubReg(), ARM::ssub_0)); 5225*0b57cec5SDimitry Andric // rZ 5226*0b57cec5SDimitry Andric MOReg = &MI.getOperand(2); 5227*0b57cec5SDimitry Andric if (!MOReg->isUndef()) 5228*0b57cec5SDimitry Andric InputRegs.push_back(RegSubRegPairAndIdx(MOReg->getReg(), 5229*0b57cec5SDimitry Andric MOReg->getSubReg(), ARM::ssub_1)); 5230*0b57cec5SDimitry Andric return true; 5231*0b57cec5SDimitry Andric } 5232*0b57cec5SDimitry Andric llvm_unreachable("Target dependent opcode missing"); 5233*0b57cec5SDimitry Andric } 5234*0b57cec5SDimitry Andric 5235*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::getExtractSubregLikeInputs( 5236*0b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx, 5237*0b57cec5SDimitry Andric RegSubRegPairAndIdx &InputReg) const { 5238*0b57cec5SDimitry Andric assert(DefIdx < MI.getDesc().getNumDefs() && "Invalid definition index"); 5239*0b57cec5SDimitry Andric assert(MI.isExtractSubregLike() && "Invalid kind of instruction"); 5240*0b57cec5SDimitry Andric 5241*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 5242*0b57cec5SDimitry Andric case ARM::VMOVRRD: 5243*0b57cec5SDimitry Andric // rX, rY = VMOVRRD dZ 5244*0b57cec5SDimitry Andric // is the same as: 5245*0b57cec5SDimitry Andric // rX = EXTRACT_SUBREG dZ, ssub_0 5246*0b57cec5SDimitry Andric // rY = EXTRACT_SUBREG dZ, ssub_1 5247*0b57cec5SDimitry Andric const MachineOperand &MOReg = MI.getOperand(2); 5248*0b57cec5SDimitry Andric if (MOReg.isUndef()) 5249*0b57cec5SDimitry Andric return false; 5250*0b57cec5SDimitry Andric InputReg.Reg = MOReg.getReg(); 5251*0b57cec5SDimitry Andric InputReg.SubReg = MOReg.getSubReg(); 5252*0b57cec5SDimitry Andric InputReg.SubIdx = DefIdx == 0 ? ARM::ssub_0 : ARM::ssub_1; 5253*0b57cec5SDimitry Andric return true; 5254*0b57cec5SDimitry Andric } 5255*0b57cec5SDimitry Andric llvm_unreachable("Target dependent opcode missing"); 5256*0b57cec5SDimitry Andric } 5257*0b57cec5SDimitry Andric 5258*0b57cec5SDimitry Andric bool ARMBaseInstrInfo::getInsertSubregLikeInputs( 5259*0b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx, RegSubRegPair &BaseReg, 5260*0b57cec5SDimitry Andric RegSubRegPairAndIdx &InsertedReg) const { 5261*0b57cec5SDimitry Andric assert(DefIdx < MI.getDesc().getNumDefs() && "Invalid definition index"); 5262*0b57cec5SDimitry Andric assert(MI.isInsertSubregLike() && "Invalid kind of instruction"); 5263*0b57cec5SDimitry Andric 5264*0b57cec5SDimitry Andric switch (MI.getOpcode()) { 5265*0b57cec5SDimitry Andric case ARM::VSETLNi32: 5266*0b57cec5SDimitry Andric // dX = VSETLNi32 dY, rZ, imm 5267*0b57cec5SDimitry Andric const MachineOperand &MOBaseReg = MI.getOperand(1); 5268*0b57cec5SDimitry Andric const MachineOperand &MOInsertedReg = MI.getOperand(2); 5269*0b57cec5SDimitry Andric if (MOInsertedReg.isUndef()) 5270*0b57cec5SDimitry Andric return false; 5271*0b57cec5SDimitry Andric const MachineOperand &MOIndex = MI.getOperand(3); 5272*0b57cec5SDimitry Andric BaseReg.Reg = MOBaseReg.getReg(); 5273*0b57cec5SDimitry Andric BaseReg.SubReg = MOBaseReg.getSubReg(); 5274*0b57cec5SDimitry Andric 5275*0b57cec5SDimitry Andric InsertedReg.Reg = MOInsertedReg.getReg(); 5276*0b57cec5SDimitry Andric InsertedReg.SubReg = MOInsertedReg.getSubReg(); 5277*0b57cec5SDimitry Andric InsertedReg.SubIdx = MOIndex.getImm() == 0 ? ARM::ssub_0 : ARM::ssub_1; 5278*0b57cec5SDimitry Andric return true; 5279*0b57cec5SDimitry Andric } 5280*0b57cec5SDimitry Andric llvm_unreachable("Target dependent opcode missing"); 5281*0b57cec5SDimitry Andric } 5282*0b57cec5SDimitry Andric 5283*0b57cec5SDimitry Andric std::pair<unsigned, unsigned> 5284*0b57cec5SDimitry Andric ARMBaseInstrInfo::decomposeMachineOperandsTargetFlags(unsigned TF) const { 5285*0b57cec5SDimitry Andric const unsigned Mask = ARMII::MO_OPTION_MASK; 5286*0b57cec5SDimitry Andric return std::make_pair(TF & Mask, TF & ~Mask); 5287*0b57cec5SDimitry Andric } 5288*0b57cec5SDimitry Andric 5289*0b57cec5SDimitry Andric ArrayRef<std::pair<unsigned, const char *>> 5290*0b57cec5SDimitry Andric ARMBaseInstrInfo::getSerializableDirectMachineOperandTargetFlags() const { 5291*0b57cec5SDimitry Andric using namespace ARMII; 5292*0b57cec5SDimitry Andric 5293*0b57cec5SDimitry Andric static const std::pair<unsigned, const char *> TargetFlags[] = { 5294*0b57cec5SDimitry Andric {MO_LO16, "arm-lo16"}, {MO_HI16, "arm-hi16"}}; 5295*0b57cec5SDimitry Andric return makeArrayRef(TargetFlags); 5296*0b57cec5SDimitry Andric } 5297*0b57cec5SDimitry Andric 5298*0b57cec5SDimitry Andric ArrayRef<std::pair<unsigned, const char *>> 5299*0b57cec5SDimitry Andric ARMBaseInstrInfo::getSerializableBitmaskMachineOperandTargetFlags() const { 5300*0b57cec5SDimitry Andric using namespace ARMII; 5301*0b57cec5SDimitry Andric 5302*0b57cec5SDimitry Andric static const std::pair<unsigned, const char *> TargetFlags[] = { 5303*0b57cec5SDimitry Andric {MO_COFFSTUB, "arm-coffstub"}, 5304*0b57cec5SDimitry Andric {MO_GOT, "arm-got"}, 5305*0b57cec5SDimitry Andric {MO_SBREL, "arm-sbrel"}, 5306*0b57cec5SDimitry Andric {MO_DLLIMPORT, "arm-dllimport"}, 5307*0b57cec5SDimitry Andric {MO_SECREL, "arm-secrel"}, 5308*0b57cec5SDimitry Andric {MO_NONLAZY, "arm-nonlazy"}}; 5309*0b57cec5SDimitry Andric return makeArrayRef(TargetFlags); 5310*0b57cec5SDimitry Andric } 5311*0b57cec5SDimitry Andric 5312*0b57cec5SDimitry Andric bool llvm::registerDefinedBetween(unsigned Reg, 5313*0b57cec5SDimitry Andric MachineBasicBlock::iterator From, 5314*0b57cec5SDimitry Andric MachineBasicBlock::iterator To, 5315*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI) { 5316*0b57cec5SDimitry Andric for (auto I = From; I != To; ++I) 5317*0b57cec5SDimitry Andric if (I->modifiesRegister(Reg, TRI)) 5318*0b57cec5SDimitry Andric return true; 5319*0b57cec5SDimitry Andric return false; 5320*0b57cec5SDimitry Andric } 5321*0b57cec5SDimitry Andric 5322*0b57cec5SDimitry Andric MachineInstr *llvm::findCMPToFoldIntoCBZ(MachineInstr *Br, 5323*0b57cec5SDimitry Andric const TargetRegisterInfo *TRI) { 5324*0b57cec5SDimitry Andric // Search backwards to the instruction that defines CSPR. This may or not 5325*0b57cec5SDimitry Andric // be a CMP, we check that after this loop. If we find another instruction 5326*0b57cec5SDimitry Andric // that reads cpsr, we return nullptr. 5327*0b57cec5SDimitry Andric MachineBasicBlock::iterator CmpMI = Br; 5328*0b57cec5SDimitry Andric while (CmpMI != Br->getParent()->begin()) { 5329*0b57cec5SDimitry Andric --CmpMI; 5330*0b57cec5SDimitry Andric if (CmpMI->modifiesRegister(ARM::CPSR, TRI)) 5331*0b57cec5SDimitry Andric break; 5332*0b57cec5SDimitry Andric if (CmpMI->readsRegister(ARM::CPSR, TRI)) 5333*0b57cec5SDimitry Andric break; 5334*0b57cec5SDimitry Andric } 5335*0b57cec5SDimitry Andric 5336*0b57cec5SDimitry Andric // Check that this inst is a CMP r[0-7], #0 and that the register 5337*0b57cec5SDimitry Andric // is not redefined between the cmp and the br. 5338*0b57cec5SDimitry Andric if (CmpMI->getOpcode() != ARM::tCMPi8 && CmpMI->getOpcode() != ARM::t2CMPri) 5339*0b57cec5SDimitry Andric return nullptr; 5340*0b57cec5SDimitry Andric unsigned Reg = CmpMI->getOperand(0).getReg(); 5341*0b57cec5SDimitry Andric unsigned PredReg = 0; 5342*0b57cec5SDimitry Andric ARMCC::CondCodes Pred = getInstrPredicate(*CmpMI, PredReg); 5343*0b57cec5SDimitry Andric if (Pred != ARMCC::AL || CmpMI->getOperand(1).getImm() != 0) 5344*0b57cec5SDimitry Andric return nullptr; 5345*0b57cec5SDimitry Andric if (!isARMLowRegister(Reg)) 5346*0b57cec5SDimitry Andric return nullptr; 5347*0b57cec5SDimitry Andric if (registerDefinedBetween(Reg, CmpMI->getNextNode(), Br, TRI)) 5348*0b57cec5SDimitry Andric return nullptr; 5349*0b57cec5SDimitry Andric 5350*0b57cec5SDimitry Andric return &*CmpMI; 5351*0b57cec5SDimitry Andric } 5352