10b57cec5SDimitry Andric //===- AArch6464FastISel.cpp - AArch64 FastISel implementation ------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file defines the AArch64-specific support for the FastISel class. Some
100b57cec5SDimitry Andric // of the target-specific code is generated by tablegen in the file
110b57cec5SDimitry Andric // AArch64GenFastISel.inc, which is #included here.
120b57cec5SDimitry Andric //
130b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
140b57cec5SDimitry Andric
150b57cec5SDimitry Andric #include "AArch64.h"
160b57cec5SDimitry Andric #include "AArch64CallingConvention.h"
173a9a9c0cSDimitry Andric #include "AArch64MachineFunctionInfo.h"
180b57cec5SDimitry Andric #include "AArch64RegisterInfo.h"
190b57cec5SDimitry Andric #include "AArch64Subtarget.h"
200b57cec5SDimitry Andric #include "MCTargetDesc/AArch64AddressingModes.h"
210b57cec5SDimitry Andric #include "Utils/AArch64BaseInfo.h"
220b57cec5SDimitry Andric #include "llvm/ADT/APFloat.h"
230b57cec5SDimitry Andric #include "llvm/ADT/APInt.h"
240b57cec5SDimitry Andric #include "llvm/ADT/DenseMap.h"
250b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h"
260b57cec5SDimitry Andric #include "llvm/Analysis/BranchProbabilityInfo.h"
270b57cec5SDimitry Andric #include "llvm/CodeGen/CallingConvLower.h"
280b57cec5SDimitry Andric #include "llvm/CodeGen/FastISel.h"
290b57cec5SDimitry Andric #include "llvm/CodeGen/FunctionLoweringInfo.h"
300b57cec5SDimitry Andric #include "llvm/CodeGen/ISDOpcodes.h"
310b57cec5SDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h"
320b57cec5SDimitry Andric #include "llvm/CodeGen/MachineConstantPool.h"
330b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
340b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstr.h"
350b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h"
360b57cec5SDimitry Andric #include "llvm/CodeGen/MachineMemOperand.h"
370b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
38*0fca6ea1SDimitry Andric #include "llvm/CodeGen/RuntimeLibcallUtil.h"
390b57cec5SDimitry Andric #include "llvm/CodeGen/ValueTypes.h"
40*0fca6ea1SDimitry Andric #include "llvm/CodeGenTypes/MachineValueType.h"
410b57cec5SDimitry Andric #include "llvm/IR/Argument.h"
420b57cec5SDimitry Andric #include "llvm/IR/Attributes.h"
430b57cec5SDimitry Andric #include "llvm/IR/BasicBlock.h"
440b57cec5SDimitry Andric #include "llvm/IR/CallingConv.h"
450b57cec5SDimitry Andric #include "llvm/IR/Constant.h"
460b57cec5SDimitry Andric #include "llvm/IR/Constants.h"
470b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h"
480b57cec5SDimitry Andric #include "llvm/IR/DerivedTypes.h"
490b57cec5SDimitry Andric #include "llvm/IR/Function.h"
500b57cec5SDimitry Andric #include "llvm/IR/GetElementPtrTypeIterator.h"
510b57cec5SDimitry Andric #include "llvm/IR/GlobalValue.h"
520b57cec5SDimitry Andric #include "llvm/IR/InstrTypes.h"
530b57cec5SDimitry Andric #include "llvm/IR/Instruction.h"
540b57cec5SDimitry Andric #include "llvm/IR/Instructions.h"
550b57cec5SDimitry Andric #include "llvm/IR/IntrinsicInst.h"
560b57cec5SDimitry Andric #include "llvm/IR/Intrinsics.h"
5706c3fb27SDimitry Andric #include "llvm/IR/IntrinsicsAArch64.h"
58*0fca6ea1SDimitry Andric #include "llvm/IR/Module.h"
590b57cec5SDimitry Andric #include "llvm/IR/Operator.h"
600b57cec5SDimitry Andric #include "llvm/IR/Type.h"
610b57cec5SDimitry Andric #include "llvm/IR/User.h"
620b57cec5SDimitry Andric #include "llvm/IR/Value.h"
630b57cec5SDimitry Andric #include "llvm/MC/MCInstrDesc.h"
640b57cec5SDimitry Andric #include "llvm/MC/MCRegisterInfo.h"
650b57cec5SDimitry Andric #include "llvm/MC/MCSymbol.h"
660b57cec5SDimitry Andric #include "llvm/Support/AtomicOrdering.h"
670b57cec5SDimitry Andric #include "llvm/Support/Casting.h"
680b57cec5SDimitry Andric #include "llvm/Support/CodeGen.h"
690b57cec5SDimitry Andric #include "llvm/Support/Compiler.h"
700b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h"
710b57cec5SDimitry Andric #include "llvm/Support/MathExtras.h"
720b57cec5SDimitry Andric #include <algorithm>
730b57cec5SDimitry Andric #include <cassert>
740b57cec5SDimitry Andric #include <cstdint>
750b57cec5SDimitry Andric #include <iterator>
760b57cec5SDimitry Andric #include <utility>
770b57cec5SDimitry Andric
780b57cec5SDimitry Andric using namespace llvm;
790b57cec5SDimitry Andric
800b57cec5SDimitry Andric namespace {
810b57cec5SDimitry Andric
820b57cec5SDimitry Andric class AArch64FastISel final : public FastISel {
830b57cec5SDimitry Andric class Address {
840b57cec5SDimitry Andric public:
850b57cec5SDimitry Andric using BaseKind = enum {
860b57cec5SDimitry Andric RegBase,
870b57cec5SDimitry Andric FrameIndexBase
880b57cec5SDimitry Andric };
890b57cec5SDimitry Andric
900b57cec5SDimitry Andric private:
910b57cec5SDimitry Andric BaseKind Kind = RegBase;
920b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ExtType = AArch64_AM::InvalidShiftExtend;
930b57cec5SDimitry Andric union {
940b57cec5SDimitry Andric unsigned Reg;
950b57cec5SDimitry Andric int FI;
960b57cec5SDimitry Andric } Base;
970b57cec5SDimitry Andric unsigned OffsetReg = 0;
980b57cec5SDimitry Andric unsigned Shift = 0;
990b57cec5SDimitry Andric int64_t Offset = 0;
1000b57cec5SDimitry Andric const GlobalValue *GV = nullptr;
1010b57cec5SDimitry Andric
1020b57cec5SDimitry Andric public:
Address()1030b57cec5SDimitry Andric Address() { Base.Reg = 0; }
1040b57cec5SDimitry Andric
setKind(BaseKind K)1050b57cec5SDimitry Andric void setKind(BaseKind K) { Kind = K; }
getKind() const1060b57cec5SDimitry Andric BaseKind getKind() const { return Kind; }
setExtendType(AArch64_AM::ShiftExtendType E)1070b57cec5SDimitry Andric void setExtendType(AArch64_AM::ShiftExtendType E) { ExtType = E; }
getExtendType() const1080b57cec5SDimitry Andric AArch64_AM::ShiftExtendType getExtendType() const { return ExtType; }
isRegBase() const1090b57cec5SDimitry Andric bool isRegBase() const { return Kind == RegBase; }
isFIBase() const1100b57cec5SDimitry Andric bool isFIBase() const { return Kind == FrameIndexBase; }
1110b57cec5SDimitry Andric
setReg(unsigned Reg)1120b57cec5SDimitry Andric void setReg(unsigned Reg) {
1130b57cec5SDimitry Andric assert(isRegBase() && "Invalid base register access!");
1140b57cec5SDimitry Andric Base.Reg = Reg;
1150b57cec5SDimitry Andric }
1160b57cec5SDimitry Andric
getReg() const1170b57cec5SDimitry Andric unsigned getReg() const {
1180b57cec5SDimitry Andric assert(isRegBase() && "Invalid base register access!");
1190b57cec5SDimitry Andric return Base.Reg;
1200b57cec5SDimitry Andric }
1210b57cec5SDimitry Andric
setOffsetReg(unsigned Reg)1220b57cec5SDimitry Andric void setOffsetReg(unsigned Reg) {
1230b57cec5SDimitry Andric OffsetReg = Reg;
1240b57cec5SDimitry Andric }
1250b57cec5SDimitry Andric
getOffsetReg() const1260b57cec5SDimitry Andric unsigned getOffsetReg() const {
1270b57cec5SDimitry Andric return OffsetReg;
1280b57cec5SDimitry Andric }
1290b57cec5SDimitry Andric
setFI(unsigned FI)1300b57cec5SDimitry Andric void setFI(unsigned FI) {
1310b57cec5SDimitry Andric assert(isFIBase() && "Invalid base frame index access!");
1320b57cec5SDimitry Andric Base.FI = FI;
1330b57cec5SDimitry Andric }
1340b57cec5SDimitry Andric
getFI() const1350b57cec5SDimitry Andric unsigned getFI() const {
1360b57cec5SDimitry Andric assert(isFIBase() && "Invalid base frame index access!");
1370b57cec5SDimitry Andric return Base.FI;
1380b57cec5SDimitry Andric }
1390b57cec5SDimitry Andric
setOffset(int64_t O)1400b57cec5SDimitry Andric void setOffset(int64_t O) { Offset = O; }
getOffset()1410b57cec5SDimitry Andric int64_t getOffset() { return Offset; }
setShift(unsigned S)1420b57cec5SDimitry Andric void setShift(unsigned S) { Shift = S; }
getShift()1430b57cec5SDimitry Andric unsigned getShift() { return Shift; }
1440b57cec5SDimitry Andric
setGlobalValue(const GlobalValue * G)1450b57cec5SDimitry Andric void setGlobalValue(const GlobalValue *G) { GV = G; }
getGlobalValue()1460b57cec5SDimitry Andric const GlobalValue *getGlobalValue() { return GV; }
1470b57cec5SDimitry Andric };
1480b57cec5SDimitry Andric
1490b57cec5SDimitry Andric /// Subtarget - Keep a pointer to the AArch64Subtarget around so that we can
1500b57cec5SDimitry Andric /// make the right decision when generating code for different targets.
1510b57cec5SDimitry Andric const AArch64Subtarget *Subtarget;
1520b57cec5SDimitry Andric LLVMContext *Context;
1530b57cec5SDimitry Andric
1540b57cec5SDimitry Andric bool fastLowerArguments() override;
1550b57cec5SDimitry Andric bool fastLowerCall(CallLoweringInfo &CLI) override;
1560b57cec5SDimitry Andric bool fastLowerIntrinsicCall(const IntrinsicInst *II) override;
1570b57cec5SDimitry Andric
1580b57cec5SDimitry Andric private:
1590b57cec5SDimitry Andric // Selection routines.
1600b57cec5SDimitry Andric bool selectAddSub(const Instruction *I);
1610b57cec5SDimitry Andric bool selectLogicalOp(const Instruction *I);
1620b57cec5SDimitry Andric bool selectLoad(const Instruction *I);
1630b57cec5SDimitry Andric bool selectStore(const Instruction *I);
1640b57cec5SDimitry Andric bool selectBranch(const Instruction *I);
1650b57cec5SDimitry Andric bool selectIndirectBr(const Instruction *I);
1660b57cec5SDimitry Andric bool selectCmp(const Instruction *I);
1670b57cec5SDimitry Andric bool selectSelect(const Instruction *I);
1680b57cec5SDimitry Andric bool selectFPExt(const Instruction *I);
1690b57cec5SDimitry Andric bool selectFPTrunc(const Instruction *I);
1700b57cec5SDimitry Andric bool selectFPToInt(const Instruction *I, bool Signed);
1710b57cec5SDimitry Andric bool selectIntToFP(const Instruction *I, bool Signed);
1720b57cec5SDimitry Andric bool selectRem(const Instruction *I, unsigned ISDOpcode);
1730b57cec5SDimitry Andric bool selectRet(const Instruction *I);
1740b57cec5SDimitry Andric bool selectTrunc(const Instruction *I);
1750b57cec5SDimitry Andric bool selectIntExt(const Instruction *I);
1760b57cec5SDimitry Andric bool selectMul(const Instruction *I);
1770b57cec5SDimitry Andric bool selectShift(const Instruction *I);
1780b57cec5SDimitry Andric bool selectBitCast(const Instruction *I);
1790b57cec5SDimitry Andric bool selectFRem(const Instruction *I);
1800b57cec5SDimitry Andric bool selectSDiv(const Instruction *I);
1810b57cec5SDimitry Andric bool selectGetElementPtr(const Instruction *I);
1820b57cec5SDimitry Andric bool selectAtomicCmpXchg(const AtomicCmpXchgInst *I);
1830b57cec5SDimitry Andric
1840b57cec5SDimitry Andric // Utility helper routines.
1850b57cec5SDimitry Andric bool isTypeLegal(Type *Ty, MVT &VT);
1860b57cec5SDimitry Andric bool isTypeSupported(Type *Ty, MVT &VT, bool IsVectorAllowed = false);
1870b57cec5SDimitry Andric bool isValueAvailable(const Value *V) const;
1880b57cec5SDimitry Andric bool computeAddress(const Value *Obj, Address &Addr, Type *Ty = nullptr);
1890b57cec5SDimitry Andric bool computeCallAddress(const Value *V, Address &Addr);
1900b57cec5SDimitry Andric bool simplifyAddress(Address &Addr, MVT VT);
1910b57cec5SDimitry Andric void addLoadStoreOperands(Address &Addr, const MachineInstrBuilder &MIB,
1920b57cec5SDimitry Andric MachineMemOperand::Flags Flags,
1930b57cec5SDimitry Andric unsigned ScaleFactor, MachineMemOperand *MMO);
194bdd1243dSDimitry Andric bool isMemCpySmall(uint64_t Len, MaybeAlign Alignment);
1950b57cec5SDimitry Andric bool tryEmitSmallMemCpy(Address Dest, Address Src, uint64_t Len,
196bdd1243dSDimitry Andric MaybeAlign Alignment);
1970b57cec5SDimitry Andric bool foldXALUIntrinsic(AArch64CC::CondCode &CC, const Instruction *I,
1980b57cec5SDimitry Andric const Value *Cond);
1990b57cec5SDimitry Andric bool optimizeIntExtLoad(const Instruction *I, MVT RetVT, MVT SrcVT);
2000b57cec5SDimitry Andric bool optimizeSelect(const SelectInst *SI);
201fe6060f1SDimitry Andric unsigned getRegForGEPIndex(const Value *Idx);
2020b57cec5SDimitry Andric
2030b57cec5SDimitry Andric // Emit helper routines.
2040b57cec5SDimitry Andric unsigned emitAddSub(bool UseAdd, MVT RetVT, const Value *LHS,
2050b57cec5SDimitry Andric const Value *RHS, bool SetFlags = false,
2060b57cec5SDimitry Andric bool WantResult = true, bool IsZExt = false);
2070b57cec5SDimitry Andric unsigned emitAddSub_rr(bool UseAdd, MVT RetVT, unsigned LHSReg,
208fe6060f1SDimitry Andric unsigned RHSReg, bool SetFlags = false,
209fe6060f1SDimitry Andric bool WantResult = true);
2100b57cec5SDimitry Andric unsigned emitAddSub_ri(bool UseAdd, MVT RetVT, unsigned LHSReg,
211fe6060f1SDimitry Andric uint64_t Imm, bool SetFlags = false,
2120b57cec5SDimitry Andric bool WantResult = true);
2130b57cec5SDimitry Andric unsigned emitAddSub_rs(bool UseAdd, MVT RetVT, unsigned LHSReg,
214fe6060f1SDimitry Andric unsigned RHSReg, AArch64_AM::ShiftExtendType ShiftType,
2150b57cec5SDimitry Andric uint64_t ShiftImm, bool SetFlags = false,
2160b57cec5SDimitry Andric bool WantResult = true);
2170b57cec5SDimitry Andric unsigned emitAddSub_rx(bool UseAdd, MVT RetVT, unsigned LHSReg,
218fe6060f1SDimitry Andric unsigned RHSReg, AArch64_AM::ShiftExtendType ExtType,
2190b57cec5SDimitry Andric uint64_t ShiftImm, bool SetFlags = false,
2200b57cec5SDimitry Andric bool WantResult = true);
2210b57cec5SDimitry Andric
2220b57cec5SDimitry Andric // Emit functions.
2230b57cec5SDimitry Andric bool emitCompareAndBranch(const BranchInst *BI);
2240b57cec5SDimitry Andric bool emitCmp(const Value *LHS, const Value *RHS, bool IsZExt);
2250b57cec5SDimitry Andric bool emitICmp(MVT RetVT, const Value *LHS, const Value *RHS, bool IsZExt);
226fe6060f1SDimitry Andric bool emitICmp_ri(MVT RetVT, unsigned LHSReg, uint64_t Imm);
2270b57cec5SDimitry Andric bool emitFCmp(MVT RetVT, const Value *LHS, const Value *RHS);
2280b57cec5SDimitry Andric unsigned emitLoad(MVT VT, MVT ResultVT, Address Addr, bool WantZExt = true,
2290b57cec5SDimitry Andric MachineMemOperand *MMO = nullptr);
2300b57cec5SDimitry Andric bool emitStore(MVT VT, unsigned SrcReg, Address Addr,
2310b57cec5SDimitry Andric MachineMemOperand *MMO = nullptr);
2320b57cec5SDimitry Andric bool emitStoreRelease(MVT VT, unsigned SrcReg, unsigned AddrReg,
2330b57cec5SDimitry Andric MachineMemOperand *MMO = nullptr);
2340b57cec5SDimitry Andric unsigned emitIntExt(MVT SrcVT, unsigned SrcReg, MVT DestVT, bool isZExt);
2350b57cec5SDimitry Andric unsigned emiti1Ext(unsigned SrcReg, MVT DestVT, bool isZExt);
2360b57cec5SDimitry Andric unsigned emitAdd(MVT RetVT, const Value *LHS, const Value *RHS,
2370b57cec5SDimitry Andric bool SetFlags = false, bool WantResult = true,
2380b57cec5SDimitry Andric bool IsZExt = false);
239fe6060f1SDimitry Andric unsigned emitAdd_ri_(MVT VT, unsigned Op0, int64_t Imm);
2400b57cec5SDimitry Andric unsigned emitSub(MVT RetVT, const Value *LHS, const Value *RHS,
2410b57cec5SDimitry Andric bool SetFlags = false, bool WantResult = true,
2420b57cec5SDimitry Andric bool IsZExt = false);
243fe6060f1SDimitry Andric unsigned emitSubs_rr(MVT RetVT, unsigned LHSReg, unsigned RHSReg,
244fe6060f1SDimitry Andric bool WantResult = true);
245fe6060f1SDimitry Andric unsigned emitSubs_rs(MVT RetVT, unsigned LHSReg, unsigned RHSReg,
2460b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ShiftType, uint64_t ShiftImm,
2470b57cec5SDimitry Andric bool WantResult = true);
2480b57cec5SDimitry Andric unsigned emitLogicalOp(unsigned ISDOpc, MVT RetVT, const Value *LHS,
2490b57cec5SDimitry Andric const Value *RHS);
2500b57cec5SDimitry Andric unsigned emitLogicalOp_ri(unsigned ISDOpc, MVT RetVT, unsigned LHSReg,
251fe6060f1SDimitry Andric uint64_t Imm);
2520b57cec5SDimitry Andric unsigned emitLogicalOp_rs(unsigned ISDOpc, MVT RetVT, unsigned LHSReg,
253fe6060f1SDimitry Andric unsigned RHSReg, uint64_t ShiftImm);
254fe6060f1SDimitry Andric unsigned emitAnd_ri(MVT RetVT, unsigned LHSReg, uint64_t Imm);
255fe6060f1SDimitry Andric unsigned emitMul_rr(MVT RetVT, unsigned Op0, unsigned Op1);
256fe6060f1SDimitry Andric unsigned emitSMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1);
257fe6060f1SDimitry Andric unsigned emitUMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1);
258fe6060f1SDimitry Andric unsigned emitLSL_rr(MVT RetVT, unsigned Op0Reg, unsigned Op1Reg);
259fe6060f1SDimitry Andric unsigned emitLSL_ri(MVT RetVT, MVT SrcVT, unsigned Op0Reg, uint64_t Imm,
260fe6060f1SDimitry Andric bool IsZExt = true);
261fe6060f1SDimitry Andric unsigned emitLSR_rr(MVT RetVT, unsigned Op0Reg, unsigned Op1Reg);
262fe6060f1SDimitry Andric unsigned emitLSR_ri(MVT RetVT, MVT SrcVT, unsigned Op0Reg, uint64_t Imm,
263fe6060f1SDimitry Andric bool IsZExt = true);
264fe6060f1SDimitry Andric unsigned emitASR_rr(MVT RetVT, unsigned Op0Reg, unsigned Op1Reg);
265fe6060f1SDimitry Andric unsigned emitASR_ri(MVT RetVT, MVT SrcVT, unsigned Op0Reg, uint64_t Imm,
266fe6060f1SDimitry Andric bool IsZExt = false);
2670b57cec5SDimitry Andric
2680b57cec5SDimitry Andric unsigned materializeInt(const ConstantInt *CI, MVT VT);
2690b57cec5SDimitry Andric unsigned materializeFP(const ConstantFP *CFP, MVT VT);
2700b57cec5SDimitry Andric unsigned materializeGV(const GlobalValue *GV);
2710b57cec5SDimitry Andric
2720b57cec5SDimitry Andric // Call handling routines.
2730b57cec5SDimitry Andric private:
2740b57cec5SDimitry Andric CCAssignFn *CCAssignFnForCall(CallingConv::ID CC) const;
2750b57cec5SDimitry Andric bool processCallArgs(CallLoweringInfo &CLI, SmallVectorImpl<MVT> &ArgVTs,
2760b57cec5SDimitry Andric unsigned &NumBytes);
27706c3fb27SDimitry Andric bool finishCall(CallLoweringInfo &CLI, unsigned NumBytes);
2780b57cec5SDimitry Andric
2790b57cec5SDimitry Andric public:
2800b57cec5SDimitry Andric // Backend specific FastISel code.
2810b57cec5SDimitry Andric unsigned fastMaterializeAlloca(const AllocaInst *AI) override;
2820b57cec5SDimitry Andric unsigned fastMaterializeConstant(const Constant *C) override;
2830b57cec5SDimitry Andric unsigned fastMaterializeFloatZero(const ConstantFP* CF) override;
2840b57cec5SDimitry Andric
AArch64FastISel(FunctionLoweringInfo & FuncInfo,const TargetLibraryInfo * LibInfo)2850b57cec5SDimitry Andric explicit AArch64FastISel(FunctionLoweringInfo &FuncInfo,
2860b57cec5SDimitry Andric const TargetLibraryInfo *LibInfo)
2870b57cec5SDimitry Andric : FastISel(FuncInfo, LibInfo, /*SkipTargetIndependentISel=*/true) {
28881ad6265SDimitry Andric Subtarget = &FuncInfo.MF->getSubtarget<AArch64Subtarget>();
2890b57cec5SDimitry Andric Context = &FuncInfo.Fn->getContext();
2900b57cec5SDimitry Andric }
2910b57cec5SDimitry Andric
2920b57cec5SDimitry Andric bool fastSelectInstruction(const Instruction *I) override;
2930b57cec5SDimitry Andric
2940b57cec5SDimitry Andric #include "AArch64GenFastISel.inc"
2950b57cec5SDimitry Andric };
2960b57cec5SDimitry Andric
2970b57cec5SDimitry Andric } // end anonymous namespace
2980b57cec5SDimitry Andric
2990b57cec5SDimitry Andric /// Check if the sign-/zero-extend will be a noop.
isIntExtFree(const Instruction * I)3000b57cec5SDimitry Andric static bool isIntExtFree(const Instruction *I) {
3010b57cec5SDimitry Andric assert((isa<ZExtInst>(I) || isa<SExtInst>(I)) &&
3020b57cec5SDimitry Andric "Unexpected integer extend instruction.");
3030b57cec5SDimitry Andric assert(!I->getType()->isVectorTy() && I->getType()->isIntegerTy() &&
3040b57cec5SDimitry Andric "Unexpected value type.");
3050b57cec5SDimitry Andric bool IsZExt = isa<ZExtInst>(I);
3060b57cec5SDimitry Andric
3070b57cec5SDimitry Andric if (const auto *LI = dyn_cast<LoadInst>(I->getOperand(0)))
3080b57cec5SDimitry Andric if (LI->hasOneUse())
3090b57cec5SDimitry Andric return true;
3100b57cec5SDimitry Andric
3110b57cec5SDimitry Andric if (const auto *Arg = dyn_cast<Argument>(I->getOperand(0)))
3120b57cec5SDimitry Andric if ((IsZExt && Arg->hasZExtAttr()) || (!IsZExt && Arg->hasSExtAttr()))
3130b57cec5SDimitry Andric return true;
3140b57cec5SDimitry Andric
3150b57cec5SDimitry Andric return false;
3160b57cec5SDimitry Andric }
3170b57cec5SDimitry Andric
3180b57cec5SDimitry Andric /// Determine the implicit scale factor that is applied by a memory
3190b57cec5SDimitry Andric /// operation for a given value type.
getImplicitScaleFactor(MVT VT)3200b57cec5SDimitry Andric static unsigned getImplicitScaleFactor(MVT VT) {
3210b57cec5SDimitry Andric switch (VT.SimpleTy) {
3220b57cec5SDimitry Andric default:
3230b57cec5SDimitry Andric return 0; // invalid
3240b57cec5SDimitry Andric case MVT::i1: // fall-through
3250b57cec5SDimitry Andric case MVT::i8:
3260b57cec5SDimitry Andric return 1;
3270b57cec5SDimitry Andric case MVT::i16:
3280b57cec5SDimitry Andric return 2;
3290b57cec5SDimitry Andric case MVT::i32: // fall-through
3300b57cec5SDimitry Andric case MVT::f32:
3310b57cec5SDimitry Andric return 4;
3320b57cec5SDimitry Andric case MVT::i64: // fall-through
3330b57cec5SDimitry Andric case MVT::f64:
3340b57cec5SDimitry Andric return 8;
3350b57cec5SDimitry Andric }
3360b57cec5SDimitry Andric }
3370b57cec5SDimitry Andric
CCAssignFnForCall(CallingConv::ID CC) const3380b57cec5SDimitry Andric CCAssignFn *AArch64FastISel::CCAssignFnForCall(CallingConv::ID CC) const {
3390b57cec5SDimitry Andric if (CC == CallingConv::GHC)
3400b57cec5SDimitry Andric return CC_AArch64_GHC;
341480093f4SDimitry Andric if (CC == CallingConv::CFGuard_Check)
342480093f4SDimitry Andric return CC_AArch64_Win64_CFGuard_Check;
3435f757f3fSDimitry Andric if (Subtarget->isTargetDarwin())
3445f757f3fSDimitry Andric return CC_AArch64_DarwinPCS;
3455f757f3fSDimitry Andric if (Subtarget->isTargetWindows())
3465f757f3fSDimitry Andric return CC_AArch64_Win64PCS;
3475f757f3fSDimitry Andric return CC_AArch64_AAPCS;
3480b57cec5SDimitry Andric }
3490b57cec5SDimitry Andric
fastMaterializeAlloca(const AllocaInst * AI)3500b57cec5SDimitry Andric unsigned AArch64FastISel::fastMaterializeAlloca(const AllocaInst *AI) {
3510b57cec5SDimitry Andric assert(TLI.getValueType(DL, AI->getType(), true) == MVT::i64 &&
3520b57cec5SDimitry Andric "Alloca should always return a pointer.");
3530b57cec5SDimitry Andric
3540b57cec5SDimitry Andric // Don't handle dynamic allocas.
3550b57cec5SDimitry Andric if (!FuncInfo.StaticAllocaMap.count(AI))
3560b57cec5SDimitry Andric return 0;
3570b57cec5SDimitry Andric
3580b57cec5SDimitry Andric DenseMap<const AllocaInst *, int>::iterator SI =
3590b57cec5SDimitry Andric FuncInfo.StaticAllocaMap.find(AI);
3600b57cec5SDimitry Andric
3610b57cec5SDimitry Andric if (SI != FuncInfo.StaticAllocaMap.end()) {
36204eeddc0SDimitry Andric Register ResultReg = createResultReg(&AArch64::GPR64spRegClass);
363bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADDXri),
3640b57cec5SDimitry Andric ResultReg)
3650b57cec5SDimitry Andric .addFrameIndex(SI->second)
3660b57cec5SDimitry Andric .addImm(0)
3670b57cec5SDimitry Andric .addImm(0);
3680b57cec5SDimitry Andric return ResultReg;
3690b57cec5SDimitry Andric }
3700b57cec5SDimitry Andric
3710b57cec5SDimitry Andric return 0;
3720b57cec5SDimitry Andric }
3730b57cec5SDimitry Andric
materializeInt(const ConstantInt * CI,MVT VT)3740b57cec5SDimitry Andric unsigned AArch64FastISel::materializeInt(const ConstantInt *CI, MVT VT) {
3750b57cec5SDimitry Andric if (VT > MVT::i64)
3760b57cec5SDimitry Andric return 0;
3770b57cec5SDimitry Andric
3780b57cec5SDimitry Andric if (!CI->isZero())
3790b57cec5SDimitry Andric return fastEmit_i(VT, VT, ISD::Constant, CI->getZExtValue());
3800b57cec5SDimitry Andric
3810b57cec5SDimitry Andric // Create a copy from the zero register to materialize a "0" value.
3820b57cec5SDimitry Andric const TargetRegisterClass *RC = (VT == MVT::i64) ? &AArch64::GPR64RegClass
3830b57cec5SDimitry Andric : &AArch64::GPR32RegClass;
3840b57cec5SDimitry Andric unsigned ZeroReg = (VT == MVT::i64) ? AArch64::XZR : AArch64::WZR;
38504eeddc0SDimitry Andric Register ResultReg = createResultReg(RC);
386bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(TargetOpcode::COPY),
3870b57cec5SDimitry Andric ResultReg).addReg(ZeroReg, getKillRegState(true));
3880b57cec5SDimitry Andric return ResultReg;
3890b57cec5SDimitry Andric }
3900b57cec5SDimitry Andric
materializeFP(const ConstantFP * CFP,MVT VT)3910b57cec5SDimitry Andric unsigned AArch64FastISel::materializeFP(const ConstantFP *CFP, MVT VT) {
3920b57cec5SDimitry Andric // Positive zero (+0.0) has to be materialized with a fmov from the zero
3930b57cec5SDimitry Andric // register, because the immediate version of fmov cannot encode zero.
3940b57cec5SDimitry Andric if (CFP->isNullValue())
3950b57cec5SDimitry Andric return fastMaterializeFloatZero(CFP);
3960b57cec5SDimitry Andric
3970b57cec5SDimitry Andric if (VT != MVT::f32 && VT != MVT::f64)
3980b57cec5SDimitry Andric return 0;
3990b57cec5SDimitry Andric
4000b57cec5SDimitry Andric const APFloat Val = CFP->getValueAPF();
4010b57cec5SDimitry Andric bool Is64Bit = (VT == MVT::f64);
4020b57cec5SDimitry Andric // This checks to see if we can use FMOV instructions to materialize
4030b57cec5SDimitry Andric // a constant, otherwise we have to materialize via the constant pool.
4040b57cec5SDimitry Andric int Imm =
4050b57cec5SDimitry Andric Is64Bit ? AArch64_AM::getFP64Imm(Val) : AArch64_AM::getFP32Imm(Val);
4060b57cec5SDimitry Andric if (Imm != -1) {
4070b57cec5SDimitry Andric unsigned Opc = Is64Bit ? AArch64::FMOVDi : AArch64::FMOVSi;
4080b57cec5SDimitry Andric return fastEmitInst_i(Opc, TLI.getRegClassFor(VT), Imm);
4090b57cec5SDimitry Andric }
4100b57cec5SDimitry Andric
411fe6060f1SDimitry Andric // For the large code model materialize the FP constant in code.
412fe6060f1SDimitry Andric if (TM.getCodeModel() == CodeModel::Large) {
4130b57cec5SDimitry Andric unsigned Opc1 = Is64Bit ? AArch64::MOVi64imm : AArch64::MOVi32imm;
4140b57cec5SDimitry Andric const TargetRegisterClass *RC = Is64Bit ?
4150b57cec5SDimitry Andric &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
4160b57cec5SDimitry Andric
41704eeddc0SDimitry Andric Register TmpReg = createResultReg(RC);
418bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc1), TmpReg)
4190b57cec5SDimitry Andric .addImm(CFP->getValueAPF().bitcastToAPInt().getZExtValue());
4200b57cec5SDimitry Andric
42104eeddc0SDimitry Andric Register ResultReg = createResultReg(TLI.getRegClassFor(VT));
422bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
4230b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
4240b57cec5SDimitry Andric .addReg(TmpReg, getKillRegState(true));
4250b57cec5SDimitry Andric
4260b57cec5SDimitry Andric return ResultReg;
4270b57cec5SDimitry Andric }
4280b57cec5SDimitry Andric
4290b57cec5SDimitry Andric // Materialize via constant pool. MachineConstantPool wants an explicit
4300b57cec5SDimitry Andric // alignment.
4315ffd83dbSDimitry Andric Align Alignment = DL.getPrefTypeAlign(CFP->getType());
4320b57cec5SDimitry Andric
4335ffd83dbSDimitry Andric unsigned CPI = MCP.getConstantPoolIndex(cast<Constant>(CFP), Alignment);
43404eeddc0SDimitry Andric Register ADRPReg = createResultReg(&AArch64::GPR64commonRegClass);
435bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADRP),
4360b57cec5SDimitry Andric ADRPReg).addConstantPoolIndex(CPI, 0, AArch64II::MO_PAGE);
4370b57cec5SDimitry Andric
4380b57cec5SDimitry Andric unsigned Opc = Is64Bit ? AArch64::LDRDui : AArch64::LDRSui;
43904eeddc0SDimitry Andric Register ResultReg = createResultReg(TLI.getRegClassFor(VT));
440bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg)
4410b57cec5SDimitry Andric .addReg(ADRPReg)
4420b57cec5SDimitry Andric .addConstantPoolIndex(CPI, 0, AArch64II::MO_PAGEOFF | AArch64II::MO_NC);
4430b57cec5SDimitry Andric return ResultReg;
4440b57cec5SDimitry Andric }
4450b57cec5SDimitry Andric
materializeGV(const GlobalValue * GV)4460b57cec5SDimitry Andric unsigned AArch64FastISel::materializeGV(const GlobalValue *GV) {
4470b57cec5SDimitry Andric // We can't handle thread-local variables quickly yet.
4480b57cec5SDimitry Andric if (GV->isThreadLocal())
4490b57cec5SDimitry Andric return 0;
4500b57cec5SDimitry Andric
4510b57cec5SDimitry Andric // MachO still uses GOT for large code-model accesses, but ELF requires
4520b57cec5SDimitry Andric // movz/movk sequences, which FastISel doesn't handle yet.
4530b57cec5SDimitry Andric if (!Subtarget->useSmallAddressing() && !Subtarget->isTargetMachO())
4540b57cec5SDimitry Andric return 0;
4550b57cec5SDimitry Andric
4568bcb0991SDimitry Andric unsigned OpFlags = Subtarget->ClassifyGlobalReference(GV, TM);
4570b57cec5SDimitry Andric
4580b57cec5SDimitry Andric EVT DestEVT = TLI.getValueType(DL, GV->getType(), true);
4590b57cec5SDimitry Andric if (!DestEVT.isSimple())
4600b57cec5SDimitry Andric return 0;
4610b57cec5SDimitry Andric
46204eeddc0SDimitry Andric Register ADRPReg = createResultReg(&AArch64::GPR64commonRegClass);
4630b57cec5SDimitry Andric unsigned ResultReg;
4640b57cec5SDimitry Andric
4650b57cec5SDimitry Andric if (OpFlags & AArch64II::MO_GOT) {
4660b57cec5SDimitry Andric // ADRP + LDRX
467bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADRP),
4680b57cec5SDimitry Andric ADRPReg)
4690b57cec5SDimitry Andric .addGlobalAddress(GV, 0, AArch64II::MO_PAGE | OpFlags);
4700b57cec5SDimitry Andric
4718bcb0991SDimitry Andric unsigned LdrOpc;
4728bcb0991SDimitry Andric if (Subtarget->isTargetILP32()) {
4738bcb0991SDimitry Andric ResultReg = createResultReg(&AArch64::GPR32RegClass);
4748bcb0991SDimitry Andric LdrOpc = AArch64::LDRWui;
4758bcb0991SDimitry Andric } else {
4760b57cec5SDimitry Andric ResultReg = createResultReg(&AArch64::GPR64RegClass);
4778bcb0991SDimitry Andric LdrOpc = AArch64::LDRXui;
4788bcb0991SDimitry Andric }
479bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(LdrOpc),
4800b57cec5SDimitry Andric ResultReg)
4810b57cec5SDimitry Andric .addReg(ADRPReg)
4828bcb0991SDimitry Andric .addGlobalAddress(GV, 0, AArch64II::MO_GOT | AArch64II::MO_PAGEOFF |
4838bcb0991SDimitry Andric AArch64II::MO_NC | OpFlags);
4848bcb0991SDimitry Andric if (!Subtarget->isTargetILP32())
4858bcb0991SDimitry Andric return ResultReg;
4868bcb0991SDimitry Andric
4878bcb0991SDimitry Andric // LDRWui produces a 32-bit register, but pointers in-register are 64-bits
4888bcb0991SDimitry Andric // so we must extend the result on ILP32.
48904eeddc0SDimitry Andric Register Result64 = createResultReg(&AArch64::GPR64RegClass);
490bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
4918bcb0991SDimitry Andric TII.get(TargetOpcode::SUBREG_TO_REG))
4928bcb0991SDimitry Andric .addDef(Result64)
4938bcb0991SDimitry Andric .addImm(0)
4948bcb0991SDimitry Andric .addReg(ResultReg, RegState::Kill)
4958bcb0991SDimitry Andric .addImm(AArch64::sub_32);
4968bcb0991SDimitry Andric return Result64;
4970b57cec5SDimitry Andric } else {
4980b57cec5SDimitry Andric // ADRP + ADDX
499bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADRP),
5000b57cec5SDimitry Andric ADRPReg)
5010b57cec5SDimitry Andric .addGlobalAddress(GV, 0, AArch64II::MO_PAGE | OpFlags);
5020b57cec5SDimitry Andric
503bdd1243dSDimitry Andric if (OpFlags & AArch64II::MO_TAGGED) {
504bdd1243dSDimitry Andric // MO_TAGGED on the page indicates a tagged address. Set the tag now.
505bdd1243dSDimitry Andric // We do so by creating a MOVK that sets bits 48-63 of the register to
506bdd1243dSDimitry Andric // (global address + 0x100000000 - PC) >> 48. This assumes that we're in
507bdd1243dSDimitry Andric // the small code model so we can assume a binary size of <= 4GB, which
508bdd1243dSDimitry Andric // makes the untagged PC relative offset positive. The binary must also be
509bdd1243dSDimitry Andric // loaded into address range [0, 2^48). Both of these properties need to
510bdd1243dSDimitry Andric // be ensured at runtime when using tagged addresses.
511bdd1243dSDimitry Andric //
512bdd1243dSDimitry Andric // TODO: There is duplicate logic in AArch64ExpandPseudoInsts.cpp that
513bdd1243dSDimitry Andric // also uses BuildMI for making an ADRP (+ MOVK) + ADD, but the operands
514bdd1243dSDimitry Andric // are not exactly 1:1 with FastISel so we cannot easily abstract this
515bdd1243dSDimitry Andric // out. At some point, it would be nice to find a way to not have this
516bdd1243dSDimitry Andric // duplciate code.
517bdd1243dSDimitry Andric unsigned DstReg = createResultReg(&AArch64::GPR64commonRegClass);
518bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::MOVKXi),
519bdd1243dSDimitry Andric DstReg)
520bdd1243dSDimitry Andric .addReg(ADRPReg)
521bdd1243dSDimitry Andric .addGlobalAddress(GV, /*Offset=*/0x100000000,
522bdd1243dSDimitry Andric AArch64II::MO_PREL | AArch64II::MO_G3)
523bdd1243dSDimitry Andric .addImm(48);
524bdd1243dSDimitry Andric ADRPReg = DstReg;
525bdd1243dSDimitry Andric }
526bdd1243dSDimitry Andric
5270b57cec5SDimitry Andric ResultReg = createResultReg(&AArch64::GPR64spRegClass);
528bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADDXri),
5290b57cec5SDimitry Andric ResultReg)
5300b57cec5SDimitry Andric .addReg(ADRPReg)
5310b57cec5SDimitry Andric .addGlobalAddress(GV, 0,
5320b57cec5SDimitry Andric AArch64II::MO_PAGEOFF | AArch64II::MO_NC | OpFlags)
5330b57cec5SDimitry Andric .addImm(0);
5340b57cec5SDimitry Andric }
5350b57cec5SDimitry Andric return ResultReg;
5360b57cec5SDimitry Andric }
5370b57cec5SDimitry Andric
fastMaterializeConstant(const Constant * C)5380b57cec5SDimitry Andric unsigned AArch64FastISel::fastMaterializeConstant(const Constant *C) {
5390b57cec5SDimitry Andric EVT CEVT = TLI.getValueType(DL, C->getType(), true);
5400b57cec5SDimitry Andric
5410b57cec5SDimitry Andric // Only handle simple types.
5420b57cec5SDimitry Andric if (!CEVT.isSimple())
5430b57cec5SDimitry Andric return 0;
5440b57cec5SDimitry Andric MVT VT = CEVT.getSimpleVT();
5458bcb0991SDimitry Andric // arm64_32 has 32-bit pointers held in 64-bit registers. Because of that,
5468bcb0991SDimitry Andric // 'null' pointers need to have a somewhat special treatment.
547fe6060f1SDimitry Andric if (isa<ConstantPointerNull>(C)) {
5488bcb0991SDimitry Andric assert(VT == MVT::i64 && "Expected 64-bit pointers");
5498bcb0991SDimitry Andric return materializeInt(ConstantInt::get(Type::getInt64Ty(*Context), 0), VT);
5508bcb0991SDimitry Andric }
5510b57cec5SDimitry Andric
5520b57cec5SDimitry Andric if (const auto *CI = dyn_cast<ConstantInt>(C))
5530b57cec5SDimitry Andric return materializeInt(CI, VT);
5540b57cec5SDimitry Andric else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(C))
5550b57cec5SDimitry Andric return materializeFP(CFP, VT);
5560b57cec5SDimitry Andric else if (const GlobalValue *GV = dyn_cast<GlobalValue>(C))
5570b57cec5SDimitry Andric return materializeGV(GV);
5580b57cec5SDimitry Andric
5590b57cec5SDimitry Andric return 0;
5600b57cec5SDimitry Andric }
5610b57cec5SDimitry Andric
fastMaterializeFloatZero(const ConstantFP * CFP)5620b57cec5SDimitry Andric unsigned AArch64FastISel::fastMaterializeFloatZero(const ConstantFP* CFP) {
5630b57cec5SDimitry Andric assert(CFP->isNullValue() &&
5640b57cec5SDimitry Andric "Floating-point constant is not a positive zero.");
5650b57cec5SDimitry Andric MVT VT;
5660b57cec5SDimitry Andric if (!isTypeLegal(CFP->getType(), VT))
5670b57cec5SDimitry Andric return 0;
5680b57cec5SDimitry Andric
5690b57cec5SDimitry Andric if (VT != MVT::f32 && VT != MVT::f64)
5700b57cec5SDimitry Andric return 0;
5710b57cec5SDimitry Andric
5720b57cec5SDimitry Andric bool Is64Bit = (VT == MVT::f64);
5730b57cec5SDimitry Andric unsigned ZReg = Is64Bit ? AArch64::XZR : AArch64::WZR;
5740b57cec5SDimitry Andric unsigned Opc = Is64Bit ? AArch64::FMOVXDr : AArch64::FMOVWSr;
575fe6060f1SDimitry Andric return fastEmitInst_r(Opc, TLI.getRegClassFor(VT), ZReg);
5760b57cec5SDimitry Andric }
5770b57cec5SDimitry Andric
5780b57cec5SDimitry Andric /// Check if the multiply is by a power-of-2 constant.
isMulPowOf2(const Value * I)5790b57cec5SDimitry Andric static bool isMulPowOf2(const Value *I) {
5800b57cec5SDimitry Andric if (const auto *MI = dyn_cast<MulOperator>(I)) {
5810b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(MI->getOperand(0)))
5820b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
5830b57cec5SDimitry Andric return true;
5840b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(MI->getOperand(1)))
5850b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
5860b57cec5SDimitry Andric return true;
5870b57cec5SDimitry Andric }
5880b57cec5SDimitry Andric return false;
5890b57cec5SDimitry Andric }
5900b57cec5SDimitry Andric
5910b57cec5SDimitry Andric // Computes the address to get to an object.
computeAddress(const Value * Obj,Address & Addr,Type * Ty)5920b57cec5SDimitry Andric bool AArch64FastISel::computeAddress(const Value *Obj, Address &Addr, Type *Ty)
5930b57cec5SDimitry Andric {
5940b57cec5SDimitry Andric const User *U = nullptr;
5950b57cec5SDimitry Andric unsigned Opcode = Instruction::UserOp1;
5960b57cec5SDimitry Andric if (const Instruction *I = dyn_cast<Instruction>(Obj)) {
5970b57cec5SDimitry Andric // Don't walk into other basic blocks unless the object is an alloca from
5980b57cec5SDimitry Andric // another block, otherwise it may not have a virtual register assigned.
5990b57cec5SDimitry Andric if (FuncInfo.StaticAllocaMap.count(static_cast<const AllocaInst *>(Obj)) ||
6000b57cec5SDimitry Andric FuncInfo.MBBMap[I->getParent()] == FuncInfo.MBB) {
6010b57cec5SDimitry Andric Opcode = I->getOpcode();
6020b57cec5SDimitry Andric U = I;
6030b57cec5SDimitry Andric }
6040b57cec5SDimitry Andric } else if (const ConstantExpr *C = dyn_cast<ConstantExpr>(Obj)) {
6050b57cec5SDimitry Andric Opcode = C->getOpcode();
6060b57cec5SDimitry Andric U = C;
6070b57cec5SDimitry Andric }
6080b57cec5SDimitry Andric
6090b57cec5SDimitry Andric if (auto *Ty = dyn_cast<PointerType>(Obj->getType()))
6100b57cec5SDimitry Andric if (Ty->getAddressSpace() > 255)
6110b57cec5SDimitry Andric // Fast instruction selection doesn't support the special
6120b57cec5SDimitry Andric // address spaces.
6130b57cec5SDimitry Andric return false;
6140b57cec5SDimitry Andric
6150b57cec5SDimitry Andric switch (Opcode) {
6160b57cec5SDimitry Andric default:
6170b57cec5SDimitry Andric break;
6180b57cec5SDimitry Andric case Instruction::BitCast:
6190b57cec5SDimitry Andric // Look through bitcasts.
6200b57cec5SDimitry Andric return computeAddress(U->getOperand(0), Addr, Ty);
6210b57cec5SDimitry Andric
6220b57cec5SDimitry Andric case Instruction::IntToPtr:
6230b57cec5SDimitry Andric // Look past no-op inttoptrs.
6240b57cec5SDimitry Andric if (TLI.getValueType(DL, U->getOperand(0)->getType()) ==
6250b57cec5SDimitry Andric TLI.getPointerTy(DL))
6260b57cec5SDimitry Andric return computeAddress(U->getOperand(0), Addr, Ty);
6270b57cec5SDimitry Andric break;
6280b57cec5SDimitry Andric
6290b57cec5SDimitry Andric case Instruction::PtrToInt:
6300b57cec5SDimitry Andric // Look past no-op ptrtoints.
6310b57cec5SDimitry Andric if (TLI.getValueType(DL, U->getType()) == TLI.getPointerTy(DL))
6320b57cec5SDimitry Andric return computeAddress(U->getOperand(0), Addr, Ty);
6330b57cec5SDimitry Andric break;
6340b57cec5SDimitry Andric
6350b57cec5SDimitry Andric case Instruction::GetElementPtr: {
6360b57cec5SDimitry Andric Address SavedAddr = Addr;
6370b57cec5SDimitry Andric uint64_t TmpOffset = Addr.getOffset();
6380b57cec5SDimitry Andric
6390b57cec5SDimitry Andric // Iterate through the GEP folding the constants into offsets where
6400b57cec5SDimitry Andric // we can.
6410b57cec5SDimitry Andric for (gep_type_iterator GTI = gep_type_begin(U), E = gep_type_end(U);
6420b57cec5SDimitry Andric GTI != E; ++GTI) {
6430b57cec5SDimitry Andric const Value *Op = GTI.getOperand();
6440b57cec5SDimitry Andric if (StructType *STy = GTI.getStructTypeOrNull()) {
6450b57cec5SDimitry Andric const StructLayout *SL = DL.getStructLayout(STy);
6460b57cec5SDimitry Andric unsigned Idx = cast<ConstantInt>(Op)->getZExtValue();
6470b57cec5SDimitry Andric TmpOffset += SL->getElementOffset(Idx);
6480b57cec5SDimitry Andric } else {
6491db9f3b2SDimitry Andric uint64_t S = GTI.getSequentialElementStride(DL);
6500b57cec5SDimitry Andric while (true) {
6510b57cec5SDimitry Andric if (const ConstantInt *CI = dyn_cast<ConstantInt>(Op)) {
6520b57cec5SDimitry Andric // Constant-offset addressing.
6530b57cec5SDimitry Andric TmpOffset += CI->getSExtValue() * S;
6540b57cec5SDimitry Andric break;
6550b57cec5SDimitry Andric }
6560b57cec5SDimitry Andric if (canFoldAddIntoGEP(U, Op)) {
6570b57cec5SDimitry Andric // A compatible add with a constant operand. Fold the constant.
6580b57cec5SDimitry Andric ConstantInt *CI =
6590b57cec5SDimitry Andric cast<ConstantInt>(cast<AddOperator>(Op)->getOperand(1));
6600b57cec5SDimitry Andric TmpOffset += CI->getSExtValue() * S;
6610b57cec5SDimitry Andric // Iterate on the other operand.
6620b57cec5SDimitry Andric Op = cast<AddOperator>(Op)->getOperand(0);
6630b57cec5SDimitry Andric continue;
6640b57cec5SDimitry Andric }
6650b57cec5SDimitry Andric // Unsupported
6660b57cec5SDimitry Andric goto unsupported_gep;
6670b57cec5SDimitry Andric }
6680b57cec5SDimitry Andric }
6690b57cec5SDimitry Andric }
6700b57cec5SDimitry Andric
6710b57cec5SDimitry Andric // Try to grab the base operand now.
6720b57cec5SDimitry Andric Addr.setOffset(TmpOffset);
6730b57cec5SDimitry Andric if (computeAddress(U->getOperand(0), Addr, Ty))
6740b57cec5SDimitry Andric return true;
6750b57cec5SDimitry Andric
6760b57cec5SDimitry Andric // We failed, restore everything and try the other options.
6770b57cec5SDimitry Andric Addr = SavedAddr;
6780b57cec5SDimitry Andric
6790b57cec5SDimitry Andric unsupported_gep:
6800b57cec5SDimitry Andric break;
6810b57cec5SDimitry Andric }
6820b57cec5SDimitry Andric case Instruction::Alloca: {
6830b57cec5SDimitry Andric const AllocaInst *AI = cast<AllocaInst>(Obj);
6840b57cec5SDimitry Andric DenseMap<const AllocaInst *, int>::iterator SI =
6850b57cec5SDimitry Andric FuncInfo.StaticAllocaMap.find(AI);
6860b57cec5SDimitry Andric if (SI != FuncInfo.StaticAllocaMap.end()) {
6870b57cec5SDimitry Andric Addr.setKind(Address::FrameIndexBase);
6880b57cec5SDimitry Andric Addr.setFI(SI->second);
6890b57cec5SDimitry Andric return true;
6900b57cec5SDimitry Andric }
6910b57cec5SDimitry Andric break;
6920b57cec5SDimitry Andric }
6930b57cec5SDimitry Andric case Instruction::Add: {
6940b57cec5SDimitry Andric // Adds of constants are common and easy enough.
6950b57cec5SDimitry Andric const Value *LHS = U->getOperand(0);
6960b57cec5SDimitry Andric const Value *RHS = U->getOperand(1);
6970b57cec5SDimitry Andric
6980b57cec5SDimitry Andric if (isa<ConstantInt>(LHS))
6990b57cec5SDimitry Andric std::swap(LHS, RHS);
7000b57cec5SDimitry Andric
7010b57cec5SDimitry Andric if (const ConstantInt *CI = dyn_cast<ConstantInt>(RHS)) {
7020b57cec5SDimitry Andric Addr.setOffset(Addr.getOffset() + CI->getSExtValue());
7030b57cec5SDimitry Andric return computeAddress(LHS, Addr, Ty);
7040b57cec5SDimitry Andric }
7050b57cec5SDimitry Andric
7060b57cec5SDimitry Andric Address Backup = Addr;
7070b57cec5SDimitry Andric if (computeAddress(LHS, Addr, Ty) && computeAddress(RHS, Addr, Ty))
7080b57cec5SDimitry Andric return true;
7090b57cec5SDimitry Andric Addr = Backup;
7100b57cec5SDimitry Andric
7110b57cec5SDimitry Andric break;
7120b57cec5SDimitry Andric }
7130b57cec5SDimitry Andric case Instruction::Sub: {
7140b57cec5SDimitry Andric // Subs of constants are common and easy enough.
7150b57cec5SDimitry Andric const Value *LHS = U->getOperand(0);
7160b57cec5SDimitry Andric const Value *RHS = U->getOperand(1);
7170b57cec5SDimitry Andric
7180b57cec5SDimitry Andric if (const ConstantInt *CI = dyn_cast<ConstantInt>(RHS)) {
7190b57cec5SDimitry Andric Addr.setOffset(Addr.getOffset() - CI->getSExtValue());
7200b57cec5SDimitry Andric return computeAddress(LHS, Addr, Ty);
7210b57cec5SDimitry Andric }
7220b57cec5SDimitry Andric break;
7230b57cec5SDimitry Andric }
7240b57cec5SDimitry Andric case Instruction::Shl: {
7250b57cec5SDimitry Andric if (Addr.getOffsetReg())
7260b57cec5SDimitry Andric break;
7270b57cec5SDimitry Andric
7280b57cec5SDimitry Andric const auto *CI = dyn_cast<ConstantInt>(U->getOperand(1));
7290b57cec5SDimitry Andric if (!CI)
7300b57cec5SDimitry Andric break;
7310b57cec5SDimitry Andric
7320b57cec5SDimitry Andric unsigned Val = CI->getZExtValue();
7330b57cec5SDimitry Andric if (Val < 1 || Val > 3)
7340b57cec5SDimitry Andric break;
7350b57cec5SDimitry Andric
7360b57cec5SDimitry Andric uint64_t NumBytes = 0;
7370b57cec5SDimitry Andric if (Ty && Ty->isSized()) {
7380b57cec5SDimitry Andric uint64_t NumBits = DL.getTypeSizeInBits(Ty);
7390b57cec5SDimitry Andric NumBytes = NumBits / 8;
7400b57cec5SDimitry Andric if (!isPowerOf2_64(NumBits))
7410b57cec5SDimitry Andric NumBytes = 0;
7420b57cec5SDimitry Andric }
7430b57cec5SDimitry Andric
7440b57cec5SDimitry Andric if (NumBytes != (1ULL << Val))
7450b57cec5SDimitry Andric break;
7460b57cec5SDimitry Andric
7470b57cec5SDimitry Andric Addr.setShift(Val);
7480b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::LSL);
7490b57cec5SDimitry Andric
7500b57cec5SDimitry Andric const Value *Src = U->getOperand(0);
7510b57cec5SDimitry Andric if (const auto *I = dyn_cast<Instruction>(Src)) {
7520b57cec5SDimitry Andric if (FuncInfo.MBBMap[I->getParent()] == FuncInfo.MBB) {
7530b57cec5SDimitry Andric // Fold the zext or sext when it won't become a noop.
7540b57cec5SDimitry Andric if (const auto *ZE = dyn_cast<ZExtInst>(I)) {
7550b57cec5SDimitry Andric if (!isIntExtFree(ZE) &&
7560b57cec5SDimitry Andric ZE->getOperand(0)->getType()->isIntegerTy(32)) {
7570b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::UXTW);
7580b57cec5SDimitry Andric Src = ZE->getOperand(0);
7590b57cec5SDimitry Andric }
7600b57cec5SDimitry Andric } else if (const auto *SE = dyn_cast<SExtInst>(I)) {
7610b57cec5SDimitry Andric if (!isIntExtFree(SE) &&
7620b57cec5SDimitry Andric SE->getOperand(0)->getType()->isIntegerTy(32)) {
7630b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::SXTW);
7640b57cec5SDimitry Andric Src = SE->getOperand(0);
7650b57cec5SDimitry Andric }
7660b57cec5SDimitry Andric }
7670b57cec5SDimitry Andric }
7680b57cec5SDimitry Andric }
7690b57cec5SDimitry Andric
7700b57cec5SDimitry Andric if (const auto *AI = dyn_cast<BinaryOperator>(Src))
7710b57cec5SDimitry Andric if (AI->getOpcode() == Instruction::And) {
7720b57cec5SDimitry Andric const Value *LHS = AI->getOperand(0);
7730b57cec5SDimitry Andric const Value *RHS = AI->getOperand(1);
7740b57cec5SDimitry Andric
7750b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(LHS))
7760b57cec5SDimitry Andric if (C->getValue() == 0xffffffff)
7770b57cec5SDimitry Andric std::swap(LHS, RHS);
7780b57cec5SDimitry Andric
7790b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS))
7800b57cec5SDimitry Andric if (C->getValue() == 0xffffffff) {
7810b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::UXTW);
78204eeddc0SDimitry Andric Register Reg = getRegForValue(LHS);
7830b57cec5SDimitry Andric if (!Reg)
7840b57cec5SDimitry Andric return false;
785fe6060f1SDimitry Andric Reg = fastEmitInst_extractsubreg(MVT::i32, Reg, AArch64::sub_32);
7860b57cec5SDimitry Andric Addr.setOffsetReg(Reg);
7870b57cec5SDimitry Andric return true;
7880b57cec5SDimitry Andric }
7890b57cec5SDimitry Andric }
7900b57cec5SDimitry Andric
79104eeddc0SDimitry Andric Register Reg = getRegForValue(Src);
7920b57cec5SDimitry Andric if (!Reg)
7930b57cec5SDimitry Andric return false;
7940b57cec5SDimitry Andric Addr.setOffsetReg(Reg);
7950b57cec5SDimitry Andric return true;
7960b57cec5SDimitry Andric }
7970b57cec5SDimitry Andric case Instruction::Mul: {
7980b57cec5SDimitry Andric if (Addr.getOffsetReg())
7990b57cec5SDimitry Andric break;
8000b57cec5SDimitry Andric
8010b57cec5SDimitry Andric if (!isMulPowOf2(U))
8020b57cec5SDimitry Andric break;
8030b57cec5SDimitry Andric
8040b57cec5SDimitry Andric const Value *LHS = U->getOperand(0);
8050b57cec5SDimitry Andric const Value *RHS = U->getOperand(1);
8060b57cec5SDimitry Andric
8070b57cec5SDimitry Andric // Canonicalize power-of-2 value to the RHS.
8080b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(LHS))
8090b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
8100b57cec5SDimitry Andric std::swap(LHS, RHS);
8110b57cec5SDimitry Andric
8120b57cec5SDimitry Andric assert(isa<ConstantInt>(RHS) && "Expected an ConstantInt.");
8130b57cec5SDimitry Andric const auto *C = cast<ConstantInt>(RHS);
8140b57cec5SDimitry Andric unsigned Val = C->getValue().logBase2();
8150b57cec5SDimitry Andric if (Val < 1 || Val > 3)
8160b57cec5SDimitry Andric break;
8170b57cec5SDimitry Andric
8180b57cec5SDimitry Andric uint64_t NumBytes = 0;
8190b57cec5SDimitry Andric if (Ty && Ty->isSized()) {
8200b57cec5SDimitry Andric uint64_t NumBits = DL.getTypeSizeInBits(Ty);
8210b57cec5SDimitry Andric NumBytes = NumBits / 8;
8220b57cec5SDimitry Andric if (!isPowerOf2_64(NumBits))
8230b57cec5SDimitry Andric NumBytes = 0;
8240b57cec5SDimitry Andric }
8250b57cec5SDimitry Andric
8260b57cec5SDimitry Andric if (NumBytes != (1ULL << Val))
8270b57cec5SDimitry Andric break;
8280b57cec5SDimitry Andric
8290b57cec5SDimitry Andric Addr.setShift(Val);
8300b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::LSL);
8310b57cec5SDimitry Andric
8320b57cec5SDimitry Andric const Value *Src = LHS;
8330b57cec5SDimitry Andric if (const auto *I = dyn_cast<Instruction>(Src)) {
8340b57cec5SDimitry Andric if (FuncInfo.MBBMap[I->getParent()] == FuncInfo.MBB) {
8350b57cec5SDimitry Andric // Fold the zext or sext when it won't become a noop.
8360b57cec5SDimitry Andric if (const auto *ZE = dyn_cast<ZExtInst>(I)) {
8370b57cec5SDimitry Andric if (!isIntExtFree(ZE) &&
8380b57cec5SDimitry Andric ZE->getOperand(0)->getType()->isIntegerTy(32)) {
8390b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::UXTW);
8400b57cec5SDimitry Andric Src = ZE->getOperand(0);
8410b57cec5SDimitry Andric }
8420b57cec5SDimitry Andric } else if (const auto *SE = dyn_cast<SExtInst>(I)) {
8430b57cec5SDimitry Andric if (!isIntExtFree(SE) &&
8440b57cec5SDimitry Andric SE->getOperand(0)->getType()->isIntegerTy(32)) {
8450b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::SXTW);
8460b57cec5SDimitry Andric Src = SE->getOperand(0);
8470b57cec5SDimitry Andric }
8480b57cec5SDimitry Andric }
8490b57cec5SDimitry Andric }
8500b57cec5SDimitry Andric }
8510b57cec5SDimitry Andric
85204eeddc0SDimitry Andric Register Reg = getRegForValue(Src);
8530b57cec5SDimitry Andric if (!Reg)
8540b57cec5SDimitry Andric return false;
8550b57cec5SDimitry Andric Addr.setOffsetReg(Reg);
8560b57cec5SDimitry Andric return true;
8570b57cec5SDimitry Andric }
8580b57cec5SDimitry Andric case Instruction::And: {
8590b57cec5SDimitry Andric if (Addr.getOffsetReg())
8600b57cec5SDimitry Andric break;
8610b57cec5SDimitry Andric
8620b57cec5SDimitry Andric if (!Ty || DL.getTypeSizeInBits(Ty) != 8)
8630b57cec5SDimitry Andric break;
8640b57cec5SDimitry Andric
8650b57cec5SDimitry Andric const Value *LHS = U->getOperand(0);
8660b57cec5SDimitry Andric const Value *RHS = U->getOperand(1);
8670b57cec5SDimitry Andric
8680b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(LHS))
8690b57cec5SDimitry Andric if (C->getValue() == 0xffffffff)
8700b57cec5SDimitry Andric std::swap(LHS, RHS);
8710b57cec5SDimitry Andric
8720b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS))
8730b57cec5SDimitry Andric if (C->getValue() == 0xffffffff) {
8740b57cec5SDimitry Andric Addr.setShift(0);
8750b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::LSL);
8760b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::UXTW);
8770b57cec5SDimitry Andric
87804eeddc0SDimitry Andric Register Reg = getRegForValue(LHS);
8790b57cec5SDimitry Andric if (!Reg)
8800b57cec5SDimitry Andric return false;
881fe6060f1SDimitry Andric Reg = fastEmitInst_extractsubreg(MVT::i32, Reg, AArch64::sub_32);
8820b57cec5SDimitry Andric Addr.setOffsetReg(Reg);
8830b57cec5SDimitry Andric return true;
8840b57cec5SDimitry Andric }
8850b57cec5SDimitry Andric break;
8860b57cec5SDimitry Andric }
8870b57cec5SDimitry Andric case Instruction::SExt:
8880b57cec5SDimitry Andric case Instruction::ZExt: {
8890b57cec5SDimitry Andric if (!Addr.getReg() || Addr.getOffsetReg())
8900b57cec5SDimitry Andric break;
8910b57cec5SDimitry Andric
8920b57cec5SDimitry Andric const Value *Src = nullptr;
8930b57cec5SDimitry Andric // Fold the zext or sext when it won't become a noop.
8940b57cec5SDimitry Andric if (const auto *ZE = dyn_cast<ZExtInst>(U)) {
8950b57cec5SDimitry Andric if (!isIntExtFree(ZE) && ZE->getOperand(0)->getType()->isIntegerTy(32)) {
8960b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::UXTW);
8970b57cec5SDimitry Andric Src = ZE->getOperand(0);
8980b57cec5SDimitry Andric }
8990b57cec5SDimitry Andric } else if (const auto *SE = dyn_cast<SExtInst>(U)) {
9000b57cec5SDimitry Andric if (!isIntExtFree(SE) && SE->getOperand(0)->getType()->isIntegerTy(32)) {
9010b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::SXTW);
9020b57cec5SDimitry Andric Src = SE->getOperand(0);
9030b57cec5SDimitry Andric }
9040b57cec5SDimitry Andric }
9050b57cec5SDimitry Andric
9060b57cec5SDimitry Andric if (!Src)
9070b57cec5SDimitry Andric break;
9080b57cec5SDimitry Andric
9090b57cec5SDimitry Andric Addr.setShift(0);
91004eeddc0SDimitry Andric Register Reg = getRegForValue(Src);
9110b57cec5SDimitry Andric if (!Reg)
9120b57cec5SDimitry Andric return false;
9130b57cec5SDimitry Andric Addr.setOffsetReg(Reg);
9140b57cec5SDimitry Andric return true;
9150b57cec5SDimitry Andric }
9160b57cec5SDimitry Andric } // end switch
9170b57cec5SDimitry Andric
9180b57cec5SDimitry Andric if (Addr.isRegBase() && !Addr.getReg()) {
91904eeddc0SDimitry Andric Register Reg = getRegForValue(Obj);
9200b57cec5SDimitry Andric if (!Reg)
9210b57cec5SDimitry Andric return false;
9220b57cec5SDimitry Andric Addr.setReg(Reg);
9230b57cec5SDimitry Andric return true;
9240b57cec5SDimitry Andric }
9250b57cec5SDimitry Andric
9260b57cec5SDimitry Andric if (!Addr.getOffsetReg()) {
92704eeddc0SDimitry Andric Register Reg = getRegForValue(Obj);
9280b57cec5SDimitry Andric if (!Reg)
9290b57cec5SDimitry Andric return false;
9300b57cec5SDimitry Andric Addr.setOffsetReg(Reg);
9310b57cec5SDimitry Andric return true;
9320b57cec5SDimitry Andric }
9330b57cec5SDimitry Andric
9340b57cec5SDimitry Andric return false;
9350b57cec5SDimitry Andric }
9360b57cec5SDimitry Andric
computeCallAddress(const Value * V,Address & Addr)9370b57cec5SDimitry Andric bool AArch64FastISel::computeCallAddress(const Value *V, Address &Addr) {
9380b57cec5SDimitry Andric const User *U = nullptr;
9390b57cec5SDimitry Andric unsigned Opcode = Instruction::UserOp1;
9400b57cec5SDimitry Andric bool InMBB = true;
9410b57cec5SDimitry Andric
9420b57cec5SDimitry Andric if (const auto *I = dyn_cast<Instruction>(V)) {
9430b57cec5SDimitry Andric Opcode = I->getOpcode();
9440b57cec5SDimitry Andric U = I;
9450b57cec5SDimitry Andric InMBB = I->getParent() == FuncInfo.MBB->getBasicBlock();
9460b57cec5SDimitry Andric } else if (const auto *C = dyn_cast<ConstantExpr>(V)) {
9470b57cec5SDimitry Andric Opcode = C->getOpcode();
9480b57cec5SDimitry Andric U = C;
9490b57cec5SDimitry Andric }
9500b57cec5SDimitry Andric
9510b57cec5SDimitry Andric switch (Opcode) {
9520b57cec5SDimitry Andric default: break;
9530b57cec5SDimitry Andric case Instruction::BitCast:
9540b57cec5SDimitry Andric // Look past bitcasts if its operand is in the same BB.
9550b57cec5SDimitry Andric if (InMBB)
9560b57cec5SDimitry Andric return computeCallAddress(U->getOperand(0), Addr);
9570b57cec5SDimitry Andric break;
9580b57cec5SDimitry Andric case Instruction::IntToPtr:
9590b57cec5SDimitry Andric // Look past no-op inttoptrs if its operand is in the same BB.
9600b57cec5SDimitry Andric if (InMBB &&
9610b57cec5SDimitry Andric TLI.getValueType(DL, U->getOperand(0)->getType()) ==
9620b57cec5SDimitry Andric TLI.getPointerTy(DL))
9630b57cec5SDimitry Andric return computeCallAddress(U->getOperand(0), Addr);
9640b57cec5SDimitry Andric break;
9650b57cec5SDimitry Andric case Instruction::PtrToInt:
9660b57cec5SDimitry Andric // Look past no-op ptrtoints if its operand is in the same BB.
9670b57cec5SDimitry Andric if (InMBB && TLI.getValueType(DL, U->getType()) == TLI.getPointerTy(DL))
9680b57cec5SDimitry Andric return computeCallAddress(U->getOperand(0), Addr);
9690b57cec5SDimitry Andric break;
9700b57cec5SDimitry Andric }
9710b57cec5SDimitry Andric
9720b57cec5SDimitry Andric if (const GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
9730b57cec5SDimitry Andric Addr.setGlobalValue(GV);
9740b57cec5SDimitry Andric return true;
9750b57cec5SDimitry Andric }
9760b57cec5SDimitry Andric
9770b57cec5SDimitry Andric // If all else fails, try to materialize the value in a register.
9780b57cec5SDimitry Andric if (!Addr.getGlobalValue()) {
9790b57cec5SDimitry Andric Addr.setReg(getRegForValue(V));
9800b57cec5SDimitry Andric return Addr.getReg() != 0;
9810b57cec5SDimitry Andric }
9820b57cec5SDimitry Andric
9830b57cec5SDimitry Andric return false;
9840b57cec5SDimitry Andric }
9850b57cec5SDimitry Andric
isTypeLegal(Type * Ty,MVT & VT)9860b57cec5SDimitry Andric bool AArch64FastISel::isTypeLegal(Type *Ty, MVT &VT) {
9870b57cec5SDimitry Andric EVT evt = TLI.getValueType(DL, Ty, true);
9880b57cec5SDimitry Andric
9898bcb0991SDimitry Andric if (Subtarget->isTargetILP32() && Ty->isPointerTy())
9908bcb0991SDimitry Andric return false;
9918bcb0991SDimitry Andric
9920b57cec5SDimitry Andric // Only handle simple types.
9930b57cec5SDimitry Andric if (evt == MVT::Other || !evt.isSimple())
9940b57cec5SDimitry Andric return false;
9950b57cec5SDimitry Andric VT = evt.getSimpleVT();
9960b57cec5SDimitry Andric
9970b57cec5SDimitry Andric // This is a legal type, but it's not something we handle in fast-isel.
9980b57cec5SDimitry Andric if (VT == MVT::f128)
9990b57cec5SDimitry Andric return false;
10000b57cec5SDimitry Andric
10010b57cec5SDimitry Andric // Handle all other legal types, i.e. a register that will directly hold this
10020b57cec5SDimitry Andric // value.
10030b57cec5SDimitry Andric return TLI.isTypeLegal(VT);
10040b57cec5SDimitry Andric }
10050b57cec5SDimitry Andric
10060b57cec5SDimitry Andric /// Determine if the value type is supported by FastISel.
10070b57cec5SDimitry Andric ///
10080b57cec5SDimitry Andric /// FastISel for AArch64 can handle more value types than are legal. This adds
10090b57cec5SDimitry Andric /// simple value type such as i1, i8, and i16.
isTypeSupported(Type * Ty,MVT & VT,bool IsVectorAllowed)10100b57cec5SDimitry Andric bool AArch64FastISel::isTypeSupported(Type *Ty, MVT &VT, bool IsVectorAllowed) {
10110b57cec5SDimitry Andric if (Ty->isVectorTy() && !IsVectorAllowed)
10120b57cec5SDimitry Andric return false;
10130b57cec5SDimitry Andric
10140b57cec5SDimitry Andric if (isTypeLegal(Ty, VT))
10150b57cec5SDimitry Andric return true;
10160b57cec5SDimitry Andric
10170b57cec5SDimitry Andric // If this is a type than can be sign or zero-extended to a basic operation
10180b57cec5SDimitry Andric // go ahead and accept it now.
10190b57cec5SDimitry Andric if (VT == MVT::i1 || VT == MVT::i8 || VT == MVT::i16)
10200b57cec5SDimitry Andric return true;
10210b57cec5SDimitry Andric
10220b57cec5SDimitry Andric return false;
10230b57cec5SDimitry Andric }
10240b57cec5SDimitry Andric
isValueAvailable(const Value * V) const10250b57cec5SDimitry Andric bool AArch64FastISel::isValueAvailable(const Value *V) const {
10260b57cec5SDimitry Andric if (!isa<Instruction>(V))
10270b57cec5SDimitry Andric return true;
10280b57cec5SDimitry Andric
10290b57cec5SDimitry Andric const auto *I = cast<Instruction>(V);
10300b57cec5SDimitry Andric return FuncInfo.MBBMap[I->getParent()] == FuncInfo.MBB;
10310b57cec5SDimitry Andric }
10320b57cec5SDimitry Andric
simplifyAddress(Address & Addr,MVT VT)10330b57cec5SDimitry Andric bool AArch64FastISel::simplifyAddress(Address &Addr, MVT VT) {
10348bcb0991SDimitry Andric if (Subtarget->isTargetILP32())
10358bcb0991SDimitry Andric return false;
10368bcb0991SDimitry Andric
10370b57cec5SDimitry Andric unsigned ScaleFactor = getImplicitScaleFactor(VT);
10380b57cec5SDimitry Andric if (!ScaleFactor)
10390b57cec5SDimitry Andric return false;
10400b57cec5SDimitry Andric
10410b57cec5SDimitry Andric bool ImmediateOffsetNeedsLowering = false;
10420b57cec5SDimitry Andric bool RegisterOffsetNeedsLowering = false;
10430b57cec5SDimitry Andric int64_t Offset = Addr.getOffset();
10440b57cec5SDimitry Andric if (((Offset < 0) || (Offset & (ScaleFactor - 1))) && !isInt<9>(Offset))
10450b57cec5SDimitry Andric ImmediateOffsetNeedsLowering = true;
10460b57cec5SDimitry Andric else if (Offset > 0 && !(Offset & (ScaleFactor - 1)) &&
10470b57cec5SDimitry Andric !isUInt<12>(Offset / ScaleFactor))
10480b57cec5SDimitry Andric ImmediateOffsetNeedsLowering = true;
10490b57cec5SDimitry Andric
10500b57cec5SDimitry Andric // Cannot encode an offset register and an immediate offset in the same
10510b57cec5SDimitry Andric // instruction. Fold the immediate offset into the load/store instruction and
10520b57cec5SDimitry Andric // emit an additional add to take care of the offset register.
10530b57cec5SDimitry Andric if (!ImmediateOffsetNeedsLowering && Addr.getOffset() && Addr.getOffsetReg())
10540b57cec5SDimitry Andric RegisterOffsetNeedsLowering = true;
10550b57cec5SDimitry Andric
10560b57cec5SDimitry Andric // Cannot encode zero register as base.
10570b57cec5SDimitry Andric if (Addr.isRegBase() && Addr.getOffsetReg() && !Addr.getReg())
10580b57cec5SDimitry Andric RegisterOffsetNeedsLowering = true;
10590b57cec5SDimitry Andric
10600b57cec5SDimitry Andric // If this is a stack pointer and the offset needs to be simplified then put
10610b57cec5SDimitry Andric // the alloca address into a register, set the base type back to register and
10620b57cec5SDimitry Andric // continue. This should almost never happen.
10630b57cec5SDimitry Andric if ((ImmediateOffsetNeedsLowering || Addr.getOffsetReg()) && Addr.isFIBase())
10640b57cec5SDimitry Andric {
106504eeddc0SDimitry Andric Register ResultReg = createResultReg(&AArch64::GPR64spRegClass);
1066bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADDXri),
10670b57cec5SDimitry Andric ResultReg)
10680b57cec5SDimitry Andric .addFrameIndex(Addr.getFI())
10690b57cec5SDimitry Andric .addImm(0)
10700b57cec5SDimitry Andric .addImm(0);
10710b57cec5SDimitry Andric Addr.setKind(Address::RegBase);
10720b57cec5SDimitry Andric Addr.setReg(ResultReg);
10730b57cec5SDimitry Andric }
10740b57cec5SDimitry Andric
10750b57cec5SDimitry Andric if (RegisterOffsetNeedsLowering) {
10760b57cec5SDimitry Andric unsigned ResultReg = 0;
10770b57cec5SDimitry Andric if (Addr.getReg()) {
10780b57cec5SDimitry Andric if (Addr.getExtendType() == AArch64_AM::SXTW ||
10790b57cec5SDimitry Andric Addr.getExtendType() == AArch64_AM::UXTW )
10800b57cec5SDimitry Andric ResultReg = emitAddSub_rx(/*UseAdd=*/true, MVT::i64, Addr.getReg(),
1081fe6060f1SDimitry Andric Addr.getOffsetReg(), Addr.getExtendType(),
10820b57cec5SDimitry Andric Addr.getShift());
10830b57cec5SDimitry Andric else
10840b57cec5SDimitry Andric ResultReg = emitAddSub_rs(/*UseAdd=*/true, MVT::i64, Addr.getReg(),
1085fe6060f1SDimitry Andric Addr.getOffsetReg(), AArch64_AM::LSL,
10860b57cec5SDimitry Andric Addr.getShift());
10870b57cec5SDimitry Andric } else {
10880b57cec5SDimitry Andric if (Addr.getExtendType() == AArch64_AM::UXTW)
10890b57cec5SDimitry Andric ResultReg = emitLSL_ri(MVT::i64, MVT::i32, Addr.getOffsetReg(),
1090fe6060f1SDimitry Andric Addr.getShift(), /*IsZExt=*/true);
10910b57cec5SDimitry Andric else if (Addr.getExtendType() == AArch64_AM::SXTW)
10920b57cec5SDimitry Andric ResultReg = emitLSL_ri(MVT::i64, MVT::i32, Addr.getOffsetReg(),
1093fe6060f1SDimitry Andric Addr.getShift(), /*IsZExt=*/false);
10940b57cec5SDimitry Andric else
10950b57cec5SDimitry Andric ResultReg = emitLSL_ri(MVT::i64, MVT::i64, Addr.getOffsetReg(),
1096fe6060f1SDimitry Andric Addr.getShift());
10970b57cec5SDimitry Andric }
10980b57cec5SDimitry Andric if (!ResultReg)
10990b57cec5SDimitry Andric return false;
11000b57cec5SDimitry Andric
11010b57cec5SDimitry Andric Addr.setReg(ResultReg);
11020b57cec5SDimitry Andric Addr.setOffsetReg(0);
11030b57cec5SDimitry Andric Addr.setShift(0);
11040b57cec5SDimitry Andric Addr.setExtendType(AArch64_AM::InvalidShiftExtend);
11050b57cec5SDimitry Andric }
11060b57cec5SDimitry Andric
11070b57cec5SDimitry Andric // Since the offset is too large for the load/store instruction get the
11080b57cec5SDimitry Andric // reg+offset into a register.
11090b57cec5SDimitry Andric if (ImmediateOffsetNeedsLowering) {
11100b57cec5SDimitry Andric unsigned ResultReg;
11110b57cec5SDimitry Andric if (Addr.getReg())
11120b57cec5SDimitry Andric // Try to fold the immediate into the add instruction.
1113fe6060f1SDimitry Andric ResultReg = emitAdd_ri_(MVT::i64, Addr.getReg(), Offset);
11140b57cec5SDimitry Andric else
11150b57cec5SDimitry Andric ResultReg = fastEmit_i(MVT::i64, MVT::i64, ISD::Constant, Offset);
11160b57cec5SDimitry Andric
11170b57cec5SDimitry Andric if (!ResultReg)
11180b57cec5SDimitry Andric return false;
11190b57cec5SDimitry Andric Addr.setReg(ResultReg);
11200b57cec5SDimitry Andric Addr.setOffset(0);
11210b57cec5SDimitry Andric }
11220b57cec5SDimitry Andric return true;
11230b57cec5SDimitry Andric }
11240b57cec5SDimitry Andric
addLoadStoreOperands(Address & Addr,const MachineInstrBuilder & MIB,MachineMemOperand::Flags Flags,unsigned ScaleFactor,MachineMemOperand * MMO)11250b57cec5SDimitry Andric void AArch64FastISel::addLoadStoreOperands(Address &Addr,
11260b57cec5SDimitry Andric const MachineInstrBuilder &MIB,
11270b57cec5SDimitry Andric MachineMemOperand::Flags Flags,
11280b57cec5SDimitry Andric unsigned ScaleFactor,
11290b57cec5SDimitry Andric MachineMemOperand *MMO) {
11300b57cec5SDimitry Andric int64_t Offset = Addr.getOffset() / ScaleFactor;
11310b57cec5SDimitry Andric // Frame base works a bit differently. Handle it separately.
11320b57cec5SDimitry Andric if (Addr.isFIBase()) {
11330b57cec5SDimitry Andric int FI = Addr.getFI();
11340b57cec5SDimitry Andric // FIXME: We shouldn't be using getObjectSize/getObjectAlignment. The size
11350b57cec5SDimitry Andric // and alignment should be based on the VT.
11360b57cec5SDimitry Andric MMO = FuncInfo.MF->getMachineMemOperand(
11370b57cec5SDimitry Andric MachinePointerInfo::getFixedStack(*FuncInfo.MF, FI, Offset), Flags,
11385ffd83dbSDimitry Andric MFI.getObjectSize(FI), MFI.getObjectAlign(FI));
11390b57cec5SDimitry Andric // Now add the rest of the operands.
11400b57cec5SDimitry Andric MIB.addFrameIndex(FI).addImm(Offset);
11410b57cec5SDimitry Andric } else {
11420b57cec5SDimitry Andric assert(Addr.isRegBase() && "Unexpected address kind.");
11430b57cec5SDimitry Andric const MCInstrDesc &II = MIB->getDesc();
11440b57cec5SDimitry Andric unsigned Idx = (Flags & MachineMemOperand::MOStore) ? 1 : 0;
11450b57cec5SDimitry Andric Addr.setReg(
11460b57cec5SDimitry Andric constrainOperandRegClass(II, Addr.getReg(), II.getNumDefs()+Idx));
11470b57cec5SDimitry Andric Addr.setOffsetReg(
11480b57cec5SDimitry Andric constrainOperandRegClass(II, Addr.getOffsetReg(), II.getNumDefs()+Idx+1));
11490b57cec5SDimitry Andric if (Addr.getOffsetReg()) {
11500b57cec5SDimitry Andric assert(Addr.getOffset() == 0 && "Unexpected offset");
11510b57cec5SDimitry Andric bool IsSigned = Addr.getExtendType() == AArch64_AM::SXTW ||
11520b57cec5SDimitry Andric Addr.getExtendType() == AArch64_AM::SXTX;
11530b57cec5SDimitry Andric MIB.addReg(Addr.getReg());
11540b57cec5SDimitry Andric MIB.addReg(Addr.getOffsetReg());
11550b57cec5SDimitry Andric MIB.addImm(IsSigned);
11560b57cec5SDimitry Andric MIB.addImm(Addr.getShift() != 0);
11570b57cec5SDimitry Andric } else
11580b57cec5SDimitry Andric MIB.addReg(Addr.getReg()).addImm(Offset);
11590b57cec5SDimitry Andric }
11600b57cec5SDimitry Andric
11610b57cec5SDimitry Andric if (MMO)
11620b57cec5SDimitry Andric MIB.addMemOperand(MMO);
11630b57cec5SDimitry Andric }
11640b57cec5SDimitry Andric
emitAddSub(bool UseAdd,MVT RetVT,const Value * LHS,const Value * RHS,bool SetFlags,bool WantResult,bool IsZExt)11650b57cec5SDimitry Andric unsigned AArch64FastISel::emitAddSub(bool UseAdd, MVT RetVT, const Value *LHS,
11660b57cec5SDimitry Andric const Value *RHS, bool SetFlags,
11670b57cec5SDimitry Andric bool WantResult, bool IsZExt) {
11680b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ExtendType = AArch64_AM::InvalidShiftExtend;
11690b57cec5SDimitry Andric bool NeedExtend = false;
11700b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
11710b57cec5SDimitry Andric default:
11720b57cec5SDimitry Andric return 0;
11730b57cec5SDimitry Andric case MVT::i1:
11740b57cec5SDimitry Andric NeedExtend = true;
11750b57cec5SDimitry Andric break;
11760b57cec5SDimitry Andric case MVT::i8:
11770b57cec5SDimitry Andric NeedExtend = true;
11780b57cec5SDimitry Andric ExtendType = IsZExt ? AArch64_AM::UXTB : AArch64_AM::SXTB;
11790b57cec5SDimitry Andric break;
11800b57cec5SDimitry Andric case MVT::i16:
11810b57cec5SDimitry Andric NeedExtend = true;
11820b57cec5SDimitry Andric ExtendType = IsZExt ? AArch64_AM::UXTH : AArch64_AM::SXTH;
11830b57cec5SDimitry Andric break;
11840b57cec5SDimitry Andric case MVT::i32: // fall-through
11850b57cec5SDimitry Andric case MVT::i64:
11860b57cec5SDimitry Andric break;
11870b57cec5SDimitry Andric }
11880b57cec5SDimitry Andric MVT SrcVT = RetVT;
11890b57cec5SDimitry Andric RetVT.SimpleTy = std::max(RetVT.SimpleTy, MVT::i32);
11900b57cec5SDimitry Andric
11910b57cec5SDimitry Andric // Canonicalize immediates to the RHS first.
11920b57cec5SDimitry Andric if (UseAdd && isa<Constant>(LHS) && !isa<Constant>(RHS))
11930b57cec5SDimitry Andric std::swap(LHS, RHS);
11940b57cec5SDimitry Andric
11950b57cec5SDimitry Andric // Canonicalize mul by power of 2 to the RHS.
11960b57cec5SDimitry Andric if (UseAdd && LHS->hasOneUse() && isValueAvailable(LHS))
11970b57cec5SDimitry Andric if (isMulPowOf2(LHS))
11980b57cec5SDimitry Andric std::swap(LHS, RHS);
11990b57cec5SDimitry Andric
12000b57cec5SDimitry Andric // Canonicalize shift immediate to the RHS.
12010b57cec5SDimitry Andric if (UseAdd && LHS->hasOneUse() && isValueAvailable(LHS))
12020b57cec5SDimitry Andric if (const auto *SI = dyn_cast<BinaryOperator>(LHS))
12030b57cec5SDimitry Andric if (isa<ConstantInt>(SI->getOperand(1)))
12040b57cec5SDimitry Andric if (SI->getOpcode() == Instruction::Shl ||
12050b57cec5SDimitry Andric SI->getOpcode() == Instruction::LShr ||
12060b57cec5SDimitry Andric SI->getOpcode() == Instruction::AShr )
12070b57cec5SDimitry Andric std::swap(LHS, RHS);
12080b57cec5SDimitry Andric
120904eeddc0SDimitry Andric Register LHSReg = getRegForValue(LHS);
12100b57cec5SDimitry Andric if (!LHSReg)
12110b57cec5SDimitry Andric return 0;
12120b57cec5SDimitry Andric
12130b57cec5SDimitry Andric if (NeedExtend)
12140b57cec5SDimitry Andric LHSReg = emitIntExt(SrcVT, LHSReg, RetVT, IsZExt);
12150b57cec5SDimitry Andric
12160b57cec5SDimitry Andric unsigned ResultReg = 0;
12170b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS)) {
12180b57cec5SDimitry Andric uint64_t Imm = IsZExt ? C->getZExtValue() : C->getSExtValue();
12190b57cec5SDimitry Andric if (C->isNegative())
1220fe6060f1SDimitry Andric ResultReg = emitAddSub_ri(!UseAdd, RetVT, LHSReg, -Imm, SetFlags,
1221fe6060f1SDimitry Andric WantResult);
12220b57cec5SDimitry Andric else
1223fe6060f1SDimitry Andric ResultReg = emitAddSub_ri(UseAdd, RetVT, LHSReg, Imm, SetFlags,
12240b57cec5SDimitry Andric WantResult);
12250b57cec5SDimitry Andric } else if (const auto *C = dyn_cast<Constant>(RHS))
12260b57cec5SDimitry Andric if (C->isNullValue())
1227fe6060f1SDimitry Andric ResultReg = emitAddSub_ri(UseAdd, RetVT, LHSReg, 0, SetFlags, WantResult);
12280b57cec5SDimitry Andric
12290b57cec5SDimitry Andric if (ResultReg)
12300b57cec5SDimitry Andric return ResultReg;
12310b57cec5SDimitry Andric
12320b57cec5SDimitry Andric // Only extend the RHS within the instruction if there is a valid extend type.
12330b57cec5SDimitry Andric if (ExtendType != AArch64_AM::InvalidShiftExtend && RHS->hasOneUse() &&
12340b57cec5SDimitry Andric isValueAvailable(RHS)) {
123504eeddc0SDimitry Andric Register RHSReg = getRegForValue(RHS);
12360b57cec5SDimitry Andric if (!RHSReg)
12370b57cec5SDimitry Andric return 0;
1238fe6060f1SDimitry Andric return emitAddSub_rx(UseAdd, RetVT, LHSReg, RHSReg, ExtendType, 0,
1239fe6060f1SDimitry Andric SetFlags, WantResult);
12400b57cec5SDimitry Andric }
12410b57cec5SDimitry Andric
12420b57cec5SDimitry Andric // Check if the mul can be folded into the instruction.
12430b57cec5SDimitry Andric if (RHS->hasOneUse() && isValueAvailable(RHS)) {
12440b57cec5SDimitry Andric if (isMulPowOf2(RHS)) {
12450b57cec5SDimitry Andric const Value *MulLHS = cast<MulOperator>(RHS)->getOperand(0);
12460b57cec5SDimitry Andric const Value *MulRHS = cast<MulOperator>(RHS)->getOperand(1);
12470b57cec5SDimitry Andric
12480b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(MulLHS))
12490b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
12500b57cec5SDimitry Andric std::swap(MulLHS, MulRHS);
12510b57cec5SDimitry Andric
12520b57cec5SDimitry Andric assert(isa<ConstantInt>(MulRHS) && "Expected a ConstantInt.");
12530b57cec5SDimitry Andric uint64_t ShiftVal = cast<ConstantInt>(MulRHS)->getValue().logBase2();
125404eeddc0SDimitry Andric Register RHSReg = getRegForValue(MulLHS);
12550b57cec5SDimitry Andric if (!RHSReg)
12560b57cec5SDimitry Andric return 0;
1257fe6060f1SDimitry Andric ResultReg = emitAddSub_rs(UseAdd, RetVT, LHSReg, RHSReg, AArch64_AM::LSL,
1258fe6060f1SDimitry Andric ShiftVal, SetFlags, WantResult);
12590b57cec5SDimitry Andric if (ResultReg)
12600b57cec5SDimitry Andric return ResultReg;
12610b57cec5SDimitry Andric }
12620b57cec5SDimitry Andric }
12630b57cec5SDimitry Andric
12640b57cec5SDimitry Andric // Check if the shift can be folded into the instruction.
12650b57cec5SDimitry Andric if (RHS->hasOneUse() && isValueAvailable(RHS)) {
12660b57cec5SDimitry Andric if (const auto *SI = dyn_cast<BinaryOperator>(RHS)) {
12670b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(SI->getOperand(1))) {
12680b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ShiftType = AArch64_AM::InvalidShiftExtend;
12690b57cec5SDimitry Andric switch (SI->getOpcode()) {
12700b57cec5SDimitry Andric default: break;
12710b57cec5SDimitry Andric case Instruction::Shl: ShiftType = AArch64_AM::LSL; break;
12720b57cec5SDimitry Andric case Instruction::LShr: ShiftType = AArch64_AM::LSR; break;
12730b57cec5SDimitry Andric case Instruction::AShr: ShiftType = AArch64_AM::ASR; break;
12740b57cec5SDimitry Andric }
12750b57cec5SDimitry Andric uint64_t ShiftVal = C->getZExtValue();
12760b57cec5SDimitry Andric if (ShiftType != AArch64_AM::InvalidShiftExtend) {
127704eeddc0SDimitry Andric Register RHSReg = getRegForValue(SI->getOperand(0));
12780b57cec5SDimitry Andric if (!RHSReg)
12790b57cec5SDimitry Andric return 0;
1280fe6060f1SDimitry Andric ResultReg = emitAddSub_rs(UseAdd, RetVT, LHSReg, RHSReg, ShiftType,
1281fe6060f1SDimitry Andric ShiftVal, SetFlags, WantResult);
12820b57cec5SDimitry Andric if (ResultReg)
12830b57cec5SDimitry Andric return ResultReg;
12840b57cec5SDimitry Andric }
12850b57cec5SDimitry Andric }
12860b57cec5SDimitry Andric }
12870b57cec5SDimitry Andric }
12880b57cec5SDimitry Andric
128904eeddc0SDimitry Andric Register RHSReg = getRegForValue(RHS);
12900b57cec5SDimitry Andric if (!RHSReg)
12910b57cec5SDimitry Andric return 0;
12920b57cec5SDimitry Andric
12930b57cec5SDimitry Andric if (NeedExtend)
12940b57cec5SDimitry Andric RHSReg = emitIntExt(SrcVT, RHSReg, RetVT, IsZExt);
12950b57cec5SDimitry Andric
1296fe6060f1SDimitry Andric return emitAddSub_rr(UseAdd, RetVT, LHSReg, RHSReg, SetFlags, WantResult);
12970b57cec5SDimitry Andric }
12980b57cec5SDimitry Andric
emitAddSub_rr(bool UseAdd,MVT RetVT,unsigned LHSReg,unsigned RHSReg,bool SetFlags,bool WantResult)12990b57cec5SDimitry Andric unsigned AArch64FastISel::emitAddSub_rr(bool UseAdd, MVT RetVT, unsigned LHSReg,
1300fe6060f1SDimitry Andric unsigned RHSReg, bool SetFlags,
13010b57cec5SDimitry Andric bool WantResult) {
13020b57cec5SDimitry Andric assert(LHSReg && RHSReg && "Invalid register number.");
13030b57cec5SDimitry Andric
13040b57cec5SDimitry Andric if (LHSReg == AArch64::SP || LHSReg == AArch64::WSP ||
13050b57cec5SDimitry Andric RHSReg == AArch64::SP || RHSReg == AArch64::WSP)
13060b57cec5SDimitry Andric return 0;
13070b57cec5SDimitry Andric
13080b57cec5SDimitry Andric if (RetVT != MVT::i32 && RetVT != MVT::i64)
13090b57cec5SDimitry Andric return 0;
13100b57cec5SDimitry Andric
13110b57cec5SDimitry Andric static const unsigned OpcTable[2][2][2] = {
13120b57cec5SDimitry Andric { { AArch64::SUBWrr, AArch64::SUBXrr },
13130b57cec5SDimitry Andric { AArch64::ADDWrr, AArch64::ADDXrr } },
13140b57cec5SDimitry Andric { { AArch64::SUBSWrr, AArch64::SUBSXrr },
13150b57cec5SDimitry Andric { AArch64::ADDSWrr, AArch64::ADDSXrr } }
13160b57cec5SDimitry Andric };
13170b57cec5SDimitry Andric bool Is64Bit = RetVT == MVT::i64;
13180b57cec5SDimitry Andric unsigned Opc = OpcTable[SetFlags][UseAdd][Is64Bit];
13190b57cec5SDimitry Andric const TargetRegisterClass *RC =
13200b57cec5SDimitry Andric Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
13210b57cec5SDimitry Andric unsigned ResultReg;
13220b57cec5SDimitry Andric if (WantResult)
13230b57cec5SDimitry Andric ResultReg = createResultReg(RC);
13240b57cec5SDimitry Andric else
13250b57cec5SDimitry Andric ResultReg = Is64Bit ? AArch64::XZR : AArch64::WZR;
13260b57cec5SDimitry Andric
13270b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
13280b57cec5SDimitry Andric LHSReg = constrainOperandRegClass(II, LHSReg, II.getNumDefs());
13290b57cec5SDimitry Andric RHSReg = constrainOperandRegClass(II, RHSReg, II.getNumDefs() + 1);
1330bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II, ResultReg)
1331fe6060f1SDimitry Andric .addReg(LHSReg)
1332fe6060f1SDimitry Andric .addReg(RHSReg);
13330b57cec5SDimitry Andric return ResultReg;
13340b57cec5SDimitry Andric }
13350b57cec5SDimitry Andric
emitAddSub_ri(bool UseAdd,MVT RetVT,unsigned LHSReg,uint64_t Imm,bool SetFlags,bool WantResult)13360b57cec5SDimitry Andric unsigned AArch64FastISel::emitAddSub_ri(bool UseAdd, MVT RetVT, unsigned LHSReg,
1337fe6060f1SDimitry Andric uint64_t Imm, bool SetFlags,
1338fe6060f1SDimitry Andric bool WantResult) {
13390b57cec5SDimitry Andric assert(LHSReg && "Invalid register number.");
13400b57cec5SDimitry Andric
13410b57cec5SDimitry Andric if (RetVT != MVT::i32 && RetVT != MVT::i64)
13420b57cec5SDimitry Andric return 0;
13430b57cec5SDimitry Andric
13440b57cec5SDimitry Andric unsigned ShiftImm;
13450b57cec5SDimitry Andric if (isUInt<12>(Imm))
13460b57cec5SDimitry Andric ShiftImm = 0;
13470b57cec5SDimitry Andric else if ((Imm & 0xfff000) == Imm) {
13480b57cec5SDimitry Andric ShiftImm = 12;
13490b57cec5SDimitry Andric Imm >>= 12;
13500b57cec5SDimitry Andric } else
13510b57cec5SDimitry Andric return 0;
13520b57cec5SDimitry Andric
13530b57cec5SDimitry Andric static const unsigned OpcTable[2][2][2] = {
13540b57cec5SDimitry Andric { { AArch64::SUBWri, AArch64::SUBXri },
13550b57cec5SDimitry Andric { AArch64::ADDWri, AArch64::ADDXri } },
13560b57cec5SDimitry Andric { { AArch64::SUBSWri, AArch64::SUBSXri },
13570b57cec5SDimitry Andric { AArch64::ADDSWri, AArch64::ADDSXri } }
13580b57cec5SDimitry Andric };
13590b57cec5SDimitry Andric bool Is64Bit = RetVT == MVT::i64;
13600b57cec5SDimitry Andric unsigned Opc = OpcTable[SetFlags][UseAdd][Is64Bit];
13610b57cec5SDimitry Andric const TargetRegisterClass *RC;
13620b57cec5SDimitry Andric if (SetFlags)
13630b57cec5SDimitry Andric RC = Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
13640b57cec5SDimitry Andric else
13650b57cec5SDimitry Andric RC = Is64Bit ? &AArch64::GPR64spRegClass : &AArch64::GPR32spRegClass;
13660b57cec5SDimitry Andric unsigned ResultReg;
13670b57cec5SDimitry Andric if (WantResult)
13680b57cec5SDimitry Andric ResultReg = createResultReg(RC);
13690b57cec5SDimitry Andric else
13700b57cec5SDimitry Andric ResultReg = Is64Bit ? AArch64::XZR : AArch64::WZR;
13710b57cec5SDimitry Andric
13720b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
13730b57cec5SDimitry Andric LHSReg = constrainOperandRegClass(II, LHSReg, II.getNumDefs());
1374bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II, ResultReg)
1375fe6060f1SDimitry Andric .addReg(LHSReg)
13760b57cec5SDimitry Andric .addImm(Imm)
13770b57cec5SDimitry Andric .addImm(getShifterImm(AArch64_AM::LSL, ShiftImm));
13780b57cec5SDimitry Andric return ResultReg;
13790b57cec5SDimitry Andric }
13800b57cec5SDimitry Andric
emitAddSub_rs(bool UseAdd,MVT RetVT,unsigned LHSReg,unsigned RHSReg,AArch64_AM::ShiftExtendType ShiftType,uint64_t ShiftImm,bool SetFlags,bool WantResult)13810b57cec5SDimitry Andric unsigned AArch64FastISel::emitAddSub_rs(bool UseAdd, MVT RetVT, unsigned LHSReg,
1382fe6060f1SDimitry Andric unsigned RHSReg,
13830b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ShiftType,
13840b57cec5SDimitry Andric uint64_t ShiftImm, bool SetFlags,
13850b57cec5SDimitry Andric bool WantResult) {
13860b57cec5SDimitry Andric assert(LHSReg && RHSReg && "Invalid register number.");
13870b57cec5SDimitry Andric assert(LHSReg != AArch64::SP && LHSReg != AArch64::WSP &&
13880b57cec5SDimitry Andric RHSReg != AArch64::SP && RHSReg != AArch64::WSP);
13890b57cec5SDimitry Andric
13900b57cec5SDimitry Andric if (RetVT != MVT::i32 && RetVT != MVT::i64)
13910b57cec5SDimitry Andric return 0;
13920b57cec5SDimitry Andric
13930b57cec5SDimitry Andric // Don't deal with undefined shifts.
13940b57cec5SDimitry Andric if (ShiftImm >= RetVT.getSizeInBits())
13950b57cec5SDimitry Andric return 0;
13960b57cec5SDimitry Andric
13970b57cec5SDimitry Andric static const unsigned OpcTable[2][2][2] = {
13980b57cec5SDimitry Andric { { AArch64::SUBWrs, AArch64::SUBXrs },
13990b57cec5SDimitry Andric { AArch64::ADDWrs, AArch64::ADDXrs } },
14000b57cec5SDimitry Andric { { AArch64::SUBSWrs, AArch64::SUBSXrs },
14010b57cec5SDimitry Andric { AArch64::ADDSWrs, AArch64::ADDSXrs } }
14020b57cec5SDimitry Andric };
14030b57cec5SDimitry Andric bool Is64Bit = RetVT == MVT::i64;
14040b57cec5SDimitry Andric unsigned Opc = OpcTable[SetFlags][UseAdd][Is64Bit];
14050b57cec5SDimitry Andric const TargetRegisterClass *RC =
14060b57cec5SDimitry Andric Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
14070b57cec5SDimitry Andric unsigned ResultReg;
14080b57cec5SDimitry Andric if (WantResult)
14090b57cec5SDimitry Andric ResultReg = createResultReg(RC);
14100b57cec5SDimitry Andric else
14110b57cec5SDimitry Andric ResultReg = Is64Bit ? AArch64::XZR : AArch64::WZR;
14120b57cec5SDimitry Andric
14130b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
14140b57cec5SDimitry Andric LHSReg = constrainOperandRegClass(II, LHSReg, II.getNumDefs());
14150b57cec5SDimitry Andric RHSReg = constrainOperandRegClass(II, RHSReg, II.getNumDefs() + 1);
1416bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II, ResultReg)
1417fe6060f1SDimitry Andric .addReg(LHSReg)
1418fe6060f1SDimitry Andric .addReg(RHSReg)
14190b57cec5SDimitry Andric .addImm(getShifterImm(ShiftType, ShiftImm));
14200b57cec5SDimitry Andric return ResultReg;
14210b57cec5SDimitry Andric }
14220b57cec5SDimitry Andric
emitAddSub_rx(bool UseAdd,MVT RetVT,unsigned LHSReg,unsigned RHSReg,AArch64_AM::ShiftExtendType ExtType,uint64_t ShiftImm,bool SetFlags,bool WantResult)14230b57cec5SDimitry Andric unsigned AArch64FastISel::emitAddSub_rx(bool UseAdd, MVT RetVT, unsigned LHSReg,
1424fe6060f1SDimitry Andric unsigned RHSReg,
14250b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ExtType,
14260b57cec5SDimitry Andric uint64_t ShiftImm, bool SetFlags,
14270b57cec5SDimitry Andric bool WantResult) {
14280b57cec5SDimitry Andric assert(LHSReg && RHSReg && "Invalid register number.");
14290b57cec5SDimitry Andric assert(LHSReg != AArch64::XZR && LHSReg != AArch64::WZR &&
14300b57cec5SDimitry Andric RHSReg != AArch64::XZR && RHSReg != AArch64::WZR);
14310b57cec5SDimitry Andric
14320b57cec5SDimitry Andric if (RetVT != MVT::i32 && RetVT != MVT::i64)
14330b57cec5SDimitry Andric return 0;
14340b57cec5SDimitry Andric
14350b57cec5SDimitry Andric if (ShiftImm >= 4)
14360b57cec5SDimitry Andric return 0;
14370b57cec5SDimitry Andric
14380b57cec5SDimitry Andric static const unsigned OpcTable[2][2][2] = {
14390b57cec5SDimitry Andric { { AArch64::SUBWrx, AArch64::SUBXrx },
14400b57cec5SDimitry Andric { AArch64::ADDWrx, AArch64::ADDXrx } },
14410b57cec5SDimitry Andric { { AArch64::SUBSWrx, AArch64::SUBSXrx },
14420b57cec5SDimitry Andric { AArch64::ADDSWrx, AArch64::ADDSXrx } }
14430b57cec5SDimitry Andric };
14440b57cec5SDimitry Andric bool Is64Bit = RetVT == MVT::i64;
14450b57cec5SDimitry Andric unsigned Opc = OpcTable[SetFlags][UseAdd][Is64Bit];
14460b57cec5SDimitry Andric const TargetRegisterClass *RC = nullptr;
14470b57cec5SDimitry Andric if (SetFlags)
14480b57cec5SDimitry Andric RC = Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
14490b57cec5SDimitry Andric else
14500b57cec5SDimitry Andric RC = Is64Bit ? &AArch64::GPR64spRegClass : &AArch64::GPR32spRegClass;
14510b57cec5SDimitry Andric unsigned ResultReg;
14520b57cec5SDimitry Andric if (WantResult)
14530b57cec5SDimitry Andric ResultReg = createResultReg(RC);
14540b57cec5SDimitry Andric else
14550b57cec5SDimitry Andric ResultReg = Is64Bit ? AArch64::XZR : AArch64::WZR;
14560b57cec5SDimitry Andric
14570b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
14580b57cec5SDimitry Andric LHSReg = constrainOperandRegClass(II, LHSReg, II.getNumDefs());
14590b57cec5SDimitry Andric RHSReg = constrainOperandRegClass(II, RHSReg, II.getNumDefs() + 1);
1460bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II, ResultReg)
1461fe6060f1SDimitry Andric .addReg(LHSReg)
1462fe6060f1SDimitry Andric .addReg(RHSReg)
14630b57cec5SDimitry Andric .addImm(getArithExtendImm(ExtType, ShiftImm));
14640b57cec5SDimitry Andric return ResultReg;
14650b57cec5SDimitry Andric }
14660b57cec5SDimitry Andric
emitCmp(const Value * LHS,const Value * RHS,bool IsZExt)14670b57cec5SDimitry Andric bool AArch64FastISel::emitCmp(const Value *LHS, const Value *RHS, bool IsZExt) {
14680b57cec5SDimitry Andric Type *Ty = LHS->getType();
14690b57cec5SDimitry Andric EVT EVT = TLI.getValueType(DL, Ty, true);
14700b57cec5SDimitry Andric if (!EVT.isSimple())
14710b57cec5SDimitry Andric return false;
14720b57cec5SDimitry Andric MVT VT = EVT.getSimpleVT();
14730b57cec5SDimitry Andric
14740b57cec5SDimitry Andric switch (VT.SimpleTy) {
14750b57cec5SDimitry Andric default:
14760b57cec5SDimitry Andric return false;
14770b57cec5SDimitry Andric case MVT::i1:
14780b57cec5SDimitry Andric case MVT::i8:
14790b57cec5SDimitry Andric case MVT::i16:
14800b57cec5SDimitry Andric case MVT::i32:
14810b57cec5SDimitry Andric case MVT::i64:
14820b57cec5SDimitry Andric return emitICmp(VT, LHS, RHS, IsZExt);
14830b57cec5SDimitry Andric case MVT::f32:
14840b57cec5SDimitry Andric case MVT::f64:
14850b57cec5SDimitry Andric return emitFCmp(VT, LHS, RHS);
14860b57cec5SDimitry Andric }
14870b57cec5SDimitry Andric }
14880b57cec5SDimitry Andric
emitICmp(MVT RetVT,const Value * LHS,const Value * RHS,bool IsZExt)14890b57cec5SDimitry Andric bool AArch64FastISel::emitICmp(MVT RetVT, const Value *LHS, const Value *RHS,
14900b57cec5SDimitry Andric bool IsZExt) {
14910b57cec5SDimitry Andric return emitSub(RetVT, LHS, RHS, /*SetFlags=*/true, /*WantResult=*/false,
14920b57cec5SDimitry Andric IsZExt) != 0;
14930b57cec5SDimitry Andric }
14940b57cec5SDimitry Andric
emitICmp_ri(MVT RetVT,unsigned LHSReg,uint64_t Imm)1495fe6060f1SDimitry Andric bool AArch64FastISel::emitICmp_ri(MVT RetVT, unsigned LHSReg, uint64_t Imm) {
1496fe6060f1SDimitry Andric return emitAddSub_ri(/*UseAdd=*/false, RetVT, LHSReg, Imm,
14970b57cec5SDimitry Andric /*SetFlags=*/true, /*WantResult=*/false) != 0;
14980b57cec5SDimitry Andric }
14990b57cec5SDimitry Andric
emitFCmp(MVT RetVT,const Value * LHS,const Value * RHS)15000b57cec5SDimitry Andric bool AArch64FastISel::emitFCmp(MVT RetVT, const Value *LHS, const Value *RHS) {
15010b57cec5SDimitry Andric if (RetVT != MVT::f32 && RetVT != MVT::f64)
15020b57cec5SDimitry Andric return false;
15030b57cec5SDimitry Andric
15040b57cec5SDimitry Andric // Check to see if the 2nd operand is a constant that we can encode directly
15050b57cec5SDimitry Andric // in the compare.
15060b57cec5SDimitry Andric bool UseImm = false;
15070b57cec5SDimitry Andric if (const auto *CFP = dyn_cast<ConstantFP>(RHS))
15080b57cec5SDimitry Andric if (CFP->isZero() && !CFP->isNegative())
15090b57cec5SDimitry Andric UseImm = true;
15100b57cec5SDimitry Andric
151104eeddc0SDimitry Andric Register LHSReg = getRegForValue(LHS);
15120b57cec5SDimitry Andric if (!LHSReg)
15130b57cec5SDimitry Andric return false;
15140b57cec5SDimitry Andric
15150b57cec5SDimitry Andric if (UseImm) {
15160b57cec5SDimitry Andric unsigned Opc = (RetVT == MVT::f64) ? AArch64::FCMPDri : AArch64::FCMPSri;
1517bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc))
1518fe6060f1SDimitry Andric .addReg(LHSReg);
15190b57cec5SDimitry Andric return true;
15200b57cec5SDimitry Andric }
15210b57cec5SDimitry Andric
152204eeddc0SDimitry Andric Register RHSReg = getRegForValue(RHS);
15230b57cec5SDimitry Andric if (!RHSReg)
15240b57cec5SDimitry Andric return false;
15250b57cec5SDimitry Andric
15260b57cec5SDimitry Andric unsigned Opc = (RetVT == MVT::f64) ? AArch64::FCMPDrr : AArch64::FCMPSrr;
1527bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc))
1528fe6060f1SDimitry Andric .addReg(LHSReg)
1529fe6060f1SDimitry Andric .addReg(RHSReg);
15300b57cec5SDimitry Andric return true;
15310b57cec5SDimitry Andric }
15320b57cec5SDimitry Andric
emitAdd(MVT RetVT,const Value * LHS,const Value * RHS,bool SetFlags,bool WantResult,bool IsZExt)15330b57cec5SDimitry Andric unsigned AArch64FastISel::emitAdd(MVT RetVT, const Value *LHS, const Value *RHS,
15340b57cec5SDimitry Andric bool SetFlags, bool WantResult, bool IsZExt) {
15350b57cec5SDimitry Andric return emitAddSub(/*UseAdd=*/true, RetVT, LHS, RHS, SetFlags, WantResult,
15360b57cec5SDimitry Andric IsZExt);
15370b57cec5SDimitry Andric }
15380b57cec5SDimitry Andric
15390b57cec5SDimitry Andric /// This method is a wrapper to simplify add emission.
15400b57cec5SDimitry Andric ///
15410b57cec5SDimitry Andric /// First try to emit an add with an immediate operand using emitAddSub_ri. If
15420b57cec5SDimitry Andric /// that fails, then try to materialize the immediate into a register and use
15430b57cec5SDimitry Andric /// emitAddSub_rr instead.
emitAdd_ri_(MVT VT,unsigned Op0,int64_t Imm)1544fe6060f1SDimitry Andric unsigned AArch64FastISel::emitAdd_ri_(MVT VT, unsigned Op0, int64_t Imm) {
15450b57cec5SDimitry Andric unsigned ResultReg;
15460b57cec5SDimitry Andric if (Imm < 0)
1547fe6060f1SDimitry Andric ResultReg = emitAddSub_ri(false, VT, Op0, -Imm);
15480b57cec5SDimitry Andric else
1549fe6060f1SDimitry Andric ResultReg = emitAddSub_ri(true, VT, Op0, Imm);
15500b57cec5SDimitry Andric
15510b57cec5SDimitry Andric if (ResultReg)
15520b57cec5SDimitry Andric return ResultReg;
15530b57cec5SDimitry Andric
15540b57cec5SDimitry Andric unsigned CReg = fastEmit_i(VT, VT, ISD::Constant, Imm);
15550b57cec5SDimitry Andric if (!CReg)
15560b57cec5SDimitry Andric return 0;
15570b57cec5SDimitry Andric
1558fe6060f1SDimitry Andric ResultReg = emitAddSub_rr(true, VT, Op0, CReg);
15590b57cec5SDimitry Andric return ResultReg;
15600b57cec5SDimitry Andric }
15610b57cec5SDimitry Andric
emitSub(MVT RetVT,const Value * LHS,const Value * RHS,bool SetFlags,bool WantResult,bool IsZExt)15620b57cec5SDimitry Andric unsigned AArch64FastISel::emitSub(MVT RetVT, const Value *LHS, const Value *RHS,
15630b57cec5SDimitry Andric bool SetFlags, bool WantResult, bool IsZExt) {
15640b57cec5SDimitry Andric return emitAddSub(/*UseAdd=*/false, RetVT, LHS, RHS, SetFlags, WantResult,
15650b57cec5SDimitry Andric IsZExt);
15660b57cec5SDimitry Andric }
15670b57cec5SDimitry Andric
emitSubs_rr(MVT RetVT,unsigned LHSReg,unsigned RHSReg,bool WantResult)15680b57cec5SDimitry Andric unsigned AArch64FastISel::emitSubs_rr(MVT RetVT, unsigned LHSReg,
1569fe6060f1SDimitry Andric unsigned RHSReg, bool WantResult) {
1570fe6060f1SDimitry Andric return emitAddSub_rr(/*UseAdd=*/false, RetVT, LHSReg, RHSReg,
1571fe6060f1SDimitry Andric /*SetFlags=*/true, WantResult);
15720b57cec5SDimitry Andric }
15730b57cec5SDimitry Andric
emitSubs_rs(MVT RetVT,unsigned LHSReg,unsigned RHSReg,AArch64_AM::ShiftExtendType ShiftType,uint64_t ShiftImm,bool WantResult)15740b57cec5SDimitry Andric unsigned AArch64FastISel::emitSubs_rs(MVT RetVT, unsigned LHSReg,
1575fe6060f1SDimitry Andric unsigned RHSReg,
15760b57cec5SDimitry Andric AArch64_AM::ShiftExtendType ShiftType,
15770b57cec5SDimitry Andric uint64_t ShiftImm, bool WantResult) {
1578fe6060f1SDimitry Andric return emitAddSub_rs(/*UseAdd=*/false, RetVT, LHSReg, RHSReg, ShiftType,
1579fe6060f1SDimitry Andric ShiftImm, /*SetFlags=*/true, WantResult);
15800b57cec5SDimitry Andric }
15810b57cec5SDimitry Andric
emitLogicalOp(unsigned ISDOpc,MVT RetVT,const Value * LHS,const Value * RHS)15820b57cec5SDimitry Andric unsigned AArch64FastISel::emitLogicalOp(unsigned ISDOpc, MVT RetVT,
15830b57cec5SDimitry Andric const Value *LHS, const Value *RHS) {
15840b57cec5SDimitry Andric // Canonicalize immediates to the RHS first.
15850b57cec5SDimitry Andric if (isa<ConstantInt>(LHS) && !isa<ConstantInt>(RHS))
15860b57cec5SDimitry Andric std::swap(LHS, RHS);
15870b57cec5SDimitry Andric
15880b57cec5SDimitry Andric // Canonicalize mul by power-of-2 to the RHS.
15890b57cec5SDimitry Andric if (LHS->hasOneUse() && isValueAvailable(LHS))
15900b57cec5SDimitry Andric if (isMulPowOf2(LHS))
15910b57cec5SDimitry Andric std::swap(LHS, RHS);
15920b57cec5SDimitry Andric
15930b57cec5SDimitry Andric // Canonicalize shift immediate to the RHS.
15940b57cec5SDimitry Andric if (LHS->hasOneUse() && isValueAvailable(LHS))
15950b57cec5SDimitry Andric if (const auto *SI = dyn_cast<ShlOperator>(LHS))
15960b57cec5SDimitry Andric if (isa<ConstantInt>(SI->getOperand(1)))
15970b57cec5SDimitry Andric std::swap(LHS, RHS);
15980b57cec5SDimitry Andric
159904eeddc0SDimitry Andric Register LHSReg = getRegForValue(LHS);
16000b57cec5SDimitry Andric if (!LHSReg)
16010b57cec5SDimitry Andric return 0;
16020b57cec5SDimitry Andric
16030b57cec5SDimitry Andric unsigned ResultReg = 0;
16040b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS)) {
16050b57cec5SDimitry Andric uint64_t Imm = C->getZExtValue();
1606fe6060f1SDimitry Andric ResultReg = emitLogicalOp_ri(ISDOpc, RetVT, LHSReg, Imm);
16070b57cec5SDimitry Andric }
16080b57cec5SDimitry Andric if (ResultReg)
16090b57cec5SDimitry Andric return ResultReg;
16100b57cec5SDimitry Andric
16110b57cec5SDimitry Andric // Check if the mul can be folded into the instruction.
16120b57cec5SDimitry Andric if (RHS->hasOneUse() && isValueAvailable(RHS)) {
16130b57cec5SDimitry Andric if (isMulPowOf2(RHS)) {
16140b57cec5SDimitry Andric const Value *MulLHS = cast<MulOperator>(RHS)->getOperand(0);
16150b57cec5SDimitry Andric const Value *MulRHS = cast<MulOperator>(RHS)->getOperand(1);
16160b57cec5SDimitry Andric
16170b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(MulLHS))
16180b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
16190b57cec5SDimitry Andric std::swap(MulLHS, MulRHS);
16200b57cec5SDimitry Andric
16210b57cec5SDimitry Andric assert(isa<ConstantInt>(MulRHS) && "Expected a ConstantInt.");
16220b57cec5SDimitry Andric uint64_t ShiftVal = cast<ConstantInt>(MulRHS)->getValue().logBase2();
16230b57cec5SDimitry Andric
162404eeddc0SDimitry Andric Register RHSReg = getRegForValue(MulLHS);
16250b57cec5SDimitry Andric if (!RHSReg)
16260b57cec5SDimitry Andric return 0;
1627fe6060f1SDimitry Andric ResultReg = emitLogicalOp_rs(ISDOpc, RetVT, LHSReg, RHSReg, ShiftVal);
16280b57cec5SDimitry Andric if (ResultReg)
16290b57cec5SDimitry Andric return ResultReg;
16300b57cec5SDimitry Andric }
16310b57cec5SDimitry Andric }
16320b57cec5SDimitry Andric
16330b57cec5SDimitry Andric // Check if the shift can be folded into the instruction.
16340b57cec5SDimitry Andric if (RHS->hasOneUse() && isValueAvailable(RHS)) {
16350b57cec5SDimitry Andric if (const auto *SI = dyn_cast<ShlOperator>(RHS))
16360b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(SI->getOperand(1))) {
16370b57cec5SDimitry Andric uint64_t ShiftVal = C->getZExtValue();
163804eeddc0SDimitry Andric Register RHSReg = getRegForValue(SI->getOperand(0));
16390b57cec5SDimitry Andric if (!RHSReg)
16400b57cec5SDimitry Andric return 0;
1641fe6060f1SDimitry Andric ResultReg = emitLogicalOp_rs(ISDOpc, RetVT, LHSReg, RHSReg, ShiftVal);
16420b57cec5SDimitry Andric if (ResultReg)
16430b57cec5SDimitry Andric return ResultReg;
16440b57cec5SDimitry Andric }
16450b57cec5SDimitry Andric }
16460b57cec5SDimitry Andric
164704eeddc0SDimitry Andric Register RHSReg = getRegForValue(RHS);
16480b57cec5SDimitry Andric if (!RHSReg)
16490b57cec5SDimitry Andric return 0;
16500b57cec5SDimitry Andric
16510b57cec5SDimitry Andric MVT VT = std::max(MVT::i32, RetVT.SimpleTy);
1652fe6060f1SDimitry Andric ResultReg = fastEmit_rr(VT, VT, ISDOpc, LHSReg, RHSReg);
16530b57cec5SDimitry Andric if (RetVT >= MVT::i8 && RetVT <= MVT::i16) {
16540b57cec5SDimitry Andric uint64_t Mask = (RetVT == MVT::i8) ? 0xff : 0xffff;
1655fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, ResultReg, Mask);
16560b57cec5SDimitry Andric }
16570b57cec5SDimitry Andric return ResultReg;
16580b57cec5SDimitry Andric }
16590b57cec5SDimitry Andric
emitLogicalOp_ri(unsigned ISDOpc,MVT RetVT,unsigned LHSReg,uint64_t Imm)16600b57cec5SDimitry Andric unsigned AArch64FastISel::emitLogicalOp_ri(unsigned ISDOpc, MVT RetVT,
1661fe6060f1SDimitry Andric unsigned LHSReg, uint64_t Imm) {
16620b57cec5SDimitry Andric static_assert((ISD::AND + 1 == ISD::OR) && (ISD::AND + 2 == ISD::XOR),
16630b57cec5SDimitry Andric "ISD nodes are not consecutive!");
16640b57cec5SDimitry Andric static const unsigned OpcTable[3][2] = {
16650b57cec5SDimitry Andric { AArch64::ANDWri, AArch64::ANDXri },
16660b57cec5SDimitry Andric { AArch64::ORRWri, AArch64::ORRXri },
16670b57cec5SDimitry Andric { AArch64::EORWri, AArch64::EORXri }
16680b57cec5SDimitry Andric };
16690b57cec5SDimitry Andric const TargetRegisterClass *RC;
16700b57cec5SDimitry Andric unsigned Opc;
16710b57cec5SDimitry Andric unsigned RegSize;
16720b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
16730b57cec5SDimitry Andric default:
16740b57cec5SDimitry Andric return 0;
16750b57cec5SDimitry Andric case MVT::i1:
16760b57cec5SDimitry Andric case MVT::i8:
16770b57cec5SDimitry Andric case MVT::i16:
16780b57cec5SDimitry Andric case MVT::i32: {
16790b57cec5SDimitry Andric unsigned Idx = ISDOpc - ISD::AND;
16800b57cec5SDimitry Andric Opc = OpcTable[Idx][0];
16810b57cec5SDimitry Andric RC = &AArch64::GPR32spRegClass;
16820b57cec5SDimitry Andric RegSize = 32;
16830b57cec5SDimitry Andric break;
16840b57cec5SDimitry Andric }
16850b57cec5SDimitry Andric case MVT::i64:
16860b57cec5SDimitry Andric Opc = OpcTable[ISDOpc - ISD::AND][1];
16870b57cec5SDimitry Andric RC = &AArch64::GPR64spRegClass;
16880b57cec5SDimitry Andric RegSize = 64;
16890b57cec5SDimitry Andric break;
16900b57cec5SDimitry Andric }
16910b57cec5SDimitry Andric
16920b57cec5SDimitry Andric if (!AArch64_AM::isLogicalImmediate(Imm, RegSize))
16930b57cec5SDimitry Andric return 0;
16940b57cec5SDimitry Andric
169504eeddc0SDimitry Andric Register ResultReg =
1696fe6060f1SDimitry Andric fastEmitInst_ri(Opc, RC, LHSReg,
16970b57cec5SDimitry Andric AArch64_AM::encodeLogicalImmediate(Imm, RegSize));
16980b57cec5SDimitry Andric if (RetVT >= MVT::i8 && RetVT <= MVT::i16 && ISDOpc != ISD::AND) {
16990b57cec5SDimitry Andric uint64_t Mask = (RetVT == MVT::i8) ? 0xff : 0xffff;
1700fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, ResultReg, Mask);
17010b57cec5SDimitry Andric }
17020b57cec5SDimitry Andric return ResultReg;
17030b57cec5SDimitry Andric }
17040b57cec5SDimitry Andric
emitLogicalOp_rs(unsigned ISDOpc,MVT RetVT,unsigned LHSReg,unsigned RHSReg,uint64_t ShiftImm)17050b57cec5SDimitry Andric unsigned AArch64FastISel::emitLogicalOp_rs(unsigned ISDOpc, MVT RetVT,
1706fe6060f1SDimitry Andric unsigned LHSReg, unsigned RHSReg,
17070b57cec5SDimitry Andric uint64_t ShiftImm) {
17080b57cec5SDimitry Andric static_assert((ISD::AND + 1 == ISD::OR) && (ISD::AND + 2 == ISD::XOR),
17090b57cec5SDimitry Andric "ISD nodes are not consecutive!");
17100b57cec5SDimitry Andric static const unsigned OpcTable[3][2] = {
17110b57cec5SDimitry Andric { AArch64::ANDWrs, AArch64::ANDXrs },
17120b57cec5SDimitry Andric { AArch64::ORRWrs, AArch64::ORRXrs },
17130b57cec5SDimitry Andric { AArch64::EORWrs, AArch64::EORXrs }
17140b57cec5SDimitry Andric };
17150b57cec5SDimitry Andric
17160b57cec5SDimitry Andric // Don't deal with undefined shifts.
17170b57cec5SDimitry Andric if (ShiftImm >= RetVT.getSizeInBits())
17180b57cec5SDimitry Andric return 0;
17190b57cec5SDimitry Andric
17200b57cec5SDimitry Andric const TargetRegisterClass *RC;
17210b57cec5SDimitry Andric unsigned Opc;
17220b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
17230b57cec5SDimitry Andric default:
17240b57cec5SDimitry Andric return 0;
17250b57cec5SDimitry Andric case MVT::i1:
17260b57cec5SDimitry Andric case MVT::i8:
17270b57cec5SDimitry Andric case MVT::i16:
17280b57cec5SDimitry Andric case MVT::i32:
17290b57cec5SDimitry Andric Opc = OpcTable[ISDOpc - ISD::AND][0];
17300b57cec5SDimitry Andric RC = &AArch64::GPR32RegClass;
17310b57cec5SDimitry Andric break;
17320b57cec5SDimitry Andric case MVT::i64:
17330b57cec5SDimitry Andric Opc = OpcTable[ISDOpc - ISD::AND][1];
17340b57cec5SDimitry Andric RC = &AArch64::GPR64RegClass;
17350b57cec5SDimitry Andric break;
17360b57cec5SDimitry Andric }
173704eeddc0SDimitry Andric Register ResultReg =
1738fe6060f1SDimitry Andric fastEmitInst_rri(Opc, RC, LHSReg, RHSReg,
17390b57cec5SDimitry Andric AArch64_AM::getShifterImm(AArch64_AM::LSL, ShiftImm));
17400b57cec5SDimitry Andric if (RetVT >= MVT::i8 && RetVT <= MVT::i16) {
17410b57cec5SDimitry Andric uint64_t Mask = (RetVT == MVT::i8) ? 0xff : 0xffff;
1742fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, ResultReg, Mask);
17430b57cec5SDimitry Andric }
17440b57cec5SDimitry Andric return ResultReg;
17450b57cec5SDimitry Andric }
17460b57cec5SDimitry Andric
emitAnd_ri(MVT RetVT,unsigned LHSReg,uint64_t Imm)1747fe6060f1SDimitry Andric unsigned AArch64FastISel::emitAnd_ri(MVT RetVT, unsigned LHSReg,
17480b57cec5SDimitry Andric uint64_t Imm) {
1749fe6060f1SDimitry Andric return emitLogicalOp_ri(ISD::AND, RetVT, LHSReg, Imm);
17500b57cec5SDimitry Andric }
17510b57cec5SDimitry Andric
emitLoad(MVT VT,MVT RetVT,Address Addr,bool WantZExt,MachineMemOperand * MMO)17520b57cec5SDimitry Andric unsigned AArch64FastISel::emitLoad(MVT VT, MVT RetVT, Address Addr,
17530b57cec5SDimitry Andric bool WantZExt, MachineMemOperand *MMO) {
17540b57cec5SDimitry Andric if (!TLI.allowsMisalignedMemoryAccesses(VT))
17550b57cec5SDimitry Andric return 0;
17560b57cec5SDimitry Andric
17570b57cec5SDimitry Andric // Simplify this down to something we can handle.
17580b57cec5SDimitry Andric if (!simplifyAddress(Addr, VT))
17590b57cec5SDimitry Andric return 0;
17600b57cec5SDimitry Andric
17610b57cec5SDimitry Andric unsigned ScaleFactor = getImplicitScaleFactor(VT);
17620b57cec5SDimitry Andric if (!ScaleFactor)
17630b57cec5SDimitry Andric llvm_unreachable("Unexpected value type.");
17640b57cec5SDimitry Andric
17650b57cec5SDimitry Andric // Negative offsets require unscaled, 9-bit, signed immediate offsets.
17660b57cec5SDimitry Andric // Otherwise, we try using scaled, 12-bit, unsigned immediate offsets.
17670b57cec5SDimitry Andric bool UseScaled = true;
17680b57cec5SDimitry Andric if ((Addr.getOffset() < 0) || (Addr.getOffset() & (ScaleFactor - 1))) {
17690b57cec5SDimitry Andric UseScaled = false;
17700b57cec5SDimitry Andric ScaleFactor = 1;
17710b57cec5SDimitry Andric }
17720b57cec5SDimitry Andric
17730b57cec5SDimitry Andric static const unsigned GPOpcTable[2][8][4] = {
17740b57cec5SDimitry Andric // Sign-extend.
17750b57cec5SDimitry Andric { { AArch64::LDURSBWi, AArch64::LDURSHWi, AArch64::LDURWi,
17760b57cec5SDimitry Andric AArch64::LDURXi },
17770b57cec5SDimitry Andric { AArch64::LDURSBXi, AArch64::LDURSHXi, AArch64::LDURSWi,
17780b57cec5SDimitry Andric AArch64::LDURXi },
17790b57cec5SDimitry Andric { AArch64::LDRSBWui, AArch64::LDRSHWui, AArch64::LDRWui,
17800b57cec5SDimitry Andric AArch64::LDRXui },
17810b57cec5SDimitry Andric { AArch64::LDRSBXui, AArch64::LDRSHXui, AArch64::LDRSWui,
17820b57cec5SDimitry Andric AArch64::LDRXui },
17830b57cec5SDimitry Andric { AArch64::LDRSBWroX, AArch64::LDRSHWroX, AArch64::LDRWroX,
17840b57cec5SDimitry Andric AArch64::LDRXroX },
17850b57cec5SDimitry Andric { AArch64::LDRSBXroX, AArch64::LDRSHXroX, AArch64::LDRSWroX,
17860b57cec5SDimitry Andric AArch64::LDRXroX },
17870b57cec5SDimitry Andric { AArch64::LDRSBWroW, AArch64::LDRSHWroW, AArch64::LDRWroW,
17880b57cec5SDimitry Andric AArch64::LDRXroW },
17890b57cec5SDimitry Andric { AArch64::LDRSBXroW, AArch64::LDRSHXroW, AArch64::LDRSWroW,
17900b57cec5SDimitry Andric AArch64::LDRXroW }
17910b57cec5SDimitry Andric },
17920b57cec5SDimitry Andric // Zero-extend.
17930b57cec5SDimitry Andric { { AArch64::LDURBBi, AArch64::LDURHHi, AArch64::LDURWi,
17940b57cec5SDimitry Andric AArch64::LDURXi },
17950b57cec5SDimitry Andric { AArch64::LDURBBi, AArch64::LDURHHi, AArch64::LDURWi,
17960b57cec5SDimitry Andric AArch64::LDURXi },
17970b57cec5SDimitry Andric { AArch64::LDRBBui, AArch64::LDRHHui, AArch64::LDRWui,
17980b57cec5SDimitry Andric AArch64::LDRXui },
17990b57cec5SDimitry Andric { AArch64::LDRBBui, AArch64::LDRHHui, AArch64::LDRWui,
18000b57cec5SDimitry Andric AArch64::LDRXui },
18010b57cec5SDimitry Andric { AArch64::LDRBBroX, AArch64::LDRHHroX, AArch64::LDRWroX,
18020b57cec5SDimitry Andric AArch64::LDRXroX },
18030b57cec5SDimitry Andric { AArch64::LDRBBroX, AArch64::LDRHHroX, AArch64::LDRWroX,
18040b57cec5SDimitry Andric AArch64::LDRXroX },
18050b57cec5SDimitry Andric { AArch64::LDRBBroW, AArch64::LDRHHroW, AArch64::LDRWroW,
18060b57cec5SDimitry Andric AArch64::LDRXroW },
18070b57cec5SDimitry Andric { AArch64::LDRBBroW, AArch64::LDRHHroW, AArch64::LDRWroW,
18080b57cec5SDimitry Andric AArch64::LDRXroW }
18090b57cec5SDimitry Andric }
18100b57cec5SDimitry Andric };
18110b57cec5SDimitry Andric
18120b57cec5SDimitry Andric static const unsigned FPOpcTable[4][2] = {
18130b57cec5SDimitry Andric { AArch64::LDURSi, AArch64::LDURDi },
18140b57cec5SDimitry Andric { AArch64::LDRSui, AArch64::LDRDui },
18150b57cec5SDimitry Andric { AArch64::LDRSroX, AArch64::LDRDroX },
18160b57cec5SDimitry Andric { AArch64::LDRSroW, AArch64::LDRDroW }
18170b57cec5SDimitry Andric };
18180b57cec5SDimitry Andric
18190b57cec5SDimitry Andric unsigned Opc;
18200b57cec5SDimitry Andric const TargetRegisterClass *RC;
18210b57cec5SDimitry Andric bool UseRegOffset = Addr.isRegBase() && !Addr.getOffset() && Addr.getReg() &&
18220b57cec5SDimitry Andric Addr.getOffsetReg();
18230b57cec5SDimitry Andric unsigned Idx = UseRegOffset ? 2 : UseScaled ? 1 : 0;
18240b57cec5SDimitry Andric if (Addr.getExtendType() == AArch64_AM::UXTW ||
18250b57cec5SDimitry Andric Addr.getExtendType() == AArch64_AM::SXTW)
18260b57cec5SDimitry Andric Idx++;
18270b57cec5SDimitry Andric
18280b57cec5SDimitry Andric bool IsRet64Bit = RetVT == MVT::i64;
18290b57cec5SDimitry Andric switch (VT.SimpleTy) {
18300b57cec5SDimitry Andric default:
18310b57cec5SDimitry Andric llvm_unreachable("Unexpected value type.");
18320b57cec5SDimitry Andric case MVT::i1: // Intentional fall-through.
18330b57cec5SDimitry Andric case MVT::i8:
18340b57cec5SDimitry Andric Opc = GPOpcTable[WantZExt][2 * Idx + IsRet64Bit][0];
18350b57cec5SDimitry Andric RC = (IsRet64Bit && !WantZExt) ?
18360b57cec5SDimitry Andric &AArch64::GPR64RegClass: &AArch64::GPR32RegClass;
18370b57cec5SDimitry Andric break;
18380b57cec5SDimitry Andric case MVT::i16:
18390b57cec5SDimitry Andric Opc = GPOpcTable[WantZExt][2 * Idx + IsRet64Bit][1];
18400b57cec5SDimitry Andric RC = (IsRet64Bit && !WantZExt) ?
18410b57cec5SDimitry Andric &AArch64::GPR64RegClass: &AArch64::GPR32RegClass;
18420b57cec5SDimitry Andric break;
18430b57cec5SDimitry Andric case MVT::i32:
18440b57cec5SDimitry Andric Opc = GPOpcTable[WantZExt][2 * Idx + IsRet64Bit][2];
18450b57cec5SDimitry Andric RC = (IsRet64Bit && !WantZExt) ?
18460b57cec5SDimitry Andric &AArch64::GPR64RegClass: &AArch64::GPR32RegClass;
18470b57cec5SDimitry Andric break;
18480b57cec5SDimitry Andric case MVT::i64:
18490b57cec5SDimitry Andric Opc = GPOpcTable[WantZExt][2 * Idx + IsRet64Bit][3];
18500b57cec5SDimitry Andric RC = &AArch64::GPR64RegClass;
18510b57cec5SDimitry Andric break;
18520b57cec5SDimitry Andric case MVT::f32:
18530b57cec5SDimitry Andric Opc = FPOpcTable[Idx][0];
18540b57cec5SDimitry Andric RC = &AArch64::FPR32RegClass;
18550b57cec5SDimitry Andric break;
18560b57cec5SDimitry Andric case MVT::f64:
18570b57cec5SDimitry Andric Opc = FPOpcTable[Idx][1];
18580b57cec5SDimitry Andric RC = &AArch64::FPR64RegClass;
18590b57cec5SDimitry Andric break;
18600b57cec5SDimitry Andric }
18610b57cec5SDimitry Andric
18620b57cec5SDimitry Andric // Create the base instruction, then add the operands.
186304eeddc0SDimitry Andric Register ResultReg = createResultReg(RC);
1864bdd1243dSDimitry Andric MachineInstrBuilder MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
18650b57cec5SDimitry Andric TII.get(Opc), ResultReg);
18660b57cec5SDimitry Andric addLoadStoreOperands(Addr, MIB, MachineMemOperand::MOLoad, ScaleFactor, MMO);
18670b57cec5SDimitry Andric
18680b57cec5SDimitry Andric // Loading an i1 requires special handling.
18690b57cec5SDimitry Andric if (VT == MVT::i1) {
1870fe6060f1SDimitry Andric unsigned ANDReg = emitAnd_ri(MVT::i32, ResultReg, 1);
18710b57cec5SDimitry Andric assert(ANDReg && "Unexpected AND instruction emission failure.");
18720b57cec5SDimitry Andric ResultReg = ANDReg;
18730b57cec5SDimitry Andric }
18740b57cec5SDimitry Andric
18750b57cec5SDimitry Andric // For zero-extending loads to 64bit we emit a 32bit load and then convert
18760b57cec5SDimitry Andric // the 32bit reg to a 64bit reg.
18770b57cec5SDimitry Andric if (WantZExt && RetVT == MVT::i64 && VT <= MVT::i32) {
187804eeddc0SDimitry Andric Register Reg64 = createResultReg(&AArch64::GPR64RegClass);
1879bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
18800b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), Reg64)
18810b57cec5SDimitry Andric .addImm(0)
18820b57cec5SDimitry Andric .addReg(ResultReg, getKillRegState(true))
18830b57cec5SDimitry Andric .addImm(AArch64::sub_32);
18840b57cec5SDimitry Andric ResultReg = Reg64;
18850b57cec5SDimitry Andric }
18860b57cec5SDimitry Andric return ResultReg;
18870b57cec5SDimitry Andric }
18880b57cec5SDimitry Andric
selectAddSub(const Instruction * I)18890b57cec5SDimitry Andric bool AArch64FastISel::selectAddSub(const Instruction *I) {
18900b57cec5SDimitry Andric MVT VT;
18910b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), VT, /*IsVectorAllowed=*/true))
18920b57cec5SDimitry Andric return false;
18930b57cec5SDimitry Andric
18940b57cec5SDimitry Andric if (VT.isVector())
18950b57cec5SDimitry Andric return selectOperator(I, I->getOpcode());
18960b57cec5SDimitry Andric
18970b57cec5SDimitry Andric unsigned ResultReg;
18980b57cec5SDimitry Andric switch (I->getOpcode()) {
18990b57cec5SDimitry Andric default:
19000b57cec5SDimitry Andric llvm_unreachable("Unexpected instruction.");
19010b57cec5SDimitry Andric case Instruction::Add:
19020b57cec5SDimitry Andric ResultReg = emitAdd(VT, I->getOperand(0), I->getOperand(1));
19030b57cec5SDimitry Andric break;
19040b57cec5SDimitry Andric case Instruction::Sub:
19050b57cec5SDimitry Andric ResultReg = emitSub(VT, I->getOperand(0), I->getOperand(1));
19060b57cec5SDimitry Andric break;
19070b57cec5SDimitry Andric }
19080b57cec5SDimitry Andric if (!ResultReg)
19090b57cec5SDimitry Andric return false;
19100b57cec5SDimitry Andric
19110b57cec5SDimitry Andric updateValueMap(I, ResultReg);
19120b57cec5SDimitry Andric return true;
19130b57cec5SDimitry Andric }
19140b57cec5SDimitry Andric
selectLogicalOp(const Instruction * I)19150b57cec5SDimitry Andric bool AArch64FastISel::selectLogicalOp(const Instruction *I) {
19160b57cec5SDimitry Andric MVT VT;
19170b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), VT, /*IsVectorAllowed=*/true))
19180b57cec5SDimitry Andric return false;
19190b57cec5SDimitry Andric
19200b57cec5SDimitry Andric if (VT.isVector())
19210b57cec5SDimitry Andric return selectOperator(I, I->getOpcode());
19220b57cec5SDimitry Andric
19230b57cec5SDimitry Andric unsigned ResultReg;
19240b57cec5SDimitry Andric switch (I->getOpcode()) {
19250b57cec5SDimitry Andric default:
19260b57cec5SDimitry Andric llvm_unreachable("Unexpected instruction.");
19270b57cec5SDimitry Andric case Instruction::And:
19280b57cec5SDimitry Andric ResultReg = emitLogicalOp(ISD::AND, VT, I->getOperand(0), I->getOperand(1));
19290b57cec5SDimitry Andric break;
19300b57cec5SDimitry Andric case Instruction::Or:
19310b57cec5SDimitry Andric ResultReg = emitLogicalOp(ISD::OR, VT, I->getOperand(0), I->getOperand(1));
19320b57cec5SDimitry Andric break;
19330b57cec5SDimitry Andric case Instruction::Xor:
19340b57cec5SDimitry Andric ResultReg = emitLogicalOp(ISD::XOR, VT, I->getOperand(0), I->getOperand(1));
19350b57cec5SDimitry Andric break;
19360b57cec5SDimitry Andric }
19370b57cec5SDimitry Andric if (!ResultReg)
19380b57cec5SDimitry Andric return false;
19390b57cec5SDimitry Andric
19400b57cec5SDimitry Andric updateValueMap(I, ResultReg);
19410b57cec5SDimitry Andric return true;
19420b57cec5SDimitry Andric }
19430b57cec5SDimitry Andric
selectLoad(const Instruction * I)19440b57cec5SDimitry Andric bool AArch64FastISel::selectLoad(const Instruction *I) {
19450b57cec5SDimitry Andric MVT VT;
19460b57cec5SDimitry Andric // Verify we have a legal type before going any further. Currently, we handle
19470b57cec5SDimitry Andric // simple types that will directly fit in a register (i32/f32/i64/f64) or
19480b57cec5SDimitry Andric // those that can be sign or zero-extended to a basic operation (i1/i8/i16).
19490b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), VT, /*IsVectorAllowed=*/true) ||
19500b57cec5SDimitry Andric cast<LoadInst>(I)->isAtomic())
19510b57cec5SDimitry Andric return false;
19520b57cec5SDimitry Andric
19530b57cec5SDimitry Andric const Value *SV = I->getOperand(0);
19540b57cec5SDimitry Andric if (TLI.supportSwiftError()) {
19550b57cec5SDimitry Andric // Swifterror values can come from either a function parameter with
19560b57cec5SDimitry Andric // swifterror attribute or an alloca with swifterror attribute.
19570b57cec5SDimitry Andric if (const Argument *Arg = dyn_cast<Argument>(SV)) {
19580b57cec5SDimitry Andric if (Arg->hasSwiftErrorAttr())
19590b57cec5SDimitry Andric return false;
19600b57cec5SDimitry Andric }
19610b57cec5SDimitry Andric
19620b57cec5SDimitry Andric if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(SV)) {
19630b57cec5SDimitry Andric if (Alloca->isSwiftError())
19640b57cec5SDimitry Andric return false;
19650b57cec5SDimitry Andric }
19660b57cec5SDimitry Andric }
19670b57cec5SDimitry Andric
19680b57cec5SDimitry Andric // See if we can handle this address.
19690b57cec5SDimitry Andric Address Addr;
19700b57cec5SDimitry Andric if (!computeAddress(I->getOperand(0), Addr, I->getType()))
19710b57cec5SDimitry Andric return false;
19720b57cec5SDimitry Andric
19730b57cec5SDimitry Andric // Fold the following sign-/zero-extend into the load instruction.
19740b57cec5SDimitry Andric bool WantZExt = true;
19750b57cec5SDimitry Andric MVT RetVT = VT;
19760b57cec5SDimitry Andric const Value *IntExtVal = nullptr;
19770b57cec5SDimitry Andric if (I->hasOneUse()) {
19780b57cec5SDimitry Andric if (const auto *ZE = dyn_cast<ZExtInst>(I->use_begin()->getUser())) {
19790b57cec5SDimitry Andric if (isTypeSupported(ZE->getType(), RetVT))
19800b57cec5SDimitry Andric IntExtVal = ZE;
19810b57cec5SDimitry Andric else
19820b57cec5SDimitry Andric RetVT = VT;
19830b57cec5SDimitry Andric } else if (const auto *SE = dyn_cast<SExtInst>(I->use_begin()->getUser())) {
19840b57cec5SDimitry Andric if (isTypeSupported(SE->getType(), RetVT))
19850b57cec5SDimitry Andric IntExtVal = SE;
19860b57cec5SDimitry Andric else
19870b57cec5SDimitry Andric RetVT = VT;
19880b57cec5SDimitry Andric WantZExt = false;
19890b57cec5SDimitry Andric }
19900b57cec5SDimitry Andric }
19910b57cec5SDimitry Andric
19920b57cec5SDimitry Andric unsigned ResultReg =
19930b57cec5SDimitry Andric emitLoad(VT, RetVT, Addr, WantZExt, createMachineMemOperandFor(I));
19940b57cec5SDimitry Andric if (!ResultReg)
19950b57cec5SDimitry Andric return false;
19960b57cec5SDimitry Andric
19970b57cec5SDimitry Andric // There are a few different cases we have to handle, because the load or the
19980b57cec5SDimitry Andric // sign-/zero-extend might not be selected by FastISel if we fall-back to
19990b57cec5SDimitry Andric // SelectionDAG. There is also an ordering issue when both instructions are in
20000b57cec5SDimitry Andric // different basic blocks.
20010b57cec5SDimitry Andric // 1.) The load instruction is selected by FastISel, but the integer extend
20020b57cec5SDimitry Andric // not. This usually happens when the integer extend is in a different
20030b57cec5SDimitry Andric // basic block and SelectionDAG took over for that basic block.
20040b57cec5SDimitry Andric // 2.) The load instruction is selected before the integer extend. This only
20050b57cec5SDimitry Andric // happens when the integer extend is in a different basic block.
20060b57cec5SDimitry Andric // 3.) The load instruction is selected by SelectionDAG and the integer extend
20070b57cec5SDimitry Andric // by FastISel. This happens if there are instructions between the load
20080b57cec5SDimitry Andric // and the integer extend that couldn't be selected by FastISel.
20090b57cec5SDimitry Andric if (IntExtVal) {
20100b57cec5SDimitry Andric // The integer extend hasn't been emitted yet. FastISel or SelectionDAG
20110b57cec5SDimitry Andric // could select it. Emit a copy to subreg if necessary. FastISel will remove
20120b57cec5SDimitry Andric // it when it selects the integer extend.
201304eeddc0SDimitry Andric Register Reg = lookUpRegForValue(IntExtVal);
20140b57cec5SDimitry Andric auto *MI = MRI.getUniqueVRegDef(Reg);
20150b57cec5SDimitry Andric if (!MI) {
20160b57cec5SDimitry Andric if (RetVT == MVT::i64 && VT <= MVT::i32) {
20170b57cec5SDimitry Andric if (WantZExt) {
20180b57cec5SDimitry Andric // Delete the last emitted instruction from emitLoad (SUBREG_TO_REG).
20190b57cec5SDimitry Andric MachineBasicBlock::iterator I(std::prev(FuncInfo.InsertPt));
20200b57cec5SDimitry Andric ResultReg = std::prev(I)->getOperand(0).getReg();
20210b57cec5SDimitry Andric removeDeadCode(I, std::next(I));
20220b57cec5SDimitry Andric } else
20230b57cec5SDimitry Andric ResultReg = fastEmitInst_extractsubreg(MVT::i32, ResultReg,
20240b57cec5SDimitry Andric AArch64::sub_32);
20250b57cec5SDimitry Andric }
20260b57cec5SDimitry Andric updateValueMap(I, ResultReg);
20270b57cec5SDimitry Andric return true;
20280b57cec5SDimitry Andric }
20290b57cec5SDimitry Andric
20300b57cec5SDimitry Andric // The integer extend has already been emitted - delete all the instructions
20310b57cec5SDimitry Andric // that have been emitted by the integer extend lowering code and use the
20320b57cec5SDimitry Andric // result from the load instruction directly.
20330b57cec5SDimitry Andric while (MI) {
20340b57cec5SDimitry Andric Reg = 0;
20350b57cec5SDimitry Andric for (auto &Opnd : MI->uses()) {
20360b57cec5SDimitry Andric if (Opnd.isReg()) {
20370b57cec5SDimitry Andric Reg = Opnd.getReg();
20380b57cec5SDimitry Andric break;
20390b57cec5SDimitry Andric }
20400b57cec5SDimitry Andric }
20410b57cec5SDimitry Andric MachineBasicBlock::iterator I(MI);
20420b57cec5SDimitry Andric removeDeadCode(I, std::next(I));
20430b57cec5SDimitry Andric MI = nullptr;
20440b57cec5SDimitry Andric if (Reg)
20450b57cec5SDimitry Andric MI = MRI.getUniqueVRegDef(Reg);
20460b57cec5SDimitry Andric }
20470b57cec5SDimitry Andric updateValueMap(IntExtVal, ResultReg);
20480b57cec5SDimitry Andric return true;
20490b57cec5SDimitry Andric }
20500b57cec5SDimitry Andric
20510b57cec5SDimitry Andric updateValueMap(I, ResultReg);
20520b57cec5SDimitry Andric return true;
20530b57cec5SDimitry Andric }
20540b57cec5SDimitry Andric
emitStoreRelease(MVT VT,unsigned SrcReg,unsigned AddrReg,MachineMemOperand * MMO)20550b57cec5SDimitry Andric bool AArch64FastISel::emitStoreRelease(MVT VT, unsigned SrcReg,
20560b57cec5SDimitry Andric unsigned AddrReg,
20570b57cec5SDimitry Andric MachineMemOperand *MMO) {
20580b57cec5SDimitry Andric unsigned Opc;
20590b57cec5SDimitry Andric switch (VT.SimpleTy) {
20600b57cec5SDimitry Andric default: return false;
20610b57cec5SDimitry Andric case MVT::i8: Opc = AArch64::STLRB; break;
20620b57cec5SDimitry Andric case MVT::i16: Opc = AArch64::STLRH; break;
20630b57cec5SDimitry Andric case MVT::i32: Opc = AArch64::STLRW; break;
20640b57cec5SDimitry Andric case MVT::i64: Opc = AArch64::STLRX; break;
20650b57cec5SDimitry Andric }
20660b57cec5SDimitry Andric
20670b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
20680b57cec5SDimitry Andric SrcReg = constrainOperandRegClass(II, SrcReg, 0);
20690b57cec5SDimitry Andric AddrReg = constrainOperandRegClass(II, AddrReg, 1);
2070bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II)
20710b57cec5SDimitry Andric .addReg(SrcReg)
20720b57cec5SDimitry Andric .addReg(AddrReg)
20730b57cec5SDimitry Andric .addMemOperand(MMO);
20740b57cec5SDimitry Andric return true;
20750b57cec5SDimitry Andric }
20760b57cec5SDimitry Andric
emitStore(MVT VT,unsigned SrcReg,Address Addr,MachineMemOperand * MMO)20770b57cec5SDimitry Andric bool AArch64FastISel::emitStore(MVT VT, unsigned SrcReg, Address Addr,
20780b57cec5SDimitry Andric MachineMemOperand *MMO) {
20790b57cec5SDimitry Andric if (!TLI.allowsMisalignedMemoryAccesses(VT))
20800b57cec5SDimitry Andric return false;
20810b57cec5SDimitry Andric
20820b57cec5SDimitry Andric // Simplify this down to something we can handle.
20830b57cec5SDimitry Andric if (!simplifyAddress(Addr, VT))
20840b57cec5SDimitry Andric return false;
20850b57cec5SDimitry Andric
20860b57cec5SDimitry Andric unsigned ScaleFactor = getImplicitScaleFactor(VT);
20870b57cec5SDimitry Andric if (!ScaleFactor)
20880b57cec5SDimitry Andric llvm_unreachable("Unexpected value type.");
20890b57cec5SDimitry Andric
20900b57cec5SDimitry Andric // Negative offsets require unscaled, 9-bit, signed immediate offsets.
20910b57cec5SDimitry Andric // Otherwise, we try using scaled, 12-bit, unsigned immediate offsets.
20920b57cec5SDimitry Andric bool UseScaled = true;
20930b57cec5SDimitry Andric if ((Addr.getOffset() < 0) || (Addr.getOffset() & (ScaleFactor - 1))) {
20940b57cec5SDimitry Andric UseScaled = false;
20950b57cec5SDimitry Andric ScaleFactor = 1;
20960b57cec5SDimitry Andric }
20970b57cec5SDimitry Andric
20980b57cec5SDimitry Andric static const unsigned OpcTable[4][6] = {
20990b57cec5SDimitry Andric { AArch64::STURBBi, AArch64::STURHHi, AArch64::STURWi, AArch64::STURXi,
21000b57cec5SDimitry Andric AArch64::STURSi, AArch64::STURDi },
21010b57cec5SDimitry Andric { AArch64::STRBBui, AArch64::STRHHui, AArch64::STRWui, AArch64::STRXui,
21020b57cec5SDimitry Andric AArch64::STRSui, AArch64::STRDui },
21030b57cec5SDimitry Andric { AArch64::STRBBroX, AArch64::STRHHroX, AArch64::STRWroX, AArch64::STRXroX,
21040b57cec5SDimitry Andric AArch64::STRSroX, AArch64::STRDroX },
21050b57cec5SDimitry Andric { AArch64::STRBBroW, AArch64::STRHHroW, AArch64::STRWroW, AArch64::STRXroW,
21060b57cec5SDimitry Andric AArch64::STRSroW, AArch64::STRDroW }
21070b57cec5SDimitry Andric };
21080b57cec5SDimitry Andric
21090b57cec5SDimitry Andric unsigned Opc;
21100b57cec5SDimitry Andric bool VTIsi1 = false;
21110b57cec5SDimitry Andric bool UseRegOffset = Addr.isRegBase() && !Addr.getOffset() && Addr.getReg() &&
21120b57cec5SDimitry Andric Addr.getOffsetReg();
21130b57cec5SDimitry Andric unsigned Idx = UseRegOffset ? 2 : UseScaled ? 1 : 0;
21140b57cec5SDimitry Andric if (Addr.getExtendType() == AArch64_AM::UXTW ||
21150b57cec5SDimitry Andric Addr.getExtendType() == AArch64_AM::SXTW)
21160b57cec5SDimitry Andric Idx++;
21170b57cec5SDimitry Andric
21180b57cec5SDimitry Andric switch (VT.SimpleTy) {
21190b57cec5SDimitry Andric default: llvm_unreachable("Unexpected value type.");
2120bdd1243dSDimitry Andric case MVT::i1: VTIsi1 = true; [[fallthrough]];
21210b57cec5SDimitry Andric case MVT::i8: Opc = OpcTable[Idx][0]; break;
21220b57cec5SDimitry Andric case MVT::i16: Opc = OpcTable[Idx][1]; break;
21230b57cec5SDimitry Andric case MVT::i32: Opc = OpcTable[Idx][2]; break;
21240b57cec5SDimitry Andric case MVT::i64: Opc = OpcTable[Idx][3]; break;
21250b57cec5SDimitry Andric case MVT::f32: Opc = OpcTable[Idx][4]; break;
21260b57cec5SDimitry Andric case MVT::f64: Opc = OpcTable[Idx][5]; break;
21270b57cec5SDimitry Andric }
21280b57cec5SDimitry Andric
21290b57cec5SDimitry Andric // Storing an i1 requires special handling.
21300b57cec5SDimitry Andric if (VTIsi1 && SrcReg != AArch64::WZR) {
2131fe6060f1SDimitry Andric unsigned ANDReg = emitAnd_ri(MVT::i32, SrcReg, 1);
21320b57cec5SDimitry Andric assert(ANDReg && "Unexpected AND instruction emission failure.");
21330b57cec5SDimitry Andric SrcReg = ANDReg;
21340b57cec5SDimitry Andric }
21350b57cec5SDimitry Andric // Create the base instruction, then add the operands.
21360b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
21370b57cec5SDimitry Andric SrcReg = constrainOperandRegClass(II, SrcReg, II.getNumDefs());
21380b57cec5SDimitry Andric MachineInstrBuilder MIB =
2139bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II).addReg(SrcReg);
21400b57cec5SDimitry Andric addLoadStoreOperands(Addr, MIB, MachineMemOperand::MOStore, ScaleFactor, MMO);
21410b57cec5SDimitry Andric
21420b57cec5SDimitry Andric return true;
21430b57cec5SDimitry Andric }
21440b57cec5SDimitry Andric
selectStore(const Instruction * I)21450b57cec5SDimitry Andric bool AArch64FastISel::selectStore(const Instruction *I) {
21460b57cec5SDimitry Andric MVT VT;
21470b57cec5SDimitry Andric const Value *Op0 = I->getOperand(0);
21480b57cec5SDimitry Andric // Verify we have a legal type before going any further. Currently, we handle
21490b57cec5SDimitry Andric // simple types that will directly fit in a register (i32/f32/i64/f64) or
21500b57cec5SDimitry Andric // those that can be sign or zero-extended to a basic operation (i1/i8/i16).
21510b57cec5SDimitry Andric if (!isTypeSupported(Op0->getType(), VT, /*IsVectorAllowed=*/true))
21520b57cec5SDimitry Andric return false;
21530b57cec5SDimitry Andric
21540b57cec5SDimitry Andric const Value *PtrV = I->getOperand(1);
21550b57cec5SDimitry Andric if (TLI.supportSwiftError()) {
21560b57cec5SDimitry Andric // Swifterror values can come from either a function parameter with
21570b57cec5SDimitry Andric // swifterror attribute or an alloca with swifterror attribute.
21580b57cec5SDimitry Andric if (const Argument *Arg = dyn_cast<Argument>(PtrV)) {
21590b57cec5SDimitry Andric if (Arg->hasSwiftErrorAttr())
21600b57cec5SDimitry Andric return false;
21610b57cec5SDimitry Andric }
21620b57cec5SDimitry Andric
21630b57cec5SDimitry Andric if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(PtrV)) {
21640b57cec5SDimitry Andric if (Alloca->isSwiftError())
21650b57cec5SDimitry Andric return false;
21660b57cec5SDimitry Andric }
21670b57cec5SDimitry Andric }
21680b57cec5SDimitry Andric
21690b57cec5SDimitry Andric // Get the value to be stored into a register. Use the zero register directly
21700b57cec5SDimitry Andric // when possible to avoid an unnecessary copy and a wasted register.
21710b57cec5SDimitry Andric unsigned SrcReg = 0;
21720b57cec5SDimitry Andric if (const auto *CI = dyn_cast<ConstantInt>(Op0)) {
21730b57cec5SDimitry Andric if (CI->isZero())
21740b57cec5SDimitry Andric SrcReg = (VT == MVT::i64) ? AArch64::XZR : AArch64::WZR;
21750b57cec5SDimitry Andric } else if (const auto *CF = dyn_cast<ConstantFP>(Op0)) {
21760b57cec5SDimitry Andric if (CF->isZero() && !CF->isNegative()) {
21770b57cec5SDimitry Andric VT = MVT::getIntegerVT(VT.getSizeInBits());
21780b57cec5SDimitry Andric SrcReg = (VT == MVT::i64) ? AArch64::XZR : AArch64::WZR;
21790b57cec5SDimitry Andric }
21800b57cec5SDimitry Andric }
21810b57cec5SDimitry Andric
21820b57cec5SDimitry Andric if (!SrcReg)
21830b57cec5SDimitry Andric SrcReg = getRegForValue(Op0);
21840b57cec5SDimitry Andric
21850b57cec5SDimitry Andric if (!SrcReg)
21860b57cec5SDimitry Andric return false;
21870b57cec5SDimitry Andric
21880b57cec5SDimitry Andric auto *SI = cast<StoreInst>(I);
21890b57cec5SDimitry Andric
21900b57cec5SDimitry Andric // Try to emit a STLR for seq_cst/release.
21910b57cec5SDimitry Andric if (SI->isAtomic()) {
21920b57cec5SDimitry Andric AtomicOrdering Ord = SI->getOrdering();
21930b57cec5SDimitry Andric // The non-atomic instructions are sufficient for relaxed stores.
21940b57cec5SDimitry Andric if (isReleaseOrStronger(Ord)) {
21950b57cec5SDimitry Andric // The STLR addressing mode only supports a base reg; pass that directly.
219604eeddc0SDimitry Andric Register AddrReg = getRegForValue(PtrV);
21970b57cec5SDimitry Andric return emitStoreRelease(VT, SrcReg, AddrReg,
21980b57cec5SDimitry Andric createMachineMemOperandFor(I));
21990b57cec5SDimitry Andric }
22000b57cec5SDimitry Andric }
22010b57cec5SDimitry Andric
22020b57cec5SDimitry Andric // See if we can handle this address.
22030b57cec5SDimitry Andric Address Addr;
22040b57cec5SDimitry Andric if (!computeAddress(PtrV, Addr, Op0->getType()))
22050b57cec5SDimitry Andric return false;
22060b57cec5SDimitry Andric
22070b57cec5SDimitry Andric if (!emitStore(VT, SrcReg, Addr, createMachineMemOperandFor(I)))
22080b57cec5SDimitry Andric return false;
22090b57cec5SDimitry Andric return true;
22100b57cec5SDimitry Andric }
22110b57cec5SDimitry Andric
getCompareCC(CmpInst::Predicate Pred)22120b57cec5SDimitry Andric static AArch64CC::CondCode getCompareCC(CmpInst::Predicate Pred) {
22130b57cec5SDimitry Andric switch (Pred) {
22140b57cec5SDimitry Andric case CmpInst::FCMP_ONE:
22150b57cec5SDimitry Andric case CmpInst::FCMP_UEQ:
22160b57cec5SDimitry Andric default:
22170b57cec5SDimitry Andric // AL is our "false" for now. The other two need more compares.
22180b57cec5SDimitry Andric return AArch64CC::AL;
22190b57cec5SDimitry Andric case CmpInst::ICMP_EQ:
22200b57cec5SDimitry Andric case CmpInst::FCMP_OEQ:
22210b57cec5SDimitry Andric return AArch64CC::EQ;
22220b57cec5SDimitry Andric case CmpInst::ICMP_SGT:
22230b57cec5SDimitry Andric case CmpInst::FCMP_OGT:
22240b57cec5SDimitry Andric return AArch64CC::GT;
22250b57cec5SDimitry Andric case CmpInst::ICMP_SGE:
22260b57cec5SDimitry Andric case CmpInst::FCMP_OGE:
22270b57cec5SDimitry Andric return AArch64CC::GE;
22280b57cec5SDimitry Andric case CmpInst::ICMP_UGT:
22290b57cec5SDimitry Andric case CmpInst::FCMP_UGT:
22300b57cec5SDimitry Andric return AArch64CC::HI;
22310b57cec5SDimitry Andric case CmpInst::FCMP_OLT:
22320b57cec5SDimitry Andric return AArch64CC::MI;
22330b57cec5SDimitry Andric case CmpInst::ICMP_ULE:
22340b57cec5SDimitry Andric case CmpInst::FCMP_OLE:
22350b57cec5SDimitry Andric return AArch64CC::LS;
22360b57cec5SDimitry Andric case CmpInst::FCMP_ORD:
22370b57cec5SDimitry Andric return AArch64CC::VC;
22380b57cec5SDimitry Andric case CmpInst::FCMP_UNO:
22390b57cec5SDimitry Andric return AArch64CC::VS;
22400b57cec5SDimitry Andric case CmpInst::FCMP_UGE:
22410b57cec5SDimitry Andric return AArch64CC::PL;
22420b57cec5SDimitry Andric case CmpInst::ICMP_SLT:
22430b57cec5SDimitry Andric case CmpInst::FCMP_ULT:
22440b57cec5SDimitry Andric return AArch64CC::LT;
22450b57cec5SDimitry Andric case CmpInst::ICMP_SLE:
22460b57cec5SDimitry Andric case CmpInst::FCMP_ULE:
22470b57cec5SDimitry Andric return AArch64CC::LE;
22480b57cec5SDimitry Andric case CmpInst::FCMP_UNE:
22490b57cec5SDimitry Andric case CmpInst::ICMP_NE:
22500b57cec5SDimitry Andric return AArch64CC::NE;
22510b57cec5SDimitry Andric case CmpInst::ICMP_UGE:
22520b57cec5SDimitry Andric return AArch64CC::HS;
22530b57cec5SDimitry Andric case CmpInst::ICMP_ULT:
22540b57cec5SDimitry Andric return AArch64CC::LO;
22550b57cec5SDimitry Andric }
22560b57cec5SDimitry Andric }
22570b57cec5SDimitry Andric
22580b57cec5SDimitry Andric /// Try to emit a combined compare-and-branch instruction.
emitCompareAndBranch(const BranchInst * BI)22590b57cec5SDimitry Andric bool AArch64FastISel::emitCompareAndBranch(const BranchInst *BI) {
22600b57cec5SDimitry Andric // Speculation tracking/SLH assumes that optimized TB(N)Z/CB(N)Z instructions
22610b57cec5SDimitry Andric // will not be produced, as they are conditional branch instructions that do
22620b57cec5SDimitry Andric // not set flags.
22630b57cec5SDimitry Andric if (FuncInfo.MF->getFunction().hasFnAttribute(
22640b57cec5SDimitry Andric Attribute::SpeculativeLoadHardening))
22650b57cec5SDimitry Andric return false;
22660b57cec5SDimitry Andric
22670b57cec5SDimitry Andric assert(isa<CmpInst>(BI->getCondition()) && "Expected cmp instruction");
22680b57cec5SDimitry Andric const CmpInst *CI = cast<CmpInst>(BI->getCondition());
22690b57cec5SDimitry Andric CmpInst::Predicate Predicate = optimizeCmpPredicate(CI);
22700b57cec5SDimitry Andric
22710b57cec5SDimitry Andric const Value *LHS = CI->getOperand(0);
22720b57cec5SDimitry Andric const Value *RHS = CI->getOperand(1);
22730b57cec5SDimitry Andric
22740b57cec5SDimitry Andric MVT VT;
22750b57cec5SDimitry Andric if (!isTypeSupported(LHS->getType(), VT))
22760b57cec5SDimitry Andric return false;
22770b57cec5SDimitry Andric
22780b57cec5SDimitry Andric unsigned BW = VT.getSizeInBits();
22790b57cec5SDimitry Andric if (BW > 64)
22800b57cec5SDimitry Andric return false;
22810b57cec5SDimitry Andric
22820b57cec5SDimitry Andric MachineBasicBlock *TBB = FuncInfo.MBBMap[BI->getSuccessor(0)];
22830b57cec5SDimitry Andric MachineBasicBlock *FBB = FuncInfo.MBBMap[BI->getSuccessor(1)];
22840b57cec5SDimitry Andric
22850b57cec5SDimitry Andric // Try to take advantage of fallthrough opportunities.
22860b57cec5SDimitry Andric if (FuncInfo.MBB->isLayoutSuccessor(TBB)) {
22870b57cec5SDimitry Andric std::swap(TBB, FBB);
22880b57cec5SDimitry Andric Predicate = CmpInst::getInversePredicate(Predicate);
22890b57cec5SDimitry Andric }
22900b57cec5SDimitry Andric
22910b57cec5SDimitry Andric int TestBit = -1;
22920b57cec5SDimitry Andric bool IsCmpNE;
22930b57cec5SDimitry Andric switch (Predicate) {
22940b57cec5SDimitry Andric default:
22950b57cec5SDimitry Andric return false;
22960b57cec5SDimitry Andric case CmpInst::ICMP_EQ:
22970b57cec5SDimitry Andric case CmpInst::ICMP_NE:
22980b57cec5SDimitry Andric if (isa<Constant>(LHS) && cast<Constant>(LHS)->isNullValue())
22990b57cec5SDimitry Andric std::swap(LHS, RHS);
23000b57cec5SDimitry Andric
23010b57cec5SDimitry Andric if (!isa<Constant>(RHS) || !cast<Constant>(RHS)->isNullValue())
23020b57cec5SDimitry Andric return false;
23030b57cec5SDimitry Andric
23040b57cec5SDimitry Andric if (const auto *AI = dyn_cast<BinaryOperator>(LHS))
23050b57cec5SDimitry Andric if (AI->getOpcode() == Instruction::And && isValueAvailable(AI)) {
23060b57cec5SDimitry Andric const Value *AndLHS = AI->getOperand(0);
23070b57cec5SDimitry Andric const Value *AndRHS = AI->getOperand(1);
23080b57cec5SDimitry Andric
23090b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(AndLHS))
23100b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
23110b57cec5SDimitry Andric std::swap(AndLHS, AndRHS);
23120b57cec5SDimitry Andric
23130b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(AndRHS))
23140b57cec5SDimitry Andric if (C->getValue().isPowerOf2()) {
23150b57cec5SDimitry Andric TestBit = C->getValue().logBase2();
23160b57cec5SDimitry Andric LHS = AndLHS;
23170b57cec5SDimitry Andric }
23180b57cec5SDimitry Andric }
23190b57cec5SDimitry Andric
23200b57cec5SDimitry Andric if (VT == MVT::i1)
23210b57cec5SDimitry Andric TestBit = 0;
23220b57cec5SDimitry Andric
23230b57cec5SDimitry Andric IsCmpNE = Predicate == CmpInst::ICMP_NE;
23240b57cec5SDimitry Andric break;
23250b57cec5SDimitry Andric case CmpInst::ICMP_SLT:
23260b57cec5SDimitry Andric case CmpInst::ICMP_SGE:
23270b57cec5SDimitry Andric if (!isa<Constant>(RHS) || !cast<Constant>(RHS)->isNullValue())
23280b57cec5SDimitry Andric return false;
23290b57cec5SDimitry Andric
23300b57cec5SDimitry Andric TestBit = BW - 1;
23310b57cec5SDimitry Andric IsCmpNE = Predicate == CmpInst::ICMP_SLT;
23320b57cec5SDimitry Andric break;
23330b57cec5SDimitry Andric case CmpInst::ICMP_SGT:
23340b57cec5SDimitry Andric case CmpInst::ICMP_SLE:
23350b57cec5SDimitry Andric if (!isa<ConstantInt>(RHS))
23360b57cec5SDimitry Andric return false;
23370b57cec5SDimitry Andric
23380b57cec5SDimitry Andric if (cast<ConstantInt>(RHS)->getValue() != APInt(BW, -1, true))
23390b57cec5SDimitry Andric return false;
23400b57cec5SDimitry Andric
23410b57cec5SDimitry Andric TestBit = BW - 1;
23420b57cec5SDimitry Andric IsCmpNE = Predicate == CmpInst::ICMP_SLE;
23430b57cec5SDimitry Andric break;
23440b57cec5SDimitry Andric } // end switch
23450b57cec5SDimitry Andric
23460b57cec5SDimitry Andric static const unsigned OpcTable[2][2][2] = {
23470b57cec5SDimitry Andric { {AArch64::CBZW, AArch64::CBZX },
23480b57cec5SDimitry Andric {AArch64::CBNZW, AArch64::CBNZX} },
23490b57cec5SDimitry Andric { {AArch64::TBZW, AArch64::TBZX },
23500b57cec5SDimitry Andric {AArch64::TBNZW, AArch64::TBNZX} }
23510b57cec5SDimitry Andric };
23520b57cec5SDimitry Andric
23530b57cec5SDimitry Andric bool IsBitTest = TestBit != -1;
23540b57cec5SDimitry Andric bool Is64Bit = BW == 64;
23550b57cec5SDimitry Andric if (TestBit < 32 && TestBit >= 0)
23560b57cec5SDimitry Andric Is64Bit = false;
23570b57cec5SDimitry Andric
23580b57cec5SDimitry Andric unsigned Opc = OpcTable[IsBitTest][IsCmpNE][Is64Bit];
23590b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
23600b57cec5SDimitry Andric
236104eeddc0SDimitry Andric Register SrcReg = getRegForValue(LHS);
23620b57cec5SDimitry Andric if (!SrcReg)
23630b57cec5SDimitry Andric return false;
23640b57cec5SDimitry Andric
23650b57cec5SDimitry Andric if (BW == 64 && !Is64Bit)
2366fe6060f1SDimitry Andric SrcReg = fastEmitInst_extractsubreg(MVT::i32, SrcReg, AArch64::sub_32);
23670b57cec5SDimitry Andric
23680b57cec5SDimitry Andric if ((BW < 32) && !IsBitTest)
23690b57cec5SDimitry Andric SrcReg = emitIntExt(VT, SrcReg, MVT::i32, /*isZExt=*/true);
23700b57cec5SDimitry Andric
23710b57cec5SDimitry Andric // Emit the combined compare and branch instruction.
23720b57cec5SDimitry Andric SrcReg = constrainOperandRegClass(II, SrcReg, II.getNumDefs());
23730b57cec5SDimitry Andric MachineInstrBuilder MIB =
2374bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc))
2375fe6060f1SDimitry Andric .addReg(SrcReg);
23760b57cec5SDimitry Andric if (IsBitTest)
23770b57cec5SDimitry Andric MIB.addImm(TestBit);
23780b57cec5SDimitry Andric MIB.addMBB(TBB);
23790b57cec5SDimitry Andric
23800b57cec5SDimitry Andric finishCondBranch(BI->getParent(), TBB, FBB);
23810b57cec5SDimitry Andric return true;
23820b57cec5SDimitry Andric }
23830b57cec5SDimitry Andric
selectBranch(const Instruction * I)23840b57cec5SDimitry Andric bool AArch64FastISel::selectBranch(const Instruction *I) {
23850b57cec5SDimitry Andric const BranchInst *BI = cast<BranchInst>(I);
23860b57cec5SDimitry Andric if (BI->isUnconditional()) {
23870b57cec5SDimitry Andric MachineBasicBlock *MSucc = FuncInfo.MBBMap[BI->getSuccessor(0)];
23880b57cec5SDimitry Andric fastEmitBranch(MSucc, BI->getDebugLoc());
23890b57cec5SDimitry Andric return true;
23900b57cec5SDimitry Andric }
23910b57cec5SDimitry Andric
23920b57cec5SDimitry Andric MachineBasicBlock *TBB = FuncInfo.MBBMap[BI->getSuccessor(0)];
23930b57cec5SDimitry Andric MachineBasicBlock *FBB = FuncInfo.MBBMap[BI->getSuccessor(1)];
23940b57cec5SDimitry Andric
23950b57cec5SDimitry Andric if (const CmpInst *CI = dyn_cast<CmpInst>(BI->getCondition())) {
23960b57cec5SDimitry Andric if (CI->hasOneUse() && isValueAvailable(CI)) {
23970b57cec5SDimitry Andric // Try to optimize or fold the cmp.
23980b57cec5SDimitry Andric CmpInst::Predicate Predicate = optimizeCmpPredicate(CI);
23990b57cec5SDimitry Andric switch (Predicate) {
24000b57cec5SDimitry Andric default:
24010b57cec5SDimitry Andric break;
24020b57cec5SDimitry Andric case CmpInst::FCMP_FALSE:
2403bdd1243dSDimitry Andric fastEmitBranch(FBB, MIMD.getDL());
24040b57cec5SDimitry Andric return true;
24050b57cec5SDimitry Andric case CmpInst::FCMP_TRUE:
2406bdd1243dSDimitry Andric fastEmitBranch(TBB, MIMD.getDL());
24070b57cec5SDimitry Andric return true;
24080b57cec5SDimitry Andric }
24090b57cec5SDimitry Andric
24100b57cec5SDimitry Andric // Try to emit a combined compare-and-branch first.
24110b57cec5SDimitry Andric if (emitCompareAndBranch(BI))
24120b57cec5SDimitry Andric return true;
24130b57cec5SDimitry Andric
24140b57cec5SDimitry Andric // Try to take advantage of fallthrough opportunities.
24150b57cec5SDimitry Andric if (FuncInfo.MBB->isLayoutSuccessor(TBB)) {
24160b57cec5SDimitry Andric std::swap(TBB, FBB);
24170b57cec5SDimitry Andric Predicate = CmpInst::getInversePredicate(Predicate);
24180b57cec5SDimitry Andric }
24190b57cec5SDimitry Andric
24200b57cec5SDimitry Andric // Emit the cmp.
24210b57cec5SDimitry Andric if (!emitCmp(CI->getOperand(0), CI->getOperand(1), CI->isUnsigned()))
24220b57cec5SDimitry Andric return false;
24230b57cec5SDimitry Andric
24240b57cec5SDimitry Andric // FCMP_UEQ and FCMP_ONE cannot be checked with a single branch
24250b57cec5SDimitry Andric // instruction.
24260b57cec5SDimitry Andric AArch64CC::CondCode CC = getCompareCC(Predicate);
24270b57cec5SDimitry Andric AArch64CC::CondCode ExtraCC = AArch64CC::AL;
24280b57cec5SDimitry Andric switch (Predicate) {
24290b57cec5SDimitry Andric default:
24300b57cec5SDimitry Andric break;
24310b57cec5SDimitry Andric case CmpInst::FCMP_UEQ:
24320b57cec5SDimitry Andric ExtraCC = AArch64CC::EQ;
24330b57cec5SDimitry Andric CC = AArch64CC::VS;
24340b57cec5SDimitry Andric break;
24350b57cec5SDimitry Andric case CmpInst::FCMP_ONE:
24360b57cec5SDimitry Andric ExtraCC = AArch64CC::MI;
24370b57cec5SDimitry Andric CC = AArch64CC::GT;
24380b57cec5SDimitry Andric break;
24390b57cec5SDimitry Andric }
24400b57cec5SDimitry Andric assert((CC != AArch64CC::AL) && "Unexpected condition code.");
24410b57cec5SDimitry Andric
24420b57cec5SDimitry Andric // Emit the extra branch for FCMP_UEQ and FCMP_ONE.
24430b57cec5SDimitry Andric if (ExtraCC != AArch64CC::AL) {
2444bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::Bcc))
24450b57cec5SDimitry Andric .addImm(ExtraCC)
24460b57cec5SDimitry Andric .addMBB(TBB);
24470b57cec5SDimitry Andric }
24480b57cec5SDimitry Andric
24490b57cec5SDimitry Andric // Emit the branch.
2450bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::Bcc))
24510b57cec5SDimitry Andric .addImm(CC)
24520b57cec5SDimitry Andric .addMBB(TBB);
24530b57cec5SDimitry Andric
24540b57cec5SDimitry Andric finishCondBranch(BI->getParent(), TBB, FBB);
24550b57cec5SDimitry Andric return true;
24560b57cec5SDimitry Andric }
24570b57cec5SDimitry Andric } else if (const auto *CI = dyn_cast<ConstantInt>(BI->getCondition())) {
24580b57cec5SDimitry Andric uint64_t Imm = CI->getZExtValue();
24590b57cec5SDimitry Andric MachineBasicBlock *Target = (Imm == 0) ? FBB : TBB;
2460bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::B))
24610b57cec5SDimitry Andric .addMBB(Target);
24620b57cec5SDimitry Andric
24630b57cec5SDimitry Andric // Obtain the branch probability and add the target to the successor list.
24640b57cec5SDimitry Andric if (FuncInfo.BPI) {
24650b57cec5SDimitry Andric auto BranchProbability = FuncInfo.BPI->getEdgeProbability(
24660b57cec5SDimitry Andric BI->getParent(), Target->getBasicBlock());
24670b57cec5SDimitry Andric FuncInfo.MBB->addSuccessor(Target, BranchProbability);
24680b57cec5SDimitry Andric } else
24690b57cec5SDimitry Andric FuncInfo.MBB->addSuccessorWithoutProb(Target);
24700b57cec5SDimitry Andric return true;
24710b57cec5SDimitry Andric } else {
24720b57cec5SDimitry Andric AArch64CC::CondCode CC = AArch64CC::NE;
24730b57cec5SDimitry Andric if (foldXALUIntrinsic(CC, I, BI->getCondition())) {
24740b57cec5SDimitry Andric // Fake request the condition, otherwise the intrinsic might be completely
24750b57cec5SDimitry Andric // optimized away.
247604eeddc0SDimitry Andric Register CondReg = getRegForValue(BI->getCondition());
24770b57cec5SDimitry Andric if (!CondReg)
24780b57cec5SDimitry Andric return false;
24790b57cec5SDimitry Andric
24800b57cec5SDimitry Andric // Emit the branch.
2481bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::Bcc))
24820b57cec5SDimitry Andric .addImm(CC)
24830b57cec5SDimitry Andric .addMBB(TBB);
24840b57cec5SDimitry Andric
24850b57cec5SDimitry Andric finishCondBranch(BI->getParent(), TBB, FBB);
24860b57cec5SDimitry Andric return true;
24870b57cec5SDimitry Andric }
24880b57cec5SDimitry Andric }
24890b57cec5SDimitry Andric
249004eeddc0SDimitry Andric Register CondReg = getRegForValue(BI->getCondition());
24910b57cec5SDimitry Andric if (CondReg == 0)
24920b57cec5SDimitry Andric return false;
24930b57cec5SDimitry Andric
24940b57cec5SDimitry Andric // i1 conditions come as i32 values, test the lowest bit with tb(n)z.
24950b57cec5SDimitry Andric unsigned Opcode = AArch64::TBNZW;
24960b57cec5SDimitry Andric if (FuncInfo.MBB->isLayoutSuccessor(TBB)) {
24970b57cec5SDimitry Andric std::swap(TBB, FBB);
24980b57cec5SDimitry Andric Opcode = AArch64::TBZW;
24990b57cec5SDimitry Andric }
25000b57cec5SDimitry Andric
25010b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opcode);
250204eeddc0SDimitry Andric Register ConstrainedCondReg
25030b57cec5SDimitry Andric = constrainOperandRegClass(II, CondReg, II.getNumDefs());
2504bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II)
2505fe6060f1SDimitry Andric .addReg(ConstrainedCondReg)
25060b57cec5SDimitry Andric .addImm(0)
25070b57cec5SDimitry Andric .addMBB(TBB);
25080b57cec5SDimitry Andric
25090b57cec5SDimitry Andric finishCondBranch(BI->getParent(), TBB, FBB);
25100b57cec5SDimitry Andric return true;
25110b57cec5SDimitry Andric }
25120b57cec5SDimitry Andric
selectIndirectBr(const Instruction * I)25130b57cec5SDimitry Andric bool AArch64FastISel::selectIndirectBr(const Instruction *I) {
25140b57cec5SDimitry Andric const IndirectBrInst *BI = cast<IndirectBrInst>(I);
251504eeddc0SDimitry Andric Register AddrReg = getRegForValue(BI->getOperand(0));
25160b57cec5SDimitry Andric if (AddrReg == 0)
25170b57cec5SDimitry Andric return false;
25180b57cec5SDimitry Andric
2519*0fca6ea1SDimitry Andric // Authenticated indirectbr is not implemented yet.
2520*0fca6ea1SDimitry Andric if (FuncInfo.MF->getFunction().hasFnAttribute("ptrauth-indirect-gotos"))
2521*0fca6ea1SDimitry Andric return false;
2522*0fca6ea1SDimitry Andric
25230b57cec5SDimitry Andric // Emit the indirect branch.
25240b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(AArch64::BR);
25250b57cec5SDimitry Andric AddrReg = constrainOperandRegClass(II, AddrReg, II.getNumDefs());
2526bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II).addReg(AddrReg);
25270b57cec5SDimitry Andric
25280b57cec5SDimitry Andric // Make sure the CFG is up-to-date.
2529bdd1243dSDimitry Andric for (const auto *Succ : BI->successors())
25300b57cec5SDimitry Andric FuncInfo.MBB->addSuccessor(FuncInfo.MBBMap[Succ]);
25310b57cec5SDimitry Andric
25320b57cec5SDimitry Andric return true;
25330b57cec5SDimitry Andric }
25340b57cec5SDimitry Andric
selectCmp(const Instruction * I)25350b57cec5SDimitry Andric bool AArch64FastISel::selectCmp(const Instruction *I) {
25360b57cec5SDimitry Andric const CmpInst *CI = cast<CmpInst>(I);
25370b57cec5SDimitry Andric
25380b57cec5SDimitry Andric // Vectors of i1 are weird: bail out.
25390b57cec5SDimitry Andric if (CI->getType()->isVectorTy())
25400b57cec5SDimitry Andric return false;
25410b57cec5SDimitry Andric
25420b57cec5SDimitry Andric // Try to optimize or fold the cmp.
25430b57cec5SDimitry Andric CmpInst::Predicate Predicate = optimizeCmpPredicate(CI);
25440b57cec5SDimitry Andric unsigned ResultReg = 0;
25450b57cec5SDimitry Andric switch (Predicate) {
25460b57cec5SDimitry Andric default:
25470b57cec5SDimitry Andric break;
25480b57cec5SDimitry Andric case CmpInst::FCMP_FALSE:
25490b57cec5SDimitry Andric ResultReg = createResultReg(&AArch64::GPR32RegClass);
2550bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
25510b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
25520b57cec5SDimitry Andric .addReg(AArch64::WZR, getKillRegState(true));
25530b57cec5SDimitry Andric break;
25540b57cec5SDimitry Andric case CmpInst::FCMP_TRUE:
25550b57cec5SDimitry Andric ResultReg = fastEmit_i(MVT::i32, MVT::i32, ISD::Constant, 1);
25560b57cec5SDimitry Andric break;
25570b57cec5SDimitry Andric }
25580b57cec5SDimitry Andric
25590b57cec5SDimitry Andric if (ResultReg) {
25600b57cec5SDimitry Andric updateValueMap(I, ResultReg);
25610b57cec5SDimitry Andric return true;
25620b57cec5SDimitry Andric }
25630b57cec5SDimitry Andric
25640b57cec5SDimitry Andric // Emit the cmp.
25650b57cec5SDimitry Andric if (!emitCmp(CI->getOperand(0), CI->getOperand(1), CI->isUnsigned()))
25660b57cec5SDimitry Andric return false;
25670b57cec5SDimitry Andric
25680b57cec5SDimitry Andric ResultReg = createResultReg(&AArch64::GPR32RegClass);
25690b57cec5SDimitry Andric
25700b57cec5SDimitry Andric // FCMP_UEQ and FCMP_ONE cannot be checked with a single instruction. These
25710b57cec5SDimitry Andric // condition codes are inverted, because they are used by CSINC.
25720b57cec5SDimitry Andric static unsigned CondCodeTable[2][2] = {
25730b57cec5SDimitry Andric { AArch64CC::NE, AArch64CC::VC },
25740b57cec5SDimitry Andric { AArch64CC::PL, AArch64CC::LE }
25750b57cec5SDimitry Andric };
25760b57cec5SDimitry Andric unsigned *CondCodes = nullptr;
25770b57cec5SDimitry Andric switch (Predicate) {
25780b57cec5SDimitry Andric default:
25790b57cec5SDimitry Andric break;
25800b57cec5SDimitry Andric case CmpInst::FCMP_UEQ:
25810b57cec5SDimitry Andric CondCodes = &CondCodeTable[0][0];
25820b57cec5SDimitry Andric break;
25830b57cec5SDimitry Andric case CmpInst::FCMP_ONE:
25840b57cec5SDimitry Andric CondCodes = &CondCodeTable[1][0];
25850b57cec5SDimitry Andric break;
25860b57cec5SDimitry Andric }
25870b57cec5SDimitry Andric
25880b57cec5SDimitry Andric if (CondCodes) {
258904eeddc0SDimitry Andric Register TmpReg1 = createResultReg(&AArch64::GPR32RegClass);
2590bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::CSINCWr),
25910b57cec5SDimitry Andric TmpReg1)
25920b57cec5SDimitry Andric .addReg(AArch64::WZR, getKillRegState(true))
25930b57cec5SDimitry Andric .addReg(AArch64::WZR, getKillRegState(true))
25940b57cec5SDimitry Andric .addImm(CondCodes[0]);
2595bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::CSINCWr),
25960b57cec5SDimitry Andric ResultReg)
25970b57cec5SDimitry Andric .addReg(TmpReg1, getKillRegState(true))
25980b57cec5SDimitry Andric .addReg(AArch64::WZR, getKillRegState(true))
25990b57cec5SDimitry Andric .addImm(CondCodes[1]);
26000b57cec5SDimitry Andric
26010b57cec5SDimitry Andric updateValueMap(I, ResultReg);
26020b57cec5SDimitry Andric return true;
26030b57cec5SDimitry Andric }
26040b57cec5SDimitry Andric
26050b57cec5SDimitry Andric // Now set a register based on the comparison.
26060b57cec5SDimitry Andric AArch64CC::CondCode CC = getCompareCC(Predicate);
26070b57cec5SDimitry Andric assert((CC != AArch64CC::AL) && "Unexpected condition code.");
26080b57cec5SDimitry Andric AArch64CC::CondCode invertedCC = getInvertedCondCode(CC);
2609bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::CSINCWr),
26100b57cec5SDimitry Andric ResultReg)
26110b57cec5SDimitry Andric .addReg(AArch64::WZR, getKillRegState(true))
26120b57cec5SDimitry Andric .addReg(AArch64::WZR, getKillRegState(true))
26130b57cec5SDimitry Andric .addImm(invertedCC);
26140b57cec5SDimitry Andric
26150b57cec5SDimitry Andric updateValueMap(I, ResultReg);
26160b57cec5SDimitry Andric return true;
26170b57cec5SDimitry Andric }
26180b57cec5SDimitry Andric
26190b57cec5SDimitry Andric /// Optimize selects of i1 if one of the operands has a 'true' or 'false'
26200b57cec5SDimitry Andric /// value.
optimizeSelect(const SelectInst * SI)26210b57cec5SDimitry Andric bool AArch64FastISel::optimizeSelect(const SelectInst *SI) {
26220b57cec5SDimitry Andric if (!SI->getType()->isIntegerTy(1))
26230b57cec5SDimitry Andric return false;
26240b57cec5SDimitry Andric
26250b57cec5SDimitry Andric const Value *Src1Val, *Src2Val;
26260b57cec5SDimitry Andric unsigned Opc = 0;
26270b57cec5SDimitry Andric bool NeedExtraOp = false;
26280b57cec5SDimitry Andric if (auto *CI = dyn_cast<ConstantInt>(SI->getTrueValue())) {
26290b57cec5SDimitry Andric if (CI->isOne()) {
26300b57cec5SDimitry Andric Src1Val = SI->getCondition();
26310b57cec5SDimitry Andric Src2Val = SI->getFalseValue();
26320b57cec5SDimitry Andric Opc = AArch64::ORRWrr;
26330b57cec5SDimitry Andric } else {
26340b57cec5SDimitry Andric assert(CI->isZero());
26350b57cec5SDimitry Andric Src1Val = SI->getFalseValue();
26360b57cec5SDimitry Andric Src2Val = SI->getCondition();
26370b57cec5SDimitry Andric Opc = AArch64::BICWrr;
26380b57cec5SDimitry Andric }
26390b57cec5SDimitry Andric } else if (auto *CI = dyn_cast<ConstantInt>(SI->getFalseValue())) {
26400b57cec5SDimitry Andric if (CI->isOne()) {
26410b57cec5SDimitry Andric Src1Val = SI->getCondition();
26420b57cec5SDimitry Andric Src2Val = SI->getTrueValue();
26430b57cec5SDimitry Andric Opc = AArch64::ORRWrr;
26440b57cec5SDimitry Andric NeedExtraOp = true;
26450b57cec5SDimitry Andric } else {
26460b57cec5SDimitry Andric assert(CI->isZero());
26470b57cec5SDimitry Andric Src1Val = SI->getCondition();
26480b57cec5SDimitry Andric Src2Val = SI->getTrueValue();
26490b57cec5SDimitry Andric Opc = AArch64::ANDWrr;
26500b57cec5SDimitry Andric }
26510b57cec5SDimitry Andric }
26520b57cec5SDimitry Andric
26530b57cec5SDimitry Andric if (!Opc)
26540b57cec5SDimitry Andric return false;
26550b57cec5SDimitry Andric
265604eeddc0SDimitry Andric Register Src1Reg = getRegForValue(Src1Val);
26570b57cec5SDimitry Andric if (!Src1Reg)
26580b57cec5SDimitry Andric return false;
26590b57cec5SDimitry Andric
266004eeddc0SDimitry Andric Register Src2Reg = getRegForValue(Src2Val);
26610b57cec5SDimitry Andric if (!Src2Reg)
26620b57cec5SDimitry Andric return false;
26630b57cec5SDimitry Andric
2664fe6060f1SDimitry Andric if (NeedExtraOp)
2665fe6060f1SDimitry Andric Src1Reg = emitLogicalOp_ri(ISD::XOR, MVT::i32, Src1Reg, 1);
2666fe6060f1SDimitry Andric
266704eeddc0SDimitry Andric Register ResultReg = fastEmitInst_rr(Opc, &AArch64::GPR32RegClass, Src1Reg,
2668fe6060f1SDimitry Andric Src2Reg);
26690b57cec5SDimitry Andric updateValueMap(SI, ResultReg);
26700b57cec5SDimitry Andric return true;
26710b57cec5SDimitry Andric }
26720b57cec5SDimitry Andric
selectSelect(const Instruction * I)26730b57cec5SDimitry Andric bool AArch64FastISel::selectSelect(const Instruction *I) {
26740b57cec5SDimitry Andric assert(isa<SelectInst>(I) && "Expected a select instruction.");
26750b57cec5SDimitry Andric MVT VT;
26760b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), VT))
26770b57cec5SDimitry Andric return false;
26780b57cec5SDimitry Andric
26790b57cec5SDimitry Andric unsigned Opc;
26800b57cec5SDimitry Andric const TargetRegisterClass *RC;
26810b57cec5SDimitry Andric switch (VT.SimpleTy) {
26820b57cec5SDimitry Andric default:
26830b57cec5SDimitry Andric return false;
26840b57cec5SDimitry Andric case MVT::i1:
26850b57cec5SDimitry Andric case MVT::i8:
26860b57cec5SDimitry Andric case MVT::i16:
26870b57cec5SDimitry Andric case MVT::i32:
26880b57cec5SDimitry Andric Opc = AArch64::CSELWr;
26890b57cec5SDimitry Andric RC = &AArch64::GPR32RegClass;
26900b57cec5SDimitry Andric break;
26910b57cec5SDimitry Andric case MVT::i64:
26920b57cec5SDimitry Andric Opc = AArch64::CSELXr;
26930b57cec5SDimitry Andric RC = &AArch64::GPR64RegClass;
26940b57cec5SDimitry Andric break;
26950b57cec5SDimitry Andric case MVT::f32:
26960b57cec5SDimitry Andric Opc = AArch64::FCSELSrrr;
26970b57cec5SDimitry Andric RC = &AArch64::FPR32RegClass;
26980b57cec5SDimitry Andric break;
26990b57cec5SDimitry Andric case MVT::f64:
27000b57cec5SDimitry Andric Opc = AArch64::FCSELDrrr;
27010b57cec5SDimitry Andric RC = &AArch64::FPR64RegClass;
27020b57cec5SDimitry Andric break;
27030b57cec5SDimitry Andric }
27040b57cec5SDimitry Andric
27050b57cec5SDimitry Andric const SelectInst *SI = cast<SelectInst>(I);
27060b57cec5SDimitry Andric const Value *Cond = SI->getCondition();
27070b57cec5SDimitry Andric AArch64CC::CondCode CC = AArch64CC::NE;
27080b57cec5SDimitry Andric AArch64CC::CondCode ExtraCC = AArch64CC::AL;
27090b57cec5SDimitry Andric
27100b57cec5SDimitry Andric if (optimizeSelect(SI))
27110b57cec5SDimitry Andric return true;
27120b57cec5SDimitry Andric
27130b57cec5SDimitry Andric // Try to pickup the flags, so we don't have to emit another compare.
27140b57cec5SDimitry Andric if (foldXALUIntrinsic(CC, I, Cond)) {
27150b57cec5SDimitry Andric // Fake request the condition to force emission of the XALU intrinsic.
271604eeddc0SDimitry Andric Register CondReg = getRegForValue(Cond);
27170b57cec5SDimitry Andric if (!CondReg)
27180b57cec5SDimitry Andric return false;
27190b57cec5SDimitry Andric } else if (isa<CmpInst>(Cond) && cast<CmpInst>(Cond)->hasOneUse() &&
27200b57cec5SDimitry Andric isValueAvailable(Cond)) {
27210b57cec5SDimitry Andric const auto *Cmp = cast<CmpInst>(Cond);
27220b57cec5SDimitry Andric // Try to optimize or fold the cmp.
27230b57cec5SDimitry Andric CmpInst::Predicate Predicate = optimizeCmpPredicate(Cmp);
27240b57cec5SDimitry Andric const Value *FoldSelect = nullptr;
27250b57cec5SDimitry Andric switch (Predicate) {
27260b57cec5SDimitry Andric default:
27270b57cec5SDimitry Andric break;
27280b57cec5SDimitry Andric case CmpInst::FCMP_FALSE:
27290b57cec5SDimitry Andric FoldSelect = SI->getFalseValue();
27300b57cec5SDimitry Andric break;
27310b57cec5SDimitry Andric case CmpInst::FCMP_TRUE:
27320b57cec5SDimitry Andric FoldSelect = SI->getTrueValue();
27330b57cec5SDimitry Andric break;
27340b57cec5SDimitry Andric }
27350b57cec5SDimitry Andric
27360b57cec5SDimitry Andric if (FoldSelect) {
273704eeddc0SDimitry Andric Register SrcReg = getRegForValue(FoldSelect);
27380b57cec5SDimitry Andric if (!SrcReg)
27390b57cec5SDimitry Andric return false;
27400b57cec5SDimitry Andric
27410b57cec5SDimitry Andric updateValueMap(I, SrcReg);
27420b57cec5SDimitry Andric return true;
27430b57cec5SDimitry Andric }
27440b57cec5SDimitry Andric
27450b57cec5SDimitry Andric // Emit the cmp.
27460b57cec5SDimitry Andric if (!emitCmp(Cmp->getOperand(0), Cmp->getOperand(1), Cmp->isUnsigned()))
27470b57cec5SDimitry Andric return false;
27480b57cec5SDimitry Andric
27490b57cec5SDimitry Andric // FCMP_UEQ and FCMP_ONE cannot be checked with a single select instruction.
27500b57cec5SDimitry Andric CC = getCompareCC(Predicate);
27510b57cec5SDimitry Andric switch (Predicate) {
27520b57cec5SDimitry Andric default:
27530b57cec5SDimitry Andric break;
27540b57cec5SDimitry Andric case CmpInst::FCMP_UEQ:
27550b57cec5SDimitry Andric ExtraCC = AArch64CC::EQ;
27560b57cec5SDimitry Andric CC = AArch64CC::VS;
27570b57cec5SDimitry Andric break;
27580b57cec5SDimitry Andric case CmpInst::FCMP_ONE:
27590b57cec5SDimitry Andric ExtraCC = AArch64CC::MI;
27600b57cec5SDimitry Andric CC = AArch64CC::GT;
27610b57cec5SDimitry Andric break;
27620b57cec5SDimitry Andric }
27630b57cec5SDimitry Andric assert((CC != AArch64CC::AL) && "Unexpected condition code.");
27640b57cec5SDimitry Andric } else {
276504eeddc0SDimitry Andric Register CondReg = getRegForValue(Cond);
27660b57cec5SDimitry Andric if (!CondReg)
27670b57cec5SDimitry Andric return false;
27680b57cec5SDimitry Andric
27690b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(AArch64::ANDSWri);
27700b57cec5SDimitry Andric CondReg = constrainOperandRegClass(II, CondReg, 1);
27710b57cec5SDimitry Andric
27720b57cec5SDimitry Andric // Emit a TST instruction (ANDS wzr, reg, #imm).
2773bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II,
27740b57cec5SDimitry Andric AArch64::WZR)
2775fe6060f1SDimitry Andric .addReg(CondReg)
27760b57cec5SDimitry Andric .addImm(AArch64_AM::encodeLogicalImmediate(1, 32));
27770b57cec5SDimitry Andric }
27780b57cec5SDimitry Andric
277904eeddc0SDimitry Andric Register Src1Reg = getRegForValue(SI->getTrueValue());
278004eeddc0SDimitry Andric Register Src2Reg = getRegForValue(SI->getFalseValue());
27810b57cec5SDimitry Andric
27820b57cec5SDimitry Andric if (!Src1Reg || !Src2Reg)
27830b57cec5SDimitry Andric return false;
27840b57cec5SDimitry Andric
2785fe6060f1SDimitry Andric if (ExtraCC != AArch64CC::AL)
2786fe6060f1SDimitry Andric Src2Reg = fastEmitInst_rri(Opc, RC, Src1Reg, Src2Reg, ExtraCC);
2787fe6060f1SDimitry Andric
278804eeddc0SDimitry Andric Register ResultReg = fastEmitInst_rri(Opc, RC, Src1Reg, Src2Reg, CC);
27890b57cec5SDimitry Andric updateValueMap(I, ResultReg);
27900b57cec5SDimitry Andric return true;
27910b57cec5SDimitry Andric }
27920b57cec5SDimitry Andric
selectFPExt(const Instruction * I)27930b57cec5SDimitry Andric bool AArch64FastISel::selectFPExt(const Instruction *I) {
27940b57cec5SDimitry Andric Value *V = I->getOperand(0);
27950b57cec5SDimitry Andric if (!I->getType()->isDoubleTy() || !V->getType()->isFloatTy())
27960b57cec5SDimitry Andric return false;
27970b57cec5SDimitry Andric
279804eeddc0SDimitry Andric Register Op = getRegForValue(V);
27990b57cec5SDimitry Andric if (Op == 0)
28000b57cec5SDimitry Andric return false;
28010b57cec5SDimitry Andric
280204eeddc0SDimitry Andric Register ResultReg = createResultReg(&AArch64::FPR64RegClass);
2803bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::FCVTDSr),
28040b57cec5SDimitry Andric ResultReg).addReg(Op);
28050b57cec5SDimitry Andric updateValueMap(I, ResultReg);
28060b57cec5SDimitry Andric return true;
28070b57cec5SDimitry Andric }
28080b57cec5SDimitry Andric
selectFPTrunc(const Instruction * I)28090b57cec5SDimitry Andric bool AArch64FastISel::selectFPTrunc(const Instruction *I) {
28100b57cec5SDimitry Andric Value *V = I->getOperand(0);
28110b57cec5SDimitry Andric if (!I->getType()->isFloatTy() || !V->getType()->isDoubleTy())
28120b57cec5SDimitry Andric return false;
28130b57cec5SDimitry Andric
281404eeddc0SDimitry Andric Register Op = getRegForValue(V);
28150b57cec5SDimitry Andric if (Op == 0)
28160b57cec5SDimitry Andric return false;
28170b57cec5SDimitry Andric
281804eeddc0SDimitry Andric Register ResultReg = createResultReg(&AArch64::FPR32RegClass);
2819bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::FCVTSDr),
28200b57cec5SDimitry Andric ResultReg).addReg(Op);
28210b57cec5SDimitry Andric updateValueMap(I, ResultReg);
28220b57cec5SDimitry Andric return true;
28230b57cec5SDimitry Andric }
28240b57cec5SDimitry Andric
28250b57cec5SDimitry Andric // FPToUI and FPToSI
selectFPToInt(const Instruction * I,bool Signed)28260b57cec5SDimitry Andric bool AArch64FastISel::selectFPToInt(const Instruction *I, bool Signed) {
28270b57cec5SDimitry Andric MVT DestVT;
28280b57cec5SDimitry Andric if (!isTypeLegal(I->getType(), DestVT) || DestVT.isVector())
28290b57cec5SDimitry Andric return false;
28300b57cec5SDimitry Andric
283104eeddc0SDimitry Andric Register SrcReg = getRegForValue(I->getOperand(0));
28320b57cec5SDimitry Andric if (SrcReg == 0)
28330b57cec5SDimitry Andric return false;
28340b57cec5SDimitry Andric
28350b57cec5SDimitry Andric EVT SrcVT = TLI.getValueType(DL, I->getOperand(0)->getType(), true);
2836*0fca6ea1SDimitry Andric if (SrcVT == MVT::f128 || SrcVT == MVT::f16 || SrcVT == MVT::bf16)
28370b57cec5SDimitry Andric return false;
28380b57cec5SDimitry Andric
28390b57cec5SDimitry Andric unsigned Opc;
28400b57cec5SDimitry Andric if (SrcVT == MVT::f64) {
28410b57cec5SDimitry Andric if (Signed)
28420b57cec5SDimitry Andric Opc = (DestVT == MVT::i32) ? AArch64::FCVTZSUWDr : AArch64::FCVTZSUXDr;
28430b57cec5SDimitry Andric else
28440b57cec5SDimitry Andric Opc = (DestVT == MVT::i32) ? AArch64::FCVTZUUWDr : AArch64::FCVTZUUXDr;
28450b57cec5SDimitry Andric } else {
28460b57cec5SDimitry Andric if (Signed)
28470b57cec5SDimitry Andric Opc = (DestVT == MVT::i32) ? AArch64::FCVTZSUWSr : AArch64::FCVTZSUXSr;
28480b57cec5SDimitry Andric else
28490b57cec5SDimitry Andric Opc = (DestVT == MVT::i32) ? AArch64::FCVTZUUWSr : AArch64::FCVTZUUXSr;
28500b57cec5SDimitry Andric }
285104eeddc0SDimitry Andric Register ResultReg = createResultReg(
28520b57cec5SDimitry Andric DestVT == MVT::i32 ? &AArch64::GPR32RegClass : &AArch64::GPR64RegClass);
2853bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg)
28540b57cec5SDimitry Andric .addReg(SrcReg);
28550b57cec5SDimitry Andric updateValueMap(I, ResultReg);
28560b57cec5SDimitry Andric return true;
28570b57cec5SDimitry Andric }
28580b57cec5SDimitry Andric
selectIntToFP(const Instruction * I,bool Signed)28590b57cec5SDimitry Andric bool AArch64FastISel::selectIntToFP(const Instruction *I, bool Signed) {
28600b57cec5SDimitry Andric MVT DestVT;
28610b57cec5SDimitry Andric if (!isTypeLegal(I->getType(), DestVT) || DestVT.isVector())
28620b57cec5SDimitry Andric return false;
28630b57cec5SDimitry Andric // Let regular ISEL handle FP16
2864*0fca6ea1SDimitry Andric if (DestVT == MVT::f16 || DestVT == MVT::bf16)
28650b57cec5SDimitry Andric return false;
28660b57cec5SDimitry Andric
28670b57cec5SDimitry Andric assert((DestVT == MVT::f32 || DestVT == MVT::f64) &&
28680b57cec5SDimitry Andric "Unexpected value type.");
28690b57cec5SDimitry Andric
287004eeddc0SDimitry Andric Register SrcReg = getRegForValue(I->getOperand(0));
28710b57cec5SDimitry Andric if (!SrcReg)
28720b57cec5SDimitry Andric return false;
28730b57cec5SDimitry Andric
28740b57cec5SDimitry Andric EVT SrcVT = TLI.getValueType(DL, I->getOperand(0)->getType(), true);
28750b57cec5SDimitry Andric
28760b57cec5SDimitry Andric // Handle sign-extension.
28770b57cec5SDimitry Andric if (SrcVT == MVT::i16 || SrcVT == MVT::i8 || SrcVT == MVT::i1) {
28780b57cec5SDimitry Andric SrcReg =
28790b57cec5SDimitry Andric emitIntExt(SrcVT.getSimpleVT(), SrcReg, MVT::i32, /*isZExt*/ !Signed);
28800b57cec5SDimitry Andric if (!SrcReg)
28810b57cec5SDimitry Andric return false;
28820b57cec5SDimitry Andric }
28830b57cec5SDimitry Andric
28840b57cec5SDimitry Andric unsigned Opc;
28850b57cec5SDimitry Andric if (SrcVT == MVT::i64) {
28860b57cec5SDimitry Andric if (Signed)
28870b57cec5SDimitry Andric Opc = (DestVT == MVT::f32) ? AArch64::SCVTFUXSri : AArch64::SCVTFUXDri;
28880b57cec5SDimitry Andric else
28890b57cec5SDimitry Andric Opc = (DestVT == MVT::f32) ? AArch64::UCVTFUXSri : AArch64::UCVTFUXDri;
28900b57cec5SDimitry Andric } else {
28910b57cec5SDimitry Andric if (Signed)
28920b57cec5SDimitry Andric Opc = (DestVT == MVT::f32) ? AArch64::SCVTFUWSri : AArch64::SCVTFUWDri;
28930b57cec5SDimitry Andric else
28940b57cec5SDimitry Andric Opc = (DestVT == MVT::f32) ? AArch64::UCVTFUWSri : AArch64::UCVTFUWDri;
28950b57cec5SDimitry Andric }
28960b57cec5SDimitry Andric
289704eeddc0SDimitry Andric Register ResultReg = fastEmitInst_r(Opc, TLI.getRegClassFor(DestVT), SrcReg);
28980b57cec5SDimitry Andric updateValueMap(I, ResultReg);
28990b57cec5SDimitry Andric return true;
29000b57cec5SDimitry Andric }
29010b57cec5SDimitry Andric
fastLowerArguments()29020b57cec5SDimitry Andric bool AArch64FastISel::fastLowerArguments() {
29030b57cec5SDimitry Andric if (!FuncInfo.CanLowerReturn)
29040b57cec5SDimitry Andric return false;
29050b57cec5SDimitry Andric
29060b57cec5SDimitry Andric const Function *F = FuncInfo.Fn;
29070b57cec5SDimitry Andric if (F->isVarArg())
29080b57cec5SDimitry Andric return false;
29090b57cec5SDimitry Andric
29100b57cec5SDimitry Andric CallingConv::ID CC = F->getCallingConv();
29110b57cec5SDimitry Andric if (CC != CallingConv::C && CC != CallingConv::Swift)
29120b57cec5SDimitry Andric return false;
29130b57cec5SDimitry Andric
29140b57cec5SDimitry Andric if (Subtarget->hasCustomCallingConv())
29150b57cec5SDimitry Andric return false;
29160b57cec5SDimitry Andric
29170b57cec5SDimitry Andric // Only handle simple cases of up to 8 GPR and FPR each.
29180b57cec5SDimitry Andric unsigned GPRCnt = 0;
29190b57cec5SDimitry Andric unsigned FPRCnt = 0;
29200b57cec5SDimitry Andric for (auto const &Arg : F->args()) {
29210b57cec5SDimitry Andric if (Arg.hasAttribute(Attribute::ByVal) ||
29220b57cec5SDimitry Andric Arg.hasAttribute(Attribute::InReg) ||
29230b57cec5SDimitry Andric Arg.hasAttribute(Attribute::StructRet) ||
29240b57cec5SDimitry Andric Arg.hasAttribute(Attribute::SwiftSelf) ||
2925fe6060f1SDimitry Andric Arg.hasAttribute(Attribute::SwiftAsync) ||
29260b57cec5SDimitry Andric Arg.hasAttribute(Attribute::SwiftError) ||
29270b57cec5SDimitry Andric Arg.hasAttribute(Attribute::Nest))
29280b57cec5SDimitry Andric return false;
29290b57cec5SDimitry Andric
29300b57cec5SDimitry Andric Type *ArgTy = Arg.getType();
29310b57cec5SDimitry Andric if (ArgTy->isStructTy() || ArgTy->isArrayTy())
29320b57cec5SDimitry Andric return false;
29330b57cec5SDimitry Andric
29340b57cec5SDimitry Andric EVT ArgVT = TLI.getValueType(DL, ArgTy);
29350b57cec5SDimitry Andric if (!ArgVT.isSimple())
29360b57cec5SDimitry Andric return false;
29370b57cec5SDimitry Andric
29380b57cec5SDimitry Andric MVT VT = ArgVT.getSimpleVT().SimpleTy;
29390b57cec5SDimitry Andric if (VT.isFloatingPoint() && !Subtarget->hasFPARMv8())
29400b57cec5SDimitry Andric return false;
29410b57cec5SDimitry Andric
29420b57cec5SDimitry Andric if (VT.isVector() &&
29430b57cec5SDimitry Andric (!Subtarget->hasNEON() || !Subtarget->isLittleEndian()))
29440b57cec5SDimitry Andric return false;
29450b57cec5SDimitry Andric
29460b57cec5SDimitry Andric if (VT >= MVT::i1 && VT <= MVT::i64)
29470b57cec5SDimitry Andric ++GPRCnt;
29480b57cec5SDimitry Andric else if ((VT >= MVT::f16 && VT <= MVT::f64) || VT.is64BitVector() ||
29490b57cec5SDimitry Andric VT.is128BitVector())
29500b57cec5SDimitry Andric ++FPRCnt;
29510b57cec5SDimitry Andric else
29520b57cec5SDimitry Andric return false;
29530b57cec5SDimitry Andric
29540b57cec5SDimitry Andric if (GPRCnt > 8 || FPRCnt > 8)
29550b57cec5SDimitry Andric return false;
29560b57cec5SDimitry Andric }
29570b57cec5SDimitry Andric
29580b57cec5SDimitry Andric static const MCPhysReg Registers[6][8] = {
29590b57cec5SDimitry Andric { AArch64::W0, AArch64::W1, AArch64::W2, AArch64::W3, AArch64::W4,
29600b57cec5SDimitry Andric AArch64::W5, AArch64::W6, AArch64::W7 },
29610b57cec5SDimitry Andric { AArch64::X0, AArch64::X1, AArch64::X2, AArch64::X3, AArch64::X4,
29620b57cec5SDimitry Andric AArch64::X5, AArch64::X6, AArch64::X7 },
29630b57cec5SDimitry Andric { AArch64::H0, AArch64::H1, AArch64::H2, AArch64::H3, AArch64::H4,
29640b57cec5SDimitry Andric AArch64::H5, AArch64::H6, AArch64::H7 },
29650b57cec5SDimitry Andric { AArch64::S0, AArch64::S1, AArch64::S2, AArch64::S3, AArch64::S4,
29660b57cec5SDimitry Andric AArch64::S5, AArch64::S6, AArch64::S7 },
29670b57cec5SDimitry Andric { AArch64::D0, AArch64::D1, AArch64::D2, AArch64::D3, AArch64::D4,
29680b57cec5SDimitry Andric AArch64::D5, AArch64::D6, AArch64::D7 },
29690b57cec5SDimitry Andric { AArch64::Q0, AArch64::Q1, AArch64::Q2, AArch64::Q3, AArch64::Q4,
29700b57cec5SDimitry Andric AArch64::Q5, AArch64::Q6, AArch64::Q7 }
29710b57cec5SDimitry Andric };
29720b57cec5SDimitry Andric
29730b57cec5SDimitry Andric unsigned GPRIdx = 0;
29740b57cec5SDimitry Andric unsigned FPRIdx = 0;
29750b57cec5SDimitry Andric for (auto const &Arg : F->args()) {
29760b57cec5SDimitry Andric MVT VT = TLI.getSimpleValueType(DL, Arg.getType());
29770b57cec5SDimitry Andric unsigned SrcReg;
29780b57cec5SDimitry Andric const TargetRegisterClass *RC;
29790b57cec5SDimitry Andric if (VT >= MVT::i1 && VT <= MVT::i32) {
29800b57cec5SDimitry Andric SrcReg = Registers[0][GPRIdx++];
29810b57cec5SDimitry Andric RC = &AArch64::GPR32RegClass;
29820b57cec5SDimitry Andric VT = MVT::i32;
29830b57cec5SDimitry Andric } else if (VT == MVT::i64) {
29840b57cec5SDimitry Andric SrcReg = Registers[1][GPRIdx++];
29850b57cec5SDimitry Andric RC = &AArch64::GPR64RegClass;
2986*0fca6ea1SDimitry Andric } else if (VT == MVT::f16 || VT == MVT::bf16) {
29870b57cec5SDimitry Andric SrcReg = Registers[2][FPRIdx++];
29880b57cec5SDimitry Andric RC = &AArch64::FPR16RegClass;
29890b57cec5SDimitry Andric } else if (VT == MVT::f32) {
29900b57cec5SDimitry Andric SrcReg = Registers[3][FPRIdx++];
29910b57cec5SDimitry Andric RC = &AArch64::FPR32RegClass;
29920b57cec5SDimitry Andric } else if ((VT == MVT::f64) || VT.is64BitVector()) {
29930b57cec5SDimitry Andric SrcReg = Registers[4][FPRIdx++];
29940b57cec5SDimitry Andric RC = &AArch64::FPR64RegClass;
29950b57cec5SDimitry Andric } else if (VT.is128BitVector()) {
29960b57cec5SDimitry Andric SrcReg = Registers[5][FPRIdx++];
29970b57cec5SDimitry Andric RC = &AArch64::FPR128RegClass;
29980b57cec5SDimitry Andric } else
29990b57cec5SDimitry Andric llvm_unreachable("Unexpected value type.");
30000b57cec5SDimitry Andric
300104eeddc0SDimitry Andric Register DstReg = FuncInfo.MF->addLiveIn(SrcReg, RC);
30020b57cec5SDimitry Andric // FIXME: Unfortunately it's necessary to emit a copy from the livein copy.
30030b57cec5SDimitry Andric // Without this, EmitLiveInCopies may eliminate the livein if its only
30040b57cec5SDimitry Andric // use is a bitcast (which isn't turned into an instruction).
300504eeddc0SDimitry Andric Register ResultReg = createResultReg(RC);
3006bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
30070b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
30080b57cec5SDimitry Andric .addReg(DstReg, getKillRegState(true));
30090b57cec5SDimitry Andric updateValueMap(&Arg, ResultReg);
30100b57cec5SDimitry Andric }
30110b57cec5SDimitry Andric return true;
30120b57cec5SDimitry Andric }
30130b57cec5SDimitry Andric
processCallArgs(CallLoweringInfo & CLI,SmallVectorImpl<MVT> & OutVTs,unsigned & NumBytes)30140b57cec5SDimitry Andric bool AArch64FastISel::processCallArgs(CallLoweringInfo &CLI,
30150b57cec5SDimitry Andric SmallVectorImpl<MVT> &OutVTs,
30160b57cec5SDimitry Andric unsigned &NumBytes) {
30170b57cec5SDimitry Andric CallingConv::ID CC = CLI.CallConv;
30180b57cec5SDimitry Andric SmallVector<CCValAssign, 16> ArgLocs;
30190b57cec5SDimitry Andric CCState CCInfo(CC, false, *FuncInfo.MF, ArgLocs, *Context);
30200b57cec5SDimitry Andric CCInfo.AnalyzeCallOperands(OutVTs, CLI.OutFlags, CCAssignFnForCall(CC));
30210b57cec5SDimitry Andric
30220b57cec5SDimitry Andric // Get a count of how many bytes are to be pushed on the stack.
302306c3fb27SDimitry Andric NumBytes = CCInfo.getStackSize();
30240b57cec5SDimitry Andric
30250b57cec5SDimitry Andric // Issue CALLSEQ_START
30260b57cec5SDimitry Andric unsigned AdjStackDown = TII.getCallFrameSetupOpcode();
3027bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AdjStackDown))
30280b57cec5SDimitry Andric .addImm(NumBytes).addImm(0);
30290b57cec5SDimitry Andric
30300b57cec5SDimitry Andric // Process the args.
30310b57cec5SDimitry Andric for (CCValAssign &VA : ArgLocs) {
30320b57cec5SDimitry Andric const Value *ArgVal = CLI.OutVals[VA.getValNo()];
30330b57cec5SDimitry Andric MVT ArgVT = OutVTs[VA.getValNo()];
30340b57cec5SDimitry Andric
303504eeddc0SDimitry Andric Register ArgReg = getRegForValue(ArgVal);
30360b57cec5SDimitry Andric if (!ArgReg)
30370b57cec5SDimitry Andric return false;
30380b57cec5SDimitry Andric
30390b57cec5SDimitry Andric // Handle arg promotion: SExt, ZExt, AExt.
30400b57cec5SDimitry Andric switch (VA.getLocInfo()) {
30410b57cec5SDimitry Andric case CCValAssign::Full:
30420b57cec5SDimitry Andric break;
30430b57cec5SDimitry Andric case CCValAssign::SExt: {
30440b57cec5SDimitry Andric MVT DestVT = VA.getLocVT();
30450b57cec5SDimitry Andric MVT SrcVT = ArgVT;
30460b57cec5SDimitry Andric ArgReg = emitIntExt(SrcVT, ArgReg, DestVT, /*isZExt=*/false);
30470b57cec5SDimitry Andric if (!ArgReg)
30480b57cec5SDimitry Andric return false;
30490b57cec5SDimitry Andric break;
30500b57cec5SDimitry Andric }
30510b57cec5SDimitry Andric case CCValAssign::AExt:
30520b57cec5SDimitry Andric // Intentional fall-through.
30530b57cec5SDimitry Andric case CCValAssign::ZExt: {
30540b57cec5SDimitry Andric MVT DestVT = VA.getLocVT();
30550b57cec5SDimitry Andric MVT SrcVT = ArgVT;
30560b57cec5SDimitry Andric ArgReg = emitIntExt(SrcVT, ArgReg, DestVT, /*isZExt=*/true);
30570b57cec5SDimitry Andric if (!ArgReg)
30580b57cec5SDimitry Andric return false;
30590b57cec5SDimitry Andric break;
30600b57cec5SDimitry Andric }
30610b57cec5SDimitry Andric default:
30620b57cec5SDimitry Andric llvm_unreachable("Unknown arg promotion!");
30630b57cec5SDimitry Andric }
30640b57cec5SDimitry Andric
30650b57cec5SDimitry Andric // Now copy/store arg to correct locations.
30660b57cec5SDimitry Andric if (VA.isRegLoc() && !VA.needsCustom()) {
3067bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
30680b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), VA.getLocReg()).addReg(ArgReg);
30690b57cec5SDimitry Andric CLI.OutRegs.push_back(VA.getLocReg());
30700b57cec5SDimitry Andric } else if (VA.needsCustom()) {
30710b57cec5SDimitry Andric // FIXME: Handle custom args.
30720b57cec5SDimitry Andric return false;
30730b57cec5SDimitry Andric } else {
30740b57cec5SDimitry Andric assert(VA.isMemLoc() && "Assuming store on stack.");
30750b57cec5SDimitry Andric
30760b57cec5SDimitry Andric // Don't emit stores for undef values.
30770b57cec5SDimitry Andric if (isa<UndefValue>(ArgVal))
30780b57cec5SDimitry Andric continue;
30790b57cec5SDimitry Andric
30800b57cec5SDimitry Andric // Need to store on the stack.
30810b57cec5SDimitry Andric unsigned ArgSize = (ArgVT.getSizeInBits() + 7) / 8;
30820b57cec5SDimitry Andric
30830b57cec5SDimitry Andric unsigned BEAlign = 0;
30840b57cec5SDimitry Andric if (ArgSize < 8 && !Subtarget->isLittleEndian())
30850b57cec5SDimitry Andric BEAlign = 8 - ArgSize;
30860b57cec5SDimitry Andric
30870b57cec5SDimitry Andric Address Addr;
30880b57cec5SDimitry Andric Addr.setKind(Address::RegBase);
30890b57cec5SDimitry Andric Addr.setReg(AArch64::SP);
30900b57cec5SDimitry Andric Addr.setOffset(VA.getLocMemOffset() + BEAlign);
30910b57cec5SDimitry Andric
30925ffd83dbSDimitry Andric Align Alignment = DL.getABITypeAlign(ArgVal->getType());
30930b57cec5SDimitry Andric MachineMemOperand *MMO = FuncInfo.MF->getMachineMemOperand(
30940b57cec5SDimitry Andric MachinePointerInfo::getStack(*FuncInfo.MF, Addr.getOffset()),
30950b57cec5SDimitry Andric MachineMemOperand::MOStore, ArgVT.getStoreSize(), Alignment);
30960b57cec5SDimitry Andric
30970b57cec5SDimitry Andric if (!emitStore(ArgVT, ArgReg, Addr, MMO))
30980b57cec5SDimitry Andric return false;
30990b57cec5SDimitry Andric }
31000b57cec5SDimitry Andric }
31010b57cec5SDimitry Andric return true;
31020b57cec5SDimitry Andric }
31030b57cec5SDimitry Andric
finishCall(CallLoweringInfo & CLI,unsigned NumBytes)310406c3fb27SDimitry Andric bool AArch64FastISel::finishCall(CallLoweringInfo &CLI, unsigned NumBytes) {
31050b57cec5SDimitry Andric CallingConv::ID CC = CLI.CallConv;
31060b57cec5SDimitry Andric
31070b57cec5SDimitry Andric // Issue CALLSEQ_END
31080b57cec5SDimitry Andric unsigned AdjStackUp = TII.getCallFrameDestroyOpcode();
3109bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AdjStackUp))
31100b57cec5SDimitry Andric .addImm(NumBytes).addImm(0);
31110b57cec5SDimitry Andric
311206c3fb27SDimitry Andric // Now the return values.
31130b57cec5SDimitry Andric SmallVector<CCValAssign, 16> RVLocs;
31140b57cec5SDimitry Andric CCState CCInfo(CC, false, *FuncInfo.MF, RVLocs, *Context);
311506c3fb27SDimitry Andric CCInfo.AnalyzeCallResult(CLI.Ins, CCAssignFnForCall(CC));
31160b57cec5SDimitry Andric
311706c3fb27SDimitry Andric Register ResultReg = FuncInfo.CreateRegs(CLI.RetTy);
311806c3fb27SDimitry Andric for (unsigned i = 0; i != RVLocs.size(); ++i) {
311906c3fb27SDimitry Andric CCValAssign &VA = RVLocs[i];
312006c3fb27SDimitry Andric MVT CopyVT = VA.getValVT();
312106c3fb27SDimitry Andric unsigned CopyReg = ResultReg + i;
31220b57cec5SDimitry Andric
31230b57cec5SDimitry Andric // TODO: Handle big-endian results
31240b57cec5SDimitry Andric if (CopyVT.isVector() && !Subtarget->isLittleEndian())
31250b57cec5SDimitry Andric return false;
31260b57cec5SDimitry Andric
312706c3fb27SDimitry Andric // Copy result out of their specified physreg.
312806c3fb27SDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(TargetOpcode::COPY),
312906c3fb27SDimitry Andric CopyReg)
313006c3fb27SDimitry Andric .addReg(VA.getLocReg());
313106c3fb27SDimitry Andric CLI.InRegs.push_back(VA.getLocReg());
313206c3fb27SDimitry Andric }
31330b57cec5SDimitry Andric
31340b57cec5SDimitry Andric CLI.ResultReg = ResultReg;
313506c3fb27SDimitry Andric CLI.NumResultRegs = RVLocs.size();
31360b57cec5SDimitry Andric
31370b57cec5SDimitry Andric return true;
31380b57cec5SDimitry Andric }
31390b57cec5SDimitry Andric
fastLowerCall(CallLoweringInfo & CLI)31400b57cec5SDimitry Andric bool AArch64FastISel::fastLowerCall(CallLoweringInfo &CLI) {
31410b57cec5SDimitry Andric CallingConv::ID CC = CLI.CallConv;
31420b57cec5SDimitry Andric bool IsTailCall = CLI.IsTailCall;
31430b57cec5SDimitry Andric bool IsVarArg = CLI.IsVarArg;
31440b57cec5SDimitry Andric const Value *Callee = CLI.Callee;
31450b57cec5SDimitry Andric MCSymbol *Symbol = CLI.Symbol;
31460b57cec5SDimitry Andric
31470b57cec5SDimitry Andric if (!Callee && !Symbol)
31480b57cec5SDimitry Andric return false;
31490b57cec5SDimitry Andric
31503a9a9c0cSDimitry Andric // Allow SelectionDAG isel to handle calls to functions like setjmp that need
31513a9a9c0cSDimitry Andric // a bti instruction following the call.
31523a9a9c0cSDimitry Andric if (CLI.CB && CLI.CB->hasFnAttr(Attribute::ReturnsTwice) &&
31533a9a9c0cSDimitry Andric !Subtarget->noBTIAtReturnTwice() &&
31543a9a9c0cSDimitry Andric MF->getInfo<AArch64FunctionInfo>()->branchTargetEnforcement())
31553a9a9c0cSDimitry Andric return false;
31563a9a9c0cSDimitry Andric
3157bdd1243dSDimitry Andric // Allow SelectionDAG isel to handle indirect calls with KCFI checks.
3158bdd1243dSDimitry Andric if (CLI.CB && CLI.CB->isIndirectCall() &&
3159bdd1243dSDimitry Andric CLI.CB->getOperandBundle(LLVMContext::OB_kcfi))
3160bdd1243dSDimitry Andric return false;
3161bdd1243dSDimitry Andric
31620b57cec5SDimitry Andric // Allow SelectionDAG isel to handle tail calls.
31630b57cec5SDimitry Andric if (IsTailCall)
31640b57cec5SDimitry Andric return false;
31650b57cec5SDimitry Andric
31668bcb0991SDimitry Andric // FIXME: we could and should support this, but for now correctness at -O0 is
31678bcb0991SDimitry Andric // more important.
31688bcb0991SDimitry Andric if (Subtarget->isTargetILP32())
31698bcb0991SDimitry Andric return false;
31708bcb0991SDimitry Andric
31710b57cec5SDimitry Andric CodeModel::Model CM = TM.getCodeModel();
31720b57cec5SDimitry Andric // Only support the small-addressing and large code models.
31730b57cec5SDimitry Andric if (CM != CodeModel::Large && !Subtarget->useSmallAddressing())
31740b57cec5SDimitry Andric return false;
31750b57cec5SDimitry Andric
31760b57cec5SDimitry Andric // FIXME: Add large code model support for ELF.
31770b57cec5SDimitry Andric if (CM == CodeModel::Large && !Subtarget->isTargetMachO())
31780b57cec5SDimitry Andric return false;
31790b57cec5SDimitry Andric
3180*0fca6ea1SDimitry Andric // ELF -fno-plt compiled intrinsic calls do not have the nonlazybind
3181*0fca6ea1SDimitry Andric // attribute. Check "RtLibUseGOT" instead.
3182*0fca6ea1SDimitry Andric if (MF->getFunction().getParent()->getRtLibUseGOT())
3183*0fca6ea1SDimitry Andric return false;
3184*0fca6ea1SDimitry Andric
31850b57cec5SDimitry Andric // Let SDISel handle vararg functions.
31860b57cec5SDimitry Andric if (IsVarArg)
31870b57cec5SDimitry Andric return false;
31880b57cec5SDimitry Andric
31897a6dacacSDimitry Andric if (Subtarget->isWindowsArm64EC())
31907a6dacacSDimitry Andric return false;
31917a6dacacSDimitry Andric
31920b57cec5SDimitry Andric for (auto Flag : CLI.OutFlags)
31930b57cec5SDimitry Andric if (Flag.isInReg() || Flag.isSRet() || Flag.isNest() || Flag.isByVal() ||
3194fe6060f1SDimitry Andric Flag.isSwiftSelf() || Flag.isSwiftAsync() || Flag.isSwiftError())
31950b57cec5SDimitry Andric return false;
31960b57cec5SDimitry Andric
31970b57cec5SDimitry Andric // Set up the argument vectors.
31980b57cec5SDimitry Andric SmallVector<MVT, 16> OutVTs;
31990b57cec5SDimitry Andric OutVTs.reserve(CLI.OutVals.size());
32000b57cec5SDimitry Andric
32010b57cec5SDimitry Andric for (auto *Val : CLI.OutVals) {
32020b57cec5SDimitry Andric MVT VT;
32030b57cec5SDimitry Andric if (!isTypeLegal(Val->getType(), VT) &&
32040b57cec5SDimitry Andric !(VT == MVT::i1 || VT == MVT::i8 || VT == MVT::i16))
32050b57cec5SDimitry Andric return false;
32060b57cec5SDimitry Andric
32070b57cec5SDimitry Andric // We don't handle vector parameters yet.
32080b57cec5SDimitry Andric if (VT.isVector() || VT.getSizeInBits() > 64)
32090b57cec5SDimitry Andric return false;
32100b57cec5SDimitry Andric
32110b57cec5SDimitry Andric OutVTs.push_back(VT);
32120b57cec5SDimitry Andric }
32130b57cec5SDimitry Andric
32140b57cec5SDimitry Andric Address Addr;
32150b57cec5SDimitry Andric if (Callee && !computeCallAddress(Callee, Addr))
32160b57cec5SDimitry Andric return false;
32170b57cec5SDimitry Andric
3218480093f4SDimitry Andric // The weak function target may be zero; in that case we must use indirect
3219480093f4SDimitry Andric // addressing via a stub on windows as it may be out of range for a
3220480093f4SDimitry Andric // PC-relative jump.
3221480093f4SDimitry Andric if (Subtarget->isTargetWindows() && Addr.getGlobalValue() &&
3222480093f4SDimitry Andric Addr.getGlobalValue()->hasExternalWeakLinkage())
3223480093f4SDimitry Andric return false;
3224480093f4SDimitry Andric
32250b57cec5SDimitry Andric // Handle the arguments now that we've gotten them.
32260b57cec5SDimitry Andric unsigned NumBytes;
32270b57cec5SDimitry Andric if (!processCallArgs(CLI, OutVTs, NumBytes))
32280b57cec5SDimitry Andric return false;
32290b57cec5SDimitry Andric
32300b57cec5SDimitry Andric const AArch64RegisterInfo *RegInfo = Subtarget->getRegisterInfo();
32310b57cec5SDimitry Andric if (RegInfo->isAnyArgRegReserved(*MF))
32320b57cec5SDimitry Andric RegInfo->emitReservedArgRegCallError(*MF);
32330b57cec5SDimitry Andric
32340b57cec5SDimitry Andric // Issue the call.
32350b57cec5SDimitry Andric MachineInstrBuilder MIB;
32360b57cec5SDimitry Andric if (Subtarget->useSmallAddressing()) {
32375ffd83dbSDimitry Andric const MCInstrDesc &II =
32385ffd83dbSDimitry Andric TII.get(Addr.getReg() ? getBLRCallOpcode(*MF) : (unsigned)AArch64::BL);
3239bdd1243dSDimitry Andric MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II);
32400b57cec5SDimitry Andric if (Symbol)
32410b57cec5SDimitry Andric MIB.addSym(Symbol, 0);
32420b57cec5SDimitry Andric else if (Addr.getGlobalValue())
32430b57cec5SDimitry Andric MIB.addGlobalAddress(Addr.getGlobalValue(), 0, 0);
32440b57cec5SDimitry Andric else if (Addr.getReg()) {
324504eeddc0SDimitry Andric Register Reg = constrainOperandRegClass(II, Addr.getReg(), 0);
32460b57cec5SDimitry Andric MIB.addReg(Reg);
32470b57cec5SDimitry Andric } else
32480b57cec5SDimitry Andric return false;
32490b57cec5SDimitry Andric } else {
32500b57cec5SDimitry Andric unsigned CallReg = 0;
32510b57cec5SDimitry Andric if (Symbol) {
325204eeddc0SDimitry Andric Register ADRPReg = createResultReg(&AArch64::GPR64commonRegClass);
3253bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::ADRP),
32540b57cec5SDimitry Andric ADRPReg)
32550b57cec5SDimitry Andric .addSym(Symbol, AArch64II::MO_GOT | AArch64II::MO_PAGE);
32560b57cec5SDimitry Andric
32570b57cec5SDimitry Andric CallReg = createResultReg(&AArch64::GPR64RegClass);
3258bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
32590b57cec5SDimitry Andric TII.get(AArch64::LDRXui), CallReg)
32600b57cec5SDimitry Andric .addReg(ADRPReg)
32610b57cec5SDimitry Andric .addSym(Symbol,
32620b57cec5SDimitry Andric AArch64II::MO_GOT | AArch64II::MO_PAGEOFF | AArch64II::MO_NC);
32630b57cec5SDimitry Andric } else if (Addr.getGlobalValue())
32640b57cec5SDimitry Andric CallReg = materializeGV(Addr.getGlobalValue());
32650b57cec5SDimitry Andric else if (Addr.getReg())
32660b57cec5SDimitry Andric CallReg = Addr.getReg();
32670b57cec5SDimitry Andric
32680b57cec5SDimitry Andric if (!CallReg)
32690b57cec5SDimitry Andric return false;
32700b57cec5SDimitry Andric
32715ffd83dbSDimitry Andric const MCInstrDesc &II = TII.get(getBLRCallOpcode(*MF));
32720b57cec5SDimitry Andric CallReg = constrainOperandRegClass(II, CallReg, 0);
3273bdd1243dSDimitry Andric MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II).addReg(CallReg);
32740b57cec5SDimitry Andric }
32750b57cec5SDimitry Andric
32760b57cec5SDimitry Andric // Add implicit physical register uses to the call.
32770b57cec5SDimitry Andric for (auto Reg : CLI.OutRegs)
32780b57cec5SDimitry Andric MIB.addReg(Reg, RegState::Implicit);
32790b57cec5SDimitry Andric
32800b57cec5SDimitry Andric // Add a register mask with the call-preserved registers.
32810b57cec5SDimitry Andric // Proper defs for return values will be added by setPhysRegsDeadExcept().
32820b57cec5SDimitry Andric MIB.addRegMask(TRI.getCallPreservedMask(*FuncInfo.MF, CC));
32830b57cec5SDimitry Andric
32840b57cec5SDimitry Andric CLI.Call = MIB;
32850b57cec5SDimitry Andric
32860b57cec5SDimitry Andric // Finish off the call including any return values.
328706c3fb27SDimitry Andric return finishCall(CLI, NumBytes);
32880b57cec5SDimitry Andric }
32890b57cec5SDimitry Andric
isMemCpySmall(uint64_t Len,MaybeAlign Alignment)3290bdd1243dSDimitry Andric bool AArch64FastISel::isMemCpySmall(uint64_t Len, MaybeAlign Alignment) {
32910b57cec5SDimitry Andric if (Alignment)
3292bdd1243dSDimitry Andric return Len / Alignment->value() <= 4;
32930b57cec5SDimitry Andric else
32940b57cec5SDimitry Andric return Len < 32;
32950b57cec5SDimitry Andric }
32960b57cec5SDimitry Andric
tryEmitSmallMemCpy(Address Dest,Address Src,uint64_t Len,MaybeAlign Alignment)32970b57cec5SDimitry Andric bool AArch64FastISel::tryEmitSmallMemCpy(Address Dest, Address Src,
3298bdd1243dSDimitry Andric uint64_t Len, MaybeAlign Alignment) {
32990b57cec5SDimitry Andric // Make sure we don't bloat code by inlining very large memcpy's.
33000b57cec5SDimitry Andric if (!isMemCpySmall(Len, Alignment))
33010b57cec5SDimitry Andric return false;
33020b57cec5SDimitry Andric
33030b57cec5SDimitry Andric int64_t UnscaledOffset = 0;
33040b57cec5SDimitry Andric Address OrigDest = Dest;
33050b57cec5SDimitry Andric Address OrigSrc = Src;
33060b57cec5SDimitry Andric
33070b57cec5SDimitry Andric while (Len) {
33080b57cec5SDimitry Andric MVT VT;
3309bdd1243dSDimitry Andric if (!Alignment || *Alignment >= 8) {
33100b57cec5SDimitry Andric if (Len >= 8)
33110b57cec5SDimitry Andric VT = MVT::i64;
33120b57cec5SDimitry Andric else if (Len >= 4)
33130b57cec5SDimitry Andric VT = MVT::i32;
33140b57cec5SDimitry Andric else if (Len >= 2)
33150b57cec5SDimitry Andric VT = MVT::i16;
33160b57cec5SDimitry Andric else {
33170b57cec5SDimitry Andric VT = MVT::i8;
33180b57cec5SDimitry Andric }
33190b57cec5SDimitry Andric } else {
3320bdd1243dSDimitry Andric assert(Alignment && "Alignment is set in this branch");
33210b57cec5SDimitry Andric // Bound based on alignment.
3322bdd1243dSDimitry Andric if (Len >= 4 && *Alignment == 4)
33230b57cec5SDimitry Andric VT = MVT::i32;
3324bdd1243dSDimitry Andric else if (Len >= 2 && *Alignment == 2)
33250b57cec5SDimitry Andric VT = MVT::i16;
33260b57cec5SDimitry Andric else {
33270b57cec5SDimitry Andric VT = MVT::i8;
33280b57cec5SDimitry Andric }
33290b57cec5SDimitry Andric }
33300b57cec5SDimitry Andric
33310b57cec5SDimitry Andric unsigned ResultReg = emitLoad(VT, VT, Src);
33320b57cec5SDimitry Andric if (!ResultReg)
33330b57cec5SDimitry Andric return false;
33340b57cec5SDimitry Andric
33350b57cec5SDimitry Andric if (!emitStore(VT, ResultReg, Dest))
33360b57cec5SDimitry Andric return false;
33370b57cec5SDimitry Andric
33380b57cec5SDimitry Andric int64_t Size = VT.getSizeInBits() / 8;
33390b57cec5SDimitry Andric Len -= Size;
33400b57cec5SDimitry Andric UnscaledOffset += Size;
33410b57cec5SDimitry Andric
33420b57cec5SDimitry Andric // We need to recompute the unscaled offset for each iteration.
33430b57cec5SDimitry Andric Dest.setOffset(OrigDest.getOffset() + UnscaledOffset);
33440b57cec5SDimitry Andric Src.setOffset(OrigSrc.getOffset() + UnscaledOffset);
33450b57cec5SDimitry Andric }
33460b57cec5SDimitry Andric
33470b57cec5SDimitry Andric return true;
33480b57cec5SDimitry Andric }
33490b57cec5SDimitry Andric
33500b57cec5SDimitry Andric /// Check if it is possible to fold the condition from the XALU intrinsic
33510b57cec5SDimitry Andric /// into the user. The condition code will only be updated on success.
foldXALUIntrinsic(AArch64CC::CondCode & CC,const Instruction * I,const Value * Cond)33520b57cec5SDimitry Andric bool AArch64FastISel::foldXALUIntrinsic(AArch64CC::CondCode &CC,
33530b57cec5SDimitry Andric const Instruction *I,
33540b57cec5SDimitry Andric const Value *Cond) {
33550b57cec5SDimitry Andric if (!isa<ExtractValueInst>(Cond))
33560b57cec5SDimitry Andric return false;
33570b57cec5SDimitry Andric
33580b57cec5SDimitry Andric const auto *EV = cast<ExtractValueInst>(Cond);
33590b57cec5SDimitry Andric if (!isa<IntrinsicInst>(EV->getAggregateOperand()))
33600b57cec5SDimitry Andric return false;
33610b57cec5SDimitry Andric
33620b57cec5SDimitry Andric const auto *II = cast<IntrinsicInst>(EV->getAggregateOperand());
33630b57cec5SDimitry Andric MVT RetVT;
33640b57cec5SDimitry Andric const Function *Callee = II->getCalledFunction();
33650b57cec5SDimitry Andric Type *RetTy =
33660b57cec5SDimitry Andric cast<StructType>(Callee->getReturnType())->getTypeAtIndex(0U);
33670b57cec5SDimitry Andric if (!isTypeLegal(RetTy, RetVT))
33680b57cec5SDimitry Andric return false;
33690b57cec5SDimitry Andric
33700b57cec5SDimitry Andric if (RetVT != MVT::i32 && RetVT != MVT::i64)
33710b57cec5SDimitry Andric return false;
33720b57cec5SDimitry Andric
33730b57cec5SDimitry Andric const Value *LHS = II->getArgOperand(0);
33740b57cec5SDimitry Andric const Value *RHS = II->getArgOperand(1);
33750b57cec5SDimitry Andric
33760b57cec5SDimitry Andric // Canonicalize immediate to the RHS.
3377e8d8bef9SDimitry Andric if (isa<ConstantInt>(LHS) && !isa<ConstantInt>(RHS) && II->isCommutative())
33780b57cec5SDimitry Andric std::swap(LHS, RHS);
33790b57cec5SDimitry Andric
33800b57cec5SDimitry Andric // Simplify multiplies.
33810b57cec5SDimitry Andric Intrinsic::ID IID = II->getIntrinsicID();
33820b57cec5SDimitry Andric switch (IID) {
33830b57cec5SDimitry Andric default:
33840b57cec5SDimitry Andric break;
33850b57cec5SDimitry Andric case Intrinsic::smul_with_overflow:
33860b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS))
33870b57cec5SDimitry Andric if (C->getValue() == 2)
33880b57cec5SDimitry Andric IID = Intrinsic::sadd_with_overflow;
33890b57cec5SDimitry Andric break;
33900b57cec5SDimitry Andric case Intrinsic::umul_with_overflow:
33910b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS))
33920b57cec5SDimitry Andric if (C->getValue() == 2)
33930b57cec5SDimitry Andric IID = Intrinsic::uadd_with_overflow;
33940b57cec5SDimitry Andric break;
33950b57cec5SDimitry Andric }
33960b57cec5SDimitry Andric
33970b57cec5SDimitry Andric AArch64CC::CondCode TmpCC;
33980b57cec5SDimitry Andric switch (IID) {
33990b57cec5SDimitry Andric default:
34000b57cec5SDimitry Andric return false;
34010b57cec5SDimitry Andric case Intrinsic::sadd_with_overflow:
34020b57cec5SDimitry Andric case Intrinsic::ssub_with_overflow:
34030b57cec5SDimitry Andric TmpCC = AArch64CC::VS;
34040b57cec5SDimitry Andric break;
34050b57cec5SDimitry Andric case Intrinsic::uadd_with_overflow:
34060b57cec5SDimitry Andric TmpCC = AArch64CC::HS;
34070b57cec5SDimitry Andric break;
34080b57cec5SDimitry Andric case Intrinsic::usub_with_overflow:
34090b57cec5SDimitry Andric TmpCC = AArch64CC::LO;
34100b57cec5SDimitry Andric break;
34110b57cec5SDimitry Andric case Intrinsic::smul_with_overflow:
34120b57cec5SDimitry Andric case Intrinsic::umul_with_overflow:
34130b57cec5SDimitry Andric TmpCC = AArch64CC::NE;
34140b57cec5SDimitry Andric break;
34150b57cec5SDimitry Andric }
34160b57cec5SDimitry Andric
34170b57cec5SDimitry Andric // Check if both instructions are in the same basic block.
34180b57cec5SDimitry Andric if (!isValueAvailable(II))
34190b57cec5SDimitry Andric return false;
34200b57cec5SDimitry Andric
34210b57cec5SDimitry Andric // Make sure nothing is in the way
34220b57cec5SDimitry Andric BasicBlock::const_iterator Start(I);
34230b57cec5SDimitry Andric BasicBlock::const_iterator End(II);
34240b57cec5SDimitry Andric for (auto Itr = std::prev(Start); Itr != End; --Itr) {
34250b57cec5SDimitry Andric // We only expect extractvalue instructions between the intrinsic and the
34260b57cec5SDimitry Andric // instruction to be selected.
34270b57cec5SDimitry Andric if (!isa<ExtractValueInst>(Itr))
34280b57cec5SDimitry Andric return false;
34290b57cec5SDimitry Andric
34300b57cec5SDimitry Andric // Check that the extractvalue operand comes from the intrinsic.
34310b57cec5SDimitry Andric const auto *EVI = cast<ExtractValueInst>(Itr);
34320b57cec5SDimitry Andric if (EVI->getAggregateOperand() != II)
34330b57cec5SDimitry Andric return false;
34340b57cec5SDimitry Andric }
34350b57cec5SDimitry Andric
34360b57cec5SDimitry Andric CC = TmpCC;
34370b57cec5SDimitry Andric return true;
34380b57cec5SDimitry Andric }
34390b57cec5SDimitry Andric
fastLowerIntrinsicCall(const IntrinsicInst * II)34400b57cec5SDimitry Andric bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
34410b57cec5SDimitry Andric // FIXME: Handle more intrinsics.
34420b57cec5SDimitry Andric switch (II->getIntrinsicID()) {
34430b57cec5SDimitry Andric default: return false;
34440b57cec5SDimitry Andric case Intrinsic::frameaddress: {
34450b57cec5SDimitry Andric MachineFrameInfo &MFI = FuncInfo.MF->getFrameInfo();
34460b57cec5SDimitry Andric MFI.setFrameAddressIsTaken(true);
34470b57cec5SDimitry Andric
34480b57cec5SDimitry Andric const AArch64RegisterInfo *RegInfo = Subtarget->getRegisterInfo();
34498bcb0991SDimitry Andric Register FramePtr = RegInfo->getFrameRegister(*(FuncInfo.MF));
34508bcb0991SDimitry Andric Register SrcReg = MRI.createVirtualRegister(&AArch64::GPR64RegClass);
3451bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
34520b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), SrcReg).addReg(FramePtr);
34530b57cec5SDimitry Andric // Recursively load frame address
34540b57cec5SDimitry Andric // ldr x0, [fp]
34550b57cec5SDimitry Andric // ldr x0, [x0]
34560b57cec5SDimitry Andric // ldr x0, [x0]
34570b57cec5SDimitry Andric // ...
34580b57cec5SDimitry Andric unsigned DestReg;
34590b57cec5SDimitry Andric unsigned Depth = cast<ConstantInt>(II->getOperand(0))->getZExtValue();
34600b57cec5SDimitry Andric while (Depth--) {
34610b57cec5SDimitry Andric DestReg = fastEmitInst_ri(AArch64::LDRXui, &AArch64::GPR64RegClass,
3462fe6060f1SDimitry Andric SrcReg, 0);
34630b57cec5SDimitry Andric assert(DestReg && "Unexpected LDR instruction emission failure.");
34640b57cec5SDimitry Andric SrcReg = DestReg;
34650b57cec5SDimitry Andric }
34660b57cec5SDimitry Andric
34670b57cec5SDimitry Andric updateValueMap(II, SrcReg);
34680b57cec5SDimitry Andric return true;
34690b57cec5SDimitry Andric }
34700b57cec5SDimitry Andric case Intrinsic::sponentry: {
34710b57cec5SDimitry Andric MachineFrameInfo &MFI = FuncInfo.MF->getFrameInfo();
34720b57cec5SDimitry Andric
34730b57cec5SDimitry Andric // SP = FP + Fixed Object + 16
34740b57cec5SDimitry Andric int FI = MFI.CreateFixedObject(4, 0, false);
347504eeddc0SDimitry Andric Register ResultReg = createResultReg(&AArch64::GPR64spRegClass);
3476bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
34770b57cec5SDimitry Andric TII.get(AArch64::ADDXri), ResultReg)
34780b57cec5SDimitry Andric .addFrameIndex(FI)
34790b57cec5SDimitry Andric .addImm(0)
34800b57cec5SDimitry Andric .addImm(0);
34810b57cec5SDimitry Andric
34820b57cec5SDimitry Andric updateValueMap(II, ResultReg);
34830b57cec5SDimitry Andric return true;
34840b57cec5SDimitry Andric }
34850b57cec5SDimitry Andric case Intrinsic::memcpy:
34860b57cec5SDimitry Andric case Intrinsic::memmove: {
34870b57cec5SDimitry Andric const auto *MTI = cast<MemTransferInst>(II);
34880b57cec5SDimitry Andric // Don't handle volatile.
34890b57cec5SDimitry Andric if (MTI->isVolatile())
34900b57cec5SDimitry Andric return false;
34910b57cec5SDimitry Andric
34920b57cec5SDimitry Andric // Disable inlining for memmove before calls to ComputeAddress. Otherwise,
34930b57cec5SDimitry Andric // we would emit dead code because we don't currently handle memmoves.
34940b57cec5SDimitry Andric bool IsMemCpy = (II->getIntrinsicID() == Intrinsic::memcpy);
34950b57cec5SDimitry Andric if (isa<ConstantInt>(MTI->getLength()) && IsMemCpy) {
34960b57cec5SDimitry Andric // Small memcpy's are common enough that we want to do them without a call
34970b57cec5SDimitry Andric // if possible.
34980b57cec5SDimitry Andric uint64_t Len = cast<ConstantInt>(MTI->getLength())->getZExtValue();
3499bdd1243dSDimitry Andric MaybeAlign Alignment;
3500bdd1243dSDimitry Andric if (MTI->getDestAlign() || MTI->getSourceAlign())
3501bdd1243dSDimitry Andric Alignment = std::min(MTI->getDestAlign().valueOrOne(),
3502bdd1243dSDimitry Andric MTI->getSourceAlign().valueOrOne());
35030b57cec5SDimitry Andric if (isMemCpySmall(Len, Alignment)) {
35040b57cec5SDimitry Andric Address Dest, Src;
35050b57cec5SDimitry Andric if (!computeAddress(MTI->getRawDest(), Dest) ||
35060b57cec5SDimitry Andric !computeAddress(MTI->getRawSource(), Src))
35070b57cec5SDimitry Andric return false;
35080b57cec5SDimitry Andric if (tryEmitSmallMemCpy(Dest, Src, Len, Alignment))
35090b57cec5SDimitry Andric return true;
35100b57cec5SDimitry Andric }
35110b57cec5SDimitry Andric }
35120b57cec5SDimitry Andric
35130b57cec5SDimitry Andric if (!MTI->getLength()->getType()->isIntegerTy(64))
35140b57cec5SDimitry Andric return false;
35150b57cec5SDimitry Andric
35160b57cec5SDimitry Andric if (MTI->getSourceAddressSpace() > 255 || MTI->getDestAddressSpace() > 255)
35170b57cec5SDimitry Andric // Fast instruction selection doesn't support the special
35180b57cec5SDimitry Andric // address spaces.
35190b57cec5SDimitry Andric return false;
35200b57cec5SDimitry Andric
35210b57cec5SDimitry Andric const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
3522349cc55cSDimitry Andric return lowerCallTo(II, IntrMemName, II->arg_size() - 1);
35230b57cec5SDimitry Andric }
35240b57cec5SDimitry Andric case Intrinsic::memset: {
35250b57cec5SDimitry Andric const MemSetInst *MSI = cast<MemSetInst>(II);
35260b57cec5SDimitry Andric // Don't handle volatile.
35270b57cec5SDimitry Andric if (MSI->isVolatile())
35280b57cec5SDimitry Andric return false;
35290b57cec5SDimitry Andric
35300b57cec5SDimitry Andric if (!MSI->getLength()->getType()->isIntegerTy(64))
35310b57cec5SDimitry Andric return false;
35320b57cec5SDimitry Andric
35330b57cec5SDimitry Andric if (MSI->getDestAddressSpace() > 255)
35340b57cec5SDimitry Andric // Fast instruction selection doesn't support the special
35350b57cec5SDimitry Andric // address spaces.
35360b57cec5SDimitry Andric return false;
35370b57cec5SDimitry Andric
3538349cc55cSDimitry Andric return lowerCallTo(II, "memset", II->arg_size() - 1);
35390b57cec5SDimitry Andric }
35400b57cec5SDimitry Andric case Intrinsic::sin:
35410b57cec5SDimitry Andric case Intrinsic::cos:
3542*0fca6ea1SDimitry Andric case Intrinsic::tan:
35430b57cec5SDimitry Andric case Intrinsic::pow: {
35440b57cec5SDimitry Andric MVT RetVT;
35450b57cec5SDimitry Andric if (!isTypeLegal(II->getType(), RetVT))
35460b57cec5SDimitry Andric return false;
35470b57cec5SDimitry Andric
35480b57cec5SDimitry Andric if (RetVT != MVT::f32 && RetVT != MVT::f64)
35490b57cec5SDimitry Andric return false;
35500b57cec5SDimitry Andric
3551*0fca6ea1SDimitry Andric static const RTLIB::Libcall LibCallTable[4][2] = {
35520b57cec5SDimitry Andric {RTLIB::SIN_F32, RTLIB::SIN_F64},
35530b57cec5SDimitry Andric {RTLIB::COS_F32, RTLIB::COS_F64},
3554*0fca6ea1SDimitry Andric {RTLIB::TAN_F32, RTLIB::TAN_F64},
3555*0fca6ea1SDimitry Andric {RTLIB::POW_F32, RTLIB::POW_F64}};
35560b57cec5SDimitry Andric RTLIB::Libcall LC;
35570b57cec5SDimitry Andric bool Is64Bit = RetVT == MVT::f64;
35580b57cec5SDimitry Andric switch (II->getIntrinsicID()) {
35590b57cec5SDimitry Andric default:
35600b57cec5SDimitry Andric llvm_unreachable("Unexpected intrinsic.");
35610b57cec5SDimitry Andric case Intrinsic::sin:
35620b57cec5SDimitry Andric LC = LibCallTable[0][Is64Bit];
35630b57cec5SDimitry Andric break;
35640b57cec5SDimitry Andric case Intrinsic::cos:
35650b57cec5SDimitry Andric LC = LibCallTable[1][Is64Bit];
35660b57cec5SDimitry Andric break;
3567*0fca6ea1SDimitry Andric case Intrinsic::tan:
35680b57cec5SDimitry Andric LC = LibCallTable[2][Is64Bit];
35690b57cec5SDimitry Andric break;
3570*0fca6ea1SDimitry Andric case Intrinsic::pow:
3571*0fca6ea1SDimitry Andric LC = LibCallTable[3][Is64Bit];
3572*0fca6ea1SDimitry Andric break;
35730b57cec5SDimitry Andric }
35740b57cec5SDimitry Andric
35750b57cec5SDimitry Andric ArgListTy Args;
3576349cc55cSDimitry Andric Args.reserve(II->arg_size());
35770b57cec5SDimitry Andric
35780b57cec5SDimitry Andric // Populate the argument list.
3579349cc55cSDimitry Andric for (auto &Arg : II->args()) {
35800b57cec5SDimitry Andric ArgListEntry Entry;
35810b57cec5SDimitry Andric Entry.Val = Arg;
35820b57cec5SDimitry Andric Entry.Ty = Arg->getType();
35830b57cec5SDimitry Andric Args.push_back(Entry);
35840b57cec5SDimitry Andric }
35850b57cec5SDimitry Andric
35860b57cec5SDimitry Andric CallLoweringInfo CLI;
35870b57cec5SDimitry Andric MCContext &Ctx = MF->getContext();
35880b57cec5SDimitry Andric CLI.setCallee(DL, Ctx, TLI.getLibcallCallingConv(LC), II->getType(),
35890b57cec5SDimitry Andric TLI.getLibcallName(LC), std::move(Args));
35900b57cec5SDimitry Andric if (!lowerCallTo(CLI))
35910b57cec5SDimitry Andric return false;
35920b57cec5SDimitry Andric updateValueMap(II, CLI.ResultReg);
35930b57cec5SDimitry Andric return true;
35940b57cec5SDimitry Andric }
35950b57cec5SDimitry Andric case Intrinsic::fabs: {
35960b57cec5SDimitry Andric MVT VT;
35970b57cec5SDimitry Andric if (!isTypeLegal(II->getType(), VT))
35980b57cec5SDimitry Andric return false;
35990b57cec5SDimitry Andric
36000b57cec5SDimitry Andric unsigned Opc;
36010b57cec5SDimitry Andric switch (VT.SimpleTy) {
36020b57cec5SDimitry Andric default:
36030b57cec5SDimitry Andric return false;
36040b57cec5SDimitry Andric case MVT::f32:
36050b57cec5SDimitry Andric Opc = AArch64::FABSSr;
36060b57cec5SDimitry Andric break;
36070b57cec5SDimitry Andric case MVT::f64:
36080b57cec5SDimitry Andric Opc = AArch64::FABSDr;
36090b57cec5SDimitry Andric break;
36100b57cec5SDimitry Andric }
361104eeddc0SDimitry Andric Register SrcReg = getRegForValue(II->getOperand(0));
36120b57cec5SDimitry Andric if (!SrcReg)
36130b57cec5SDimitry Andric return false;
361404eeddc0SDimitry Andric Register ResultReg = createResultReg(TLI.getRegClassFor(VT));
3615bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg)
3616fe6060f1SDimitry Andric .addReg(SrcReg);
36170b57cec5SDimitry Andric updateValueMap(II, ResultReg);
36180b57cec5SDimitry Andric return true;
36190b57cec5SDimitry Andric }
36200b57cec5SDimitry Andric case Intrinsic::trap:
3621bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::BRK))
36220b57cec5SDimitry Andric .addImm(1);
36230b57cec5SDimitry Andric return true;
3624e8d8bef9SDimitry Andric case Intrinsic::debugtrap:
3625bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::BRK))
36260b57cec5SDimitry Andric .addImm(0xF000);
36270b57cec5SDimitry Andric return true;
36280b57cec5SDimitry Andric
36290b57cec5SDimitry Andric case Intrinsic::sqrt: {
36300b57cec5SDimitry Andric Type *RetTy = II->getCalledFunction()->getReturnType();
36310b57cec5SDimitry Andric
36320b57cec5SDimitry Andric MVT VT;
36330b57cec5SDimitry Andric if (!isTypeLegal(RetTy, VT))
36340b57cec5SDimitry Andric return false;
36350b57cec5SDimitry Andric
363604eeddc0SDimitry Andric Register Op0Reg = getRegForValue(II->getOperand(0));
36370b57cec5SDimitry Andric if (!Op0Reg)
36380b57cec5SDimitry Andric return false;
36390b57cec5SDimitry Andric
3640fe6060f1SDimitry Andric unsigned ResultReg = fastEmit_r(VT, VT, ISD::FSQRT, Op0Reg);
36410b57cec5SDimitry Andric if (!ResultReg)
36420b57cec5SDimitry Andric return false;
36430b57cec5SDimitry Andric
36440b57cec5SDimitry Andric updateValueMap(II, ResultReg);
36450b57cec5SDimitry Andric return true;
36460b57cec5SDimitry Andric }
36470b57cec5SDimitry Andric case Intrinsic::sadd_with_overflow:
36480b57cec5SDimitry Andric case Intrinsic::uadd_with_overflow:
36490b57cec5SDimitry Andric case Intrinsic::ssub_with_overflow:
36500b57cec5SDimitry Andric case Intrinsic::usub_with_overflow:
36510b57cec5SDimitry Andric case Intrinsic::smul_with_overflow:
36520b57cec5SDimitry Andric case Intrinsic::umul_with_overflow: {
36530b57cec5SDimitry Andric // This implements the basic lowering of the xalu with overflow intrinsics.
36540b57cec5SDimitry Andric const Function *Callee = II->getCalledFunction();
36550b57cec5SDimitry Andric auto *Ty = cast<StructType>(Callee->getReturnType());
36560b57cec5SDimitry Andric Type *RetTy = Ty->getTypeAtIndex(0U);
36570b57cec5SDimitry Andric
36580b57cec5SDimitry Andric MVT VT;
36590b57cec5SDimitry Andric if (!isTypeLegal(RetTy, VT))
36600b57cec5SDimitry Andric return false;
36610b57cec5SDimitry Andric
36620b57cec5SDimitry Andric if (VT != MVT::i32 && VT != MVT::i64)
36630b57cec5SDimitry Andric return false;
36640b57cec5SDimitry Andric
36650b57cec5SDimitry Andric const Value *LHS = II->getArgOperand(0);
36660b57cec5SDimitry Andric const Value *RHS = II->getArgOperand(1);
36670b57cec5SDimitry Andric // Canonicalize immediate to the RHS.
3668e8d8bef9SDimitry Andric if (isa<ConstantInt>(LHS) && !isa<ConstantInt>(RHS) && II->isCommutative())
36690b57cec5SDimitry Andric std::swap(LHS, RHS);
36700b57cec5SDimitry Andric
36710b57cec5SDimitry Andric // Simplify multiplies.
36720b57cec5SDimitry Andric Intrinsic::ID IID = II->getIntrinsicID();
36730b57cec5SDimitry Andric switch (IID) {
36740b57cec5SDimitry Andric default:
36750b57cec5SDimitry Andric break;
36760b57cec5SDimitry Andric case Intrinsic::smul_with_overflow:
36770b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS))
36780b57cec5SDimitry Andric if (C->getValue() == 2) {
36790b57cec5SDimitry Andric IID = Intrinsic::sadd_with_overflow;
36800b57cec5SDimitry Andric RHS = LHS;
36810b57cec5SDimitry Andric }
36820b57cec5SDimitry Andric break;
36830b57cec5SDimitry Andric case Intrinsic::umul_with_overflow:
36840b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(RHS))
36850b57cec5SDimitry Andric if (C->getValue() == 2) {
36860b57cec5SDimitry Andric IID = Intrinsic::uadd_with_overflow;
36870b57cec5SDimitry Andric RHS = LHS;
36880b57cec5SDimitry Andric }
36890b57cec5SDimitry Andric break;
36900b57cec5SDimitry Andric }
36910b57cec5SDimitry Andric
36920b57cec5SDimitry Andric unsigned ResultReg1 = 0, ResultReg2 = 0, MulReg = 0;
36930b57cec5SDimitry Andric AArch64CC::CondCode CC = AArch64CC::Invalid;
36940b57cec5SDimitry Andric switch (IID) {
36950b57cec5SDimitry Andric default: llvm_unreachable("Unexpected intrinsic!");
36960b57cec5SDimitry Andric case Intrinsic::sadd_with_overflow:
36970b57cec5SDimitry Andric ResultReg1 = emitAdd(VT, LHS, RHS, /*SetFlags=*/true);
36980b57cec5SDimitry Andric CC = AArch64CC::VS;
36990b57cec5SDimitry Andric break;
37000b57cec5SDimitry Andric case Intrinsic::uadd_with_overflow:
37010b57cec5SDimitry Andric ResultReg1 = emitAdd(VT, LHS, RHS, /*SetFlags=*/true);
37020b57cec5SDimitry Andric CC = AArch64CC::HS;
37030b57cec5SDimitry Andric break;
37040b57cec5SDimitry Andric case Intrinsic::ssub_with_overflow:
37050b57cec5SDimitry Andric ResultReg1 = emitSub(VT, LHS, RHS, /*SetFlags=*/true);
37060b57cec5SDimitry Andric CC = AArch64CC::VS;
37070b57cec5SDimitry Andric break;
37080b57cec5SDimitry Andric case Intrinsic::usub_with_overflow:
37090b57cec5SDimitry Andric ResultReg1 = emitSub(VT, LHS, RHS, /*SetFlags=*/true);
37100b57cec5SDimitry Andric CC = AArch64CC::LO;
37110b57cec5SDimitry Andric break;
37120b57cec5SDimitry Andric case Intrinsic::smul_with_overflow: {
37130b57cec5SDimitry Andric CC = AArch64CC::NE;
371404eeddc0SDimitry Andric Register LHSReg = getRegForValue(LHS);
37150b57cec5SDimitry Andric if (!LHSReg)
37160b57cec5SDimitry Andric return false;
37170b57cec5SDimitry Andric
371804eeddc0SDimitry Andric Register RHSReg = getRegForValue(RHS);
37190b57cec5SDimitry Andric if (!RHSReg)
37200b57cec5SDimitry Andric return false;
37210b57cec5SDimitry Andric
37220b57cec5SDimitry Andric if (VT == MVT::i32) {
3723fe6060f1SDimitry Andric MulReg = emitSMULL_rr(MVT::i64, LHSReg, RHSReg);
372404eeddc0SDimitry Andric Register MulSubReg =
3725fe6060f1SDimitry Andric fastEmitInst_extractsubreg(VT, MulReg, AArch64::sub_32);
3726fe6060f1SDimitry Andric // cmp xreg, wreg, sxtw
3727fe6060f1SDimitry Andric emitAddSub_rx(/*UseAdd=*/false, MVT::i64, MulReg, MulSubReg,
3728fe6060f1SDimitry Andric AArch64_AM::SXTW, /*ShiftImm=*/0, /*SetFlags=*/true,
3729fe6060f1SDimitry Andric /*WantResult=*/false);
3730fe6060f1SDimitry Andric MulReg = MulSubReg;
37310b57cec5SDimitry Andric } else {
37320b57cec5SDimitry Andric assert(VT == MVT::i64 && "Unexpected value type.");
37330b57cec5SDimitry Andric // LHSReg and RHSReg cannot be killed by this Mul, since they are
37340b57cec5SDimitry Andric // reused in the next instruction.
3735fe6060f1SDimitry Andric MulReg = emitMul_rr(VT, LHSReg, RHSReg);
3736fe6060f1SDimitry Andric unsigned SMULHReg = fastEmit_rr(VT, VT, ISD::MULHS, LHSReg, RHSReg);
3737fe6060f1SDimitry Andric emitSubs_rs(VT, SMULHReg, MulReg, AArch64_AM::ASR, 63,
3738fe6060f1SDimitry Andric /*WantResult=*/false);
37390b57cec5SDimitry Andric }
37400b57cec5SDimitry Andric break;
37410b57cec5SDimitry Andric }
37420b57cec5SDimitry Andric case Intrinsic::umul_with_overflow: {
37430b57cec5SDimitry Andric CC = AArch64CC::NE;
374404eeddc0SDimitry Andric Register LHSReg = getRegForValue(LHS);
37450b57cec5SDimitry Andric if (!LHSReg)
37460b57cec5SDimitry Andric return false;
37470b57cec5SDimitry Andric
374804eeddc0SDimitry Andric Register RHSReg = getRegForValue(RHS);
37490b57cec5SDimitry Andric if (!RHSReg)
37500b57cec5SDimitry Andric return false;
37510b57cec5SDimitry Andric
37520b57cec5SDimitry Andric if (VT == MVT::i32) {
3753fe6060f1SDimitry Andric MulReg = emitUMULL_rr(MVT::i64, LHSReg, RHSReg);
3754fe6060f1SDimitry Andric // tst xreg, #0xffffffff00000000
3755bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
3756fe6060f1SDimitry Andric TII.get(AArch64::ANDSXri), AArch64::XZR)
3757fe6060f1SDimitry Andric .addReg(MulReg)
3758fe6060f1SDimitry Andric .addImm(AArch64_AM::encodeLogicalImmediate(0xFFFFFFFF00000000, 64));
3759fe6060f1SDimitry Andric MulReg = fastEmitInst_extractsubreg(VT, MulReg, AArch64::sub_32);
37600b57cec5SDimitry Andric } else {
37610b57cec5SDimitry Andric assert(VT == MVT::i64 && "Unexpected value type.");
37620b57cec5SDimitry Andric // LHSReg and RHSReg cannot be killed by this Mul, since they are
37630b57cec5SDimitry Andric // reused in the next instruction.
3764fe6060f1SDimitry Andric MulReg = emitMul_rr(VT, LHSReg, RHSReg);
3765fe6060f1SDimitry Andric unsigned UMULHReg = fastEmit_rr(VT, VT, ISD::MULHU, LHSReg, RHSReg);
3766fe6060f1SDimitry Andric emitSubs_rr(VT, AArch64::XZR, UMULHReg, /*WantResult=*/false);
37670b57cec5SDimitry Andric }
37680b57cec5SDimitry Andric break;
37690b57cec5SDimitry Andric }
37700b57cec5SDimitry Andric }
37710b57cec5SDimitry Andric
37720b57cec5SDimitry Andric if (MulReg) {
37730b57cec5SDimitry Andric ResultReg1 = createResultReg(TLI.getRegClassFor(VT));
3774bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
37750b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg1).addReg(MulReg);
37760b57cec5SDimitry Andric }
37770b57cec5SDimitry Andric
37780b57cec5SDimitry Andric if (!ResultReg1)
37790b57cec5SDimitry Andric return false;
37800b57cec5SDimitry Andric
37810b57cec5SDimitry Andric ResultReg2 = fastEmitInst_rri(AArch64::CSINCWr, &AArch64::GPR32RegClass,
3782fe6060f1SDimitry Andric AArch64::WZR, AArch64::WZR,
3783fe6060f1SDimitry Andric getInvertedCondCode(CC));
37840b57cec5SDimitry Andric (void)ResultReg2;
37850b57cec5SDimitry Andric assert((ResultReg1 + 1) == ResultReg2 &&
37860b57cec5SDimitry Andric "Nonconsecutive result registers.");
37870b57cec5SDimitry Andric updateValueMap(II, ResultReg1, 2);
37880b57cec5SDimitry Andric return true;
37890b57cec5SDimitry Andric }
379006c3fb27SDimitry Andric case Intrinsic::aarch64_crc32b:
379106c3fb27SDimitry Andric case Intrinsic::aarch64_crc32h:
379206c3fb27SDimitry Andric case Intrinsic::aarch64_crc32w:
379306c3fb27SDimitry Andric case Intrinsic::aarch64_crc32x:
379406c3fb27SDimitry Andric case Intrinsic::aarch64_crc32cb:
379506c3fb27SDimitry Andric case Intrinsic::aarch64_crc32ch:
379606c3fb27SDimitry Andric case Intrinsic::aarch64_crc32cw:
379706c3fb27SDimitry Andric case Intrinsic::aarch64_crc32cx: {
379806c3fb27SDimitry Andric if (!Subtarget->hasCRC())
379906c3fb27SDimitry Andric return false;
380006c3fb27SDimitry Andric
380106c3fb27SDimitry Andric unsigned Opc;
380206c3fb27SDimitry Andric switch (II->getIntrinsicID()) {
380306c3fb27SDimitry Andric default:
380406c3fb27SDimitry Andric llvm_unreachable("Unexpected intrinsic!");
380506c3fb27SDimitry Andric case Intrinsic::aarch64_crc32b:
380606c3fb27SDimitry Andric Opc = AArch64::CRC32Brr;
380706c3fb27SDimitry Andric break;
380806c3fb27SDimitry Andric case Intrinsic::aarch64_crc32h:
380906c3fb27SDimitry Andric Opc = AArch64::CRC32Hrr;
381006c3fb27SDimitry Andric break;
381106c3fb27SDimitry Andric case Intrinsic::aarch64_crc32w:
381206c3fb27SDimitry Andric Opc = AArch64::CRC32Wrr;
381306c3fb27SDimitry Andric break;
381406c3fb27SDimitry Andric case Intrinsic::aarch64_crc32x:
381506c3fb27SDimitry Andric Opc = AArch64::CRC32Xrr;
381606c3fb27SDimitry Andric break;
381706c3fb27SDimitry Andric case Intrinsic::aarch64_crc32cb:
381806c3fb27SDimitry Andric Opc = AArch64::CRC32CBrr;
381906c3fb27SDimitry Andric break;
382006c3fb27SDimitry Andric case Intrinsic::aarch64_crc32ch:
382106c3fb27SDimitry Andric Opc = AArch64::CRC32CHrr;
382206c3fb27SDimitry Andric break;
382306c3fb27SDimitry Andric case Intrinsic::aarch64_crc32cw:
382406c3fb27SDimitry Andric Opc = AArch64::CRC32CWrr;
382506c3fb27SDimitry Andric break;
382606c3fb27SDimitry Andric case Intrinsic::aarch64_crc32cx:
382706c3fb27SDimitry Andric Opc = AArch64::CRC32CXrr;
382806c3fb27SDimitry Andric break;
382906c3fb27SDimitry Andric }
383006c3fb27SDimitry Andric
383106c3fb27SDimitry Andric Register LHSReg = getRegForValue(II->getArgOperand(0));
383206c3fb27SDimitry Andric Register RHSReg = getRegForValue(II->getArgOperand(1));
383306c3fb27SDimitry Andric if (!LHSReg || !RHSReg)
383406c3fb27SDimitry Andric return false;
383506c3fb27SDimitry Andric
383606c3fb27SDimitry Andric Register ResultReg =
383706c3fb27SDimitry Andric fastEmitInst_rr(Opc, &AArch64::GPR32RegClass, LHSReg, RHSReg);
383806c3fb27SDimitry Andric updateValueMap(II, ResultReg);
383906c3fb27SDimitry Andric return true;
384006c3fb27SDimitry Andric }
38410b57cec5SDimitry Andric }
38420b57cec5SDimitry Andric return false;
38430b57cec5SDimitry Andric }
38440b57cec5SDimitry Andric
selectRet(const Instruction * I)38450b57cec5SDimitry Andric bool AArch64FastISel::selectRet(const Instruction *I) {
38460b57cec5SDimitry Andric const ReturnInst *Ret = cast<ReturnInst>(I);
38470b57cec5SDimitry Andric const Function &F = *I->getParent()->getParent();
38480b57cec5SDimitry Andric
38490b57cec5SDimitry Andric if (!FuncInfo.CanLowerReturn)
38500b57cec5SDimitry Andric return false;
38510b57cec5SDimitry Andric
38520b57cec5SDimitry Andric if (F.isVarArg())
38530b57cec5SDimitry Andric return false;
38540b57cec5SDimitry Andric
38550b57cec5SDimitry Andric if (TLI.supportSwiftError() &&
38560b57cec5SDimitry Andric F.getAttributes().hasAttrSomewhere(Attribute::SwiftError))
38570b57cec5SDimitry Andric return false;
38580b57cec5SDimitry Andric
38590b57cec5SDimitry Andric if (TLI.supportSplitCSR(FuncInfo.MF))
38600b57cec5SDimitry Andric return false;
38610b57cec5SDimitry Andric
38620b57cec5SDimitry Andric // Build a list of return value registers.
38630b57cec5SDimitry Andric SmallVector<unsigned, 4> RetRegs;
38640b57cec5SDimitry Andric
38650b57cec5SDimitry Andric if (Ret->getNumOperands() > 0) {
38660b57cec5SDimitry Andric CallingConv::ID CC = F.getCallingConv();
38670b57cec5SDimitry Andric SmallVector<ISD::OutputArg, 4> Outs;
38680b57cec5SDimitry Andric GetReturnInfo(CC, F.getReturnType(), F.getAttributes(), Outs, TLI, DL);
38690b57cec5SDimitry Andric
38700b57cec5SDimitry Andric // Analyze operands of the call, assigning locations to each operand.
38710b57cec5SDimitry Andric SmallVector<CCValAssign, 16> ValLocs;
38720b57cec5SDimitry Andric CCState CCInfo(CC, F.isVarArg(), *FuncInfo.MF, ValLocs, I->getContext());
38735f757f3fSDimitry Andric CCInfo.AnalyzeReturn(Outs, RetCC_AArch64_AAPCS);
38740b57cec5SDimitry Andric
38750b57cec5SDimitry Andric // Only handle a single return value for now.
38760b57cec5SDimitry Andric if (ValLocs.size() != 1)
38770b57cec5SDimitry Andric return false;
38780b57cec5SDimitry Andric
38790b57cec5SDimitry Andric CCValAssign &VA = ValLocs[0];
38800b57cec5SDimitry Andric const Value *RV = Ret->getOperand(0);
38810b57cec5SDimitry Andric
38820b57cec5SDimitry Andric // Don't bother handling odd stuff for now.
38830b57cec5SDimitry Andric if ((VA.getLocInfo() != CCValAssign::Full) &&
38840b57cec5SDimitry Andric (VA.getLocInfo() != CCValAssign::BCvt))
38850b57cec5SDimitry Andric return false;
38860b57cec5SDimitry Andric
38870b57cec5SDimitry Andric // Only handle register returns for now.
38880b57cec5SDimitry Andric if (!VA.isRegLoc())
38890b57cec5SDimitry Andric return false;
38900b57cec5SDimitry Andric
389104eeddc0SDimitry Andric Register Reg = getRegForValue(RV);
38920b57cec5SDimitry Andric if (Reg == 0)
38930b57cec5SDimitry Andric return false;
38940b57cec5SDimitry Andric
38950b57cec5SDimitry Andric unsigned SrcReg = Reg + VA.getValNo();
38968bcb0991SDimitry Andric Register DestReg = VA.getLocReg();
38970b57cec5SDimitry Andric // Avoid a cross-class copy. This is very unlikely.
38980b57cec5SDimitry Andric if (!MRI.getRegClass(SrcReg)->contains(DestReg))
38990b57cec5SDimitry Andric return false;
39000b57cec5SDimitry Andric
39010b57cec5SDimitry Andric EVT RVEVT = TLI.getValueType(DL, RV->getType());
39020b57cec5SDimitry Andric if (!RVEVT.isSimple())
39030b57cec5SDimitry Andric return false;
39040b57cec5SDimitry Andric
39050b57cec5SDimitry Andric // Vectors (of > 1 lane) in big endian need tricky handling.
3906fe6060f1SDimitry Andric if (RVEVT.isVector() && RVEVT.getVectorElementCount().isVector() &&
39070b57cec5SDimitry Andric !Subtarget->isLittleEndian())
39080b57cec5SDimitry Andric return false;
39090b57cec5SDimitry Andric
39100b57cec5SDimitry Andric MVT RVVT = RVEVT.getSimpleVT();
39110b57cec5SDimitry Andric if (RVVT == MVT::f128)
39120b57cec5SDimitry Andric return false;
39130b57cec5SDimitry Andric
39140b57cec5SDimitry Andric MVT DestVT = VA.getValVT();
39150b57cec5SDimitry Andric // Special handling for extended integers.
39160b57cec5SDimitry Andric if (RVVT != DestVT) {
39170b57cec5SDimitry Andric if (RVVT != MVT::i1 && RVVT != MVT::i8 && RVVT != MVT::i16)
39180b57cec5SDimitry Andric return false;
39190b57cec5SDimitry Andric
39200b57cec5SDimitry Andric if (!Outs[0].Flags.isZExt() && !Outs[0].Flags.isSExt())
39210b57cec5SDimitry Andric return false;
39220b57cec5SDimitry Andric
39230b57cec5SDimitry Andric bool IsZExt = Outs[0].Flags.isZExt();
39240b57cec5SDimitry Andric SrcReg = emitIntExt(RVVT, SrcReg, DestVT, IsZExt);
39250b57cec5SDimitry Andric if (SrcReg == 0)
39260b57cec5SDimitry Andric return false;
39270b57cec5SDimitry Andric }
39280b57cec5SDimitry Andric
3929480093f4SDimitry Andric // "Callee" (i.e. value producer) zero extends pointers at function
3930480093f4SDimitry Andric // boundary.
3931480093f4SDimitry Andric if (Subtarget->isTargetILP32() && RV->getType()->isPointerTy())
3932fe6060f1SDimitry Andric SrcReg = emitAnd_ri(MVT::i64, SrcReg, 0xffffffff);
3933480093f4SDimitry Andric
39340b57cec5SDimitry Andric // Make the copy.
3935bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
39360b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), DestReg).addReg(SrcReg);
39370b57cec5SDimitry Andric
39380b57cec5SDimitry Andric // Add register to return instruction.
39390b57cec5SDimitry Andric RetRegs.push_back(VA.getLocReg());
39400b57cec5SDimitry Andric }
39410b57cec5SDimitry Andric
3942bdd1243dSDimitry Andric MachineInstrBuilder MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
39430b57cec5SDimitry Andric TII.get(AArch64::RET_ReallyLR));
39440b57cec5SDimitry Andric for (unsigned RetReg : RetRegs)
39450b57cec5SDimitry Andric MIB.addReg(RetReg, RegState::Implicit);
39460b57cec5SDimitry Andric return true;
39470b57cec5SDimitry Andric }
39480b57cec5SDimitry Andric
selectTrunc(const Instruction * I)39490b57cec5SDimitry Andric bool AArch64FastISel::selectTrunc(const Instruction *I) {
39500b57cec5SDimitry Andric Type *DestTy = I->getType();
39510b57cec5SDimitry Andric Value *Op = I->getOperand(0);
39520b57cec5SDimitry Andric Type *SrcTy = Op->getType();
39530b57cec5SDimitry Andric
39540b57cec5SDimitry Andric EVT SrcEVT = TLI.getValueType(DL, SrcTy, true);
39550b57cec5SDimitry Andric EVT DestEVT = TLI.getValueType(DL, DestTy, true);
39560b57cec5SDimitry Andric if (!SrcEVT.isSimple())
39570b57cec5SDimitry Andric return false;
39580b57cec5SDimitry Andric if (!DestEVT.isSimple())
39590b57cec5SDimitry Andric return false;
39600b57cec5SDimitry Andric
39610b57cec5SDimitry Andric MVT SrcVT = SrcEVT.getSimpleVT();
39620b57cec5SDimitry Andric MVT DestVT = DestEVT.getSimpleVT();
39630b57cec5SDimitry Andric
39640b57cec5SDimitry Andric if (SrcVT != MVT::i64 && SrcVT != MVT::i32 && SrcVT != MVT::i16 &&
39650b57cec5SDimitry Andric SrcVT != MVT::i8)
39660b57cec5SDimitry Andric return false;
39670b57cec5SDimitry Andric if (DestVT != MVT::i32 && DestVT != MVT::i16 && DestVT != MVT::i8 &&
39680b57cec5SDimitry Andric DestVT != MVT::i1)
39690b57cec5SDimitry Andric return false;
39700b57cec5SDimitry Andric
397104eeddc0SDimitry Andric Register SrcReg = getRegForValue(Op);
39720b57cec5SDimitry Andric if (!SrcReg)
39730b57cec5SDimitry Andric return false;
39740b57cec5SDimitry Andric
39750b57cec5SDimitry Andric // If we're truncating from i64 to a smaller non-legal type then generate an
39760b57cec5SDimitry Andric // AND. Otherwise, we know the high bits are undefined and a truncate only
39770b57cec5SDimitry Andric // generate a COPY. We cannot mark the source register also as result
39780b57cec5SDimitry Andric // register, because this can incorrectly transfer the kill flag onto the
39790b57cec5SDimitry Andric // source register.
39800b57cec5SDimitry Andric unsigned ResultReg;
39810b57cec5SDimitry Andric if (SrcVT == MVT::i64) {
39820b57cec5SDimitry Andric uint64_t Mask = 0;
39830b57cec5SDimitry Andric switch (DestVT.SimpleTy) {
39840b57cec5SDimitry Andric default:
39850b57cec5SDimitry Andric // Trunc i64 to i32 is handled by the target-independent fast-isel.
39860b57cec5SDimitry Andric return false;
39870b57cec5SDimitry Andric case MVT::i1:
39880b57cec5SDimitry Andric Mask = 0x1;
39890b57cec5SDimitry Andric break;
39900b57cec5SDimitry Andric case MVT::i8:
39910b57cec5SDimitry Andric Mask = 0xff;
39920b57cec5SDimitry Andric break;
39930b57cec5SDimitry Andric case MVT::i16:
39940b57cec5SDimitry Andric Mask = 0xffff;
39950b57cec5SDimitry Andric break;
39960b57cec5SDimitry Andric }
39970b57cec5SDimitry Andric // Issue an extract_subreg to get the lower 32-bits.
399804eeddc0SDimitry Andric Register Reg32 = fastEmitInst_extractsubreg(MVT::i32, SrcReg,
39990b57cec5SDimitry Andric AArch64::sub_32);
40000b57cec5SDimitry Andric // Create the AND instruction which performs the actual truncation.
4001fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, Reg32, Mask);
40020b57cec5SDimitry Andric assert(ResultReg && "Unexpected AND instruction emission failure.");
40030b57cec5SDimitry Andric } else {
40040b57cec5SDimitry Andric ResultReg = createResultReg(&AArch64::GPR32RegClass);
4005bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
40060b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
4007fe6060f1SDimitry Andric .addReg(SrcReg);
40080b57cec5SDimitry Andric }
40090b57cec5SDimitry Andric
40100b57cec5SDimitry Andric updateValueMap(I, ResultReg);
40110b57cec5SDimitry Andric return true;
40120b57cec5SDimitry Andric }
40130b57cec5SDimitry Andric
emiti1Ext(unsigned SrcReg,MVT DestVT,bool IsZExt)40140b57cec5SDimitry Andric unsigned AArch64FastISel::emiti1Ext(unsigned SrcReg, MVT DestVT, bool IsZExt) {
40150b57cec5SDimitry Andric assert((DestVT == MVT::i8 || DestVT == MVT::i16 || DestVT == MVT::i32 ||
40160b57cec5SDimitry Andric DestVT == MVT::i64) &&
40170b57cec5SDimitry Andric "Unexpected value type.");
40180b57cec5SDimitry Andric // Handle i8 and i16 as i32.
40190b57cec5SDimitry Andric if (DestVT == MVT::i8 || DestVT == MVT::i16)
40200b57cec5SDimitry Andric DestVT = MVT::i32;
40210b57cec5SDimitry Andric
40220b57cec5SDimitry Andric if (IsZExt) {
4023fe6060f1SDimitry Andric unsigned ResultReg = emitAnd_ri(MVT::i32, SrcReg, 1);
40240b57cec5SDimitry Andric assert(ResultReg && "Unexpected AND instruction emission failure.");
40250b57cec5SDimitry Andric if (DestVT == MVT::i64) {
40260b57cec5SDimitry Andric // We're ZExt i1 to i64. The ANDWri Wd, Ws, #1 implicitly clears the
40270b57cec5SDimitry Andric // upper 32 bits. Emit a SUBREG_TO_REG to extend from Wd to Xd.
40288bcb0991SDimitry Andric Register Reg64 = MRI.createVirtualRegister(&AArch64::GPR64RegClass);
4029bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
40300b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), Reg64)
40310b57cec5SDimitry Andric .addImm(0)
40320b57cec5SDimitry Andric .addReg(ResultReg)
40330b57cec5SDimitry Andric .addImm(AArch64::sub_32);
40340b57cec5SDimitry Andric ResultReg = Reg64;
40350b57cec5SDimitry Andric }
40360b57cec5SDimitry Andric return ResultReg;
40370b57cec5SDimitry Andric } else {
40380b57cec5SDimitry Andric if (DestVT == MVT::i64) {
40390b57cec5SDimitry Andric // FIXME: We're SExt i1 to i64.
40400b57cec5SDimitry Andric return 0;
40410b57cec5SDimitry Andric }
40420b57cec5SDimitry Andric return fastEmitInst_rii(AArch64::SBFMWri, &AArch64::GPR32RegClass, SrcReg,
4043fe6060f1SDimitry Andric 0, 0);
40440b57cec5SDimitry Andric }
40450b57cec5SDimitry Andric }
40460b57cec5SDimitry Andric
emitMul_rr(MVT RetVT,unsigned Op0,unsigned Op1)4047fe6060f1SDimitry Andric unsigned AArch64FastISel::emitMul_rr(MVT RetVT, unsigned Op0, unsigned Op1) {
40480b57cec5SDimitry Andric unsigned Opc, ZReg;
40490b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
40500b57cec5SDimitry Andric default: return 0;
40510b57cec5SDimitry Andric case MVT::i8:
40520b57cec5SDimitry Andric case MVT::i16:
40530b57cec5SDimitry Andric case MVT::i32:
40540b57cec5SDimitry Andric RetVT = MVT::i32;
40550b57cec5SDimitry Andric Opc = AArch64::MADDWrrr; ZReg = AArch64::WZR; break;
40560b57cec5SDimitry Andric case MVT::i64:
40570b57cec5SDimitry Andric Opc = AArch64::MADDXrrr; ZReg = AArch64::XZR; break;
40580b57cec5SDimitry Andric }
40590b57cec5SDimitry Andric
40600b57cec5SDimitry Andric const TargetRegisterClass *RC =
40610b57cec5SDimitry Andric (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
4062fe6060f1SDimitry Andric return fastEmitInst_rrr(Opc, RC, Op0, Op1, ZReg);
40630b57cec5SDimitry Andric }
40640b57cec5SDimitry Andric
emitSMULL_rr(MVT RetVT,unsigned Op0,unsigned Op1)4065fe6060f1SDimitry Andric unsigned AArch64FastISel::emitSMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1) {
40660b57cec5SDimitry Andric if (RetVT != MVT::i64)
40670b57cec5SDimitry Andric return 0;
40680b57cec5SDimitry Andric
40690b57cec5SDimitry Andric return fastEmitInst_rrr(AArch64::SMADDLrrr, &AArch64::GPR64RegClass,
4070fe6060f1SDimitry Andric Op0, Op1, AArch64::XZR);
40710b57cec5SDimitry Andric }
40720b57cec5SDimitry Andric
emitUMULL_rr(MVT RetVT,unsigned Op0,unsigned Op1)4073fe6060f1SDimitry Andric unsigned AArch64FastISel::emitUMULL_rr(MVT RetVT, unsigned Op0, unsigned Op1) {
40740b57cec5SDimitry Andric if (RetVT != MVT::i64)
40750b57cec5SDimitry Andric return 0;
40760b57cec5SDimitry Andric
40770b57cec5SDimitry Andric return fastEmitInst_rrr(AArch64::UMADDLrrr, &AArch64::GPR64RegClass,
4078fe6060f1SDimitry Andric Op0, Op1, AArch64::XZR);
40790b57cec5SDimitry Andric }
40800b57cec5SDimitry Andric
emitLSL_rr(MVT RetVT,unsigned Op0Reg,unsigned Op1Reg)4081fe6060f1SDimitry Andric unsigned AArch64FastISel::emitLSL_rr(MVT RetVT, unsigned Op0Reg,
4082fe6060f1SDimitry Andric unsigned Op1Reg) {
40830b57cec5SDimitry Andric unsigned Opc = 0;
40840b57cec5SDimitry Andric bool NeedTrunc = false;
40850b57cec5SDimitry Andric uint64_t Mask = 0;
40860b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
40870b57cec5SDimitry Andric default: return 0;
40880b57cec5SDimitry Andric case MVT::i8: Opc = AArch64::LSLVWr; NeedTrunc = true; Mask = 0xff; break;
40890b57cec5SDimitry Andric case MVT::i16: Opc = AArch64::LSLVWr; NeedTrunc = true; Mask = 0xffff; break;
40900b57cec5SDimitry Andric case MVT::i32: Opc = AArch64::LSLVWr; break;
40910b57cec5SDimitry Andric case MVT::i64: Opc = AArch64::LSLVXr; break;
40920b57cec5SDimitry Andric }
40930b57cec5SDimitry Andric
40940b57cec5SDimitry Andric const TargetRegisterClass *RC =
40950b57cec5SDimitry Andric (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
40960b57cec5SDimitry Andric if (NeedTrunc)
4097fe6060f1SDimitry Andric Op1Reg = emitAnd_ri(MVT::i32, Op1Reg, Mask);
4098fe6060f1SDimitry Andric
409904eeddc0SDimitry Andric Register ResultReg = fastEmitInst_rr(Opc, RC, Op0Reg, Op1Reg);
4100fe6060f1SDimitry Andric if (NeedTrunc)
4101fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, ResultReg, Mask);
41020b57cec5SDimitry Andric return ResultReg;
41030b57cec5SDimitry Andric }
41040b57cec5SDimitry Andric
emitLSL_ri(MVT RetVT,MVT SrcVT,unsigned Op0,uint64_t Shift,bool IsZExt)41050b57cec5SDimitry Andric unsigned AArch64FastISel::emitLSL_ri(MVT RetVT, MVT SrcVT, unsigned Op0,
4106fe6060f1SDimitry Andric uint64_t Shift, bool IsZExt) {
41070b57cec5SDimitry Andric assert(RetVT.SimpleTy >= SrcVT.SimpleTy &&
41080b57cec5SDimitry Andric "Unexpected source/return type pair.");
41090b57cec5SDimitry Andric assert((SrcVT == MVT::i1 || SrcVT == MVT::i8 || SrcVT == MVT::i16 ||
41100b57cec5SDimitry Andric SrcVT == MVT::i32 || SrcVT == MVT::i64) &&
41110b57cec5SDimitry Andric "Unexpected source value type.");
41120b57cec5SDimitry Andric assert((RetVT == MVT::i8 || RetVT == MVT::i16 || RetVT == MVT::i32 ||
41130b57cec5SDimitry Andric RetVT == MVT::i64) && "Unexpected return value type.");
41140b57cec5SDimitry Andric
41150b57cec5SDimitry Andric bool Is64Bit = (RetVT == MVT::i64);
41160b57cec5SDimitry Andric unsigned RegSize = Is64Bit ? 64 : 32;
41170b57cec5SDimitry Andric unsigned DstBits = RetVT.getSizeInBits();
41180b57cec5SDimitry Andric unsigned SrcBits = SrcVT.getSizeInBits();
41190b57cec5SDimitry Andric const TargetRegisterClass *RC =
41200b57cec5SDimitry Andric Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
41210b57cec5SDimitry Andric
41220b57cec5SDimitry Andric // Just emit a copy for "zero" shifts.
41230b57cec5SDimitry Andric if (Shift == 0) {
41240b57cec5SDimitry Andric if (RetVT == SrcVT) {
412504eeddc0SDimitry Andric Register ResultReg = createResultReg(RC);
4126bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
41270b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
4128fe6060f1SDimitry Andric .addReg(Op0);
41290b57cec5SDimitry Andric return ResultReg;
41300b57cec5SDimitry Andric } else
41310b57cec5SDimitry Andric return emitIntExt(SrcVT, Op0, RetVT, IsZExt);
41320b57cec5SDimitry Andric }
41330b57cec5SDimitry Andric
41340b57cec5SDimitry Andric // Don't deal with undefined shifts.
41350b57cec5SDimitry Andric if (Shift >= DstBits)
41360b57cec5SDimitry Andric return 0;
41370b57cec5SDimitry Andric
41380b57cec5SDimitry Andric // For immediate shifts we can fold the zero-/sign-extension into the shift.
41390b57cec5SDimitry Andric // {S|U}BFM Wd, Wn, #r, #s
41400b57cec5SDimitry Andric // Wd<32+s-r,32-r> = Wn<s:0> when r > s
41410b57cec5SDimitry Andric
41420b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
41430b57cec5SDimitry Andric // %2 = shl i16 %1, 4
41440b57cec5SDimitry Andric // Wd<32+7-28,32-28> = Wn<7:0> <- clamp s to 7
41450b57cec5SDimitry Andric // 0b1111_1111_1111_1111__1111_1010_1010_0000 sext
41460b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0101_0101_0000 sext | zext
41470b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_1010_1010_0000 zext
41480b57cec5SDimitry Andric
41490b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
41500b57cec5SDimitry Andric // %2 = shl i16 %1, 8
41510b57cec5SDimitry Andric // Wd<32+7-24,32-24> = Wn<7:0>
41520b57cec5SDimitry Andric // 0b1111_1111_1111_1111__1010_1010_0000_0000 sext
41530b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0101_0101_0000_0000 sext | zext
41540b57cec5SDimitry Andric // 0b0000_0000_0000_0000__1010_1010_0000_0000 zext
41550b57cec5SDimitry Andric
41560b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
41570b57cec5SDimitry Andric // %2 = shl i16 %1, 12
41580b57cec5SDimitry Andric // Wd<32+3-20,32-20> = Wn<3:0>
41590b57cec5SDimitry Andric // 0b1111_1111_1111_1111__1010_0000_0000_0000 sext
41600b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0101_0000_0000_0000 sext | zext
41610b57cec5SDimitry Andric // 0b0000_0000_0000_0000__1010_0000_0000_0000 zext
41620b57cec5SDimitry Andric
41630b57cec5SDimitry Andric unsigned ImmR = RegSize - Shift;
41640b57cec5SDimitry Andric // Limit the width to the length of the source type.
41650b57cec5SDimitry Andric unsigned ImmS = std::min<unsigned>(SrcBits - 1, DstBits - 1 - Shift);
41660b57cec5SDimitry Andric static const unsigned OpcTable[2][2] = {
41670b57cec5SDimitry Andric {AArch64::SBFMWri, AArch64::SBFMXri},
41680b57cec5SDimitry Andric {AArch64::UBFMWri, AArch64::UBFMXri}
41690b57cec5SDimitry Andric };
41700b57cec5SDimitry Andric unsigned Opc = OpcTable[IsZExt][Is64Bit];
41710b57cec5SDimitry Andric if (SrcVT.SimpleTy <= MVT::i32 && RetVT == MVT::i64) {
41728bcb0991SDimitry Andric Register TmpReg = MRI.createVirtualRegister(RC);
4173bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
41740b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), TmpReg)
41750b57cec5SDimitry Andric .addImm(0)
4176fe6060f1SDimitry Andric .addReg(Op0)
41770b57cec5SDimitry Andric .addImm(AArch64::sub_32);
41780b57cec5SDimitry Andric Op0 = TmpReg;
41790b57cec5SDimitry Andric }
4180fe6060f1SDimitry Andric return fastEmitInst_rii(Opc, RC, Op0, ImmR, ImmS);
41810b57cec5SDimitry Andric }
41820b57cec5SDimitry Andric
emitLSR_rr(MVT RetVT,unsigned Op0Reg,unsigned Op1Reg)4183fe6060f1SDimitry Andric unsigned AArch64FastISel::emitLSR_rr(MVT RetVT, unsigned Op0Reg,
4184fe6060f1SDimitry Andric unsigned Op1Reg) {
41850b57cec5SDimitry Andric unsigned Opc = 0;
41860b57cec5SDimitry Andric bool NeedTrunc = false;
41870b57cec5SDimitry Andric uint64_t Mask = 0;
41880b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
41890b57cec5SDimitry Andric default: return 0;
41900b57cec5SDimitry Andric case MVT::i8: Opc = AArch64::LSRVWr; NeedTrunc = true; Mask = 0xff; break;
41910b57cec5SDimitry Andric case MVT::i16: Opc = AArch64::LSRVWr; NeedTrunc = true; Mask = 0xffff; break;
41920b57cec5SDimitry Andric case MVT::i32: Opc = AArch64::LSRVWr; break;
41930b57cec5SDimitry Andric case MVT::i64: Opc = AArch64::LSRVXr; break;
41940b57cec5SDimitry Andric }
41950b57cec5SDimitry Andric
41960b57cec5SDimitry Andric const TargetRegisterClass *RC =
41970b57cec5SDimitry Andric (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
41980b57cec5SDimitry Andric if (NeedTrunc) {
4199fe6060f1SDimitry Andric Op0Reg = emitAnd_ri(MVT::i32, Op0Reg, Mask);
4200fe6060f1SDimitry Andric Op1Reg = emitAnd_ri(MVT::i32, Op1Reg, Mask);
42010b57cec5SDimitry Andric }
420204eeddc0SDimitry Andric Register ResultReg = fastEmitInst_rr(Opc, RC, Op0Reg, Op1Reg);
42030b57cec5SDimitry Andric if (NeedTrunc)
4204fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, ResultReg, Mask);
42050b57cec5SDimitry Andric return ResultReg;
42060b57cec5SDimitry Andric }
42070b57cec5SDimitry Andric
emitLSR_ri(MVT RetVT,MVT SrcVT,unsigned Op0,uint64_t Shift,bool IsZExt)42080b57cec5SDimitry Andric unsigned AArch64FastISel::emitLSR_ri(MVT RetVT, MVT SrcVT, unsigned Op0,
4209fe6060f1SDimitry Andric uint64_t Shift, bool IsZExt) {
42100b57cec5SDimitry Andric assert(RetVT.SimpleTy >= SrcVT.SimpleTy &&
42110b57cec5SDimitry Andric "Unexpected source/return type pair.");
42120b57cec5SDimitry Andric assert((SrcVT == MVT::i1 || SrcVT == MVT::i8 || SrcVT == MVT::i16 ||
42130b57cec5SDimitry Andric SrcVT == MVT::i32 || SrcVT == MVT::i64) &&
42140b57cec5SDimitry Andric "Unexpected source value type.");
42150b57cec5SDimitry Andric assert((RetVT == MVT::i8 || RetVT == MVT::i16 || RetVT == MVT::i32 ||
42160b57cec5SDimitry Andric RetVT == MVT::i64) && "Unexpected return value type.");
42170b57cec5SDimitry Andric
42180b57cec5SDimitry Andric bool Is64Bit = (RetVT == MVT::i64);
42190b57cec5SDimitry Andric unsigned RegSize = Is64Bit ? 64 : 32;
42200b57cec5SDimitry Andric unsigned DstBits = RetVT.getSizeInBits();
42210b57cec5SDimitry Andric unsigned SrcBits = SrcVT.getSizeInBits();
42220b57cec5SDimitry Andric const TargetRegisterClass *RC =
42230b57cec5SDimitry Andric Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
42240b57cec5SDimitry Andric
42250b57cec5SDimitry Andric // Just emit a copy for "zero" shifts.
42260b57cec5SDimitry Andric if (Shift == 0) {
42270b57cec5SDimitry Andric if (RetVT == SrcVT) {
422804eeddc0SDimitry Andric Register ResultReg = createResultReg(RC);
4229bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
42300b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
4231fe6060f1SDimitry Andric .addReg(Op0);
42320b57cec5SDimitry Andric return ResultReg;
42330b57cec5SDimitry Andric } else
42340b57cec5SDimitry Andric return emitIntExt(SrcVT, Op0, RetVT, IsZExt);
42350b57cec5SDimitry Andric }
42360b57cec5SDimitry Andric
42370b57cec5SDimitry Andric // Don't deal with undefined shifts.
42380b57cec5SDimitry Andric if (Shift >= DstBits)
42390b57cec5SDimitry Andric return 0;
42400b57cec5SDimitry Andric
42410b57cec5SDimitry Andric // For immediate shifts we can fold the zero-/sign-extension into the shift.
42420b57cec5SDimitry Andric // {S|U}BFM Wd, Wn, #r, #s
42430b57cec5SDimitry Andric // Wd<s-r:0> = Wn<s:r> when r <= s
42440b57cec5SDimitry Andric
42450b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
42460b57cec5SDimitry Andric // %2 = lshr i16 %1, 4
42470b57cec5SDimitry Andric // Wd<7-4:0> = Wn<7:4>
42480b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_1111_1111_1010 sext
42490b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0101 sext | zext
42500b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_1010 zext
42510b57cec5SDimitry Andric
42520b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
42530b57cec5SDimitry Andric // %2 = lshr i16 %1, 8
42540b57cec5SDimitry Andric // Wd<7-7,0> = Wn<7:7>
42550b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_1111_1111 sext
42560b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 sext
42570b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 zext
42580b57cec5SDimitry Andric
42590b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
42600b57cec5SDimitry Andric // %2 = lshr i16 %1, 12
42610b57cec5SDimitry Andric // Wd<7-7,0> = Wn<7:7> <- clamp r to 7
42620b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_1111 sext
42630b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 sext
42640b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 zext
42650b57cec5SDimitry Andric
42660b57cec5SDimitry Andric if (Shift >= SrcBits && IsZExt)
42670b57cec5SDimitry Andric return materializeInt(ConstantInt::get(*Context, APInt(RegSize, 0)), RetVT);
42680b57cec5SDimitry Andric
42690b57cec5SDimitry Andric // It is not possible to fold a sign-extend into the LShr instruction. In this
42700b57cec5SDimitry Andric // case emit a sign-extend.
42710b57cec5SDimitry Andric if (!IsZExt) {
42720b57cec5SDimitry Andric Op0 = emitIntExt(SrcVT, Op0, RetVT, IsZExt);
42730b57cec5SDimitry Andric if (!Op0)
42740b57cec5SDimitry Andric return 0;
42750b57cec5SDimitry Andric SrcVT = RetVT;
42760b57cec5SDimitry Andric SrcBits = SrcVT.getSizeInBits();
42770b57cec5SDimitry Andric IsZExt = true;
42780b57cec5SDimitry Andric }
42790b57cec5SDimitry Andric
42800b57cec5SDimitry Andric unsigned ImmR = std::min<unsigned>(SrcBits - 1, Shift);
42810b57cec5SDimitry Andric unsigned ImmS = SrcBits - 1;
42820b57cec5SDimitry Andric static const unsigned OpcTable[2][2] = {
42830b57cec5SDimitry Andric {AArch64::SBFMWri, AArch64::SBFMXri},
42840b57cec5SDimitry Andric {AArch64::UBFMWri, AArch64::UBFMXri}
42850b57cec5SDimitry Andric };
42860b57cec5SDimitry Andric unsigned Opc = OpcTable[IsZExt][Is64Bit];
42870b57cec5SDimitry Andric if (SrcVT.SimpleTy <= MVT::i32 && RetVT == MVT::i64) {
42888bcb0991SDimitry Andric Register TmpReg = MRI.createVirtualRegister(RC);
4289bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
42900b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), TmpReg)
42910b57cec5SDimitry Andric .addImm(0)
4292fe6060f1SDimitry Andric .addReg(Op0)
42930b57cec5SDimitry Andric .addImm(AArch64::sub_32);
42940b57cec5SDimitry Andric Op0 = TmpReg;
42950b57cec5SDimitry Andric }
4296fe6060f1SDimitry Andric return fastEmitInst_rii(Opc, RC, Op0, ImmR, ImmS);
42970b57cec5SDimitry Andric }
42980b57cec5SDimitry Andric
emitASR_rr(MVT RetVT,unsigned Op0Reg,unsigned Op1Reg)4299fe6060f1SDimitry Andric unsigned AArch64FastISel::emitASR_rr(MVT RetVT, unsigned Op0Reg,
4300fe6060f1SDimitry Andric unsigned Op1Reg) {
43010b57cec5SDimitry Andric unsigned Opc = 0;
43020b57cec5SDimitry Andric bool NeedTrunc = false;
43030b57cec5SDimitry Andric uint64_t Mask = 0;
43040b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
43050b57cec5SDimitry Andric default: return 0;
43060b57cec5SDimitry Andric case MVT::i8: Opc = AArch64::ASRVWr; NeedTrunc = true; Mask = 0xff; break;
43070b57cec5SDimitry Andric case MVT::i16: Opc = AArch64::ASRVWr; NeedTrunc = true; Mask = 0xffff; break;
43080b57cec5SDimitry Andric case MVT::i32: Opc = AArch64::ASRVWr; break;
43090b57cec5SDimitry Andric case MVT::i64: Opc = AArch64::ASRVXr; break;
43100b57cec5SDimitry Andric }
43110b57cec5SDimitry Andric
43120b57cec5SDimitry Andric const TargetRegisterClass *RC =
43130b57cec5SDimitry Andric (RetVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
43140b57cec5SDimitry Andric if (NeedTrunc) {
43150b57cec5SDimitry Andric Op0Reg = emitIntExt(RetVT, Op0Reg, MVT::i32, /*isZExt=*/false);
4316fe6060f1SDimitry Andric Op1Reg = emitAnd_ri(MVT::i32, Op1Reg, Mask);
43170b57cec5SDimitry Andric }
431804eeddc0SDimitry Andric Register ResultReg = fastEmitInst_rr(Opc, RC, Op0Reg, Op1Reg);
43190b57cec5SDimitry Andric if (NeedTrunc)
4320fe6060f1SDimitry Andric ResultReg = emitAnd_ri(MVT::i32, ResultReg, Mask);
43210b57cec5SDimitry Andric return ResultReg;
43220b57cec5SDimitry Andric }
43230b57cec5SDimitry Andric
emitASR_ri(MVT RetVT,MVT SrcVT,unsigned Op0,uint64_t Shift,bool IsZExt)43240b57cec5SDimitry Andric unsigned AArch64FastISel::emitASR_ri(MVT RetVT, MVT SrcVT, unsigned Op0,
4325fe6060f1SDimitry Andric uint64_t Shift, bool IsZExt) {
43260b57cec5SDimitry Andric assert(RetVT.SimpleTy >= SrcVT.SimpleTy &&
43270b57cec5SDimitry Andric "Unexpected source/return type pair.");
43280b57cec5SDimitry Andric assert((SrcVT == MVT::i1 || SrcVT == MVT::i8 || SrcVT == MVT::i16 ||
43290b57cec5SDimitry Andric SrcVT == MVT::i32 || SrcVT == MVT::i64) &&
43300b57cec5SDimitry Andric "Unexpected source value type.");
43310b57cec5SDimitry Andric assert((RetVT == MVT::i8 || RetVT == MVT::i16 || RetVT == MVT::i32 ||
43320b57cec5SDimitry Andric RetVT == MVT::i64) && "Unexpected return value type.");
43330b57cec5SDimitry Andric
43340b57cec5SDimitry Andric bool Is64Bit = (RetVT == MVT::i64);
43350b57cec5SDimitry Andric unsigned RegSize = Is64Bit ? 64 : 32;
43360b57cec5SDimitry Andric unsigned DstBits = RetVT.getSizeInBits();
43370b57cec5SDimitry Andric unsigned SrcBits = SrcVT.getSizeInBits();
43380b57cec5SDimitry Andric const TargetRegisterClass *RC =
43390b57cec5SDimitry Andric Is64Bit ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
43400b57cec5SDimitry Andric
43410b57cec5SDimitry Andric // Just emit a copy for "zero" shifts.
43420b57cec5SDimitry Andric if (Shift == 0) {
43430b57cec5SDimitry Andric if (RetVT == SrcVT) {
434404eeddc0SDimitry Andric Register ResultReg = createResultReg(RC);
4345bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
43460b57cec5SDimitry Andric TII.get(TargetOpcode::COPY), ResultReg)
4347fe6060f1SDimitry Andric .addReg(Op0);
43480b57cec5SDimitry Andric return ResultReg;
43490b57cec5SDimitry Andric } else
43500b57cec5SDimitry Andric return emitIntExt(SrcVT, Op0, RetVT, IsZExt);
43510b57cec5SDimitry Andric }
43520b57cec5SDimitry Andric
43530b57cec5SDimitry Andric // Don't deal with undefined shifts.
43540b57cec5SDimitry Andric if (Shift >= DstBits)
43550b57cec5SDimitry Andric return 0;
43560b57cec5SDimitry Andric
43570b57cec5SDimitry Andric // For immediate shifts we can fold the zero-/sign-extension into the shift.
43580b57cec5SDimitry Andric // {S|U}BFM Wd, Wn, #r, #s
43590b57cec5SDimitry Andric // Wd<s-r:0> = Wn<s:r> when r <= s
43600b57cec5SDimitry Andric
43610b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
43620b57cec5SDimitry Andric // %2 = ashr i16 %1, 4
43630b57cec5SDimitry Andric // Wd<7-4:0> = Wn<7:4>
43640b57cec5SDimitry Andric // 0b1111_1111_1111_1111__1111_1111_1111_1010 sext
43650b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0101 sext | zext
43660b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_1010 zext
43670b57cec5SDimitry Andric
43680b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
43690b57cec5SDimitry Andric // %2 = ashr i16 %1, 8
43700b57cec5SDimitry Andric // Wd<7-7,0> = Wn<7:7>
43710b57cec5SDimitry Andric // 0b1111_1111_1111_1111__1111_1111_1111_1111 sext
43720b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 sext
43730b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 zext
43740b57cec5SDimitry Andric
43750b57cec5SDimitry Andric // %1 = {s|z}ext i8 {0b1010_1010|0b0101_0101} to i16
43760b57cec5SDimitry Andric // %2 = ashr i16 %1, 12
43770b57cec5SDimitry Andric // Wd<7-7,0> = Wn<7:7> <- clamp r to 7
43780b57cec5SDimitry Andric // 0b1111_1111_1111_1111__1111_1111_1111_1111 sext
43790b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 sext
43800b57cec5SDimitry Andric // 0b0000_0000_0000_0000__0000_0000_0000_0000 zext
43810b57cec5SDimitry Andric
43820b57cec5SDimitry Andric if (Shift >= SrcBits && IsZExt)
43830b57cec5SDimitry Andric return materializeInt(ConstantInt::get(*Context, APInt(RegSize, 0)), RetVT);
43840b57cec5SDimitry Andric
43850b57cec5SDimitry Andric unsigned ImmR = std::min<unsigned>(SrcBits - 1, Shift);
43860b57cec5SDimitry Andric unsigned ImmS = SrcBits - 1;
43870b57cec5SDimitry Andric static const unsigned OpcTable[2][2] = {
43880b57cec5SDimitry Andric {AArch64::SBFMWri, AArch64::SBFMXri},
43890b57cec5SDimitry Andric {AArch64::UBFMWri, AArch64::UBFMXri}
43900b57cec5SDimitry Andric };
43910b57cec5SDimitry Andric unsigned Opc = OpcTable[IsZExt][Is64Bit];
43920b57cec5SDimitry Andric if (SrcVT.SimpleTy <= MVT::i32 && RetVT == MVT::i64) {
43938bcb0991SDimitry Andric Register TmpReg = MRI.createVirtualRegister(RC);
4394bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
43950b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), TmpReg)
43960b57cec5SDimitry Andric .addImm(0)
4397fe6060f1SDimitry Andric .addReg(Op0)
43980b57cec5SDimitry Andric .addImm(AArch64::sub_32);
43990b57cec5SDimitry Andric Op0 = TmpReg;
44000b57cec5SDimitry Andric }
4401fe6060f1SDimitry Andric return fastEmitInst_rii(Opc, RC, Op0, ImmR, ImmS);
44020b57cec5SDimitry Andric }
44030b57cec5SDimitry Andric
emitIntExt(MVT SrcVT,unsigned SrcReg,MVT DestVT,bool IsZExt)44040b57cec5SDimitry Andric unsigned AArch64FastISel::emitIntExt(MVT SrcVT, unsigned SrcReg, MVT DestVT,
44050b57cec5SDimitry Andric bool IsZExt) {
44060b57cec5SDimitry Andric assert(DestVT != MVT::i1 && "ZeroExt/SignExt an i1?");
44070b57cec5SDimitry Andric
44080b57cec5SDimitry Andric // FastISel does not have plumbing to deal with extensions where the SrcVT or
44090b57cec5SDimitry Andric // DestVT are odd things, so test to make sure that they are both types we can
44100b57cec5SDimitry Andric // handle (i1/i8/i16/i32 for SrcVT and i8/i16/i32/i64 for DestVT), otherwise
44110b57cec5SDimitry Andric // bail out to SelectionDAG.
44120b57cec5SDimitry Andric if (((DestVT != MVT::i8) && (DestVT != MVT::i16) &&
44130b57cec5SDimitry Andric (DestVT != MVT::i32) && (DestVT != MVT::i64)) ||
44140b57cec5SDimitry Andric ((SrcVT != MVT::i1) && (SrcVT != MVT::i8) &&
44150b57cec5SDimitry Andric (SrcVT != MVT::i16) && (SrcVT != MVT::i32)))
44160b57cec5SDimitry Andric return 0;
44170b57cec5SDimitry Andric
44180b57cec5SDimitry Andric unsigned Opc;
44190b57cec5SDimitry Andric unsigned Imm = 0;
44200b57cec5SDimitry Andric
44210b57cec5SDimitry Andric switch (SrcVT.SimpleTy) {
44220b57cec5SDimitry Andric default:
44230b57cec5SDimitry Andric return 0;
44240b57cec5SDimitry Andric case MVT::i1:
44250b57cec5SDimitry Andric return emiti1Ext(SrcReg, DestVT, IsZExt);
44260b57cec5SDimitry Andric case MVT::i8:
44270b57cec5SDimitry Andric if (DestVT == MVT::i64)
44280b57cec5SDimitry Andric Opc = IsZExt ? AArch64::UBFMXri : AArch64::SBFMXri;
44290b57cec5SDimitry Andric else
44300b57cec5SDimitry Andric Opc = IsZExt ? AArch64::UBFMWri : AArch64::SBFMWri;
44310b57cec5SDimitry Andric Imm = 7;
44320b57cec5SDimitry Andric break;
44330b57cec5SDimitry Andric case MVT::i16:
44340b57cec5SDimitry Andric if (DestVT == MVT::i64)
44350b57cec5SDimitry Andric Opc = IsZExt ? AArch64::UBFMXri : AArch64::SBFMXri;
44360b57cec5SDimitry Andric else
44370b57cec5SDimitry Andric Opc = IsZExt ? AArch64::UBFMWri : AArch64::SBFMWri;
44380b57cec5SDimitry Andric Imm = 15;
44390b57cec5SDimitry Andric break;
44400b57cec5SDimitry Andric case MVT::i32:
44410b57cec5SDimitry Andric assert(DestVT == MVT::i64 && "IntExt i32 to i32?!?");
44420b57cec5SDimitry Andric Opc = IsZExt ? AArch64::UBFMXri : AArch64::SBFMXri;
44430b57cec5SDimitry Andric Imm = 31;
44440b57cec5SDimitry Andric break;
44450b57cec5SDimitry Andric }
44460b57cec5SDimitry Andric
44470b57cec5SDimitry Andric // Handle i8 and i16 as i32.
44480b57cec5SDimitry Andric if (DestVT == MVT::i8 || DestVT == MVT::i16)
44490b57cec5SDimitry Andric DestVT = MVT::i32;
44500b57cec5SDimitry Andric else if (DestVT == MVT::i64) {
44518bcb0991SDimitry Andric Register Src64 = MRI.createVirtualRegister(&AArch64::GPR64RegClass);
4452bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
44530b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), Src64)
44540b57cec5SDimitry Andric .addImm(0)
44550b57cec5SDimitry Andric .addReg(SrcReg)
44560b57cec5SDimitry Andric .addImm(AArch64::sub_32);
44570b57cec5SDimitry Andric SrcReg = Src64;
44580b57cec5SDimitry Andric }
44590b57cec5SDimitry Andric
44600b57cec5SDimitry Andric const TargetRegisterClass *RC =
44610b57cec5SDimitry Andric (DestVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
4462fe6060f1SDimitry Andric return fastEmitInst_rii(Opc, RC, SrcReg, 0, Imm);
44630b57cec5SDimitry Andric }
44640b57cec5SDimitry Andric
isZExtLoad(const MachineInstr * LI)44650b57cec5SDimitry Andric static bool isZExtLoad(const MachineInstr *LI) {
44660b57cec5SDimitry Andric switch (LI->getOpcode()) {
44670b57cec5SDimitry Andric default:
44680b57cec5SDimitry Andric return false;
44690b57cec5SDimitry Andric case AArch64::LDURBBi:
44700b57cec5SDimitry Andric case AArch64::LDURHHi:
44710b57cec5SDimitry Andric case AArch64::LDURWi:
44720b57cec5SDimitry Andric case AArch64::LDRBBui:
44730b57cec5SDimitry Andric case AArch64::LDRHHui:
44740b57cec5SDimitry Andric case AArch64::LDRWui:
44750b57cec5SDimitry Andric case AArch64::LDRBBroX:
44760b57cec5SDimitry Andric case AArch64::LDRHHroX:
44770b57cec5SDimitry Andric case AArch64::LDRWroX:
44780b57cec5SDimitry Andric case AArch64::LDRBBroW:
44790b57cec5SDimitry Andric case AArch64::LDRHHroW:
44800b57cec5SDimitry Andric case AArch64::LDRWroW:
44810b57cec5SDimitry Andric return true;
44820b57cec5SDimitry Andric }
44830b57cec5SDimitry Andric }
44840b57cec5SDimitry Andric
isSExtLoad(const MachineInstr * LI)44850b57cec5SDimitry Andric static bool isSExtLoad(const MachineInstr *LI) {
44860b57cec5SDimitry Andric switch (LI->getOpcode()) {
44870b57cec5SDimitry Andric default:
44880b57cec5SDimitry Andric return false;
44890b57cec5SDimitry Andric case AArch64::LDURSBWi:
44900b57cec5SDimitry Andric case AArch64::LDURSHWi:
44910b57cec5SDimitry Andric case AArch64::LDURSBXi:
44920b57cec5SDimitry Andric case AArch64::LDURSHXi:
44930b57cec5SDimitry Andric case AArch64::LDURSWi:
44940b57cec5SDimitry Andric case AArch64::LDRSBWui:
44950b57cec5SDimitry Andric case AArch64::LDRSHWui:
44960b57cec5SDimitry Andric case AArch64::LDRSBXui:
44970b57cec5SDimitry Andric case AArch64::LDRSHXui:
44980b57cec5SDimitry Andric case AArch64::LDRSWui:
44990b57cec5SDimitry Andric case AArch64::LDRSBWroX:
45000b57cec5SDimitry Andric case AArch64::LDRSHWroX:
45010b57cec5SDimitry Andric case AArch64::LDRSBXroX:
45020b57cec5SDimitry Andric case AArch64::LDRSHXroX:
45030b57cec5SDimitry Andric case AArch64::LDRSWroX:
45040b57cec5SDimitry Andric case AArch64::LDRSBWroW:
45050b57cec5SDimitry Andric case AArch64::LDRSHWroW:
45060b57cec5SDimitry Andric case AArch64::LDRSBXroW:
45070b57cec5SDimitry Andric case AArch64::LDRSHXroW:
45080b57cec5SDimitry Andric case AArch64::LDRSWroW:
45090b57cec5SDimitry Andric return true;
45100b57cec5SDimitry Andric }
45110b57cec5SDimitry Andric }
45120b57cec5SDimitry Andric
optimizeIntExtLoad(const Instruction * I,MVT RetVT,MVT SrcVT)45130b57cec5SDimitry Andric bool AArch64FastISel::optimizeIntExtLoad(const Instruction *I, MVT RetVT,
45140b57cec5SDimitry Andric MVT SrcVT) {
45150b57cec5SDimitry Andric const auto *LI = dyn_cast<LoadInst>(I->getOperand(0));
45160b57cec5SDimitry Andric if (!LI || !LI->hasOneUse())
45170b57cec5SDimitry Andric return false;
45180b57cec5SDimitry Andric
45190b57cec5SDimitry Andric // Check if the load instruction has already been selected.
452004eeddc0SDimitry Andric Register Reg = lookUpRegForValue(LI);
45210b57cec5SDimitry Andric if (!Reg)
45220b57cec5SDimitry Andric return false;
45230b57cec5SDimitry Andric
45240b57cec5SDimitry Andric MachineInstr *MI = MRI.getUniqueVRegDef(Reg);
45250b57cec5SDimitry Andric if (!MI)
45260b57cec5SDimitry Andric return false;
45270b57cec5SDimitry Andric
45280b57cec5SDimitry Andric // Check if the correct load instruction has been emitted - SelectionDAG might
45290b57cec5SDimitry Andric // have emitted a zero-extending load, but we need a sign-extending load.
45300b57cec5SDimitry Andric bool IsZExt = isa<ZExtInst>(I);
45310b57cec5SDimitry Andric const auto *LoadMI = MI;
45320b57cec5SDimitry Andric if (LoadMI->getOpcode() == TargetOpcode::COPY &&
45330b57cec5SDimitry Andric LoadMI->getOperand(1).getSubReg() == AArch64::sub_32) {
45348bcb0991SDimitry Andric Register LoadReg = MI->getOperand(1).getReg();
45350b57cec5SDimitry Andric LoadMI = MRI.getUniqueVRegDef(LoadReg);
45360b57cec5SDimitry Andric assert(LoadMI && "Expected valid instruction");
45370b57cec5SDimitry Andric }
45380b57cec5SDimitry Andric if (!(IsZExt && isZExtLoad(LoadMI)) && !(!IsZExt && isSExtLoad(LoadMI)))
45390b57cec5SDimitry Andric return false;
45400b57cec5SDimitry Andric
45410b57cec5SDimitry Andric // Nothing to be done.
45420b57cec5SDimitry Andric if (RetVT != MVT::i64 || SrcVT > MVT::i32) {
45430b57cec5SDimitry Andric updateValueMap(I, Reg);
45440b57cec5SDimitry Andric return true;
45450b57cec5SDimitry Andric }
45460b57cec5SDimitry Andric
45470b57cec5SDimitry Andric if (IsZExt) {
454804eeddc0SDimitry Andric Register Reg64 = createResultReg(&AArch64::GPR64RegClass);
4549bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
45500b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), Reg64)
45510b57cec5SDimitry Andric .addImm(0)
45520b57cec5SDimitry Andric .addReg(Reg, getKillRegState(true))
45530b57cec5SDimitry Andric .addImm(AArch64::sub_32);
45540b57cec5SDimitry Andric Reg = Reg64;
45550b57cec5SDimitry Andric } else {
45560b57cec5SDimitry Andric assert((MI->getOpcode() == TargetOpcode::COPY &&
45570b57cec5SDimitry Andric MI->getOperand(1).getSubReg() == AArch64::sub_32) &&
45580b57cec5SDimitry Andric "Expected copy instruction");
45590b57cec5SDimitry Andric Reg = MI->getOperand(1).getReg();
45600b57cec5SDimitry Andric MachineBasicBlock::iterator I(MI);
45610b57cec5SDimitry Andric removeDeadCode(I, std::next(I));
45620b57cec5SDimitry Andric }
45630b57cec5SDimitry Andric updateValueMap(I, Reg);
45640b57cec5SDimitry Andric return true;
45650b57cec5SDimitry Andric }
45660b57cec5SDimitry Andric
selectIntExt(const Instruction * I)45670b57cec5SDimitry Andric bool AArch64FastISel::selectIntExt(const Instruction *I) {
45680b57cec5SDimitry Andric assert((isa<ZExtInst>(I) || isa<SExtInst>(I)) &&
45690b57cec5SDimitry Andric "Unexpected integer extend instruction.");
45700b57cec5SDimitry Andric MVT RetVT;
45710b57cec5SDimitry Andric MVT SrcVT;
45720b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), RetVT))
45730b57cec5SDimitry Andric return false;
45740b57cec5SDimitry Andric
45750b57cec5SDimitry Andric if (!isTypeSupported(I->getOperand(0)->getType(), SrcVT))
45760b57cec5SDimitry Andric return false;
45770b57cec5SDimitry Andric
45780b57cec5SDimitry Andric // Try to optimize already sign-/zero-extended values from load instructions.
45790b57cec5SDimitry Andric if (optimizeIntExtLoad(I, RetVT, SrcVT))
45800b57cec5SDimitry Andric return true;
45810b57cec5SDimitry Andric
458204eeddc0SDimitry Andric Register SrcReg = getRegForValue(I->getOperand(0));
45830b57cec5SDimitry Andric if (!SrcReg)
45840b57cec5SDimitry Andric return false;
45850b57cec5SDimitry Andric
45860b57cec5SDimitry Andric // Try to optimize already sign-/zero-extended values from function arguments.
45870b57cec5SDimitry Andric bool IsZExt = isa<ZExtInst>(I);
45880b57cec5SDimitry Andric if (const auto *Arg = dyn_cast<Argument>(I->getOperand(0))) {
45890b57cec5SDimitry Andric if ((IsZExt && Arg->hasZExtAttr()) || (!IsZExt && Arg->hasSExtAttr())) {
45900b57cec5SDimitry Andric if (RetVT == MVT::i64 && SrcVT != MVT::i64) {
459104eeddc0SDimitry Andric Register ResultReg = createResultReg(&AArch64::GPR64RegClass);
4592bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD,
45930b57cec5SDimitry Andric TII.get(AArch64::SUBREG_TO_REG), ResultReg)
45940b57cec5SDimitry Andric .addImm(0)
4595fe6060f1SDimitry Andric .addReg(SrcReg)
45960b57cec5SDimitry Andric .addImm(AArch64::sub_32);
45970b57cec5SDimitry Andric SrcReg = ResultReg;
45980b57cec5SDimitry Andric }
45990b57cec5SDimitry Andric
46000b57cec5SDimitry Andric updateValueMap(I, SrcReg);
46010b57cec5SDimitry Andric return true;
46020b57cec5SDimitry Andric }
46030b57cec5SDimitry Andric }
46040b57cec5SDimitry Andric
46050b57cec5SDimitry Andric unsigned ResultReg = emitIntExt(SrcVT, SrcReg, RetVT, IsZExt);
46060b57cec5SDimitry Andric if (!ResultReg)
46070b57cec5SDimitry Andric return false;
46080b57cec5SDimitry Andric
46090b57cec5SDimitry Andric updateValueMap(I, ResultReg);
46100b57cec5SDimitry Andric return true;
46110b57cec5SDimitry Andric }
46120b57cec5SDimitry Andric
selectRem(const Instruction * I,unsigned ISDOpcode)46130b57cec5SDimitry Andric bool AArch64FastISel::selectRem(const Instruction *I, unsigned ISDOpcode) {
46140b57cec5SDimitry Andric EVT DestEVT = TLI.getValueType(DL, I->getType(), true);
46150b57cec5SDimitry Andric if (!DestEVT.isSimple())
46160b57cec5SDimitry Andric return false;
46170b57cec5SDimitry Andric
46180b57cec5SDimitry Andric MVT DestVT = DestEVT.getSimpleVT();
46190b57cec5SDimitry Andric if (DestVT != MVT::i64 && DestVT != MVT::i32)
46200b57cec5SDimitry Andric return false;
46210b57cec5SDimitry Andric
46220b57cec5SDimitry Andric unsigned DivOpc;
46230b57cec5SDimitry Andric bool Is64bit = (DestVT == MVT::i64);
46240b57cec5SDimitry Andric switch (ISDOpcode) {
46250b57cec5SDimitry Andric default:
46260b57cec5SDimitry Andric return false;
46270b57cec5SDimitry Andric case ISD::SREM:
46280b57cec5SDimitry Andric DivOpc = Is64bit ? AArch64::SDIVXr : AArch64::SDIVWr;
46290b57cec5SDimitry Andric break;
46300b57cec5SDimitry Andric case ISD::UREM:
46310b57cec5SDimitry Andric DivOpc = Is64bit ? AArch64::UDIVXr : AArch64::UDIVWr;
46320b57cec5SDimitry Andric break;
46330b57cec5SDimitry Andric }
46340b57cec5SDimitry Andric unsigned MSubOpc = Is64bit ? AArch64::MSUBXrrr : AArch64::MSUBWrrr;
463504eeddc0SDimitry Andric Register Src0Reg = getRegForValue(I->getOperand(0));
46360b57cec5SDimitry Andric if (!Src0Reg)
46370b57cec5SDimitry Andric return false;
46380b57cec5SDimitry Andric
463904eeddc0SDimitry Andric Register Src1Reg = getRegForValue(I->getOperand(1));
46400b57cec5SDimitry Andric if (!Src1Reg)
46410b57cec5SDimitry Andric return false;
46420b57cec5SDimitry Andric
46430b57cec5SDimitry Andric const TargetRegisterClass *RC =
46440b57cec5SDimitry Andric (DestVT == MVT::i64) ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
464504eeddc0SDimitry Andric Register QuotReg = fastEmitInst_rr(DivOpc, RC, Src0Reg, Src1Reg);
46460b57cec5SDimitry Andric assert(QuotReg && "Unexpected DIV instruction emission failure.");
46470b57cec5SDimitry Andric // The remainder is computed as numerator - (quotient * denominator) using the
46480b57cec5SDimitry Andric // MSUB instruction.
464904eeddc0SDimitry Andric Register ResultReg = fastEmitInst_rrr(MSubOpc, RC, QuotReg, Src1Reg, Src0Reg);
46500b57cec5SDimitry Andric updateValueMap(I, ResultReg);
46510b57cec5SDimitry Andric return true;
46520b57cec5SDimitry Andric }
46530b57cec5SDimitry Andric
selectMul(const Instruction * I)46540b57cec5SDimitry Andric bool AArch64FastISel::selectMul(const Instruction *I) {
46550b57cec5SDimitry Andric MVT VT;
46560b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), VT, /*IsVectorAllowed=*/true))
46570b57cec5SDimitry Andric return false;
46580b57cec5SDimitry Andric
46590b57cec5SDimitry Andric if (VT.isVector())
46600b57cec5SDimitry Andric return selectBinaryOp(I, ISD::MUL);
46610b57cec5SDimitry Andric
46620b57cec5SDimitry Andric const Value *Src0 = I->getOperand(0);
46630b57cec5SDimitry Andric const Value *Src1 = I->getOperand(1);
46640b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(Src0))
46650b57cec5SDimitry Andric if (C->getValue().isPowerOf2())
46660b57cec5SDimitry Andric std::swap(Src0, Src1);
46670b57cec5SDimitry Andric
46680b57cec5SDimitry Andric // Try to simplify to a shift instruction.
46690b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(Src1))
46700b57cec5SDimitry Andric if (C->getValue().isPowerOf2()) {
46710b57cec5SDimitry Andric uint64_t ShiftVal = C->getValue().logBase2();
46720b57cec5SDimitry Andric MVT SrcVT = VT;
46730b57cec5SDimitry Andric bool IsZExt = true;
46740b57cec5SDimitry Andric if (const auto *ZExt = dyn_cast<ZExtInst>(Src0)) {
46750b57cec5SDimitry Andric if (!isIntExtFree(ZExt)) {
46760b57cec5SDimitry Andric MVT VT;
46770b57cec5SDimitry Andric if (isValueAvailable(ZExt) && isTypeSupported(ZExt->getSrcTy(), VT)) {
46780b57cec5SDimitry Andric SrcVT = VT;
46790b57cec5SDimitry Andric IsZExt = true;
46800b57cec5SDimitry Andric Src0 = ZExt->getOperand(0);
46810b57cec5SDimitry Andric }
46820b57cec5SDimitry Andric }
46830b57cec5SDimitry Andric } else if (const auto *SExt = dyn_cast<SExtInst>(Src0)) {
46840b57cec5SDimitry Andric if (!isIntExtFree(SExt)) {
46850b57cec5SDimitry Andric MVT VT;
46860b57cec5SDimitry Andric if (isValueAvailable(SExt) && isTypeSupported(SExt->getSrcTy(), VT)) {
46870b57cec5SDimitry Andric SrcVT = VT;
46880b57cec5SDimitry Andric IsZExt = false;
46890b57cec5SDimitry Andric Src0 = SExt->getOperand(0);
46900b57cec5SDimitry Andric }
46910b57cec5SDimitry Andric }
46920b57cec5SDimitry Andric }
46930b57cec5SDimitry Andric
469404eeddc0SDimitry Andric Register Src0Reg = getRegForValue(Src0);
46950b57cec5SDimitry Andric if (!Src0Reg)
46960b57cec5SDimitry Andric return false;
46970b57cec5SDimitry Andric
46980b57cec5SDimitry Andric unsigned ResultReg =
4699fe6060f1SDimitry Andric emitLSL_ri(VT, SrcVT, Src0Reg, ShiftVal, IsZExt);
47000b57cec5SDimitry Andric
47010b57cec5SDimitry Andric if (ResultReg) {
47020b57cec5SDimitry Andric updateValueMap(I, ResultReg);
47030b57cec5SDimitry Andric return true;
47040b57cec5SDimitry Andric }
47050b57cec5SDimitry Andric }
47060b57cec5SDimitry Andric
470704eeddc0SDimitry Andric Register Src0Reg = getRegForValue(I->getOperand(0));
47080b57cec5SDimitry Andric if (!Src0Reg)
47090b57cec5SDimitry Andric return false;
47100b57cec5SDimitry Andric
471104eeddc0SDimitry Andric Register Src1Reg = getRegForValue(I->getOperand(1));
47120b57cec5SDimitry Andric if (!Src1Reg)
47130b57cec5SDimitry Andric return false;
47140b57cec5SDimitry Andric
4715fe6060f1SDimitry Andric unsigned ResultReg = emitMul_rr(VT, Src0Reg, Src1Reg);
47160b57cec5SDimitry Andric
47170b57cec5SDimitry Andric if (!ResultReg)
47180b57cec5SDimitry Andric return false;
47190b57cec5SDimitry Andric
47200b57cec5SDimitry Andric updateValueMap(I, ResultReg);
47210b57cec5SDimitry Andric return true;
47220b57cec5SDimitry Andric }
47230b57cec5SDimitry Andric
selectShift(const Instruction * I)47240b57cec5SDimitry Andric bool AArch64FastISel::selectShift(const Instruction *I) {
47250b57cec5SDimitry Andric MVT RetVT;
47260b57cec5SDimitry Andric if (!isTypeSupported(I->getType(), RetVT, /*IsVectorAllowed=*/true))
47270b57cec5SDimitry Andric return false;
47280b57cec5SDimitry Andric
47290b57cec5SDimitry Andric if (RetVT.isVector())
47300b57cec5SDimitry Andric return selectOperator(I, I->getOpcode());
47310b57cec5SDimitry Andric
47320b57cec5SDimitry Andric if (const auto *C = dyn_cast<ConstantInt>(I->getOperand(1))) {
47330b57cec5SDimitry Andric unsigned ResultReg = 0;
47340b57cec5SDimitry Andric uint64_t ShiftVal = C->getZExtValue();
47350b57cec5SDimitry Andric MVT SrcVT = RetVT;
47360b57cec5SDimitry Andric bool IsZExt = I->getOpcode() != Instruction::AShr;
47370b57cec5SDimitry Andric const Value *Op0 = I->getOperand(0);
47380b57cec5SDimitry Andric if (const auto *ZExt = dyn_cast<ZExtInst>(Op0)) {
47390b57cec5SDimitry Andric if (!isIntExtFree(ZExt)) {
47400b57cec5SDimitry Andric MVT TmpVT;
47410b57cec5SDimitry Andric if (isValueAvailable(ZExt) && isTypeSupported(ZExt->getSrcTy(), TmpVT)) {
47420b57cec5SDimitry Andric SrcVT = TmpVT;
47430b57cec5SDimitry Andric IsZExt = true;
47440b57cec5SDimitry Andric Op0 = ZExt->getOperand(0);
47450b57cec5SDimitry Andric }
47460b57cec5SDimitry Andric }
47470b57cec5SDimitry Andric } else if (const auto *SExt = dyn_cast<SExtInst>(Op0)) {
47480b57cec5SDimitry Andric if (!isIntExtFree(SExt)) {
47490b57cec5SDimitry Andric MVT TmpVT;
47500b57cec5SDimitry Andric if (isValueAvailable(SExt) && isTypeSupported(SExt->getSrcTy(), TmpVT)) {
47510b57cec5SDimitry Andric SrcVT = TmpVT;
47520b57cec5SDimitry Andric IsZExt = false;
47530b57cec5SDimitry Andric Op0 = SExt->getOperand(0);
47540b57cec5SDimitry Andric }
47550b57cec5SDimitry Andric }
47560b57cec5SDimitry Andric }
47570b57cec5SDimitry Andric
475804eeddc0SDimitry Andric Register Op0Reg = getRegForValue(Op0);
47590b57cec5SDimitry Andric if (!Op0Reg)
47600b57cec5SDimitry Andric return false;
47610b57cec5SDimitry Andric
47620b57cec5SDimitry Andric switch (I->getOpcode()) {
47630b57cec5SDimitry Andric default: llvm_unreachable("Unexpected instruction.");
47640b57cec5SDimitry Andric case Instruction::Shl:
4765fe6060f1SDimitry Andric ResultReg = emitLSL_ri(RetVT, SrcVT, Op0Reg, ShiftVal, IsZExt);
47660b57cec5SDimitry Andric break;
47670b57cec5SDimitry Andric case Instruction::AShr:
4768fe6060f1SDimitry Andric ResultReg = emitASR_ri(RetVT, SrcVT, Op0Reg, ShiftVal, IsZExt);
47690b57cec5SDimitry Andric break;
47700b57cec5SDimitry Andric case Instruction::LShr:
4771fe6060f1SDimitry Andric ResultReg = emitLSR_ri(RetVT, SrcVT, Op0Reg, ShiftVal, IsZExt);
47720b57cec5SDimitry Andric break;
47730b57cec5SDimitry Andric }
47740b57cec5SDimitry Andric if (!ResultReg)
47750b57cec5SDimitry Andric return false;
47760b57cec5SDimitry Andric
47770b57cec5SDimitry Andric updateValueMap(I, ResultReg);
47780b57cec5SDimitry Andric return true;
47790b57cec5SDimitry Andric }
47800b57cec5SDimitry Andric
478104eeddc0SDimitry Andric Register Op0Reg = getRegForValue(I->getOperand(0));
47820b57cec5SDimitry Andric if (!Op0Reg)
47830b57cec5SDimitry Andric return false;
47840b57cec5SDimitry Andric
478504eeddc0SDimitry Andric Register Op1Reg = getRegForValue(I->getOperand(1));
47860b57cec5SDimitry Andric if (!Op1Reg)
47870b57cec5SDimitry Andric return false;
47880b57cec5SDimitry Andric
47890b57cec5SDimitry Andric unsigned ResultReg = 0;
47900b57cec5SDimitry Andric switch (I->getOpcode()) {
47910b57cec5SDimitry Andric default: llvm_unreachable("Unexpected instruction.");
47920b57cec5SDimitry Andric case Instruction::Shl:
4793fe6060f1SDimitry Andric ResultReg = emitLSL_rr(RetVT, Op0Reg, Op1Reg);
47940b57cec5SDimitry Andric break;
47950b57cec5SDimitry Andric case Instruction::AShr:
4796fe6060f1SDimitry Andric ResultReg = emitASR_rr(RetVT, Op0Reg, Op1Reg);
47970b57cec5SDimitry Andric break;
47980b57cec5SDimitry Andric case Instruction::LShr:
4799fe6060f1SDimitry Andric ResultReg = emitLSR_rr(RetVT, Op0Reg, Op1Reg);
48000b57cec5SDimitry Andric break;
48010b57cec5SDimitry Andric }
48020b57cec5SDimitry Andric
48030b57cec5SDimitry Andric if (!ResultReg)
48040b57cec5SDimitry Andric return false;
48050b57cec5SDimitry Andric
48060b57cec5SDimitry Andric updateValueMap(I, ResultReg);
48070b57cec5SDimitry Andric return true;
48080b57cec5SDimitry Andric }
48090b57cec5SDimitry Andric
selectBitCast(const Instruction * I)48100b57cec5SDimitry Andric bool AArch64FastISel::selectBitCast(const Instruction *I) {
48110b57cec5SDimitry Andric MVT RetVT, SrcVT;
48120b57cec5SDimitry Andric
48130b57cec5SDimitry Andric if (!isTypeLegal(I->getOperand(0)->getType(), SrcVT))
48140b57cec5SDimitry Andric return false;
48150b57cec5SDimitry Andric if (!isTypeLegal(I->getType(), RetVT))
48160b57cec5SDimitry Andric return false;
48170b57cec5SDimitry Andric
48180b57cec5SDimitry Andric unsigned Opc;
48190b57cec5SDimitry Andric if (RetVT == MVT::f32 && SrcVT == MVT::i32)
48200b57cec5SDimitry Andric Opc = AArch64::FMOVWSr;
48210b57cec5SDimitry Andric else if (RetVT == MVT::f64 && SrcVT == MVT::i64)
48220b57cec5SDimitry Andric Opc = AArch64::FMOVXDr;
48230b57cec5SDimitry Andric else if (RetVT == MVT::i32 && SrcVT == MVT::f32)
48240b57cec5SDimitry Andric Opc = AArch64::FMOVSWr;
48250b57cec5SDimitry Andric else if (RetVT == MVT::i64 && SrcVT == MVT::f64)
48260b57cec5SDimitry Andric Opc = AArch64::FMOVDXr;
48270b57cec5SDimitry Andric else
48280b57cec5SDimitry Andric return false;
48290b57cec5SDimitry Andric
48300b57cec5SDimitry Andric const TargetRegisterClass *RC = nullptr;
48310b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
48320b57cec5SDimitry Andric default: llvm_unreachable("Unexpected value type.");
48330b57cec5SDimitry Andric case MVT::i32: RC = &AArch64::GPR32RegClass; break;
48340b57cec5SDimitry Andric case MVT::i64: RC = &AArch64::GPR64RegClass; break;
48350b57cec5SDimitry Andric case MVT::f32: RC = &AArch64::FPR32RegClass; break;
48360b57cec5SDimitry Andric case MVT::f64: RC = &AArch64::FPR64RegClass; break;
48370b57cec5SDimitry Andric }
483804eeddc0SDimitry Andric Register Op0Reg = getRegForValue(I->getOperand(0));
48390b57cec5SDimitry Andric if (!Op0Reg)
48400b57cec5SDimitry Andric return false;
48410b57cec5SDimitry Andric
484204eeddc0SDimitry Andric Register ResultReg = fastEmitInst_r(Opc, RC, Op0Reg);
48430b57cec5SDimitry Andric if (!ResultReg)
48440b57cec5SDimitry Andric return false;
48450b57cec5SDimitry Andric
48460b57cec5SDimitry Andric updateValueMap(I, ResultReg);
48470b57cec5SDimitry Andric return true;
48480b57cec5SDimitry Andric }
48490b57cec5SDimitry Andric
selectFRem(const Instruction * I)48500b57cec5SDimitry Andric bool AArch64FastISel::selectFRem(const Instruction *I) {
48510b57cec5SDimitry Andric MVT RetVT;
48520b57cec5SDimitry Andric if (!isTypeLegal(I->getType(), RetVT))
48530b57cec5SDimitry Andric return false;
48540b57cec5SDimitry Andric
48550b57cec5SDimitry Andric RTLIB::Libcall LC;
48560b57cec5SDimitry Andric switch (RetVT.SimpleTy) {
48570b57cec5SDimitry Andric default:
48580b57cec5SDimitry Andric return false;
48590b57cec5SDimitry Andric case MVT::f32:
48600b57cec5SDimitry Andric LC = RTLIB::REM_F32;
48610b57cec5SDimitry Andric break;
48620b57cec5SDimitry Andric case MVT::f64:
48630b57cec5SDimitry Andric LC = RTLIB::REM_F64;
48640b57cec5SDimitry Andric break;
48650b57cec5SDimitry Andric }
48660b57cec5SDimitry Andric
48670b57cec5SDimitry Andric ArgListTy Args;
48680b57cec5SDimitry Andric Args.reserve(I->getNumOperands());
48690b57cec5SDimitry Andric
48700b57cec5SDimitry Andric // Populate the argument list.
48710b57cec5SDimitry Andric for (auto &Arg : I->operands()) {
48720b57cec5SDimitry Andric ArgListEntry Entry;
48730b57cec5SDimitry Andric Entry.Val = Arg;
48740b57cec5SDimitry Andric Entry.Ty = Arg->getType();
48750b57cec5SDimitry Andric Args.push_back(Entry);
48760b57cec5SDimitry Andric }
48770b57cec5SDimitry Andric
48780b57cec5SDimitry Andric CallLoweringInfo CLI;
48790b57cec5SDimitry Andric MCContext &Ctx = MF->getContext();
48800b57cec5SDimitry Andric CLI.setCallee(DL, Ctx, TLI.getLibcallCallingConv(LC), I->getType(),
48810b57cec5SDimitry Andric TLI.getLibcallName(LC), std::move(Args));
48820b57cec5SDimitry Andric if (!lowerCallTo(CLI))
48830b57cec5SDimitry Andric return false;
48840b57cec5SDimitry Andric updateValueMap(I, CLI.ResultReg);
48850b57cec5SDimitry Andric return true;
48860b57cec5SDimitry Andric }
48870b57cec5SDimitry Andric
selectSDiv(const Instruction * I)48880b57cec5SDimitry Andric bool AArch64FastISel::selectSDiv(const Instruction *I) {
48890b57cec5SDimitry Andric MVT VT;
48900b57cec5SDimitry Andric if (!isTypeLegal(I->getType(), VT))
48910b57cec5SDimitry Andric return false;
48920b57cec5SDimitry Andric
48930b57cec5SDimitry Andric if (!isa<ConstantInt>(I->getOperand(1)))
48940b57cec5SDimitry Andric return selectBinaryOp(I, ISD::SDIV);
48950b57cec5SDimitry Andric
48960b57cec5SDimitry Andric const APInt &C = cast<ConstantInt>(I->getOperand(1))->getValue();
48970b57cec5SDimitry Andric if ((VT != MVT::i32 && VT != MVT::i64) || !C ||
4898349cc55cSDimitry Andric !(C.isPowerOf2() || C.isNegatedPowerOf2()))
48990b57cec5SDimitry Andric return selectBinaryOp(I, ISD::SDIV);
49000b57cec5SDimitry Andric
490106c3fb27SDimitry Andric unsigned Lg2 = C.countr_zero();
490204eeddc0SDimitry Andric Register Src0Reg = getRegForValue(I->getOperand(0));
49030b57cec5SDimitry Andric if (!Src0Reg)
49040b57cec5SDimitry Andric return false;
49050b57cec5SDimitry Andric
49060b57cec5SDimitry Andric if (cast<BinaryOperator>(I)->isExact()) {
4907fe6060f1SDimitry Andric unsigned ResultReg = emitASR_ri(VT, VT, Src0Reg, Lg2);
49080b57cec5SDimitry Andric if (!ResultReg)
49090b57cec5SDimitry Andric return false;
49100b57cec5SDimitry Andric updateValueMap(I, ResultReg);
49110b57cec5SDimitry Andric return true;
49120b57cec5SDimitry Andric }
49130b57cec5SDimitry Andric
49140b57cec5SDimitry Andric int64_t Pow2MinusOne = (1ULL << Lg2) - 1;
4915fe6060f1SDimitry Andric unsigned AddReg = emitAdd_ri_(VT, Src0Reg, Pow2MinusOne);
49160b57cec5SDimitry Andric if (!AddReg)
49170b57cec5SDimitry Andric return false;
49180b57cec5SDimitry Andric
49190b57cec5SDimitry Andric // (Src0 < 0) ? Pow2 - 1 : 0;
4920fe6060f1SDimitry Andric if (!emitICmp_ri(VT, Src0Reg, 0))
49210b57cec5SDimitry Andric return false;
49220b57cec5SDimitry Andric
49230b57cec5SDimitry Andric unsigned SelectOpc;
49240b57cec5SDimitry Andric const TargetRegisterClass *RC;
49250b57cec5SDimitry Andric if (VT == MVT::i64) {
49260b57cec5SDimitry Andric SelectOpc = AArch64::CSELXr;
49270b57cec5SDimitry Andric RC = &AArch64::GPR64RegClass;
49280b57cec5SDimitry Andric } else {
49290b57cec5SDimitry Andric SelectOpc = AArch64::CSELWr;
49300b57cec5SDimitry Andric RC = &AArch64::GPR32RegClass;
49310b57cec5SDimitry Andric }
493204eeddc0SDimitry Andric Register SelectReg = fastEmitInst_rri(SelectOpc, RC, AddReg, Src0Reg,
4933fe6060f1SDimitry Andric AArch64CC::LT);
49340b57cec5SDimitry Andric if (!SelectReg)
49350b57cec5SDimitry Andric return false;
49360b57cec5SDimitry Andric
49370b57cec5SDimitry Andric // Divide by Pow2 --> ashr. If we're dividing by a negative value we must also
49380b57cec5SDimitry Andric // negate the result.
49390b57cec5SDimitry Andric unsigned ZeroReg = (VT == MVT::i64) ? AArch64::XZR : AArch64::WZR;
49400b57cec5SDimitry Andric unsigned ResultReg;
49410b57cec5SDimitry Andric if (C.isNegative())
4942fe6060f1SDimitry Andric ResultReg = emitAddSub_rs(/*UseAdd=*/false, VT, ZeroReg, SelectReg,
4943fe6060f1SDimitry Andric AArch64_AM::ASR, Lg2);
49440b57cec5SDimitry Andric else
4945fe6060f1SDimitry Andric ResultReg = emitASR_ri(VT, VT, SelectReg, Lg2);
49460b57cec5SDimitry Andric
49470b57cec5SDimitry Andric if (!ResultReg)
49480b57cec5SDimitry Andric return false;
49490b57cec5SDimitry Andric
49500b57cec5SDimitry Andric updateValueMap(I, ResultReg);
49510b57cec5SDimitry Andric return true;
49520b57cec5SDimitry Andric }
49530b57cec5SDimitry Andric
49540b57cec5SDimitry Andric /// This is mostly a copy of the existing FastISel getRegForGEPIndex code. We
49550b57cec5SDimitry Andric /// have to duplicate it for AArch64, because otherwise we would fail during the
49560b57cec5SDimitry Andric /// sign-extend emission.
getRegForGEPIndex(const Value * Idx)4957fe6060f1SDimitry Andric unsigned AArch64FastISel::getRegForGEPIndex(const Value *Idx) {
495804eeddc0SDimitry Andric Register IdxN = getRegForValue(Idx);
49590b57cec5SDimitry Andric if (IdxN == 0)
49600b57cec5SDimitry Andric // Unhandled operand. Halt "fast" selection and bail.
4961fe6060f1SDimitry Andric return 0;
49620b57cec5SDimitry Andric
49630b57cec5SDimitry Andric // If the index is smaller or larger than intptr_t, truncate or extend it.
49640b57cec5SDimitry Andric MVT PtrVT = TLI.getPointerTy(DL);
49650b57cec5SDimitry Andric EVT IdxVT = EVT::getEVT(Idx->getType(), /*HandleUnknown=*/false);
49660b57cec5SDimitry Andric if (IdxVT.bitsLT(PtrVT)) {
49670b57cec5SDimitry Andric IdxN = emitIntExt(IdxVT.getSimpleVT(), IdxN, PtrVT, /*isZExt=*/false);
49680b57cec5SDimitry Andric } else if (IdxVT.bitsGT(PtrVT))
49690b57cec5SDimitry Andric llvm_unreachable("AArch64 FastISel doesn't support types larger than i64");
4970fe6060f1SDimitry Andric return IdxN;
49710b57cec5SDimitry Andric }
49720b57cec5SDimitry Andric
49730b57cec5SDimitry Andric /// This is mostly a copy of the existing FastISel GEP code, but we have to
49740b57cec5SDimitry Andric /// duplicate it for AArch64, because otherwise we would bail out even for
49750b57cec5SDimitry Andric /// simple cases. This is because the standard fastEmit functions don't cover
49760b57cec5SDimitry Andric /// MUL at all and ADD is lowered very inefficientily.
selectGetElementPtr(const Instruction * I)49770b57cec5SDimitry Andric bool AArch64FastISel::selectGetElementPtr(const Instruction *I) {
4978480093f4SDimitry Andric if (Subtarget->isTargetILP32())
4979480093f4SDimitry Andric return false;
4980480093f4SDimitry Andric
498104eeddc0SDimitry Andric Register N = getRegForValue(I->getOperand(0));
49820b57cec5SDimitry Andric if (!N)
49830b57cec5SDimitry Andric return false;
49840b57cec5SDimitry Andric
49850b57cec5SDimitry Andric // Keep a running tab of the total offset to coalesce multiple N = N + Offset
49860b57cec5SDimitry Andric // into a single N = N + TotalOffset.
49870b57cec5SDimitry Andric uint64_t TotalOffs = 0;
49880b57cec5SDimitry Andric MVT VT = TLI.getPointerTy(DL);
49890b57cec5SDimitry Andric for (gep_type_iterator GTI = gep_type_begin(I), E = gep_type_end(I);
49900b57cec5SDimitry Andric GTI != E; ++GTI) {
49910b57cec5SDimitry Andric const Value *Idx = GTI.getOperand();
49920b57cec5SDimitry Andric if (auto *StTy = GTI.getStructTypeOrNull()) {
49930b57cec5SDimitry Andric unsigned Field = cast<ConstantInt>(Idx)->getZExtValue();
49940b57cec5SDimitry Andric // N = N + Offset
49950b57cec5SDimitry Andric if (Field)
49960b57cec5SDimitry Andric TotalOffs += DL.getStructLayout(StTy)->getElementOffset(Field);
49970b57cec5SDimitry Andric } else {
49980b57cec5SDimitry Andric // If this is a constant subscript, handle it quickly.
49990b57cec5SDimitry Andric if (const auto *CI = dyn_cast<ConstantInt>(Idx)) {
50000b57cec5SDimitry Andric if (CI->isZero())
50010b57cec5SDimitry Andric continue;
50020b57cec5SDimitry Andric // N = N + Offset
50031db9f3b2SDimitry Andric TotalOffs += GTI.getSequentialElementStride(DL) *
50041db9f3b2SDimitry Andric cast<ConstantInt>(CI)->getSExtValue();
50050b57cec5SDimitry Andric continue;
50060b57cec5SDimitry Andric }
50070b57cec5SDimitry Andric if (TotalOffs) {
5008fe6060f1SDimitry Andric N = emitAdd_ri_(VT, N, TotalOffs);
50090b57cec5SDimitry Andric if (!N)
50100b57cec5SDimitry Andric return false;
50110b57cec5SDimitry Andric TotalOffs = 0;
50120b57cec5SDimitry Andric }
50130b57cec5SDimitry Andric
50140b57cec5SDimitry Andric // N = N + Idx * ElementSize;
50151db9f3b2SDimitry Andric uint64_t ElementSize = GTI.getSequentialElementStride(DL);
5016fe6060f1SDimitry Andric unsigned IdxN = getRegForGEPIndex(Idx);
50170b57cec5SDimitry Andric if (!IdxN)
50180b57cec5SDimitry Andric return false;
50190b57cec5SDimitry Andric
50200b57cec5SDimitry Andric if (ElementSize != 1) {
50210b57cec5SDimitry Andric unsigned C = fastEmit_i(VT, VT, ISD::Constant, ElementSize);
50220b57cec5SDimitry Andric if (!C)
50230b57cec5SDimitry Andric return false;
5024fe6060f1SDimitry Andric IdxN = emitMul_rr(VT, IdxN, C);
50250b57cec5SDimitry Andric if (!IdxN)
50260b57cec5SDimitry Andric return false;
50270b57cec5SDimitry Andric }
5028fe6060f1SDimitry Andric N = fastEmit_rr(VT, VT, ISD::ADD, N, IdxN);
50290b57cec5SDimitry Andric if (!N)
50300b57cec5SDimitry Andric return false;
50310b57cec5SDimitry Andric }
50320b57cec5SDimitry Andric }
50330b57cec5SDimitry Andric if (TotalOffs) {
5034fe6060f1SDimitry Andric N = emitAdd_ri_(VT, N, TotalOffs);
50350b57cec5SDimitry Andric if (!N)
50360b57cec5SDimitry Andric return false;
50370b57cec5SDimitry Andric }
50380b57cec5SDimitry Andric updateValueMap(I, N);
50390b57cec5SDimitry Andric return true;
50400b57cec5SDimitry Andric }
50410b57cec5SDimitry Andric
selectAtomicCmpXchg(const AtomicCmpXchgInst * I)50420b57cec5SDimitry Andric bool AArch64FastISel::selectAtomicCmpXchg(const AtomicCmpXchgInst *I) {
50435f757f3fSDimitry Andric assert(TM.getOptLevel() == CodeGenOptLevel::None &&
50440b57cec5SDimitry Andric "cmpxchg survived AtomicExpand at optlevel > -O0");
50450b57cec5SDimitry Andric
50460b57cec5SDimitry Andric auto *RetPairTy = cast<StructType>(I->getType());
50470b57cec5SDimitry Andric Type *RetTy = RetPairTy->getTypeAtIndex(0U);
50480b57cec5SDimitry Andric assert(RetPairTy->getTypeAtIndex(1U)->isIntegerTy(1) &&
50490b57cec5SDimitry Andric "cmpxchg has a non-i1 status result");
50500b57cec5SDimitry Andric
50510b57cec5SDimitry Andric MVT VT;
50520b57cec5SDimitry Andric if (!isTypeLegal(RetTy, VT))
50530b57cec5SDimitry Andric return false;
50540b57cec5SDimitry Andric
50550b57cec5SDimitry Andric const TargetRegisterClass *ResRC;
50560b57cec5SDimitry Andric unsigned Opc, CmpOpc;
50570b57cec5SDimitry Andric // This only supports i32/i64, because i8/i16 aren't legal, and the generic
50580b57cec5SDimitry Andric // extractvalue selection doesn't support that.
50590b57cec5SDimitry Andric if (VT == MVT::i32) {
50600b57cec5SDimitry Andric Opc = AArch64::CMP_SWAP_32;
50610b57cec5SDimitry Andric CmpOpc = AArch64::SUBSWrs;
50620b57cec5SDimitry Andric ResRC = &AArch64::GPR32RegClass;
50630b57cec5SDimitry Andric } else if (VT == MVT::i64) {
50640b57cec5SDimitry Andric Opc = AArch64::CMP_SWAP_64;
50650b57cec5SDimitry Andric CmpOpc = AArch64::SUBSXrs;
50660b57cec5SDimitry Andric ResRC = &AArch64::GPR64RegClass;
50670b57cec5SDimitry Andric } else {
50680b57cec5SDimitry Andric return false;
50690b57cec5SDimitry Andric }
50700b57cec5SDimitry Andric
50710b57cec5SDimitry Andric const MCInstrDesc &II = TII.get(Opc);
50720b57cec5SDimitry Andric
507304eeddc0SDimitry Andric const Register AddrReg = constrainOperandRegClass(
50740b57cec5SDimitry Andric II, getRegForValue(I->getPointerOperand()), II.getNumDefs());
507504eeddc0SDimitry Andric const Register DesiredReg = constrainOperandRegClass(
50760b57cec5SDimitry Andric II, getRegForValue(I->getCompareOperand()), II.getNumDefs() + 1);
507704eeddc0SDimitry Andric const Register NewReg = constrainOperandRegClass(
50780b57cec5SDimitry Andric II, getRegForValue(I->getNewValOperand()), II.getNumDefs() + 2);
50790b57cec5SDimitry Andric
508004eeddc0SDimitry Andric const Register ResultReg1 = createResultReg(ResRC);
508104eeddc0SDimitry Andric const Register ResultReg2 = createResultReg(&AArch64::GPR32RegClass);
508204eeddc0SDimitry Andric const Register ScratchReg = createResultReg(&AArch64::GPR32RegClass);
50830b57cec5SDimitry Andric
50840b57cec5SDimitry Andric // FIXME: MachineMemOperand doesn't support cmpxchg yet.
5085bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, II)
50860b57cec5SDimitry Andric .addDef(ResultReg1)
50870b57cec5SDimitry Andric .addDef(ScratchReg)
50880b57cec5SDimitry Andric .addUse(AddrReg)
50890b57cec5SDimitry Andric .addUse(DesiredReg)
50900b57cec5SDimitry Andric .addUse(NewReg);
50910b57cec5SDimitry Andric
5092bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(CmpOpc))
50930b57cec5SDimitry Andric .addDef(VT == MVT::i32 ? AArch64::WZR : AArch64::XZR)
50940b57cec5SDimitry Andric .addUse(ResultReg1)
50950b57cec5SDimitry Andric .addUse(DesiredReg)
50960b57cec5SDimitry Andric .addImm(0);
50970b57cec5SDimitry Andric
5098bdd1243dSDimitry Andric BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(AArch64::CSINCWr))
50990b57cec5SDimitry Andric .addDef(ResultReg2)
51000b57cec5SDimitry Andric .addUse(AArch64::WZR)
51010b57cec5SDimitry Andric .addUse(AArch64::WZR)
51020b57cec5SDimitry Andric .addImm(AArch64CC::NE);
51030b57cec5SDimitry Andric
51040b57cec5SDimitry Andric assert((ResultReg1 + 1) == ResultReg2 && "Nonconsecutive result registers.");
51050b57cec5SDimitry Andric updateValueMap(I, ResultReg1, 2);
51060b57cec5SDimitry Andric return true;
51070b57cec5SDimitry Andric }
51080b57cec5SDimitry Andric
fastSelectInstruction(const Instruction * I)51090b57cec5SDimitry Andric bool AArch64FastISel::fastSelectInstruction(const Instruction *I) {
5110bdd1243dSDimitry Andric if (TLI.fallBackToDAGISel(*I))
5111bdd1243dSDimitry Andric return false;
51120b57cec5SDimitry Andric switch (I->getOpcode()) {
51130b57cec5SDimitry Andric default:
51140b57cec5SDimitry Andric break;
51150b57cec5SDimitry Andric case Instruction::Add:
51160b57cec5SDimitry Andric case Instruction::Sub:
51170b57cec5SDimitry Andric return selectAddSub(I);
51180b57cec5SDimitry Andric case Instruction::Mul:
51190b57cec5SDimitry Andric return selectMul(I);
51200b57cec5SDimitry Andric case Instruction::SDiv:
51210b57cec5SDimitry Andric return selectSDiv(I);
51220b57cec5SDimitry Andric case Instruction::SRem:
51230b57cec5SDimitry Andric if (!selectBinaryOp(I, ISD::SREM))
51240b57cec5SDimitry Andric return selectRem(I, ISD::SREM);
51250b57cec5SDimitry Andric return true;
51260b57cec5SDimitry Andric case Instruction::URem:
51270b57cec5SDimitry Andric if (!selectBinaryOp(I, ISD::UREM))
51280b57cec5SDimitry Andric return selectRem(I, ISD::UREM);
51290b57cec5SDimitry Andric return true;
51300b57cec5SDimitry Andric case Instruction::Shl:
51310b57cec5SDimitry Andric case Instruction::LShr:
51320b57cec5SDimitry Andric case Instruction::AShr:
51330b57cec5SDimitry Andric return selectShift(I);
51340b57cec5SDimitry Andric case Instruction::And:
51350b57cec5SDimitry Andric case Instruction::Or:
51360b57cec5SDimitry Andric case Instruction::Xor:
51370b57cec5SDimitry Andric return selectLogicalOp(I);
51380b57cec5SDimitry Andric case Instruction::Br:
51390b57cec5SDimitry Andric return selectBranch(I);
51400b57cec5SDimitry Andric case Instruction::IndirectBr:
51410b57cec5SDimitry Andric return selectIndirectBr(I);
51420b57cec5SDimitry Andric case Instruction::BitCast:
51430b57cec5SDimitry Andric if (!FastISel::selectBitCast(I))
51440b57cec5SDimitry Andric return selectBitCast(I);
51450b57cec5SDimitry Andric return true;
51460b57cec5SDimitry Andric case Instruction::FPToSI:
51470b57cec5SDimitry Andric if (!selectCast(I, ISD::FP_TO_SINT))
51480b57cec5SDimitry Andric return selectFPToInt(I, /*Signed=*/true);
51490b57cec5SDimitry Andric return true;
51500b57cec5SDimitry Andric case Instruction::FPToUI:
51510b57cec5SDimitry Andric return selectFPToInt(I, /*Signed=*/false);
51520b57cec5SDimitry Andric case Instruction::ZExt:
51530b57cec5SDimitry Andric case Instruction::SExt:
51540b57cec5SDimitry Andric return selectIntExt(I);
51550b57cec5SDimitry Andric case Instruction::Trunc:
51560b57cec5SDimitry Andric if (!selectCast(I, ISD::TRUNCATE))
51570b57cec5SDimitry Andric return selectTrunc(I);
51580b57cec5SDimitry Andric return true;
51590b57cec5SDimitry Andric case Instruction::FPExt:
51600b57cec5SDimitry Andric return selectFPExt(I);
51610b57cec5SDimitry Andric case Instruction::FPTrunc:
51620b57cec5SDimitry Andric return selectFPTrunc(I);
51630b57cec5SDimitry Andric case Instruction::SIToFP:
51640b57cec5SDimitry Andric if (!selectCast(I, ISD::SINT_TO_FP))
51650b57cec5SDimitry Andric return selectIntToFP(I, /*Signed=*/true);
51660b57cec5SDimitry Andric return true;
51670b57cec5SDimitry Andric case Instruction::UIToFP:
51680b57cec5SDimitry Andric return selectIntToFP(I, /*Signed=*/false);
51690b57cec5SDimitry Andric case Instruction::Load:
51700b57cec5SDimitry Andric return selectLoad(I);
51710b57cec5SDimitry Andric case Instruction::Store:
51720b57cec5SDimitry Andric return selectStore(I);
51730b57cec5SDimitry Andric case Instruction::FCmp:
51740b57cec5SDimitry Andric case Instruction::ICmp:
51750b57cec5SDimitry Andric return selectCmp(I);
51760b57cec5SDimitry Andric case Instruction::Select:
51770b57cec5SDimitry Andric return selectSelect(I);
51780b57cec5SDimitry Andric case Instruction::Ret:
51790b57cec5SDimitry Andric return selectRet(I);
51800b57cec5SDimitry Andric case Instruction::FRem:
51810b57cec5SDimitry Andric return selectFRem(I);
51820b57cec5SDimitry Andric case Instruction::GetElementPtr:
51830b57cec5SDimitry Andric return selectGetElementPtr(I);
51840b57cec5SDimitry Andric case Instruction::AtomicCmpXchg:
51850b57cec5SDimitry Andric return selectAtomicCmpXchg(cast<AtomicCmpXchgInst>(I));
51860b57cec5SDimitry Andric }
51870b57cec5SDimitry Andric
51880b57cec5SDimitry Andric // fall-back to target-independent instruction selection.
51890b57cec5SDimitry Andric return selectOperator(I, I->getOpcode());
51900b57cec5SDimitry Andric }
51910b57cec5SDimitry Andric
createFastISel(FunctionLoweringInfo & FuncInfo,const TargetLibraryInfo * LibInfo)51920b57cec5SDimitry Andric FastISel *AArch64::createFastISel(FunctionLoweringInfo &FuncInfo,
51930b57cec5SDimitry Andric const TargetLibraryInfo *LibInfo) {
5194bdd1243dSDimitry Andric
5195bdd1243dSDimitry Andric SMEAttrs CallerAttrs(*FuncInfo.Fn);
5196*0fca6ea1SDimitry Andric if (CallerAttrs.hasZAState() || CallerAttrs.hasZT0State() ||
5197*0fca6ea1SDimitry Andric CallerAttrs.hasStreamingInterfaceOrBody() ||
51985f757f3fSDimitry Andric CallerAttrs.hasStreamingCompatibleInterface())
5199bdd1243dSDimitry Andric return nullptr;
52000b57cec5SDimitry Andric return new AArch64FastISel(FuncInfo, LibInfo);
52010b57cec5SDimitry Andric }
5202