10b57cec5SDimitry Andric //===- RISCVTargetTransformInfo.h - RISC-V specific TTI ---------*- C++ -*-===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric /// \file 90b57cec5SDimitry Andric /// This file defines a TargetTransformInfo::Concept conforming object specific 100b57cec5SDimitry Andric /// to the RISC-V target machine. It uses the target's detailed information to 110b57cec5SDimitry Andric /// provide more precise answers to certain TTI queries, while letting the 120b57cec5SDimitry Andric /// target independent and default TTI implementations handle the rest. 130b57cec5SDimitry Andric /// 140b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 150b57cec5SDimitry Andric 160b57cec5SDimitry Andric #ifndef LLVM_LIB_TARGET_RISCV_RISCVTARGETTRANSFORMINFO_H 170b57cec5SDimitry Andric #define LLVM_LIB_TARGET_RISCV_RISCVTARGETTRANSFORMINFO_H 180b57cec5SDimitry Andric 190b57cec5SDimitry Andric #include "RISCVSubtarget.h" 200b57cec5SDimitry Andric #include "RISCVTargetMachine.h" 21349cc55cSDimitry Andric #include "llvm/Analysis/IVDescriptors.h" 220b57cec5SDimitry Andric #include "llvm/Analysis/TargetTransformInfo.h" 230b57cec5SDimitry Andric #include "llvm/CodeGen/BasicTTIImpl.h" 240b57cec5SDimitry Andric #include "llvm/IR/Function.h" 25bdd1243dSDimitry Andric #include <optional> 260b57cec5SDimitry Andric 270b57cec5SDimitry Andric namespace llvm { 280b57cec5SDimitry Andric 290b57cec5SDimitry Andric class RISCVTTIImpl : public BasicTTIImplBase<RISCVTTIImpl> { 300b57cec5SDimitry Andric using BaseT = BasicTTIImplBase<RISCVTTIImpl>; 310b57cec5SDimitry Andric using TTI = TargetTransformInfo; 320b57cec5SDimitry Andric 330b57cec5SDimitry Andric friend BaseT; 340b57cec5SDimitry Andric 350b57cec5SDimitry Andric const RISCVSubtarget *ST; 360b57cec5SDimitry Andric const RISCVTargetLowering *TLI; 370b57cec5SDimitry Andric getST()380b57cec5SDimitry Andric const RISCVSubtarget *getST() const { return ST; } getTLI()390b57cec5SDimitry Andric const RISCVTargetLowering *getTLI() const { return TLI; } 400b57cec5SDimitry Andric 41bdd1243dSDimitry Andric /// This function returns an estimate for VL to be used in VL based terms 42bdd1243dSDimitry Andric /// of the cost model. For fixed length vectors, this is simply the 43bdd1243dSDimitry Andric /// vector length. For scalable vectors, we return results consistent 44bdd1243dSDimitry Andric /// with getVScaleForTuning under the assumption that clients are also 45bdd1243dSDimitry Andric /// using that when comparing costs between scalar and vector representation. 46bdd1243dSDimitry Andric /// This does unfortunately mean that we can both undershoot and overshot 47bdd1243dSDimitry Andric /// the true cost significantly if getVScaleForTuning is wildly off for the 48bdd1243dSDimitry Andric /// actual target hardware. 49bdd1243dSDimitry Andric unsigned getEstimatedVLFor(VectorType *Ty); 50bdd1243dSDimitry Andric 511db9f3b2SDimitry Andric InstructionCost getRISCVInstructionCost(ArrayRef<unsigned> OpCodes, MVT VT, 521db9f3b2SDimitry Andric TTI::TargetCostKind CostKind); 531db9f3b2SDimitry Andric 5406c3fb27SDimitry Andric /// Return the cost of accessing a constant pool entry of the specified 5506c3fb27SDimitry Andric /// type. 5606c3fb27SDimitry Andric InstructionCost getConstantPoolLoadCost(Type *Ty, 5706c3fb27SDimitry Andric TTI::TargetCostKind CostKind); 580b57cec5SDimitry Andric public: RISCVTTIImpl(const RISCVTargetMachine * TM,const Function & F)590b57cec5SDimitry Andric explicit RISCVTTIImpl(const RISCVTargetMachine *TM, const Function &F) 60*0fca6ea1SDimitry Andric : BaseT(TM, F.getDataLayout()), ST(TM->getSubtargetImpl(F)), 610b57cec5SDimitry Andric TLI(ST->getTargetLowering()) {} 620b57cec5SDimitry Andric 63*0fca6ea1SDimitry Andric bool areInlineCompatible(const Function *Caller, 64*0fca6ea1SDimitry Andric const Function *Callee) const; 65*0fca6ea1SDimitry Andric 66bdd1243dSDimitry Andric /// Return the cost of materializing an immediate for a value operand of 67bdd1243dSDimitry Andric /// a store instruction. 68bdd1243dSDimitry Andric InstructionCost getStoreImmCost(Type *VecTy, TTI::OperandValueInfo OpInfo, 69bdd1243dSDimitry Andric TTI::TargetCostKind CostKind); 70bdd1243dSDimitry Andric 71fe6060f1SDimitry Andric InstructionCost getIntImmCost(const APInt &Imm, Type *Ty, 72fe6060f1SDimitry Andric TTI::TargetCostKind CostKind); 73fe6060f1SDimitry Andric InstructionCost getIntImmCostInst(unsigned Opcode, unsigned Idx, 74fe6060f1SDimitry Andric const APInt &Imm, Type *Ty, 75fe6060f1SDimitry Andric TTI::TargetCostKind CostKind, 76e8d8bef9SDimitry Andric Instruction *Inst = nullptr); 77fe6060f1SDimitry Andric InstructionCost getIntImmCostIntrin(Intrinsic::ID IID, unsigned Idx, 78fe6060f1SDimitry Andric const APInt &Imm, Type *Ty, 79fe6060f1SDimitry Andric TTI::TargetCostKind CostKind); 80fe6060f1SDimitry Andric 81*0fca6ea1SDimitry Andric /// \name EVL Support for predicated vectorization. 82*0fca6ea1SDimitry Andric /// Whether the target supports the %evl parameter of VP intrinsic efficiently 83*0fca6ea1SDimitry Andric /// in hardware, for the given opcode and type/alignment. (see LLVM Language 84*0fca6ea1SDimitry Andric /// Reference - "Vector Predication Intrinsics", 85*0fca6ea1SDimitry Andric /// https://llvm.org/docs/LangRef.html#vector-predication-intrinsics and 86*0fca6ea1SDimitry Andric /// "IR-level VP intrinsics", 87*0fca6ea1SDimitry Andric /// https://llvm.org/docs/Proposals/VectorPredication.html#ir-level-vp-intrinsics). 88*0fca6ea1SDimitry Andric /// \param Opcode the opcode of the instruction checked for predicated version 89*0fca6ea1SDimitry Andric /// support. 90*0fca6ea1SDimitry Andric /// \param DataType the type of the instruction with the \p Opcode checked for 91*0fca6ea1SDimitry Andric /// prediction support. 92*0fca6ea1SDimitry Andric /// \param Alignment the alignment for memory access operation checked for 93*0fca6ea1SDimitry Andric /// predicated version support. 94*0fca6ea1SDimitry Andric bool hasActiveVectorLength(unsigned Opcode, Type *DataType, 95*0fca6ea1SDimitry Andric Align Alignment) const; 96*0fca6ea1SDimitry Andric 97fe6060f1SDimitry Andric TargetTransformInfo::PopcntSupportKind getPopcntSupport(unsigned TyWidth); 98fe6060f1SDimitry Andric 99fe6060f1SDimitry Andric bool shouldExpandReduction(const IntrinsicInst *II) const; supportsScalableVectors()100349cc55cSDimitry Andric bool supportsScalableVectors() const { return ST->hasVInstructions(); } enableOrderedReductions()10106c3fb27SDimitry Andric bool enableOrderedReductions() const { return true; } enableScalableVectorization()102bdd1243dSDimitry Andric bool enableScalableVectorization() const { return ST->hasVInstructions(); } 10306c3fb27SDimitry Andric TailFoldingStyle getPreferredTailFoldingStyle(bool IVUpdateMayOverflow)10406c3fb27SDimitry Andric getPreferredTailFoldingStyle(bool IVUpdateMayOverflow) const { 10506c3fb27SDimitry Andric return ST->hasVInstructions() ? TailFoldingStyle::Data 10606c3fb27SDimitry Andric : TailFoldingStyle::DataWithoutLaneMask; 107753f127fSDimitry Andric } 108bdd1243dSDimitry Andric std::optional<unsigned> getMaxVScale() const; 109bdd1243dSDimitry Andric std::optional<unsigned> getVScaleForTuning() const; 110fe6060f1SDimitry Andric 11104eeddc0SDimitry Andric TypeSize getRegisterBitWidth(TargetTransformInfo::RegisterKind K) const; 112fe6060f1SDimitry Andric 11381ad6265SDimitry Andric unsigned getRegUsageForType(Type *Ty); 11481ad6265SDimitry Andric 115bdd1243dSDimitry Andric unsigned getMaximumVF(unsigned ElemWidth, unsigned Opcode) const; 116bdd1243dSDimitry Andric preferEpilogueVectorization()117bdd1243dSDimitry Andric bool preferEpilogueVectorization() const { 118bdd1243dSDimitry Andric // Epilogue vectorization is usually unprofitable - tail folding or 119bdd1243dSDimitry Andric // a smaller VF would have been better. This a blunt hammer - we 120bdd1243dSDimitry Andric // should re-examine this once vectorization is better tuned. 121bdd1243dSDimitry Andric return false; 122bdd1243dSDimitry Andric } 123bdd1243dSDimitry Andric 12481ad6265SDimitry Andric InstructionCost getMaskedMemoryOpCost(unsigned Opcode, Type *Src, 12581ad6265SDimitry Andric Align Alignment, unsigned AddressSpace, 12681ad6265SDimitry Andric TTI::TargetCostKind CostKind); 127fe6060f1SDimitry Andric 12806c3fb27SDimitry Andric InstructionCost getPointersChainCost(ArrayRef<const Value *> Ptrs, 12906c3fb27SDimitry Andric const Value *Base, 13006c3fb27SDimitry Andric const TTI::PointersChainInfo &Info, 13106c3fb27SDimitry Andric Type *AccessTy, 13206c3fb27SDimitry Andric TTI::TargetCostKind CostKind); 13306c3fb27SDimitry Andric 1340eae32dcSDimitry Andric void getUnrollingPreferences(Loop *L, ScalarEvolution &SE, 1350eae32dcSDimitry Andric TTI::UnrollingPreferences &UP, 1360eae32dcSDimitry Andric OptimizationRemarkEmitter *ORE); 1370eae32dcSDimitry Andric 1380eae32dcSDimitry Andric void getPeelingPreferences(Loop *L, ScalarEvolution &SE, 1390eae32dcSDimitry Andric TTI::PeelingPreferences &PP); 1400eae32dcSDimitry Andric getMinVectorRegisterBitWidth()141349cc55cSDimitry Andric unsigned getMinVectorRegisterBitWidth() const { 14204eeddc0SDimitry Andric return ST->useRVVForFixedLengthVectors() ? 16 : 0; 143349cc55cSDimitry Andric } 144349cc55cSDimitry Andric 14581ad6265SDimitry Andric InstructionCost getShuffleCost(TTI::ShuffleKind Kind, VectorType *Tp, 146bdd1243dSDimitry Andric ArrayRef<int> Mask, 147bdd1243dSDimitry Andric TTI::TargetCostKind CostKind, int Index, 14881ad6265SDimitry Andric VectorType *SubTp, 149*0fca6ea1SDimitry Andric ArrayRef<const Value *> Args = std::nullopt, 150*0fca6ea1SDimitry Andric const Instruction *CxtI = nullptr); 15181ad6265SDimitry Andric 15281ad6265SDimitry Andric InstructionCost getIntrinsicInstrCost(const IntrinsicCostAttributes &ICA, 15381ad6265SDimitry Andric TTI::TargetCostKind CostKind); 15481ad6265SDimitry Andric 15506c3fb27SDimitry Andric InstructionCost getInterleavedMemoryOpCost( 15606c3fb27SDimitry Andric unsigned Opcode, Type *VecTy, unsigned Factor, ArrayRef<unsigned> Indices, 15706c3fb27SDimitry Andric Align Alignment, unsigned AddressSpace, TTI::TargetCostKind CostKind, 15806c3fb27SDimitry Andric bool UseMaskForCond = false, bool UseMaskForGaps = false); 15906c3fb27SDimitry Andric 160fe6060f1SDimitry Andric InstructionCost getGatherScatterOpCost(unsigned Opcode, Type *DataTy, 161fe6060f1SDimitry Andric const Value *Ptr, bool VariableMask, 162fe6060f1SDimitry Andric Align Alignment, 163fe6060f1SDimitry Andric TTI::TargetCostKind CostKind, 164fe6060f1SDimitry Andric const Instruction *I); 165fe6060f1SDimitry Andric 166*0fca6ea1SDimitry Andric InstructionCost getStridedMemoryOpCost(unsigned Opcode, Type *DataTy, 167*0fca6ea1SDimitry Andric const Value *Ptr, bool VariableMask, 168*0fca6ea1SDimitry Andric Align Alignment, 169*0fca6ea1SDimitry Andric TTI::TargetCostKind CostKind, 170*0fca6ea1SDimitry Andric const Instruction *I); 171*0fca6ea1SDimitry Andric 17281ad6265SDimitry Andric InstructionCost getCastInstrCost(unsigned Opcode, Type *Dst, Type *Src, 17381ad6265SDimitry Andric TTI::CastContextHint CCH, 17481ad6265SDimitry Andric TTI::TargetCostKind CostKind, 17581ad6265SDimitry Andric const Instruction *I = nullptr); 17681ad6265SDimitry Andric 17706c3fb27SDimitry Andric InstructionCost getMinMaxReductionCost(Intrinsic::ID IID, VectorType *Ty, 17806c3fb27SDimitry Andric FastMathFlags FMF, 17981ad6265SDimitry Andric TTI::TargetCostKind CostKind); 18081ad6265SDimitry Andric 18181ad6265SDimitry Andric InstructionCost getArithmeticReductionCost(unsigned Opcode, VectorType *Ty, 182bdd1243dSDimitry Andric std::optional<FastMathFlags> FMF, 18381ad6265SDimitry Andric TTI::TargetCostKind CostKind); 18481ad6265SDimitry Andric 185bdd1243dSDimitry Andric InstructionCost getExtendedReductionCost(unsigned Opcode, bool IsUnsigned, 186bdd1243dSDimitry Andric Type *ResTy, VectorType *ValTy, 18706c3fb27SDimitry Andric FastMathFlags FMF, 188bdd1243dSDimitry Andric TTI::TargetCostKind CostKind); 189bdd1243dSDimitry Andric 190bdd1243dSDimitry Andric InstructionCost 191bdd1243dSDimitry Andric getMemoryOpCost(unsigned Opcode, Type *Src, MaybeAlign Alignment, 192bdd1243dSDimitry Andric unsigned AddressSpace, TTI::TargetCostKind CostKind, 193bdd1243dSDimitry Andric TTI::OperandValueInfo OpdInfo = {TTI::OK_AnyValue, TTI::OP_None}, 194bdd1243dSDimitry Andric const Instruction *I = nullptr); 195bdd1243dSDimitry Andric 196bdd1243dSDimitry Andric InstructionCost getCmpSelInstrCost(unsigned Opcode, Type *ValTy, Type *CondTy, 197bdd1243dSDimitry Andric CmpInst::Predicate VecPred, 198bdd1243dSDimitry Andric TTI::TargetCostKind CostKind, 199bdd1243dSDimitry Andric const Instruction *I = nullptr); 200bdd1243dSDimitry Andric 2015f757f3fSDimitry Andric InstructionCost getCFInstrCost(unsigned Opcode, TTI::TargetCostKind CostKind, 2025f757f3fSDimitry Andric const Instruction *I = nullptr); 2035f757f3fSDimitry Andric 204bdd1243dSDimitry Andric using BaseT::getVectorInstrCost; 205bdd1243dSDimitry Andric InstructionCost getVectorInstrCost(unsigned Opcode, Type *Val, 206bdd1243dSDimitry Andric TTI::TargetCostKind CostKind, 207bdd1243dSDimitry Andric unsigned Index, Value *Op0, Value *Op1); 208bdd1243dSDimitry Andric 209bdd1243dSDimitry Andric InstructionCost getArithmeticInstrCost( 210bdd1243dSDimitry Andric unsigned Opcode, Type *Ty, TTI::TargetCostKind CostKind, 211bdd1243dSDimitry Andric TTI::OperandValueInfo Op1Info = {TTI::OK_AnyValue, TTI::OP_None}, 212bdd1243dSDimitry Andric TTI::OperandValueInfo Op2Info = {TTI::OK_AnyValue, TTI::OP_None}, 213*0fca6ea1SDimitry Andric ArrayRef<const Value *> Args = std::nullopt, 214bdd1243dSDimitry Andric const Instruction *CxtI = nullptr); 215bdd1243dSDimitry Andric isElementTypeLegalForScalableVector(Type * Ty)21681ad6265SDimitry Andric bool isElementTypeLegalForScalableVector(Type *Ty) const { 21706c3fb27SDimitry Andric return TLI->isLegalElementTypeForRVV(TLI->getValueType(DL, Ty)); 21881ad6265SDimitry Andric } 21981ad6265SDimitry Andric isLegalMaskedLoadStore(Type * DataType,Align Alignment)220fe6060f1SDimitry Andric bool isLegalMaskedLoadStore(Type *DataType, Align Alignment) { 221349cc55cSDimitry Andric if (!ST->hasVInstructions()) 222fe6060f1SDimitry Andric return false; 223fe6060f1SDimitry Andric 22406c3fb27SDimitry Andric EVT DataTypeVT = TLI->getValueType(DL, DataType); 22506c3fb27SDimitry Andric 226fe6060f1SDimitry Andric // Only support fixed vectors if we know the minimum vector size. 22706c3fb27SDimitry Andric if (DataTypeVT.isFixedLengthVector() && !ST->useRVVForFixedLengthVectors()) 228fe6060f1SDimitry Andric return false; 229fe6060f1SDimitry Andric 23006c3fb27SDimitry Andric EVT ElemType = DataTypeVT.getScalarType(); 231*0fca6ea1SDimitry Andric if (!ST->enableUnalignedVectorMem() && Alignment < ElemType.getStoreSize()) 232349cc55cSDimitry Andric return false; 233349cc55cSDimitry Andric 23406c3fb27SDimitry Andric return TLI->isLegalElementTypeForRVV(ElemType); 235fe6060f1SDimitry Andric 236fe6060f1SDimitry Andric } 237fe6060f1SDimitry Andric isLegalMaskedLoad(Type * DataType,Align Alignment)238fe6060f1SDimitry Andric bool isLegalMaskedLoad(Type *DataType, Align Alignment) { 239fe6060f1SDimitry Andric return isLegalMaskedLoadStore(DataType, Alignment); 240fe6060f1SDimitry Andric } isLegalMaskedStore(Type * DataType,Align Alignment)241fe6060f1SDimitry Andric bool isLegalMaskedStore(Type *DataType, Align Alignment) { 242fe6060f1SDimitry Andric return isLegalMaskedLoadStore(DataType, Alignment); 243fe6060f1SDimitry Andric } 244fe6060f1SDimitry Andric isLegalMaskedGatherScatter(Type * DataType,Align Alignment)245fe6060f1SDimitry Andric bool isLegalMaskedGatherScatter(Type *DataType, Align Alignment) { 246349cc55cSDimitry Andric if (!ST->hasVInstructions()) 247fe6060f1SDimitry Andric return false; 248fe6060f1SDimitry Andric 24906c3fb27SDimitry Andric EVT DataTypeVT = TLI->getValueType(DL, DataType); 25006c3fb27SDimitry Andric 251fe6060f1SDimitry Andric // Only support fixed vectors if we know the minimum vector size. 25206c3fb27SDimitry Andric if (DataTypeVT.isFixedLengthVector() && !ST->useRVVForFixedLengthVectors()) 253fe6060f1SDimitry Andric return false; 254fe6060f1SDimitry Andric 25506c3fb27SDimitry Andric EVT ElemType = DataTypeVT.getScalarType(); 256*0fca6ea1SDimitry Andric if (!ST->enableUnalignedVectorMem() && Alignment < ElemType.getStoreSize()) 257349cc55cSDimitry Andric return false; 258349cc55cSDimitry Andric 25906c3fb27SDimitry Andric return TLI->isLegalElementTypeForRVV(ElemType); 260fe6060f1SDimitry Andric } 261fe6060f1SDimitry Andric isLegalMaskedGather(Type * DataType,Align Alignment)262fe6060f1SDimitry Andric bool isLegalMaskedGather(Type *DataType, Align Alignment) { 263fe6060f1SDimitry Andric return isLegalMaskedGatherScatter(DataType, Alignment); 264fe6060f1SDimitry Andric } isLegalMaskedScatter(Type * DataType,Align Alignment)265fe6060f1SDimitry Andric bool isLegalMaskedScatter(Type *DataType, Align Alignment) { 266fe6060f1SDimitry Andric return isLegalMaskedGatherScatter(DataType, Alignment); 267fe6060f1SDimitry Andric } 268fe6060f1SDimitry Andric forceScalarizeMaskedGather(VectorType * VTy,Align Alignment)26981ad6265SDimitry Andric bool forceScalarizeMaskedGather(VectorType *VTy, Align Alignment) { 27081ad6265SDimitry Andric // Scalarize masked gather for RV64 if EEW=64 indices aren't supported. 27181ad6265SDimitry Andric return ST->is64Bit() && !ST->hasVInstructionsI64(); 27281ad6265SDimitry Andric } 27381ad6265SDimitry Andric forceScalarizeMaskedScatter(VectorType * VTy,Align Alignment)27481ad6265SDimitry Andric bool forceScalarizeMaskedScatter(VectorType *VTy, Align Alignment) { 27581ad6265SDimitry Andric // Scalarize masked scatter for RV64 if EEW=64 indices aren't supported. 27681ad6265SDimitry Andric return ST->is64Bit() && !ST->hasVInstructionsI64(); 27781ad6265SDimitry Andric } 27881ad6265SDimitry Andric isLegalStridedLoadStore(Type * DataType,Align Alignment)279*0fca6ea1SDimitry Andric bool isLegalStridedLoadStore(Type *DataType, Align Alignment) { 280*0fca6ea1SDimitry Andric EVT DataTypeVT = TLI->getValueType(DL, DataType); 281*0fca6ea1SDimitry Andric return TLI->isLegalStridedLoadStore(DataTypeVT, Alignment); 282*0fca6ea1SDimitry Andric } 283*0fca6ea1SDimitry Andric 284*0fca6ea1SDimitry Andric bool isLegalMaskedCompressStore(Type *DataTy, Align Alignment); 285*0fca6ea1SDimitry Andric isVScaleKnownToBeAPowerOfTwo()28606c3fb27SDimitry Andric bool isVScaleKnownToBeAPowerOfTwo() const { 28706c3fb27SDimitry Andric return TLI->isVScaleKnownToBeAPowerOfTwo(); 28806c3fb27SDimitry Andric } 28906c3fb27SDimitry Andric 290fe6060f1SDimitry Andric /// \returns How the target needs this vector-predicated operation to be 291fe6060f1SDimitry Andric /// transformed. 292fe6060f1SDimitry Andric TargetTransformInfo::VPLegalization getVPLegalizationStrategy(const VPIntrinsic & PI)293fe6060f1SDimitry Andric getVPLegalizationStrategy(const VPIntrinsic &PI) const { 294fe6060f1SDimitry Andric using VPLegalization = TargetTransformInfo::VPLegalization; 295bdd1243dSDimitry Andric if (!ST->hasVInstructions() || 296bdd1243dSDimitry Andric (PI.getIntrinsicID() == Intrinsic::vp_reduce_mul && 297bdd1243dSDimitry Andric cast<VectorType>(PI.getArgOperand(1)->getType()) 298bdd1243dSDimitry Andric ->getElementType() 299bdd1243dSDimitry Andric ->getIntegerBitWidth() != 1)) 300bdd1243dSDimitry Andric return VPLegalization(VPLegalization::Discard, VPLegalization::Convert); 301fe6060f1SDimitry Andric return VPLegalization(VPLegalization::Legal, VPLegalization::Legal); 302fe6060f1SDimitry Andric } 303fe6060f1SDimitry Andric isLegalToVectorizeReduction(const RecurrenceDescriptor & RdxDesc,ElementCount VF)304fe6060f1SDimitry Andric bool isLegalToVectorizeReduction(const RecurrenceDescriptor &RdxDesc, 305fe6060f1SDimitry Andric ElementCount VF) const { 306fe6060f1SDimitry Andric if (!VF.isScalable()) 307fe6060f1SDimitry Andric return true; 308fe6060f1SDimitry Andric 309fe6060f1SDimitry Andric Type *Ty = RdxDesc.getRecurrenceType(); 31006c3fb27SDimitry Andric if (!TLI->isLegalElementTypeForRVV(TLI->getValueType(DL, Ty))) 311fe6060f1SDimitry Andric return false; 312fe6060f1SDimitry Andric 313fe6060f1SDimitry Andric switch (RdxDesc.getRecurrenceKind()) { 314fe6060f1SDimitry Andric case RecurKind::Add: 315fe6060f1SDimitry Andric case RecurKind::FAdd: 316fe6060f1SDimitry Andric case RecurKind::And: 317fe6060f1SDimitry Andric case RecurKind::Or: 318fe6060f1SDimitry Andric case RecurKind::Xor: 319fe6060f1SDimitry Andric case RecurKind::SMin: 320fe6060f1SDimitry Andric case RecurKind::SMax: 321fe6060f1SDimitry Andric case RecurKind::UMin: 322fe6060f1SDimitry Andric case RecurKind::UMax: 323fe6060f1SDimitry Andric case RecurKind::FMin: 324fe6060f1SDimitry Andric case RecurKind::FMax: 325bdd1243dSDimitry Andric case RecurKind::FMulAdd: 3265f757f3fSDimitry Andric case RecurKind::IAnyOf: 3275f757f3fSDimitry Andric case RecurKind::FAnyOf: 328fe6060f1SDimitry Andric return true; 329fe6060f1SDimitry Andric default: 330fe6060f1SDimitry Andric return false; 331fe6060f1SDimitry Andric } 332fe6060f1SDimitry Andric } 333fe6060f1SDimitry Andric getMaxInterleaveFactor(ElementCount VF)33406c3fb27SDimitry Andric unsigned getMaxInterleaveFactor(ElementCount VF) { 33506c3fb27SDimitry Andric // Don't interleave if the loop has been vectorized with scalable vectors. 33606c3fb27SDimitry Andric if (VF.isScalable()) 33706c3fb27SDimitry Andric return 1; 3380eae32dcSDimitry Andric // If the loop will not be vectorized, don't interleave the loop. 3390eae32dcSDimitry Andric // Let regular unroll to unroll the loop. 34006c3fb27SDimitry Andric return VF.isScalar() ? 1 : ST->getMaxInterleaveFactor(); 341fe6060f1SDimitry Andric } 34204eeddc0SDimitry Andric enableInterleavedAccessVectorization()34306c3fb27SDimitry Andric bool enableInterleavedAccessVectorization() { return true; } 34406c3fb27SDimitry Andric 34581ad6265SDimitry Andric enum RISCVRegisterClass { GPRRC, FPRRC, VRRC }; getNumberOfRegisters(unsigned ClassID)34604eeddc0SDimitry Andric unsigned getNumberOfRegisters(unsigned ClassID) const { 34781ad6265SDimitry Andric switch (ClassID) { 34881ad6265SDimitry Andric case RISCVRegisterClass::GPRRC: 34904eeddc0SDimitry Andric // 31 = 32 GPR - x0 (zero register) 35004eeddc0SDimitry Andric // FIXME: Should we exclude fixed registers like SP, TP or GP? 35104eeddc0SDimitry Andric return 31; 35281ad6265SDimitry Andric case RISCVRegisterClass::FPRRC: 35381ad6265SDimitry Andric if (ST->hasStdExtF()) 35481ad6265SDimitry Andric return 32; 35581ad6265SDimitry Andric return 0; 35681ad6265SDimitry Andric case RISCVRegisterClass::VRRC: 35781ad6265SDimitry Andric // Although there are 32 vector registers, v0 is special in that it is the 35881ad6265SDimitry Andric // only register that can be used to hold a mask. 35981ad6265SDimitry Andric // FIXME: Should we conservatively return 31 as the number of usable 36081ad6265SDimitry Andric // vector registers? 36181ad6265SDimitry Andric return ST->hasVInstructions() ? 32 : 0; 36281ad6265SDimitry Andric } 36381ad6265SDimitry Andric llvm_unreachable("unknown register class"); 36481ad6265SDimitry Andric } 36581ad6265SDimitry Andric 36681ad6265SDimitry Andric unsigned getRegisterClassForType(bool Vector, Type *Ty = nullptr) const { 36781ad6265SDimitry Andric if (Vector) 36881ad6265SDimitry Andric return RISCVRegisterClass::VRRC; 36981ad6265SDimitry Andric if (!Ty) 37081ad6265SDimitry Andric return RISCVRegisterClass::GPRRC; 37181ad6265SDimitry Andric 37281ad6265SDimitry Andric Type *ScalarTy = Ty->getScalarType(); 373cb14a3feSDimitry Andric if ((ScalarTy->isHalfTy() && ST->hasStdExtZfhmin()) || 37481ad6265SDimitry Andric (ScalarTy->isFloatTy() && ST->hasStdExtF()) || 37581ad6265SDimitry Andric (ScalarTy->isDoubleTy() && ST->hasStdExtD())) { 37681ad6265SDimitry Andric return RISCVRegisterClass::FPRRC; 37781ad6265SDimitry Andric } 37881ad6265SDimitry Andric 37981ad6265SDimitry Andric return RISCVRegisterClass::GPRRC; 38081ad6265SDimitry Andric } 38181ad6265SDimitry Andric getRegisterClassName(unsigned ClassID)38281ad6265SDimitry Andric const char *getRegisterClassName(unsigned ClassID) const { 38381ad6265SDimitry Andric switch (ClassID) { 38481ad6265SDimitry Andric case RISCVRegisterClass::GPRRC: 38581ad6265SDimitry Andric return "RISCV::GPRRC"; 38681ad6265SDimitry Andric case RISCVRegisterClass::FPRRC: 38781ad6265SDimitry Andric return "RISCV::FPRRC"; 38881ad6265SDimitry Andric case RISCVRegisterClass::VRRC: 38981ad6265SDimitry Andric return "RISCV::VRRC"; 39081ad6265SDimitry Andric } 39181ad6265SDimitry Andric llvm_unreachable("unknown register class"); 39204eeddc0SDimitry Andric } 393bdd1243dSDimitry Andric 394bdd1243dSDimitry Andric bool isLSRCostLess(const TargetTransformInfo::LSRCost &C1, 395bdd1243dSDimitry Andric const TargetTransformInfo::LSRCost &C2); 3965f757f3fSDimitry Andric shouldFoldTerminatingConditionAfterLSR()3975f757f3fSDimitry Andric bool shouldFoldTerminatingConditionAfterLSR() const { 3987a6dacacSDimitry Andric return true; 3995f757f3fSDimitry Andric } 400*0fca6ea1SDimitry Andric getMinPageSize()401*0fca6ea1SDimitry Andric std::optional<unsigned> getMinPageSize() const { return 4096; } 4020b57cec5SDimitry Andric }; 4030b57cec5SDimitry Andric 4040b57cec5SDimitry Andric } // end namespace llvm 4050b57cec5SDimitry Andric 4060b57cec5SDimitry Andric #endif // LLVM_LIB_TARGET_RISCV_RISCVTARGETTRANSFORMINFO_H 407