Lines Matching refs:Subtarget

50   const X86Subtarget *Subtarget;  member in __anonec6fb4fc0111::X86FastISel
56 Subtarget = &funcInfo.MF->getSubtarget<X86Subtarget>(); in X86FastISel()
130 return Subtarget->getInstrInfo(); in getInstrInfo()
150 return (VT == MVT::f64 && Subtarget->hasSSE2()) || in isScalarFPTypeInSSEReg()
151 (VT == MVT::f32 && Subtarget->hasSSE1()) || VT == MVT::f16; in isScalarFPTypeInSSEReg()
299 if (VT == MVT::f64 && !Subtarget->hasSSE2()) in isTypeLegal()
301 if (VT == MVT::f32 && !Subtarget->hasSSE1()) in isTypeLegal()
319 bool HasSSE1 = Subtarget->hasSSE1(); in X86FastEmitLoad()
320 bool HasSSE2 = Subtarget->hasSSE2(); in X86FastEmitLoad()
321 bool HasSSE41 = Subtarget->hasSSE41(); in X86FastEmitLoad()
322 bool HasAVX = Subtarget->hasAVX(); in X86FastEmitLoad()
323 bool HasAVX2 = Subtarget->hasAVX2(); in X86FastEmitLoad()
324 bool HasAVX512 = Subtarget->hasAVX512(); in X86FastEmitLoad()
325 bool HasVLX = Subtarget->hasVLX(); in X86FastEmitLoad()
481 bool HasSSE1 = Subtarget->hasSSE1(); in X86FastEmitStore()
482 bool HasSSE2 = Subtarget->hasSSE2(); in X86FastEmitStore()
483 bool HasSSE4A = Subtarget->hasSSE4A(); in X86FastEmitStore()
484 bool HasAVX = Subtarget->hasAVX(); in X86FastEmitStore()
485 bool HasAVX512 = Subtarget->hasAVX512(); in X86FastEmitStore()
486 bool HasVLX = Subtarget->hasVLX(); in X86FastEmitStore()
733 if (!Subtarget->isPICStyleRIPRel() || in handleConstantAddresses()
739 unsigned char GVFlags = Subtarget->classifyGlobalReference(GV); in handleConstantAddresses()
750 if (Subtarget->isPICStyleRIPRel()) { in handleConstantAddresses()
786 if (Subtarget->isPICStyleRIPRel() || GVFlags == X86II::MO_GOTPCREL || in handleConstantAddresses()
811 if (!AM.GV || !Subtarget->isPICStyleRIPRel()) { in handleConstantAddresses()
938 (!AM.GV || !Subtarget->isPICStyleRIPRel()) && in X86SelectAddress()
1059 if (Subtarget->isPICStyleRIPRel() && in X86SelectCallAddress()
1074 if (Subtarget->isPICStyleRIPRel()) { in X86SelectCallAddress()
1080 AM.GVOpFlags = Subtarget->classifyLocalReference(nullptr); in X86SelectCallAddress()
1087 if (!AM.GV || !Subtarget->isPICStyleRIPRel()) { in X86SelectCallAddress()
1092 if (Reg && Subtarget->isTarget64BitILP32()) { in X86SelectCallAddress()
1294 unsigned RetReg = Subtarget->isTarget64BitLP64() ? X86::RAX : X86::EAX; in X86SelectRet()
1304 TII.get(Subtarget->is64Bit() ? X86::RETI64 : X86::RETI32)) in X86SelectRet()
1308 TII.get(Subtarget->is64Bit() ? X86::RET64 : X86::RET32)); in X86SelectRet()
1358 static unsigned X86ChooseCmpOpcode(EVT VT, const X86Subtarget *Subtarget) { in X86ChooseCmpOpcode() argument
1359 bool HasAVX512 = Subtarget->hasAVX512(); in X86ChooseCmpOpcode()
1360 bool HasAVX = Subtarget->hasAVX(); in X86ChooseCmpOpcode()
1361 bool HasSSE1 = Subtarget->hasSSE1(); in X86ChooseCmpOpcode()
1362 bool HasSSE2 = Subtarget->hasSSE2(); in X86ChooseCmpOpcode()
1424 unsigned CompareOpc = X86ChooseCmpOpcode(VT, Subtarget); in X86FastEmitCompare()
1930 if (!Subtarget->is64Bit()) in X86SelectDivRem()
1997 OpEntry.DivRemResultReg == X86::AH && Subtarget->is64Bit()) { in X86SelectDivRem()
2026 if (!Subtarget->canUseCMOV()) in X86FastEmitCMoveSelect()
2137 const TargetRegisterInfo &TRI = *Subtarget->getRegisterInfo(); in X86FastEmitCMoveSelect()
2139 Subtarget->hasNDD()); in X86FastEmitCMoveSelect()
2159 !((Subtarget->hasSSE1() && RetVT == MVT::f32) || in X86FastEmitSSESelect()
2160 (Subtarget->hasSSE2() && RetVT == MVT::f64))) in X86FastEmitSSESelect()
2179 if (CC > 7 && !Subtarget->hasAVX()) in X86FastEmitSSESelect()
2198 if (Subtarget->hasAVX512()) { in X86FastEmitSSESelect()
2225 } else if (Subtarget->hasAVX()) { in X86FastEmitSSESelect()
2282 Opc = Subtarget->hasAVX512() ? X86::CMOV_FR16X : X86::CMOV_FR16; break; in X86FastEmitPseudoSelect()
2284 Opc = Subtarget->hasAVX512() ? X86::CMOV_FR32X : X86::CMOV_FR32; break; in X86FastEmitPseudoSelect()
2286 Opc = Subtarget->hasAVX512() ? X86::CMOV_FR64X : X86::CMOV_FR64; break; in X86FastEmitPseudoSelect()
2397 bool HasAVX512 = Subtarget->hasAVX512(); in X86SelectIntToFP()
2398 if (!Subtarget->hasAVX() || (!IsSigned && !HasAVX512)) in X86SelectIntToFP()
2459 bool HasAVX = Subtarget->hasAVX(); in X86SelectFPExtOrFPTrunc()
2487 if (Subtarget->hasSSE2() && I->getType()->isDoubleTy() && in X86SelectFPExt()
2489 bool HasAVX512 = Subtarget->hasAVX512(); in X86SelectFPExt()
2493 : Subtarget->hasAVX() ? X86::VCVTSS2SDrr : X86::CVTSS2SDrr; in X86SelectFPExt()
2501 if (Subtarget->hasSSE2() && I->getType()->isFloatTy() && in X86SelectFPTrunc()
2503 bool HasAVX512 = Subtarget->hasAVX512(); in X86SelectFPTrunc()
2507 : Subtarget->hasAVX() ? X86::VCVTSD2SSrr : X86::CVTSD2SSrr; in X86SelectFPTrunc()
2546 return Len <= (Subtarget->is64Bit() ? 32 : 16); in IsMemcpySmall()
2556 bool i64Legal = Subtarget->is64Bit(); in TryEmitSmallMemcpy()
2591 if (Subtarget->useSoftFloat() || !Subtarget->hasF16C()) in fastLowerIntrinsicCall()
2619 unsigned Opc = Subtarget->hasVLX() ? X86::VCVTPS2PHZ128rr in fastLowerIntrinsicCall()
2624 Opc = Subtarget->hasAVX512() ? X86::VMOVPDI2DIZrr in fastLowerIntrinsicCall()
2642 unsigned Opc = Subtarget->hasVLX() ? X86::VCVTPH2PSZ128rr in fastLowerIntrinsicCall()
2682 const X86RegisterInfo *RegInfo = Subtarget->getRegisterInfo(); in fastLowerIntrinsicCall()
2731 unsigned SizeWidth = Subtarget->is64Bit() ? 64 : 32; in fastLowerIntrinsicCall()
2746 unsigned SizeWidth = Subtarget->is64Bit() ? 64 : 32; in fastLowerIntrinsicCall()
2790 if (!Subtarget->hasSSE1()) in fastLowerIntrinsicCall()
2807 unsigned AVXLevel = Subtarget->hasAVX512() ? 2 : in fastLowerIntrinsicCall()
2808 Subtarget->hasAVX() ? 1 : in fastLowerIntrinsicCall()
2971 if (!Subtarget->hasSSE1()) in fastLowerIntrinsicCall()
2977 if (!Subtarget->hasSSE2()) in fastLowerIntrinsicCall()
2996 unsigned AVXLevel = Subtarget->hasAVX512() ? 2 : in fastLowerIntrinsicCall()
2997 Subtarget->hasAVX() ? 1 : in fastLowerIntrinsicCall()
3036 if (!Subtarget->hasCRC32()) in fastLowerIntrinsicCall()
3051 #define GET_EGPR_IF_ENABLED(OPC) Subtarget->hasEGPR() ? OPC##_EVEX : OPC in fastLowerIntrinsicCall()
3101 if (Subtarget->isCallingConvWin64(CC)) in fastLowerArguments()
3104 if (!Subtarget->is64Bit()) in fastLowerArguments()
3107 if (Subtarget->useSoftFloat()) in fastLowerArguments()
3137 if (!Subtarget->hasSSE1()) in fastLowerArguments()
3187 static unsigned computeBytesPoppedByCalleeForSRet(const X86Subtarget *Subtarget, in computeBytesPoppedByCalleeForSRet() argument
3190 if (Subtarget->is64Bit()) in computeBytesPoppedByCalleeForSRet()
3192 if (Subtarget->getTargetTriple().isOSMSVCRT()) in computeBytesPoppedByCalleeForSRet()
3201 CB->paramHasAttr(0, Attribute::InReg) || Subtarget->isTargetMCU()) in computeBytesPoppedByCalleeForSRet()
3220 bool Is64Bit = Subtarget->is64Bit(); in fastLowerCall()
3221 bool IsWin64 = Subtarget->isCallingConvWin64(CC); in fastLowerCall()
3241 if (Subtarget->useIndirectThunkCalls()) in fastLowerCall()
3352 const X86RegisterInfo *RegInfo = Subtarget->getRegisterInfo(); in fastLowerCall()
3476 if (Subtarget->isPICStyleGOT()) { in fastLowerCall()
3497 assert((Subtarget->hasSSE1() || !NumXMMRegs) in fastLowerCall()
3529 unsigned char OpFlags = Subtarget->classifyGlobalFunctionReference(GV); in fastLowerCall()
3561 if (Subtarget->isPICStyleGOT()) in fastLowerCall()
3573 X86::isCalleePop(CC, Subtarget->is64Bit(), IsVarArg, in fastLowerCall()
3576 : computeBytesPoppedByCalleeForSRet(Subtarget, CC, CLI.CB); in fastLowerCall()
3597 ((Is64Bit || Ins[i].Flags.isInReg()) && !Subtarget->hasSSE1())) { in fastLowerCall()
3693 if (!Subtarget->hasSSE2()) in fastSelectInstruction()
3786 bool HasSSE1 = Subtarget->hasSSE1(); in X86MaterializeFP()
3787 bool HasSSE2 = Subtarget->hasSSE2(); in X86MaterializeFP()
3788 bool HasAVX = Subtarget->hasAVX(); in X86MaterializeFP()
3789 bool HasAVX512 = Subtarget->hasAVX512(); in X86MaterializeFP()
3814 unsigned char OpFlag = Subtarget->classifyLocalReference(nullptr); in X86MaterializeFP()
3819 else if (Subtarget->is64Bit() && TM.getCodeModel() != CodeModel::Large) in X86MaterializeFP()
3827 if (Subtarget->is64Bit() && CM == CodeModel::Large) { in X86MaterializeFP()
3876 ? (Subtarget->isTarget64BitILP32() ? X86::LEA64_32r : X86::LEA32r) in X86MaterializeGV()
3906 if (!Subtarget->hasSSE1()) in fastMaterializeConstant()
3910 if (!Subtarget->hasSSE2()) in fastMaterializeConstant()
3946 ? (Subtarget->isTarget64BitILP32() ? X86::LEA64_32r : X86::LEA32r) in fastMaterializeAlloca()
3961 bool HasSSE1 = Subtarget->hasSSE1(); in fastMaterializeFloatZero()
3962 bool HasSSE2 = Subtarget->hasSSE2(); in fastMaterializeFloatZero()
3963 bool HasAVX512 = Subtarget->hasAVX512(); in fastMaterializeFloatZero()