Searched refs:hasAVX (Results 1 – 19 of 19) sorted by relevance
54 def UseSSE1 : Predicate<"Subtarget->hasSSE1() && !Subtarget->hasAVX()">;56 def UseSSE2 : Predicate<"Subtarget->hasSSE2() && !Subtarget->hasAVX()">;58 def UseSSE3 : Predicate<"Subtarget->hasSSE3() && !Subtarget->hasAVX()">;60 def UseSSSE3 : Predicate<"Subtarget->hasSSSE3() && !Subtarget->hasAVX()">;63 def UseSSE41 : Predicate<"Subtarget->hasSSE41() && !Subtarget->hasAVX()">;65 def UseSSE42 : Predicate<"Subtarget->hasSSE42() && !Subtarget->hasAVX()">;67 def NoAVX : Predicate<"!Subtarget->hasAVX()">;68 def HasAVX : Predicate<"Subtarget->hasAVX()">;70 def HasAVX1Only : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX2()">;75 def UseAVX : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX512()">;
287 if (!ST.hasAVX() || !ST.insertVZEROUPPER()) in runOnMachineFunction()
322 bool HasAVX = Subtarget->hasAVX(); in X86FastEmitLoad()484 bool HasAVX = Subtarget->hasAVX(); in X86FastEmitStore()1360 bool HasAVX = Subtarget->hasAVX(); in X86ChooseCmpOpcode()2179 if (CC > 7 && !Subtarget->hasAVX()) in X86FastEmitSSESelect()2225 } else if (Subtarget->hasAVX()) { in X86FastEmitSSESelect()2398 if (!Subtarget->hasAVX() || (!IsSigned && !HasAVX512)) in X86SelectIntToFP()2459 bool HasAVX = Subtarget->hasAVX(); in X86SelectFPExtOrFPTrunc()2493 : Subtarget->hasAVX() ? X86::VCVTSS2SDrr : X86::CVTSS2SDrr; in X86SelectFPExt()2507 : Subtarget->hasAVX() ? X86::VCVTSD2SSrr : X86::CVTSD2SSrr; in X86SelectFPTrunc()2808 Subtarget->hasAVX() ? 1 : in fastLowerIntrinsicCall()[all …]
199 bool hasAVX() const { return X86SSELevel >= AVX; } in hasAVX() function
288 bool HasAVX = Subtarget.hasAVX(); in getCalleeSavedRegs()425 bool HasAVX = Subtarget.hasAVX(); in getCallPreservedMask()665 if (ST.hasAVX()) in getNumSupportedRegs()
211 if (ST->hasAVX() && PreferVectorWidth >= 256) in getRegisterBitWidth()240 if (ST->hasAVX()) in getMaxInterleaveFactor()530 if (Op2Info.isUniform() && Op2Info.isConstant() && ST->hasAVX() && in getArithmeticInstrCost()648 if (Op2Info.isConstant() && ST->hasAVX()) in getArithmeticInstrCost()800 if (ST->hasAVX() && Op2Info.isUniform() && in getArithmeticInstrCost()1072 ((VT == MVT::v16i16 || VT == MVT::v8i32) && ST->hasAVX())) in getArithmeticInstrCost()1288 if (ST->hasAVX()) in getArithmeticInstrCost()1558 (ST->hasAVX() && LT.second.getScalarSizeInBits() >= 32))) in getShuffleCost()2036 if (ST->hasAVX()) in getShuffleCost()3031 if (ST->hasAVX()) { in getCastInstrCost()[all …]
154 CCIfSubtarget<"hasAVX()", CCAssignToReg<RC.YMM>>>,230 CCIfSubtarget<"hasAVX()", CCAssignToReg<RC.YMM>>>,602 CCIfSubtarget<"hasAVX()",726 CCIfSubtarget<"hasAVX()",801 CCIfSubtarget<"hasAVX()",821 CCIfSubtarget<"hasAVX()",
398 unsigned StoreOpc = ST.hasAVX() ? X86::VMOVUPSmr : X86::MOVUPSmr; in runOnMachineFunction()
182 unsigned StoreOpc = ST->hasAVX() ? X86::VMOVUPSmr : X86::MOVUPSmr; in InitializeTileConfigStackSpace()
140 if (!Subtarget.hasAVX() || (Factor != 4 && Factor != 3)) in isSupported()
972 N->getOpcode() == ISD::ADD && Subtarget->hasAVX() && in PreprocessISelDAG()6203 Subtarget->hasAVX() ? X86::VPCMPISTRMrri : X86::PCMPISTRMrri; in Select()6205 Subtarget->hasAVX() ? X86::VPCMPISTRMrmi : X86::PCMPISTRMrmi; in Select()6211 Subtarget->hasAVX() ? X86::VPCMPISTRIrri : X86::PCMPISTRIrri; in Select()6213 Subtarget->hasAVX() ? X86::VPCMPISTRIrmi : X86::PCMPISTRIrmi; in Select()6242 Subtarget->hasAVX() ? X86::VPCMPESTRMrri : X86::PCMPESTRMrri; in Select()6244 Subtarget->hasAVX() ? X86::VPCMPESTRMrmi : X86::PCMPESTRMrmi; in Select()6251 Subtarget->hasAVX() ? X86::VPCMPESTRIrri : X86::PCMPESTRIrri; in Select()6253 Subtarget->hasAVX() ? X86::VPCMPESTRIrmi : X86::PCMPESTRIrmi; in Select()
293 if (Op.size() >= 32 && Subtarget.hasAVX() && in getOptimalMemOpType()398 if (!!(Flags & MachineMemOperand::MOStore) && Subtarget.hasAVX()) in allowsMemoryAccess()1623 else if (Subtarget.hasAVX()) in forwardMustTailParameters()
552 if (Subtarget.is64Bit() && Subtarget.hasAVX()) { in X86TargetLowering()1413 if (!Subtarget.useSoftFloat() && Subtarget.hasAVX()) { in X86TargetLowering()2649 if (!Subtarget.hasAVX() && !Subtarget.hasSSEUnalignedMem() && in mayFoldLoad()2662 assert(Subtarget.hasAVX() && "Expected AVX for broadcast from memory"); in mayFoldLoadIntoBroadcastFromMem()3151 return !IsFPSetCC || !Subtarget.isTarget64BitLP64() || !Subtarget.hasAVX(); in reduceSelectOfFPConstantLoads()7108 if (ZeroMask.isZero() && isPowerOf2_32(NumElems) && Subtarget.hasAVX() && in EltsFromConsecutiveLoads()7285 if (!Subtarget.hasAVX()) in lowerBuildVectorAsBroadcast()8248 ((VT == MVT::v8f32 || VT == MVT::v4f64) && Subtarget.hasAVX()) || in LowerToHorizontalOp()8257 if (!Subtarget.hasAVX() || !VT.is256BitVector()) in LowerToHorizontalOp()8548 if (Subtarget.hasAVX()) { in createVariablePermute()[all …]
761 unsigned MOVOpc = STI->hasAVX() ? X86::VMOVAPSmr : X86::MOVAPSmr; in expandVastartSaveXmmRegs()
4160 bool HasAVX = Subtarget.hasAVX(); in CopyToFromAsymmetricReg()4243 bool HasAVX = Subtarget.hasAVX(); in copyPhysReg()4343 : STI.hasAVX() ? X86::VMOVSSrm in getLoadStoreOpcodeForFP16()4347 : STI.hasAVX() ? X86::VMOVSSmr in getLoadStoreOpcodeForFP16()4355 bool HasAVX = STI.hasAVX(); in getLoadStoreRegOpcode()6078 bool HasAVX = Subtarget.hasAVX(); in expandPostRAPseudo()7042 unsigned Opc = Subtarget.hasAVX() ? X86::VXORPSrr : X86::XORPSrr; in breakPartialRegDependency()10649 if (!ST.hasAVX()) in buildClearRegister()
454 bool HasAVX = STI.hasAVX(); in getLoadStoreOp()1252 bool HasAVX = STI.hasAVX(); in selectExtract()1385 bool HasAVX = STI.hasAVX(); in selectInsert()
39 bool HasAVX = Subtarget.hasAVX(); in X86LegalizerInfo()