Searched refs:NumLaneElts (Results 1 – 6 of 6) sorted by relevance
92 const unsigned NumLaneElts = 2; in DecodeMOVDDUPMask() local94 for (unsigned l = 0; l < NumElts; l += NumLaneElts) in DecodeMOVDDUPMask()95 for (unsigned i = 0; i < NumLaneElts; ++i) in DecodeMOVDDUPMask()101 const unsigned NumLaneElts = 16; in DecodePSLLDQMask() local103 for (unsigned l = 0; l < NumElts; l += NumLaneElts) in DecodePSLLDQMask()104 for (unsigned i = 0; i < NumLaneElts; ++i) { in DecodePSLLDQMask()113 const unsigned NumLaneElts = 16; in DecodePSRLDQMask() local115 for (unsigned l = 0; l < NumElts; l += NumLaneElts) in DecodePSRLDQMask()116 for (unsigned i = 0; i < NumLaneElts; ++i) { in DecodePSRLDQMask()119 if (Base >= NumLaneElts) M = SM_SentinelZero; in DecodePSRLDQMask()[all …]
478 unsigned NumLaneElts = NumElts / NumLanes; in DecodePALIGNRMask() local480 Imm = AlignDirection ? Imm : (NumLaneElts - Imm); in DecodePALIGNRMask()483 for (unsigned l = 0; l != NumElts; l += NumLaneElts) { in DecodePALIGNRMask()484 for (unsigned i = 0; i != NumLaneElts; ++i) { in DecodePALIGNRMask()488 if (Base >= NumLaneElts) in DecodePALIGNRMask()489 Base = Unary ? Base % NumLaneElts : Base + NumElts - NumLaneElts; in DecodePALIGNRMask()
2038 unsigned NumLaneElts = IsPD ? 2 : 4; in simplifyX86vpermilvar() local2066 Index += APInt(32, (I / NumLaneElts) * NumLaneElts); in simplifyX86vpermilvar()
10851 int NumLaneElts = NumElts / NumLanes; in lowerShuffleAsUNPCKAndPermute() local10852 int NumHalfLaneElts = NumLaneElts / 2; in lowerShuffleAsUNPCKAndPermute()10875 for (int Lane = 0; Lane != NumElts; Lane += NumLaneElts) { in lowerShuffleAsUNPCKAndPermute()10876 int Lo = Lane, Mid = Lane + NumHalfLaneElts, Hi = Lane + NumLaneElts; in lowerShuffleAsUNPCKAndPermute()10905 NumLaneElts * (NormM / NumLaneElts) + (2 * (NormM % NumHalfLaneElts)); in lowerShuffleAsUNPCKAndPermute()15070 int NumLaneElts = 128 / VT.getScalarSizeInBits(); in lowerShuffleAsLanePermuteAndRepeatedMask() local15071 SmallVector<int, 16> RepeatMask(NumLaneElts, -1); in lowerShuffleAsLanePermuteAndRepeatedMask()15078 SmallVector<int, 16> InLaneMask(NumLaneElts, -1); in lowerShuffleAsLanePermuteAndRepeatedMask()15079 for (int i = 0; i != NumLaneElts; ++i) { in lowerShuffleAsLanePermuteAndRepeatedMask()15080 int M = Mask[(Lane * NumLaneElts) + i]; in lowerShuffleAsLanePermuteAndRepeatedMask()[all …]
3129 unsigned NumLaneElts = 128 / CI->getType()->getScalarSizeInBits(); in upgradeX86IntrinsicCall() local3130 unsigned HalfLaneElts = NumLaneElts / 2; in upgradeX86IntrinsicCall()3135 Idxs[i] = i - (i % NumLaneElts); in upgradeX86IntrinsicCall()3137 if ((i % NumLaneElts) >= HalfLaneElts) in upgradeX86IntrinsicCall()3153 unsigned NumLaneElts = 128 / CI->getType()->getScalarSizeInBits(); in upgradeX86IntrinsicCall() local3160 for (unsigned l = 0; l != NumElts; l += NumLaneElts) in upgradeX86IntrinsicCall()3161 for (unsigned i = 0; i != NumLaneElts; i += 2) { in upgradeX86IntrinsicCall()3175 int NumLaneElts = 128 / CI->getType()->getScalarSizeInBits(); in upgradeX86IntrinsicCall() local3178 for (int l = 0; l != NumElts; l += NumLaneElts) in upgradeX86IntrinsicCall()3179 for (int i = 0; i != NumLaneElts; ++i) in upgradeX86IntrinsicCall()[all …]
15261 unsigned NumLaneElts = NumElts / NumLanes; in EmitX86BuiltinExpr() local15267 for (unsigned l = 0; l != NumElts; l += NumLaneElts) { in EmitX86BuiltinExpr()15268 for (unsigned i = 0; i != NumLaneElts; ++i) { in EmitX86BuiltinExpr()15269 Indices[i + l] = (Imm % NumLaneElts) + l; in EmitX86BuiltinExpr()15270 Imm /= NumLaneElts; in EmitX86BuiltinExpr()15287 unsigned NumLaneElts = NumElts / NumLanes; in EmitX86BuiltinExpr() local15293 for (unsigned l = 0; l != NumElts; l += NumLaneElts) { in EmitX86BuiltinExpr()15294 for (unsigned i = 0; i != NumLaneElts; ++i) { in EmitX86BuiltinExpr()15295 unsigned Index = Imm % NumLaneElts; in EmitX86BuiltinExpr()15296 Imm /= NumLaneElts; in EmitX86BuiltinExpr()[all …]