1//===-- X86InstrFragmentsSIMD.td - x86 SIMD ISA ------------*- tablegen -*-===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// 9// This file provides pattern fragments useful for SIMD instructions. 10// 11//===----------------------------------------------------------------------===// 12 13//===----------------------------------------------------------------------===// 14// MMX specific DAG Nodes. 15//===----------------------------------------------------------------------===// 16 17// Low word of MMX to GPR. 18def MMX_X86movd2w : SDNode<"X86ISD::MMX_MOVD2W", SDTypeProfile<1, 1, 19 [SDTCisVT<0, i32>, SDTCisVT<1, x86mmx>]>>; 20// GPR to low word of MMX. 21def MMX_X86movw2d : SDNode<"X86ISD::MMX_MOVW2D", SDTypeProfile<1, 1, 22 [SDTCisVT<0, x86mmx>, SDTCisVT<1, i32>]>>; 23 24//===----------------------------------------------------------------------===// 25// MMX Pattern Fragments 26//===----------------------------------------------------------------------===// 27 28def load_mmx : PatFrag<(ops node:$ptr), (x86mmx (load node:$ptr))>; 29 30//===----------------------------------------------------------------------===// 31// SSE specific DAG Nodes. 32//===----------------------------------------------------------------------===// 33 34def SDTX86VFCMP : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 35 SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>, 36 SDTCisVT<3, i8>]>; 37 38def X86fmin : SDNode<"X86ISD::FMIN", SDTFPBinOp>; 39def X86fmax : SDNode<"X86ISD::FMAX", SDTFPBinOp>; 40def X86fmins : SDNode<"X86ISD::FMINS", SDTFPBinOp>; 41def X86fmaxs : SDNode<"X86ISD::FMAXS", SDTFPBinOp>; 42 43// Commutative and Associative FMIN and FMAX. 44def X86fminc : SDNode<"X86ISD::FMINC", SDTFPBinOp, 45 [SDNPCommutative, SDNPAssociative]>; 46def X86fmaxc : SDNode<"X86ISD::FMAXC", SDTFPBinOp, 47 [SDNPCommutative, SDNPAssociative]>; 48 49def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp, 50 [SDNPCommutative, SDNPAssociative]>; 51def X86for : SDNode<"X86ISD::FOR", SDTFPBinOp, 52 [SDNPCommutative, SDNPAssociative]>; 53def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp, 54 [SDNPCommutative, SDNPAssociative]>; 55def X86fandn : SDNode<"X86ISD::FANDN", SDTFPBinOp>; 56def X86frsqrt : SDNode<"X86ISD::FRSQRT", SDTFPUnaryOp>; 57def X86frcp : SDNode<"X86ISD::FRCP", SDTFPUnaryOp>; 58def X86fhadd : SDNode<"X86ISD::FHADD", SDTFPBinOp>; 59def X86fhsub : SDNode<"X86ISD::FHSUB", SDTFPBinOp>; 60def X86hadd : SDNode<"X86ISD::HADD", SDTIntBinOp>; 61def X86hsub : SDNode<"X86ISD::HSUB", SDTIntBinOp>; 62def X86comi : SDNode<"X86ISD::COMI", SDTX86FCmp>; 63def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86FCmp>; 64 65def SDTX86Cmps : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisSameAs<0, 1>, 66 SDTCisSameAs<1, 2>, SDTCisVT<3, i8>]>; 67def X86cmps : SDNode<"X86ISD::FSETCC", SDTX86Cmps>; 68 69def X86pshufb : SDNode<"X86ISD::PSHUFB", 70 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i8>, SDTCisSameAs<0,1>, 71 SDTCisSameAs<0,2>]>>; 72def X86psadbw : SDNode<"X86ISD::PSADBW", 73 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 74 SDTCVecEltisVT<1, i8>, 75 SDTCisSameSizeAs<0,1>, 76 SDTCisSameAs<1,2>]>, [SDNPCommutative]>; 77def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", 78 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>, 79 SDTCVecEltisVT<1, i8>, 80 SDTCisSameSizeAs<0,1>, 81 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>>; 82def X86andnp : SDNode<"X86ISD::ANDNP", 83 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 84 SDTCisSameAs<0,2>]>>; 85def X86multishift : SDNode<"X86ISD::MULTISHIFT", 86 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 87 SDTCisSameAs<1,2>]>>; 88def X86pextrb : SDNode<"X86ISD::PEXTRB", 89 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v16i8>, 90 SDTCisVT<2, i8>]>>; 91def X86pextrw : SDNode<"X86ISD::PEXTRW", 92 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v8i16>, 93 SDTCisVT<2, i8>]>>; 94def X86pinsrb : SDNode<"X86ISD::PINSRB", 95 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 96 SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>; 97def X86pinsrw : SDNode<"X86ISD::PINSRW", 98 SDTypeProfile<1, 3, [SDTCisVT<0, v8i16>, SDTCisSameAs<0,1>, 99 SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>; 100def X86insertps : SDNode<"X86ISD::INSERTPS", 101 SDTypeProfile<1, 3, [SDTCisVT<0, v4f32>, SDTCisSameAs<0,1>, 102 SDTCisVT<2, v4f32>, SDTCisVT<3, i8>]>>; 103def X86vzmovl : SDNode<"X86ISD::VZEXT_MOVL", 104 SDTypeProfile<1, 1, [SDTCisSameAs<0,1>]>>; 105 106def X86vzld : SDNode<"X86ISD::VZEXT_LOAD", SDTLoad, 107 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 108def X86vextractst : SDNode<"X86ISD::VEXTRACT_STORE", SDTStore, 109 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 110def X86VBroadcastld : SDNode<"X86ISD::VBROADCAST_LOAD", SDTLoad, 111 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 112def X86SubVBroadcastld : SDNode<"X86ISD::SUBV_BROADCAST_LOAD", SDTLoad, 113 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 114 115def SDTVtrunc : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 116 SDTCisInt<0>, SDTCisInt<1>, 117 SDTCisOpSmallerThanOp<0, 1>]>; 118def SDTVmtrunc : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 119 SDTCisInt<0>, SDTCisInt<1>, 120 SDTCisOpSmallerThanOp<0, 1>, 121 SDTCisSameAs<0, 2>, 122 SDTCVecEltisVT<3, i1>, 123 SDTCisSameNumEltsAs<1, 3>]>; 124 125def X86vtrunc : SDNode<"X86ISD::VTRUNC", SDTVtrunc>; 126def X86vtruncs : SDNode<"X86ISD::VTRUNCS", SDTVtrunc>; 127def X86vtruncus : SDNode<"X86ISD::VTRUNCUS", SDTVtrunc>; 128def X86vmtrunc : SDNode<"X86ISD::VMTRUNC", SDTVmtrunc>; 129def X86vmtruncs : SDNode<"X86ISD::VMTRUNCS", SDTVmtrunc>; 130def X86vmtruncus : SDNode<"X86ISD::VMTRUNCUS", SDTVmtrunc>; 131 132def X86vfpext : SDNode<"X86ISD::VFPEXT", 133 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>, 134 SDTCisFP<1>, SDTCisVec<1>]>>; 135 136def X86strict_vfpext : SDNode<"X86ISD::STRICT_VFPEXT", 137 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>, 138 SDTCisFP<1>, SDTCisVec<1>]>, 139 [SDNPHasChain]>; 140 141def X86any_vfpext : PatFrags<(ops node:$src), 142 [(X86strict_vfpext node:$src), 143 (X86vfpext node:$src)]>; 144 145def X86vfpround: SDNode<"X86ISD::VFPROUND", 146 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>, 147 SDTCisFP<1>, SDTCisVec<1>, 148 SDTCisOpSmallerThanOp<0, 1>]>>; 149 150def X86strict_vfpround: SDNode<"X86ISD::STRICT_VFPROUND", 151 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>, 152 SDTCisFP<1>, SDTCisVec<1>, 153 SDTCisOpSmallerThanOp<0, 1>]>, 154 [SDNPHasChain]>; 155 156def X86any_vfpround : PatFrags<(ops node:$src), 157 [(X86strict_vfpround node:$src), 158 (X86vfpround node:$src)]>; 159 160def X86frounds : SDNode<"X86ISD::VFPROUNDS", 161 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>, 162 SDTCisSameAs<0, 1>, 163 SDTCisFP<2>, SDTCisVec<2>, 164 SDTCisSameSizeAs<0, 2>]>>; 165 166def X86froundsRnd: SDNode<"X86ISD::VFPROUNDS_RND", 167 SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 168 SDTCisSameAs<0, 1>, 169 SDTCisFP<2>, SDTCisVec<2>, 170 SDTCisSameSizeAs<0, 2>, 171 SDTCisVT<3, i32>]>>; 172 173def X86fpexts : SDNode<"X86ISD::VFPEXTS", 174 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>, 175 SDTCisSameAs<0, 1>, 176 SDTCisFP<2>, SDTCisVec<2>, 177 SDTCisSameSizeAs<0, 2>]>>; 178def X86fpextsSAE : SDNode<"X86ISD::VFPEXTS_SAE", 179 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>, 180 SDTCisSameAs<0, 1>, 181 SDTCisFP<2>, SDTCisVec<2>, 182 SDTCisSameSizeAs<0, 2>]>>; 183 184def X86vmfpround: SDNode<"X86ISD::VMFPROUND", 185 SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 186 SDTCisFP<1>, SDTCisVec<1>, 187 SDTCisSameAs<0, 2>, 188 SDTCVecEltisVT<3, i1>, 189 SDTCisSameNumEltsAs<1, 3>]>>; 190 191def X86vshiftimm : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 192 SDTCisVT<2, i8>, SDTCisInt<0>]>; 193 194def X86vshldq : SDNode<"X86ISD::VSHLDQ", X86vshiftimm>; 195def X86vshrdq : SDNode<"X86ISD::VSRLDQ", X86vshiftimm>; 196def X86pcmpeq : SDNode<"X86ISD::PCMPEQ", SDTIntBinOp, [SDNPCommutative]>; 197def X86pcmpgt : SDNode<"X86ISD::PCMPGT", SDTIntBinOp>; 198 199def X86cmpp : SDNode<"X86ISD::CMPP", SDTX86VFCMP>; 200def X86strict_cmpp : SDNode<"X86ISD::STRICT_CMPP", SDTX86VFCMP, [SDNPHasChain]>; 201def X86any_cmpp : PatFrags<(ops node:$src1, node:$src2, node:$src3), 202 [(X86strict_cmpp node:$src1, node:$src2, node:$src3), 203 (X86cmpp node:$src1, node:$src2, node:$src3)]>; 204 205def X86CmpMaskCC : 206 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 207 SDTCisVec<1>, SDTCisSameAs<2, 1>, 208 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>]>; 209def X86MaskCmpMaskCC : 210 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 211 SDTCisVec<1>, SDTCisSameAs<2, 1>, 212 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>, SDTCisSameAs<4, 0>]>; 213def X86CmpMaskCCScalar : 214 SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisFP<1>, SDTCisSameAs<1, 2>, 215 SDTCisVT<3, i8>]>; 216 217def X86cmpm : SDNode<"X86ISD::CMPM", X86CmpMaskCC>; 218def X86cmpmm : SDNode<"X86ISD::CMPMM", X86MaskCmpMaskCC>; 219def X86strict_cmpm : SDNode<"X86ISD::STRICT_CMPM", X86CmpMaskCC, [SDNPHasChain]>; 220def X86any_cmpm : PatFrags<(ops node:$src1, node:$src2, node:$src3), 221 [(X86strict_cmpm node:$src1, node:$src2, node:$src3), 222 (X86cmpm node:$src1, node:$src2, node:$src3)]>; 223def X86cmpmmSAE : SDNode<"X86ISD::CMPMM_SAE", X86MaskCmpMaskCC>; 224def X86cmpms : SDNode<"X86ISD::FSETCCM", X86CmpMaskCCScalar>; 225def X86cmpmsSAE : SDNode<"X86ISD::FSETCCM_SAE", X86CmpMaskCCScalar>; 226 227def X86phminpos: SDNode<"X86ISD::PHMINPOS", 228 SDTypeProfile<1, 1, [SDTCisVT<0, v8i16>, SDTCisVT<1, v8i16>]>>; 229 230def X86vshiftuniform : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 231 SDTCisVec<2>, SDTCisInt<0>, 232 SDTCisInt<2>]>; 233 234def X86vshl : SDNode<"X86ISD::VSHL", X86vshiftuniform>; 235def X86vsrl : SDNode<"X86ISD::VSRL", X86vshiftuniform>; 236def X86vsra : SDNode<"X86ISD::VSRA", X86vshiftuniform>; 237 238def X86vshiftvariable : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 239 SDTCisSameAs<0,2>, SDTCisInt<0>]>; 240 241def X86vshlv : SDNode<"X86ISD::VSHLV", X86vshiftvariable>; 242def X86vsrlv : SDNode<"X86ISD::VSRLV", X86vshiftvariable>; 243def X86vsrav : SDNode<"X86ISD::VSRAV", X86vshiftvariable>; 244 245def X86vshli : SDNode<"X86ISD::VSHLI", X86vshiftimm>; 246def X86vsrli : SDNode<"X86ISD::VSRLI", X86vshiftimm>; 247def X86vsrai : SDNode<"X86ISD::VSRAI", X86vshiftimm>; 248 249def X86kshiftl : SDNode<"X86ISD::KSHIFTL", 250 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 251 SDTCisSameAs<0, 1>, 252 SDTCisVT<2, i8>]>>; 253def X86kshiftr : SDNode<"X86ISD::KSHIFTR", 254 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 255 SDTCisSameAs<0, 1>, 256 SDTCisVT<2, i8>]>>; 257 258def X86kadd : SDNode<"X86ISD::KADD", SDTIntBinOp, [SDNPCommutative]>; 259 260def X86vrotli : SDNode<"X86ISD::VROTLI", X86vshiftimm>; 261def X86vrotri : SDNode<"X86ISD::VROTRI", X86vshiftimm>; 262 263def X86vpshl : SDNode<"X86ISD::VPSHL", X86vshiftvariable>; 264def X86vpsha : SDNode<"X86ISD::VPSHA", X86vshiftvariable>; 265 266def X86vpcom : SDNode<"X86ISD::VPCOM", 267 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 268 SDTCisSameAs<0,2>, 269 SDTCisVT<3, i8>, SDTCisInt<0>]>>; 270def X86vpcomu : SDNode<"X86ISD::VPCOMU", 271 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 272 SDTCisSameAs<0,2>, 273 SDTCisVT<3, i8>, SDTCisInt<0>]>>; 274def X86vpermil2 : SDNode<"X86ISD::VPERMIL2", 275 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>, 276 SDTCisSameAs<0,2>, 277 SDTCisFP<0>, SDTCisInt<3>, 278 SDTCisSameNumEltsAs<0, 3>, 279 SDTCisSameSizeAs<0,3>, 280 SDTCisVT<4, i8>]>>; 281def X86vpperm : SDNode<"X86ISD::VPPERM", 282 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 283 SDTCisSameAs<0,2>, SDTCisSameAs<0, 3>]>>; 284 285def SDTX86CmpPTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 286 SDTCisVec<1>, 287 SDTCisSameAs<2, 1>]>; 288 289def X86mulhrs : SDNode<"X86ISD::MULHRS", SDTIntBinOp, [SDNPCommutative]>; 290def X86ptest : SDNode<"X86ISD::PTEST", SDTX86CmpPTest>; 291def X86testp : SDNode<"X86ISD::TESTP", SDTX86CmpPTest>; 292def X86kortest : SDNode<"X86ISD::KORTEST", SDTX86CmpPTest>; 293def X86ktest : SDNode<"X86ISD::KTEST", SDTX86CmpPTest>; 294 295def X86movmsk : SDNode<"X86ISD::MOVMSK", 296 SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVec<1>]>>; 297 298def X86selects : SDNode<"X86ISD::SELECTS", 299 SDTypeProfile<1, 3, [SDTCisVT<1, v1i1>, 300 SDTCisSameAs<0, 2>, 301 SDTCisSameAs<2, 3>]>>; 302 303def X86pmuludq : SDNode<"X86ISD::PMULUDQ", 304 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 305 SDTCisSameAs<0,1>, 306 SDTCisSameAs<1,2>]>, 307 [SDNPCommutative]>; 308def X86pmuldq : SDNode<"X86ISD::PMULDQ", 309 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 310 SDTCisSameAs<0,1>, 311 SDTCisSameAs<1,2>]>, 312 [SDNPCommutative]>; 313 314def X86extrqi : SDNode<"X86ISD::EXTRQI", 315 SDTypeProfile<1, 3, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 316 SDTCisVT<2, i8>, SDTCisVT<3, i8>]>>; 317def X86insertqi : SDNode<"X86ISD::INSERTQI", 318 SDTypeProfile<1, 4, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 319 SDTCisSameAs<1,2>, SDTCisVT<3, i8>, 320 SDTCisVT<4, i8>]>>; 321 322// Specific shuffle nodes - At some point ISD::VECTOR_SHUFFLE will always get 323// translated into one of the target nodes below during lowering. 324// Note: this is a work in progress... 325def SDTShuff1Op : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisSameAs<0,1>]>; 326def SDTShuff2Op : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 327 SDTCisSameAs<0,2>]>; 328def SDTShuff2OpFP : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>, 329 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>]>; 330 331def SDTShuff2OpM : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 332 SDTCisFP<0>, SDTCisInt<2>, 333 SDTCisSameNumEltsAs<0,2>, 334 SDTCisSameSizeAs<0,2>]>; 335def SDTShuff2OpI : SDTypeProfile<1, 2, [SDTCisVec<0>, 336 SDTCisSameAs<0,1>, SDTCisVT<2, i8>]>; 337def SDTShuff3OpI : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 338 SDTCisSameAs<0,2>, SDTCisVT<3, i8>]>; 339def SDTFPBinOpImm: SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 340 SDTCisSameAs<0,1>, 341 SDTCisSameAs<0,2>, 342 SDTCisVT<3, i32>]>; 343def SDTFPTernaryOpImm: SDTypeProfile<1, 4, [SDTCisFP<0>, SDTCisSameAs<0,1>, 344 SDTCisSameAs<0,2>, 345 SDTCisInt<3>, 346 SDTCisSameSizeAs<0, 3>, 347 SDTCisSameNumEltsAs<0, 3>, 348 SDTCisVT<4, i32>]>; 349def SDTFPUnaryOpImm: SDTypeProfile<1, 2, [SDTCisFP<0>, 350 SDTCisSameAs<0,1>, 351 SDTCisVT<2, i32>]>; 352 353def SDTVBroadcast : SDTypeProfile<1, 1, [SDTCisVec<0>]>; 354def SDTVBroadcastm : SDTypeProfile<1, 1, [SDTCisVec<0>, 355 SDTCisInt<0>, SDTCisInt<1>]>; 356 357def SDTBlend : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 358 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>; 359 360def SDTTernlog : SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisVec<0>, 361 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>, 362 SDTCisSameAs<0,3>, SDTCisVT<4, i8>]>; 363 364def SDTFPBinOpRound : SDTypeProfile<1, 3, [ // fadd_round, fmul_round, etc. 365 SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>, SDTCisVT<3, i32>]>; 366 367def SDTFPUnaryOpRound : SDTypeProfile<1, 2, [ // fsqrt_round, fgetexp_round, etc. 368 SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisVT<2, i32>]>; 369 370def SDTFmaRound : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>, 371 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>, 372 SDTCisFP<0>, SDTCisVT<4, i32>]>; 373 374def X86PAlignr : SDNode<"X86ISD::PALIGNR", 375 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i8>, 376 SDTCisSameAs<0,1>, 377 SDTCisSameAs<0,2>, 378 SDTCisVT<3, i8>]>>; 379def X86VAlign : SDNode<"X86ISD::VALIGN", SDTShuff3OpI>; 380 381def X86VShld : SDNode<"X86ISD::VSHLD", SDTShuff3OpI>; 382def X86VShrd : SDNode<"X86ISD::VSHRD", SDTShuff3OpI>; 383def X86VShldv : SDNode<"X86ISD::VSHLDV", 384 SDTypeProfile<1, 3, [SDTCisVec<0>, 385 SDTCisSameAs<0,1>, 386 SDTCisSameAs<0,2>, 387 SDTCisSameAs<0,3>]>>; 388def X86VShrdv : SDNode<"X86ISD::VSHRDV", 389 SDTypeProfile<1, 3, [SDTCisVec<0>, 390 SDTCisSameAs<0,1>, 391 SDTCisSameAs<0,2>, 392 SDTCisSameAs<0,3>]>>; 393 394def X86Conflict : SDNode<"X86ISD::CONFLICT", SDTIntUnaryOp>; 395 396def X86PShufd : SDNode<"X86ISD::PSHUFD", SDTShuff2OpI>; 397def X86PShufhw : SDNode<"X86ISD::PSHUFHW", SDTShuff2OpI>; 398def X86PShuflw : SDNode<"X86ISD::PSHUFLW", SDTShuff2OpI>; 399 400def X86Shufp : SDNode<"X86ISD::SHUFP", SDTShuff3OpI>; 401def X86Shuf128 : SDNode<"X86ISD::SHUF128", SDTShuff3OpI>; 402 403def X86Movddup : SDNode<"X86ISD::MOVDDUP", SDTShuff1Op>; 404def X86Movshdup : SDNode<"X86ISD::MOVSHDUP", SDTShuff1Op>; 405def X86Movsldup : SDNode<"X86ISD::MOVSLDUP", SDTShuff1Op>; 406 407def X86Movsd : SDNode<"X86ISD::MOVSD", 408 SDTypeProfile<1, 2, [SDTCisVT<0, v2f64>, 409 SDTCisVT<1, v2f64>, 410 SDTCisVT<2, v2f64>]>>; 411def X86Movss : SDNode<"X86ISD::MOVSS", 412 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>, 413 SDTCisVT<1, v4f32>, 414 SDTCisVT<2, v4f32>]>>; 415 416def X86Movsh : SDNode<"X86ISD::MOVSH", 417 SDTypeProfile<1, 2, [SDTCisVT<0, v8f16>, 418 SDTCisVT<1, v8f16>, 419 SDTCisVT<2, v8f16>]>>; 420 421def X86Movlhps : SDNode<"X86ISD::MOVLHPS", 422 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>, 423 SDTCisVT<1, v4f32>, 424 SDTCisVT<2, v4f32>]>>; 425def X86Movhlps : SDNode<"X86ISD::MOVHLPS", 426 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>, 427 SDTCisVT<1, v4f32>, 428 SDTCisVT<2, v4f32>]>>; 429 430def SDTPack : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<0>, 431 SDTCisVec<1>, SDTCisInt<1>, 432 SDTCisSameSizeAs<0,1>, 433 SDTCisSameAs<1,2>, 434 SDTCisOpSmallerThanOp<0, 1>]>; 435def X86Packss : SDNode<"X86ISD::PACKSS", SDTPack>; 436def X86Packus : SDNode<"X86ISD::PACKUS", SDTPack>; 437 438def X86Unpckl : SDNode<"X86ISD::UNPCKL", SDTShuff2Op>; 439def X86Unpckh : SDNode<"X86ISD::UNPCKH", SDTShuff2Op>; 440 441def X86vpmaddubsw : SDNode<"X86ISD::VPMADDUBSW", 442 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 443 SDTCVecEltisVT<1, i8>, 444 SDTCisSameSizeAs<0,1>, 445 SDTCisSameAs<1,2>]>>; 446def X86vpmaddwd : SDNode<"X86ISD::VPMADDWD", 447 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i32>, 448 SDTCVecEltisVT<1, i16>, 449 SDTCisSameSizeAs<0,1>, 450 SDTCisSameAs<1,2>]>, 451 [SDNPCommutative]>; 452 453def X86VPermilpv : SDNode<"X86ISD::VPERMILPV", SDTShuff2OpM>; 454def X86VPermilpi : SDNode<"X86ISD::VPERMILPI", SDTShuff2OpI>; 455def X86VPermv : SDNode<"X86ISD::VPERMV", 456 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<1>, 457 SDTCisSameNumEltsAs<0,1>, 458 SDTCisSameSizeAs<0,1>, 459 SDTCisSameAs<0,2>]>>; 460def X86VPermi : SDNode<"X86ISD::VPERMI", SDTShuff2OpI>; 461def X86VPermt2 : SDNode<"X86ISD::VPERMV3", 462 SDTypeProfile<1, 3, [SDTCisVec<0>, 463 SDTCisSameAs<0,1>, SDTCisInt<2>, 464 SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>, 465 SDTCisSameSizeAs<0,2>, 466 SDTCisSameAs<0,3>]>, []>; 467 468def X86vpternlog : SDNode<"X86ISD::VPTERNLOG", SDTTernlog>; 469 470def X86VPerm2x128 : SDNode<"X86ISD::VPERM2X128", SDTShuff3OpI>; 471 472def X86VFixupimm : SDNode<"X86ISD::VFIXUPIMM", SDTFPTernaryOpImm>; 473def X86VFixupimmSAE : SDNode<"X86ISD::VFIXUPIMM_SAE", SDTFPTernaryOpImm>; 474def X86VFixupimms : SDNode<"X86ISD::VFIXUPIMMS", SDTFPTernaryOpImm>; 475def X86VFixupimmSAEs : SDNode<"X86ISD::VFIXUPIMMS_SAE", SDTFPTernaryOpImm>; 476def X86VRange : SDNode<"X86ISD::VRANGE", SDTFPBinOpImm>; 477def X86VRangeSAE : SDNode<"X86ISD::VRANGE_SAE", SDTFPBinOpImm>; 478def X86VReduce : SDNode<"X86ISD::VREDUCE", SDTFPUnaryOpImm>; 479def X86VReduceSAE : SDNode<"X86ISD::VREDUCE_SAE", SDTFPUnaryOpImm>; 480def X86VRndScale : SDNode<"X86ISD::VRNDSCALE", SDTFPUnaryOpImm>; 481def X86strict_VRndScale : SDNode<"X86ISD::STRICT_VRNDSCALE", SDTFPUnaryOpImm, 482 [SDNPHasChain]>; 483def X86any_VRndScale : PatFrags<(ops node:$src1, node:$src2), 484 [(X86strict_VRndScale node:$src1, node:$src2), 485 (X86VRndScale node:$src1, node:$src2)]>; 486 487def X86VRndScaleSAE: SDNode<"X86ISD::VRNDSCALE_SAE", SDTFPUnaryOpImm>; 488def X86VGetMant : SDNode<"X86ISD::VGETMANT", SDTFPUnaryOpImm>; 489def X86VGetMantSAE : SDNode<"X86ISD::VGETMANT_SAE", SDTFPUnaryOpImm>; 490def X86Vfpclass : SDNode<"X86ISD::VFPCLASS", 491 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 492 SDTCisFP<1>, 493 SDTCisSameNumEltsAs<0,1>, 494 SDTCisVT<2, i32>]>, []>; 495def X86Vfpclasss : SDNode<"X86ISD::VFPCLASSS", 496 SDTypeProfile<1, 2, [SDTCisVT<0, v1i1>, 497 SDTCisFP<1>, SDTCisVT<2, i32>]>,[]>; 498 499def X86VBroadcast : SDNode<"X86ISD::VBROADCAST", SDTVBroadcast>; 500def X86VBroadcastm : SDNode<"X86ISD::VBROADCASTM", SDTVBroadcastm>; 501 502def X86Blendi : SDNode<"X86ISD::BLENDI", SDTBlend>; 503def X86Blendv : SDNode<"X86ISD::BLENDV", 504 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisInt<1>, 505 SDTCisSameAs<0, 2>, 506 SDTCisSameAs<2, 3>, 507 SDTCisSameNumEltsAs<0, 1>, 508 SDTCisSameSizeAs<0, 1>]>>; 509 510def X86Addsub : SDNode<"X86ISD::ADDSUB", SDTFPBinOp>; 511 512def X86faddRnd : SDNode<"X86ISD::FADD_RND", SDTFPBinOpRound>; 513def X86fadds : SDNode<"X86ISD::FADDS", SDTFPBinOp>; 514def X86faddRnds : SDNode<"X86ISD::FADDS_RND", SDTFPBinOpRound>; 515def X86fsubRnd : SDNode<"X86ISD::FSUB_RND", SDTFPBinOpRound>; 516def X86fsubs : SDNode<"X86ISD::FSUBS", SDTFPBinOp>; 517def X86fsubRnds : SDNode<"X86ISD::FSUBS_RND", SDTFPBinOpRound>; 518def X86fmulRnd : SDNode<"X86ISD::FMUL_RND", SDTFPBinOpRound>; 519def X86fmuls : SDNode<"X86ISD::FMULS", SDTFPBinOp>; 520def X86fmulRnds : SDNode<"X86ISD::FMULS_RND", SDTFPBinOpRound>; 521def X86fdivRnd : SDNode<"X86ISD::FDIV_RND", SDTFPBinOpRound>; 522def X86fdivs : SDNode<"X86ISD::FDIVS", SDTFPBinOp>; 523def X86fdivRnds : SDNode<"X86ISD::FDIVS_RND", SDTFPBinOpRound>; 524def X86fmaxSAE : SDNode<"X86ISD::FMAX_SAE", SDTFPBinOp>; 525def X86fmaxSAEs : SDNode<"X86ISD::FMAXS_SAE", SDTFPBinOp>; 526def X86fminSAE : SDNode<"X86ISD::FMIN_SAE", SDTFPBinOp>; 527def X86fminSAEs : SDNode<"X86ISD::FMINS_SAE", SDTFPBinOp>; 528def X86scalef : SDNode<"X86ISD::SCALEF", SDTFPBinOp>; 529def X86scalefRnd : SDNode<"X86ISD::SCALEF_RND", SDTFPBinOpRound>; 530def X86scalefs : SDNode<"X86ISD::SCALEFS", SDTFPBinOp>; 531def X86scalefsRnd: SDNode<"X86ISD::SCALEFS_RND", SDTFPBinOpRound>; 532def X86fsqrtRnd : SDNode<"X86ISD::FSQRT_RND", SDTFPUnaryOpRound>; 533def X86fsqrts : SDNode<"X86ISD::FSQRTS", SDTFPBinOp>; 534def X86fsqrtRnds : SDNode<"X86ISD::FSQRTS_RND", SDTFPBinOpRound>; 535def X86fgetexp : SDNode<"X86ISD::FGETEXP", SDTFPUnaryOp>; 536def X86fgetexpSAE : SDNode<"X86ISD::FGETEXP_SAE", SDTFPUnaryOp>; 537def X86fgetexps : SDNode<"X86ISD::FGETEXPS", SDTFPBinOp>; 538def X86fgetexpSAEs : SDNode<"X86ISD::FGETEXPS_SAE", SDTFPBinOp>; 539 540def X86Fnmadd : SDNode<"X86ISD::FNMADD", SDTFPTernaryOp, [SDNPCommutative]>; 541def X86strict_Fnmadd : SDNode<"X86ISD::STRICT_FNMADD", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 542def X86any_Fnmadd : PatFrags<(ops node:$src1, node:$src2, node:$src3), 543 [(X86strict_Fnmadd node:$src1, node:$src2, node:$src3), 544 (X86Fnmadd node:$src1, node:$src2, node:$src3)]>; 545def X86Fmsub : SDNode<"X86ISD::FMSUB", SDTFPTernaryOp, [SDNPCommutative]>; 546def X86strict_Fmsub : SDNode<"X86ISD::STRICT_FMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 547def X86any_Fmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3), 548 [(X86strict_Fmsub node:$src1, node:$src2, node:$src3), 549 (X86Fmsub node:$src1, node:$src2, node:$src3)]>; 550def X86Fnmsub : SDNode<"X86ISD::FNMSUB", SDTFPTernaryOp, [SDNPCommutative]>; 551def X86strict_Fnmsub : SDNode<"X86ISD::STRICT_FNMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 552def X86any_Fnmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3), 553 [(X86strict_Fnmsub node:$src1, node:$src2, node:$src3), 554 (X86Fnmsub node:$src1, node:$src2, node:$src3)]>; 555def X86Fmaddsub : SDNode<"X86ISD::FMADDSUB", SDTFPTernaryOp, [SDNPCommutative]>; 556def X86Fmsubadd : SDNode<"X86ISD::FMSUBADD", SDTFPTernaryOp, [SDNPCommutative]>; 557 558def X86FmaddRnd : SDNode<"X86ISD::FMADD_RND", SDTFmaRound, [SDNPCommutative]>; 559def X86FnmaddRnd : SDNode<"X86ISD::FNMADD_RND", SDTFmaRound, [SDNPCommutative]>; 560def X86FmsubRnd : SDNode<"X86ISD::FMSUB_RND", SDTFmaRound, [SDNPCommutative]>; 561def X86FnmsubRnd : SDNode<"X86ISD::FNMSUB_RND", SDTFmaRound, [SDNPCommutative]>; 562def X86FmaddsubRnd : SDNode<"X86ISD::FMADDSUB_RND", SDTFmaRound, [SDNPCommutative]>; 563def X86FmsubaddRnd : SDNode<"X86ISD::FMSUBADD_RND", SDTFmaRound, [SDNPCommutative]>; 564 565def X86vp2intersect : SDNode<"X86ISD::VP2INTERSECT", 566 SDTypeProfile<1, 2, [SDTCisVT<0, untyped>, 567 SDTCisVec<1>, SDTCisSameAs<1, 2>]>>; 568 569def SDTIFma : SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<0,1>, 570 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 571def x86vpmadd52l : SDNode<"X86ISD::VPMADD52L", SDTIFma, [SDNPCommutative]>; 572def x86vpmadd52h : SDNode<"X86ISD::VPMADD52H", SDTIFma, [SDNPCommutative]>; 573 574def x86vfmaddc : SDNode<"X86ISD::VFMADDC", SDTFPTernaryOp, [SDNPCommutative]>; 575def x86vfmaddcRnd : SDNode<"X86ISD::VFMADDC_RND", SDTFmaRound, [SDNPCommutative]>; 576def x86vfcmaddc : SDNode<"X86ISD::VFCMADDC", SDTFPTernaryOp>; 577def x86vfcmaddcRnd : SDNode<"X86ISD::VFCMADDC_RND", SDTFmaRound>; 578def x86vfmulc : SDNode<"X86ISD::VFMULC", SDTFPBinOp, [SDNPCommutative]>; 579def x86vfmulcRnd : SDNode<"X86ISD::VFMULC_RND", SDTFPBinOpRound, [SDNPCommutative]>; 580def x86vfcmulc : SDNode<"X86ISD::VFCMULC", SDTFPBinOp>; 581def x86vfcmulcRnd : SDNode<"X86ISD::VFCMULC_RND", SDTFPBinOpRound>; 582 583def x86vfmaddcSh : SDNode<"X86ISD::VFMADDCSH", SDTFPTernaryOp, [SDNPCommutative]>; 584def x86vfcmaddcSh : SDNode<"X86ISD::VFCMADDCSH", SDTFPTernaryOp>; 585def x86vfmulcSh : SDNode<"X86ISD::VFMULCSH", SDTFPBinOp, [SDNPCommutative]>; 586def x86vfcmulcSh : SDNode<"X86ISD::VFCMULCSH", SDTFPBinOp>; 587def x86vfmaddcShRnd : SDNode<"X86ISD::VFMADDCSH_RND", SDTFmaRound, [SDNPCommutative]>; 588def x86vfcmaddcShRnd : SDNode<"X86ISD::VFCMADDCSH_RND",SDTFmaRound>; 589def x86vfmulcShRnd : SDNode<"X86ISD::VFMULCSH_RND", SDTFPBinOpRound, [SDNPCommutative]>; 590def x86vfcmulcShRnd : SDNode<"X86ISD::VFCMULCSH_RND", SDTFPBinOpRound>; 591 592def X86rsqrt14 : SDNode<"X86ISD::RSQRT14", SDTFPUnaryOp>; 593def X86rcp14 : SDNode<"X86ISD::RCP14", SDTFPUnaryOp>; 594 595// VNNI 596def SDTVnni : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 597 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 598def X86Vpdpbusd : SDNode<"X86ISD::VPDPBUSD", SDTVnni>; 599def X86Vpdpbusds : SDNode<"X86ISD::VPDPBUSDS", SDTVnni>; 600def X86Vpdpwssd : SDNode<"X86ISD::VPDPWSSD", SDTVnni>; 601def X86Vpdpwssds : SDNode<"X86ISD::VPDPWSSDS", SDTVnni>; 602 603def X86rsqrt28 : SDNode<"X86ISD::RSQRT28", SDTFPUnaryOp>; 604def X86rsqrt28SAE: SDNode<"X86ISD::RSQRT28_SAE", SDTFPUnaryOp>; 605def X86rcp28 : SDNode<"X86ISD::RCP28", SDTFPUnaryOp>; 606def X86rcp28SAE : SDNode<"X86ISD::RCP28_SAE", SDTFPUnaryOp>; 607def X86exp2 : SDNode<"X86ISD::EXP2", SDTFPUnaryOp>; 608def X86exp2SAE : SDNode<"X86ISD::EXP2_SAE", SDTFPUnaryOp>; 609 610def X86rsqrt14s : SDNode<"X86ISD::RSQRT14S", SDTFPBinOp>; 611def X86rcp14s : SDNode<"X86ISD::RCP14S", SDTFPBinOp>; 612def X86rsqrt28s : SDNode<"X86ISD::RSQRT28S", SDTFPBinOp>; 613def X86rsqrt28SAEs : SDNode<"X86ISD::RSQRT28S_SAE", SDTFPBinOp>; 614def X86rcp28s : SDNode<"X86ISD::RCP28S", SDTFPBinOp>; 615def X86rcp28SAEs : SDNode<"X86ISD::RCP28S_SAE", SDTFPBinOp>; 616def X86Ranges : SDNode<"X86ISD::VRANGES", SDTFPBinOpImm>; 617def X86RndScales : SDNode<"X86ISD::VRNDSCALES", SDTFPBinOpImm>; 618def X86Reduces : SDNode<"X86ISD::VREDUCES", SDTFPBinOpImm>; 619def X86GetMants : SDNode<"X86ISD::VGETMANTS", SDTFPBinOpImm>; 620def X86RangesSAE : SDNode<"X86ISD::VRANGES_SAE", SDTFPBinOpImm>; 621def X86RndScalesSAE : SDNode<"X86ISD::VRNDSCALES_SAE", SDTFPBinOpImm>; 622def X86ReducesSAE : SDNode<"X86ISD::VREDUCES_SAE", SDTFPBinOpImm>; 623def X86GetMantsSAE : SDNode<"X86ISD::VGETMANTS_SAE", SDTFPBinOpImm>; 624 625def X86compress: SDNode<"X86ISD::COMPRESS", SDTypeProfile<1, 3, 626 [SDTCisSameAs<0, 1>, SDTCisVec<1>, 627 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>, 628 SDTCisSameNumEltsAs<0, 3>]>, []>; 629def X86expand : SDNode<"X86ISD::EXPAND", SDTypeProfile<1, 3, 630 [SDTCisSameAs<0, 1>, SDTCisVec<1>, 631 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>, 632 SDTCisSameNumEltsAs<0, 3>]>, []>; 633 634// vpshufbitqmb 635def X86Vpshufbitqmb : SDNode<"X86ISD::VPSHUFBITQMB", 636 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 637 SDTCisSameAs<1,2>, 638 SDTCVecEltisVT<0,i1>, 639 SDTCisSameNumEltsAs<0,1>]>>; 640 641def SDTintToFP: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>, 642 SDTCisSameAs<0,1>, SDTCisInt<2>]>; 643def SDTintToFPRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisFP<0>, 644 SDTCisSameAs<0,1>, SDTCisInt<2>, 645 SDTCisVT<3, i32>]>; 646 647def SDTFloatToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 648 SDTCisInt<0>, SDTCisFP<1>]>; 649def SDTFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 650 SDTCisInt<0>, SDTCisFP<1>, 651 SDTCisVT<2, i32>]>; 652def SDTSFloatToInt: SDTypeProfile<1, 1, [SDTCisInt<0>, SDTCisFP<1>, 653 SDTCisVec<1>]>; 654def SDTSFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>, SDTCisFP<1>, 655 SDTCisVec<1>, SDTCisVT<2, i32>]>; 656 657def SDTVintToFP: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 658 SDTCisFP<0>, SDTCisInt<1>]>; 659def SDTVintToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 660 SDTCisFP<0>, SDTCisInt<1>, 661 SDTCisVT<2, i32>]>; 662 663// Scalar 664def X86SintToFp : SDNode<"X86ISD::SCALAR_SINT_TO_FP", SDTintToFP>; 665def X86SintToFpRnd : SDNode<"X86ISD::SCALAR_SINT_TO_FP_RND", SDTintToFPRound>; 666def X86UintToFp : SDNode<"X86ISD::SCALAR_UINT_TO_FP", SDTintToFP>; 667def X86UintToFpRnd : SDNode<"X86ISD::SCALAR_UINT_TO_FP_RND", SDTintToFPRound>; 668 669def X86cvtts2Int : SDNode<"X86ISD::CVTTS2SI", SDTSFloatToInt>; 670def X86cvtts2UInt : SDNode<"X86ISD::CVTTS2UI", SDTSFloatToInt>; 671def X86cvtts2IntSAE : SDNode<"X86ISD::CVTTS2SI_SAE", SDTSFloatToInt>; 672def X86cvtts2UIntSAE : SDNode<"X86ISD::CVTTS2UI_SAE", SDTSFloatToInt>; 673 674def X86cvts2si : SDNode<"X86ISD::CVTS2SI", SDTSFloatToInt>; 675def X86cvts2usi : SDNode<"X86ISD::CVTS2UI", SDTSFloatToInt>; 676def X86cvts2siRnd : SDNode<"X86ISD::CVTS2SI_RND", SDTSFloatToIntRnd>; 677def X86cvts2usiRnd : SDNode<"X86ISD::CVTS2UI_RND", SDTSFloatToIntRnd>; 678 679// Vector with rounding mode 680 681// cvtt fp-to-int staff 682def X86cvttp2siSAE : SDNode<"X86ISD::CVTTP2SI_SAE", SDTFloatToInt>; 683def X86cvttp2uiSAE : SDNode<"X86ISD::CVTTP2UI_SAE", SDTFloatToInt>; 684 685def X86VSintToFpRnd : SDNode<"X86ISD::SINT_TO_FP_RND", SDTVintToFPRound>; 686def X86VUintToFpRnd : SDNode<"X86ISD::UINT_TO_FP_RND", SDTVintToFPRound>; 687 688// cvt fp-to-int staff 689def X86cvtp2IntRnd : SDNode<"X86ISD::CVTP2SI_RND", SDTFloatToIntRnd>; 690def X86cvtp2UIntRnd : SDNode<"X86ISD::CVTP2UI_RND", SDTFloatToIntRnd>; 691 692// Vector without rounding mode 693 694// cvtt fp-to-int staff 695def X86cvttp2si : SDNode<"X86ISD::CVTTP2SI", SDTFloatToInt>; 696def X86cvttp2ui : SDNode<"X86ISD::CVTTP2UI", SDTFloatToInt>; 697def X86strict_cvttp2si : SDNode<"X86ISD::STRICT_CVTTP2SI", SDTFloatToInt, [SDNPHasChain]>; 698def X86strict_cvttp2ui : SDNode<"X86ISD::STRICT_CVTTP2UI", SDTFloatToInt, [SDNPHasChain]>; 699def X86any_cvttp2si : PatFrags<(ops node:$src), 700 [(X86strict_cvttp2si node:$src), 701 (X86cvttp2si node:$src)]>; 702def X86any_cvttp2ui : PatFrags<(ops node:$src), 703 [(X86strict_cvttp2ui node:$src), 704 (X86cvttp2ui node:$src)]>; 705 706def X86VSintToFP : SDNode<"X86ISD::CVTSI2P", SDTVintToFP>; 707def X86VUintToFP : SDNode<"X86ISD::CVTUI2P", SDTVintToFP>; 708def X86strict_VSintToFP : SDNode<"X86ISD::STRICT_CVTSI2P", SDTVintToFP, [SDNPHasChain]>; 709def X86strict_VUintToFP : SDNode<"X86ISD::STRICT_CVTUI2P", SDTVintToFP, [SDNPHasChain]>; 710def X86any_VSintToFP : PatFrags<(ops node:$src), 711 [(X86strict_VSintToFP node:$src), 712 (X86VSintToFP node:$src)]>; 713def X86any_VUintToFP : PatFrags<(ops node:$src), 714 [(X86strict_VUintToFP node:$src), 715 (X86VUintToFP node:$src)]>; 716 717 718// cvt int-to-fp staff 719def X86cvtp2Int : SDNode<"X86ISD::CVTP2SI", SDTFloatToInt>; 720def X86cvtp2UInt : SDNode<"X86ISD::CVTP2UI", SDTFloatToInt>; 721 722 723// Masked versions of above 724def SDTMVintToFP: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 725 SDTCisFP<0>, SDTCisInt<1>, 726 SDTCisSameAs<0, 2>, 727 SDTCVecEltisVT<3, i1>, 728 SDTCisSameNumEltsAs<1, 3>]>; 729def SDTMFloatToInt: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 730 SDTCisInt<0>, SDTCisFP<1>, 731 SDTCisSameSizeAs<0, 1>, 732 SDTCisSameAs<0, 2>, 733 SDTCVecEltisVT<3, i1>, 734 SDTCisSameNumEltsAs<1, 3>]>; 735 736def X86VMSintToFP : SDNode<"X86ISD::MCVTSI2P", SDTMVintToFP>; 737def X86VMUintToFP : SDNode<"X86ISD::MCVTUI2P", SDTMVintToFP>; 738 739def X86mcvtp2Int : SDNode<"X86ISD::MCVTP2SI", SDTMFloatToInt>; 740def X86mcvtp2UInt : SDNode<"X86ISD::MCVTP2UI", SDTMFloatToInt>; 741def X86mcvttp2si : SDNode<"X86ISD::MCVTTP2SI", SDTMFloatToInt>; 742def X86mcvttp2ui : SDNode<"X86ISD::MCVTTP2UI", SDTMFloatToInt>; 743 744def SDTcvtph2ps : SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>, 745 SDTCVecEltisVT<1, i16>]>; 746def X86cvtph2ps : SDNode<"X86ISD::CVTPH2PS", SDTcvtph2ps>; 747def X86strict_cvtph2ps : SDNode<"X86ISD::STRICT_CVTPH2PS", SDTcvtph2ps, 748 [SDNPHasChain]>; 749def X86any_cvtph2ps : PatFrags<(ops node:$src), 750 [(X86strict_cvtph2ps node:$src), 751 (X86cvtph2ps node:$src)]>; 752 753def X86cvtph2psSAE : SDNode<"X86ISD::CVTPH2PS_SAE", SDTcvtph2ps>; 754 755def SDTcvtps2ph : SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 756 SDTCVecEltisVT<1, f32>, 757 SDTCisVT<2, i32>]>; 758def X86cvtps2ph : SDNode<"X86ISD::CVTPS2PH", SDTcvtps2ph>; 759def X86strict_cvtps2ph : SDNode<"X86ISD::STRICT_CVTPS2PH", SDTcvtps2ph, 760 [SDNPHasChain]>; 761def X86any_cvtps2ph : PatFrags<(ops node:$src1, node:$src2), 762 [(X86strict_cvtps2ph node:$src1, node:$src2), 763 (X86cvtps2ph node:$src1, node:$src2)]>; 764 765def X86cvtps2phSAE : SDNode<"X86ISD::CVTPS2PH_SAE", SDTcvtps2ph>; 766 767def SDTmcvtps2ph : SDTypeProfile<1, 4, [SDTCVecEltisVT<0, i16>, 768 SDTCVecEltisVT<1, f32>, 769 SDTCisVT<2, i32>, 770 SDTCisSameAs<0, 3>, 771 SDTCVecEltisVT<4, i1>, 772 SDTCisSameNumEltsAs<1, 4>]>; 773def X86mcvtps2ph : SDNode<"X86ISD::MCVTPS2PH", SDTmcvtps2ph>; 774def X86mcvtps2phSAE : SDNode<"X86ISD::MCVTPS2PH_SAE", SDTmcvtps2ph>; 775 776def X86vfpextSAE : SDNode<"X86ISD::VFPEXT_SAE", 777 SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>, 778 SDTCisFP<1>, SDTCisVec<1>, 779 SDTCisOpSmallerThanOp<1, 0>]>>; 780def X86vfproundRnd: SDNode<"X86ISD::VFPROUND_RND", 781 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>, 782 SDTCisFP<1>, SDTCisVec<1>, 783 SDTCisOpSmallerThanOp<0, 1>, 784 SDTCisVT<2, i32>]>>; 785 786// cvt fp to bfloat16 787def X86cvtne2ps2bf16 : SDNode<"X86ISD::CVTNE2PS2BF16", 788 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, bf16>, 789 SDTCVecEltisVT<1, f32>, 790 SDTCisSameSizeAs<0,1>, 791 SDTCisSameAs<1,2>]>>; 792def X86mcvtneps2bf16 : SDNode<"X86ISD::MCVTNEPS2BF16", 793 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, bf16>, 794 SDTCVecEltisVT<1, f32>, 795 SDTCisSameAs<0, 2>, 796 SDTCVecEltisVT<3, i1>, 797 SDTCisSameNumEltsAs<1, 3>]>>; 798def X86cvtneps2bf16 : SDNode<"X86ISD::CVTNEPS2BF16", 799 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, bf16>, 800 SDTCVecEltisVT<1, f32>]>>; 801def X86dpbf16ps : SDNode<"X86ISD::DPBF16PS", 802 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>, 803 SDTCisSameAs<0,1>, 804 SDTCVecEltisVT<2, bf16>, 805 SDTCisSameAs<2,3>]>>; 806 807// galois field arithmetic 808def X86GF2P8affineinvqb : SDNode<"X86ISD::GF2P8AFFINEINVQB", SDTBlend>; 809def X86GF2P8affineqb : SDNode<"X86ISD::GF2P8AFFINEQB", SDTBlend>; 810def X86GF2P8mulb : SDNode<"X86ISD::GF2P8MULB", SDTIntBinOp>; 811 812def SDTX86MaskedStore: SDTypeProfile<0, 3, [ // masked store 813 SDTCisVec<0>, SDTCisPtrTy<1>, SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2> 814]>; 815 816def X86vpdpbssd : SDNode<"X86ISD::VPDPBSSD", SDTVnni>; 817def X86vpdpbssds : SDNode<"X86ISD::VPDPBSSDS", SDTVnni>; 818def X86vpdpbsud : SDNode<"X86ISD::VPDPBSUD", SDTVnni>; 819def X86vpdpbsuds : SDNode<"X86ISD::VPDPBSUDS", SDTVnni>; 820def X86vpdpbuud : SDNode<"X86ISD::VPDPBUUD", SDTVnni>; 821def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>; 822 823//===----------------------------------------------------------------------===// 824// SSE pattern fragments 825//===----------------------------------------------------------------------===// 826 827// 128-bit load pattern fragments 828def loadv8f16 : PatFrag<(ops node:$ptr), (v8f16 (load node:$ptr))>; 829def loadv8bf16 : PatFrag<(ops node:$ptr), (v8bf16 (load node:$ptr))>; 830def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>; 831def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>; 832def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>; 833def loadv4i32 : PatFrag<(ops node:$ptr), (v4i32 (load node:$ptr))>; 834def loadv8i16 : PatFrag<(ops node:$ptr), (v8i16 (load node:$ptr))>; 835def loadv16i8 : PatFrag<(ops node:$ptr), (v16i8 (load node:$ptr))>; 836 837// 256-bit load pattern fragments 838def loadv16f16 : PatFrag<(ops node:$ptr), (v16f16 (load node:$ptr))>; 839def loadv16bf16 : PatFrag<(ops node:$ptr), (v16bf16 (load node:$ptr))>; 840def loadv8f32 : PatFrag<(ops node:$ptr), (v8f32 (load node:$ptr))>; 841def loadv4f64 : PatFrag<(ops node:$ptr), (v4f64 (load node:$ptr))>; 842def loadv4i64 : PatFrag<(ops node:$ptr), (v4i64 (load node:$ptr))>; 843def loadv8i32 : PatFrag<(ops node:$ptr), (v8i32 (load node:$ptr))>; 844def loadv16i16 : PatFrag<(ops node:$ptr), (v16i16 (load node:$ptr))>; 845def loadv32i8 : PatFrag<(ops node:$ptr), (v32i8 (load node:$ptr))>; 846 847// 512-bit load pattern fragments 848def loadv32f16 : PatFrag<(ops node:$ptr), (v32f16 (load node:$ptr))>; 849def loadv32bf16 : PatFrag<(ops node:$ptr), (v32bf16 (load node:$ptr))>; 850def loadv16f32 : PatFrag<(ops node:$ptr), (v16f32 (load node:$ptr))>; 851def loadv8f64 : PatFrag<(ops node:$ptr), (v8f64 (load node:$ptr))>; 852def loadv8i64 : PatFrag<(ops node:$ptr), (v8i64 (load node:$ptr))>; 853def loadv16i32 : PatFrag<(ops node:$ptr), (v16i32 (load node:$ptr))>; 854def loadv32i16 : PatFrag<(ops node:$ptr), (v32i16 (load node:$ptr))>; 855def loadv64i8 : PatFrag<(ops node:$ptr), (v64i8 (load node:$ptr))>; 856 857// 128-/256-/512-bit extload pattern fragments 858def extloadv2f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>; 859def extloadv4f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>; 860def extloadv8f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>; 861def extloadv2f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>; 862def extloadv4f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>; 863def extloadv8f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>; 864def extloadv16f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>; 865 866// Like 'store', but always requires vector size alignment. 867def alignedstore : PatFrag<(ops node:$val, node:$ptr), 868 (store node:$val, node:$ptr), [{ 869 auto *St = cast<StoreSDNode>(N); 870 return St->getAlign() >= St->getMemoryVT().getStoreSize(); 871}]>; 872 873// Like 'load', but always requires vector size alignment. 874def alignedload : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 875 auto *Ld = cast<LoadSDNode>(N); 876 return Ld->getAlign() >= Ld->getMemoryVT().getStoreSize(); 877}]>; 878 879// 128-bit aligned load pattern fragments 880// NOTE: all 128-bit integer vector loads are promoted to v2i64 881def alignedloadv8f16 : PatFrag<(ops node:$ptr), 882 (v8f16 (alignedload node:$ptr))>; 883def alignedloadv8bf16 : PatFrag<(ops node:$ptr), 884 (v8bf16 (alignedload node:$ptr))>; 885def alignedloadv4f32 : PatFrag<(ops node:$ptr), 886 (v4f32 (alignedload node:$ptr))>; 887def alignedloadv2f64 : PatFrag<(ops node:$ptr), 888 (v2f64 (alignedload node:$ptr))>; 889def alignedloadv2i64 : PatFrag<(ops node:$ptr), 890 (v2i64 (alignedload node:$ptr))>; 891def alignedloadv4i32 : PatFrag<(ops node:$ptr), 892 (v4i32 (alignedload node:$ptr))>; 893def alignedloadv8i16 : PatFrag<(ops node:$ptr), 894 (v8i16 (alignedload node:$ptr))>; 895def alignedloadv16i8 : PatFrag<(ops node:$ptr), 896 (v16i8 (alignedload node:$ptr))>; 897 898// 256-bit aligned load pattern fragments 899// NOTE: all 256-bit integer vector loads are promoted to v4i64 900def alignedloadv16f16 : PatFrag<(ops node:$ptr), 901 (v16f16 (alignedload node:$ptr))>; 902def alignedloadv16bf16 : PatFrag<(ops node:$ptr), 903 (v16bf16 (alignedload node:$ptr))>; 904def alignedloadv8f32 : PatFrag<(ops node:$ptr), 905 (v8f32 (alignedload node:$ptr))>; 906def alignedloadv4f64 : PatFrag<(ops node:$ptr), 907 (v4f64 (alignedload node:$ptr))>; 908def alignedloadv4i64 : PatFrag<(ops node:$ptr), 909 (v4i64 (alignedload node:$ptr))>; 910def alignedloadv8i32 : PatFrag<(ops node:$ptr), 911 (v8i32 (alignedload node:$ptr))>; 912def alignedloadv16i16 : PatFrag<(ops node:$ptr), 913 (v16i16 (alignedload node:$ptr))>; 914def alignedloadv32i8 : PatFrag<(ops node:$ptr), 915 (v32i8 (alignedload node:$ptr))>; 916 917// 512-bit aligned load pattern fragments 918def alignedloadv32f16 : PatFrag<(ops node:$ptr), 919 (v32f16 (alignedload node:$ptr))>; 920def alignedloadv32bf16 : PatFrag<(ops node:$ptr), 921 (v32bf16 (alignedload node:$ptr))>; 922def alignedloadv16f32 : PatFrag<(ops node:$ptr), 923 (v16f32 (alignedload node:$ptr))>; 924def alignedloadv8f64 : PatFrag<(ops node:$ptr), 925 (v8f64 (alignedload node:$ptr))>; 926def alignedloadv8i64 : PatFrag<(ops node:$ptr), 927 (v8i64 (alignedload node:$ptr))>; 928def alignedloadv16i32 : PatFrag<(ops node:$ptr), 929 (v16i32 (alignedload node:$ptr))>; 930def alignedloadv32i16 : PatFrag<(ops node:$ptr), 931 (v32i16 (alignedload node:$ptr))>; 932def alignedloadv64i8 : PatFrag<(ops node:$ptr), 933 (v64i8 (alignedload node:$ptr))>; 934 935// Like 'load', but uses special alignment checks suitable for use in 936// memory operands in most SSE instructions, which are required to 937// be naturally aligned on some targets but not on others. If the subtarget 938// allows unaligned accesses, match any load, though this may require 939// setting a feature bit in the processor (on startup, for example). 940// Opteron 10h and later implement such a feature. 941def memop : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 942 auto *Ld = cast<LoadSDNode>(N); 943 return Subtarget->hasSSEUnalignedMem() || 944 Ld->getAlign() >= Ld->getMemoryVT().getStoreSize(); 945}]>; 946 947// 128-bit memop pattern fragments 948// NOTE: all 128-bit integer vector loads are promoted to v2i64 949def memopv4f32 : PatFrag<(ops node:$ptr), (v4f32 (memop node:$ptr))>; 950def memopv2f64 : PatFrag<(ops node:$ptr), (v2f64 (memop node:$ptr))>; 951def memopv2i64 : PatFrag<(ops node:$ptr), (v2i64 (memop node:$ptr))>; 952def memopv4i32 : PatFrag<(ops node:$ptr), (v4i32 (memop node:$ptr))>; 953def memopv8i16 : PatFrag<(ops node:$ptr), (v8i16 (memop node:$ptr))>; 954def memopv16i8 : PatFrag<(ops node:$ptr), (v16i8 (memop node:$ptr))>; 955 956// 128-bit bitconvert pattern fragments 957def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>; 958def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>; 959def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>; 960def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>; 961def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>; 962def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>; 963 964// 256-bit bitconvert pattern fragments 965def bc_v32i8 : PatFrag<(ops node:$in), (v32i8 (bitconvert node:$in))>; 966def bc_v16i16 : PatFrag<(ops node:$in), (v16i16 (bitconvert node:$in))>; 967def bc_v8i32 : PatFrag<(ops node:$in), (v8i32 (bitconvert node:$in))>; 968def bc_v4i64 : PatFrag<(ops node:$in), (v4i64 (bitconvert node:$in))>; 969def bc_v8f32 : PatFrag<(ops node:$in), (v8f32 (bitconvert node:$in))>; 970def bc_v4f64 : PatFrag<(ops node:$in), (v4f64 (bitconvert node:$in))>; 971 972// 512-bit bitconvert pattern fragments 973def bc_v64i8 : PatFrag<(ops node:$in), (v64i8 (bitconvert node:$in))>; 974def bc_v32i16 : PatFrag<(ops node:$in), (v32i16 (bitconvert node:$in))>; 975def bc_v16i32 : PatFrag<(ops node:$in), (v16i32 (bitconvert node:$in))>; 976def bc_v8i64 : PatFrag<(ops node:$in), (v8i64 (bitconvert node:$in))>; 977def bc_v8f64 : PatFrag<(ops node:$in), (v8f64 (bitconvert node:$in))>; 978def bc_v16f32 : PatFrag<(ops node:$in), (v16f32 (bitconvert node:$in))>; 979 980def X86vzload16 : PatFrag<(ops node:$src), 981 (X86vzld node:$src), [{ 982 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2; 983}]>; 984 985def X86vzload32 : PatFrag<(ops node:$src), 986 (X86vzld node:$src), [{ 987 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4; 988}]>; 989 990def X86vzload64 : PatFrag<(ops node:$src), 991 (X86vzld node:$src), [{ 992 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8; 993}]>; 994 995def X86vextractstore64 : PatFrag<(ops node:$val, node:$ptr), 996 (X86vextractst node:$val, node:$ptr), [{ 997 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8; 998}]>; 999 1000def X86VBroadcastld8 : PatFrag<(ops node:$src), 1001 (X86VBroadcastld node:$src), [{ 1002 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 1; 1003}]>; 1004 1005def X86VBroadcastld16 : PatFrag<(ops node:$src), 1006 (X86VBroadcastld node:$src), [{ 1007 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2; 1008}]>; 1009 1010def X86VBroadcastld32 : PatFrag<(ops node:$src), 1011 (X86VBroadcastld node:$src), [{ 1012 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4; 1013}]>; 1014 1015def X86VBroadcastld64 : PatFrag<(ops node:$src), 1016 (X86VBroadcastld node:$src), [{ 1017 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8; 1018}]>; 1019 1020def X86SubVBroadcastld128 : PatFrag<(ops node:$src), 1021 (X86SubVBroadcastld node:$src), [{ 1022 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 16; 1023}]>; 1024 1025def X86SubVBroadcastld256 : PatFrag<(ops node:$src), 1026 (X86SubVBroadcastld node:$src), [{ 1027 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 32; 1028}]>; 1029 1030// Scalar SSE intrinsic fragments to match several different types of loads. 1031// Used by scalar SSE intrinsic instructions which have 128 bit types, but 1032// only load a single element. 1033// FIXME: We should add more canolicalizing in DAGCombine. Particulary removing 1034// the simple_load case. 1035def sse_load_f16 : PatFrags<(ops node:$ptr), 1036 [(v8f16 (simple_load node:$ptr)), 1037 (v8f16 (X86vzload16 node:$ptr)), 1038 (v8f16 (scalar_to_vector (loadf16 node:$ptr)))]>; 1039def sse_load_f32 : PatFrags<(ops node:$ptr), 1040 [(v4f32 (simple_load node:$ptr)), 1041 (v4f32 (X86vzload32 node:$ptr)), 1042 (v4f32 (scalar_to_vector (loadf32 node:$ptr)))]>; 1043def sse_load_f64 : PatFrags<(ops node:$ptr), 1044 [(v2f64 (simple_load node:$ptr)), 1045 (v2f64 (X86vzload64 node:$ptr)), 1046 (v2f64 (scalar_to_vector (loadf64 node:$ptr)))]>; 1047 1048def fp16imm0 : PatLeaf<(f16 fpimm), [{ 1049 return N->isExactlyValue(+0.0); 1050}]>; 1051 1052def fp32imm0 : PatLeaf<(f32 fpimm), [{ 1053 return N->isExactlyValue(+0.0); 1054}]>; 1055 1056def fp64imm0 : PatLeaf<(f64 fpimm), [{ 1057 return N->isExactlyValue(+0.0); 1058}]>; 1059 1060def fp128imm0 : PatLeaf<(f128 fpimm), [{ 1061 return N->isExactlyValue(+0.0); 1062}]>; 1063 1064// EXTRACT_get_vextract128_imm xform function: convert extract_subvector index 1065// to VEXTRACTF128/VEXTRACTI128 imm. 1066def EXTRACT_get_vextract128_imm : SDNodeXForm<extract_subvector, [{ 1067 return getExtractVEXTRACTImmediate(N, 128, SDLoc(N)); 1068}]>; 1069 1070// INSERT_get_vinsert128_imm xform function: convert insert_subvector index to 1071// VINSERTF128/VINSERTI128 imm. 1072def INSERT_get_vinsert128_imm : SDNodeXForm<insert_subvector, [{ 1073 return getInsertVINSERTImmediate(N, 128, SDLoc(N)); 1074}]>; 1075 1076// INSERT_get_vperm2x128_imm xform function: convert insert_subvector index to 1077// commuted VPERM2F128/VPERM2I128 imm. 1078def INSERT_get_vperm2x128_commutedimm : SDNodeXForm<insert_subvector, [{ 1079 return getPermuteVINSERTCommutedImmediate(N, 128, SDLoc(N)); 1080}]>; 1081 1082// EXTRACT_get_vextract256_imm xform function: convert extract_subvector index 1083// to VEXTRACTF64x4 imm. 1084def EXTRACT_get_vextract256_imm : SDNodeXForm<extract_subvector, [{ 1085 return getExtractVEXTRACTImmediate(N, 256, SDLoc(N)); 1086}]>; 1087 1088// INSERT_get_vinsert256_imm xform function: convert insert_subvector index to 1089// VINSERTF64x4 imm. 1090def INSERT_get_vinsert256_imm : SDNodeXForm<insert_subvector, [{ 1091 return getInsertVINSERTImmediate(N, 256, SDLoc(N)); 1092}]>; 1093 1094def vextract128_extract : PatFrag<(ops node:$bigvec, node:$index), 1095 (extract_subvector node:$bigvec, 1096 node:$index), [{ 1097 // Index 0 can be handled via extract_subreg. 1098 return !isNullConstant(N->getOperand(1)); 1099}], EXTRACT_get_vextract128_imm>; 1100 1101def vinsert128_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 1102 node:$index), 1103 (insert_subvector node:$bigvec, node:$smallvec, 1104 node:$index), [{}], 1105 INSERT_get_vinsert128_imm>; 1106 1107def vextract256_extract : PatFrag<(ops node:$bigvec, node:$index), 1108 (extract_subvector node:$bigvec, 1109 node:$index), [{ 1110 // Index 0 can be handled via extract_subreg. 1111 return !isNullConstant(N->getOperand(1)); 1112}], EXTRACT_get_vextract256_imm>; 1113 1114def vinsert256_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 1115 node:$index), 1116 (insert_subvector node:$bigvec, node:$smallvec, 1117 node:$index), [{}], 1118 INSERT_get_vinsert256_imm>; 1119 1120def masked_load : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1121 (masked_ld node:$src1, undef, node:$src2, node:$src3), [{ 1122 return !cast<MaskedLoadSDNode>(N)->isExpandingLoad() && 1123 cast<MaskedLoadSDNode>(N)->getExtensionType() == ISD::NON_EXTLOAD && 1124 cast<MaskedLoadSDNode>(N)->isUnindexed(); 1125}]>; 1126 1127def masked_load_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1128 (masked_load node:$src1, node:$src2, node:$src3), [{ 1129 // Use the node type to determine the size the alignment needs to match. 1130 // We can't use memory VT because type widening changes the node VT, but 1131 // not the memory VT. 1132 auto *Ld = cast<MaskedLoadSDNode>(N); 1133 return Ld->getAlign() >= Ld->getValueType(0).getStoreSize(); 1134}]>; 1135 1136def X86mExpandingLoad : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1137 (masked_ld node:$src1, undef, node:$src2, node:$src3), [{ 1138 return cast<MaskedLoadSDNode>(N)->isExpandingLoad() && 1139 cast<MaskedLoadSDNode>(N)->isUnindexed(); 1140}]>; 1141 1142// Masked store fragments. 1143// X86mstore can't be implemented in core DAG files because some targets 1144// do not support vector types (llvm-tblgen will fail). 1145def masked_store : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1146 (masked_st node:$src1, node:$src2, undef, node:$src3), [{ 1147 return !cast<MaskedStoreSDNode>(N)->isTruncatingStore() && 1148 !cast<MaskedStoreSDNode>(N)->isCompressingStore() && 1149 cast<MaskedStoreSDNode>(N)->isUnindexed(); 1150}]>; 1151 1152def masked_store_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1153 (masked_store node:$src1, node:$src2, node:$src3), [{ 1154 // Use the node type to determine the size the alignment needs to match. 1155 // We can't use memory VT because type widening changes the node VT, but 1156 // not the memory VT. 1157 auto *St = cast<MaskedStoreSDNode>(N); 1158 return St->getAlign() >= St->getOperand(1).getValueType().getStoreSize(); 1159}]>; 1160 1161def X86mCompressingStore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1162 (masked_st node:$src1, node:$src2, undef, node:$src3), [{ 1163 return cast<MaskedStoreSDNode>(N)->isCompressingStore() && 1164 cast<MaskedStoreSDNode>(N)->isUnindexed(); 1165}]>; 1166 1167// masked truncstore fragments 1168// X86mtruncstore can't be implemented in core DAG files because some targets 1169// doesn't support vector type ( llvm-tblgen will fail) 1170def X86mtruncstore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1171 (masked_st node:$src1, node:$src2, undef, node:$src3), [{ 1172 return cast<MaskedStoreSDNode>(N)->isTruncatingStore() && 1173 cast<MaskedStoreSDNode>(N)->isUnindexed(); 1174}]>; 1175def masked_truncstorevi8 : 1176 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1177 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1178 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1179}]>; 1180def masked_truncstorevi16 : 1181 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1182 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1183 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1184}]>; 1185def masked_truncstorevi32 : 1186 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1187 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1188 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1189}]>; 1190 1191def X86TruncSStore : SDNode<"X86ISD::VTRUNCSTORES", SDTStore, 1192 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1193 1194def X86TruncUSStore : SDNode<"X86ISD::VTRUNCSTOREUS", SDTStore, 1195 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1196 1197def X86MTruncSStore : SDNode<"X86ISD::VMTRUNCSTORES", SDTX86MaskedStore, 1198 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1199 1200def X86MTruncUSStore : SDNode<"X86ISD::VMTRUNCSTOREUS", SDTX86MaskedStore, 1201 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1202 1203def truncstore_s_vi8 : PatFrag<(ops node:$val, node:$ptr), 1204 (X86TruncSStore node:$val, node:$ptr), [{ 1205 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1206}]>; 1207 1208def truncstore_us_vi8 : PatFrag<(ops node:$val, node:$ptr), 1209 (X86TruncUSStore node:$val, node:$ptr), [{ 1210 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1211}]>; 1212 1213def truncstore_s_vi16 : PatFrag<(ops node:$val, node:$ptr), 1214 (X86TruncSStore node:$val, node:$ptr), [{ 1215 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1216}]>; 1217 1218def truncstore_us_vi16 : PatFrag<(ops node:$val, node:$ptr), 1219 (X86TruncUSStore node:$val, node:$ptr), [{ 1220 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1221}]>; 1222 1223def truncstore_s_vi32 : PatFrag<(ops node:$val, node:$ptr), 1224 (X86TruncSStore node:$val, node:$ptr), [{ 1225 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1226}]>; 1227 1228def truncstore_us_vi32 : PatFrag<(ops node:$val, node:$ptr), 1229 (X86TruncUSStore node:$val, node:$ptr), [{ 1230 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1231}]>; 1232 1233def masked_truncstore_s_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1234 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1235 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1236}]>; 1237 1238def masked_truncstore_us_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1239 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1240 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1241}]>; 1242 1243def masked_truncstore_s_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1244 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1245 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1246}]>; 1247 1248def masked_truncstore_us_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1249 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1250 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1251}]>; 1252 1253def masked_truncstore_s_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1254 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1255 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1256}]>; 1257 1258def masked_truncstore_us_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1259 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1260 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1261}]>; 1262 1263def X86Vfpclasss_su : PatFrag<(ops node:$src1, node:$src2), 1264 (X86Vfpclasss node:$src1, node:$src2), [{ 1265 return N->hasOneUse(); 1266}]>; 1267 1268def X86Vfpclass_su : PatFrag<(ops node:$src1, node:$src2), 1269 (X86Vfpclass node:$src1, node:$src2), [{ 1270 return N->hasOneUse(); 1271}]>; 1272 1273// These nodes use 'vnot' instead of 'not' to support vectors. 1274def vandn : PatFrag<(ops node:$i0, node:$i1), (and (vnot node:$i0), node:$i1)>; 1275def vxnor : PatFrag<(ops node:$i0, node:$i1), (vnot (xor node:$i0, node:$i1))>; 1276 1277// Used for matching masked operations. Ensures the operation part only has a 1278// single use. 1279def vselect_mask : PatFrag<(ops node:$mask, node:$src1, node:$src2), 1280 (vselect node:$mask, node:$src1, node:$src2), [{ 1281 return isProfitableToFormMaskedOp(N); 1282}]>; 1283 1284def X86selects_mask : PatFrag<(ops node:$mask, node:$src1, node:$src2), 1285 (X86selects node:$mask, node:$src1, node:$src2), [{ 1286 return isProfitableToFormMaskedOp(N); 1287}]>; 1288 1289def X86cmpms_su : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1290 (X86cmpms node:$src1, node:$src2, node:$cc), [{ 1291 return N->hasOneUse(); 1292}]>; 1293def X86cmpmsSAE_su : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1294 (X86cmpmsSAE node:$src1, node:$src2, node:$cc), [{ 1295 return N->hasOneUse(); 1296}]>; 1297 1298// PatFrags that contain a select and a truncate op. The take operands in the 1299// same order as X86vmtrunc, X86vmtruncs, X86vmtruncus. This allows us to pass 1300// either to the multiclasses. 1301def select_trunc : PatFrag<(ops node:$src, node:$src0, node:$mask), 1302 (vselect_mask node:$mask, 1303 (trunc node:$src), node:$src0)>; 1304def select_truncs : PatFrag<(ops node:$src, node:$src0, node:$mask), 1305 (vselect_mask node:$mask, 1306 (X86vtruncs node:$src), node:$src0)>; 1307def select_truncus : PatFrag<(ops node:$src, node:$src0, node:$mask), 1308 (vselect_mask node:$mask, 1309 (X86vtruncus node:$src), node:$src0)>; 1310 1311def X86Vpshufbitqmb_su : PatFrag<(ops node:$src1, node:$src2), 1312 (X86Vpshufbitqmb node:$src1, node:$src2), [{ 1313 return N->hasOneUse(); 1314}]>; 1315 1316// This fragment treats X86cmpm as commutable to help match loads in both 1317// operands for PCMPEQ. 1318def X86setcc_commute : SDNode<"ISD::SETCC", SDTSetCC, [SDNPCommutative]>; 1319def X86pcmpgtm : PatFrag<(ops node:$src1, node:$src2), 1320 (setcc node:$src1, node:$src2, SETGT)>; 1321 1322def X86pcmpm_imm : SDNodeXForm<setcc, [{ 1323 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get(); 1324 uint8_t SSECC = X86::getVPCMPImmForCond(CC); 1325 return getI8Imm(SSECC, SDLoc(N)); 1326}]>; 1327 1328// Swapped operand version of the above. 1329def X86pcmpm_imm_commute : SDNodeXForm<setcc, [{ 1330 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get(); 1331 uint8_t SSECC = X86::getVPCMPImmForCond(CC); 1332 SSECC = X86::getSwappedVPCMPImm(SSECC); 1333 return getI8Imm(SSECC, SDLoc(N)); 1334}]>; 1335 1336def X86pcmpm : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1337 (setcc node:$src1, node:$src2, node:$cc), [{ 1338 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get(); 1339 return !ISD::isUnsignedIntSetCC(CC); 1340}], X86pcmpm_imm>; 1341 1342def X86pcmpm_su : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1343 (setcc node:$src1, node:$src2, node:$cc), [{ 1344 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get(); 1345 return N->hasOneUse() && !ISD::isUnsignedIntSetCC(CC); 1346}], X86pcmpm_imm>; 1347 1348def X86pcmpum : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1349 (setcc node:$src1, node:$src2, node:$cc), [{ 1350 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get(); 1351 return ISD::isUnsignedIntSetCC(CC); 1352}], X86pcmpm_imm>; 1353 1354def X86pcmpum_su : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1355 (setcc node:$src1, node:$src2, node:$cc), [{ 1356 ISD::CondCode CC = cast<CondCodeSDNode>(N->getOperand(2))->get(); 1357 return N->hasOneUse() && ISD::isUnsignedIntSetCC(CC); 1358}], X86pcmpm_imm>; 1359 1360def X86cmpm_su : PatFrag<(ops node:$src1, node:$src2, node:$cc), 1361 (X86cmpm node:$src1, node:$src2, node:$cc), [{ 1362 return N->hasOneUse(); 1363}]>; 1364 1365def X86cmpm_imm_commute : SDNodeXForm<timm, [{ 1366 uint8_t Imm = X86::getSwappedVCMPImm(N->getZExtValue() & 0x1f); 1367 return getI8Imm(Imm, SDLoc(N)); 1368}]>; 1369 1370def X86vpmaddwd_su : PatFrag<(ops node:$lhs, node:$rhs), 1371 (X86vpmaddwd node:$lhs, node:$rhs), [{ 1372 return N->hasOneUse(); 1373}]>; 1374 1375