1//===- IntrinsicsAARCH64.td - Defines AARCH64 intrinsics ---*- tablegen -*-===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// 9// This file defines all of the AARCH64-specific intrinsics. 10// 11//===----------------------------------------------------------------------===// 12 13let TargetPrefix = "aarch64" in { 14 15def int_aarch64_ldxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty], 16 [IntrNoFree, IntrWillReturn]>; 17def int_aarch64_ldaxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty], 18 [IntrNoFree, IntrWillReturn]>; 19def int_aarch64_stxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty], 20 [IntrNoFree, IntrWillReturn]>; 21def int_aarch64_stlxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty], 22 [IntrNoFree, IntrWillReturn]>; 23 24def int_aarch64_ldxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty], 25 [IntrNoFree, IntrWillReturn]>; 26def int_aarch64_ldaxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty], 27 [IntrNoFree, IntrWillReturn]>; 28def int_aarch64_stxp : Intrinsic<[llvm_i32_ty], 29 [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty], 30 [IntrNoFree, IntrWillReturn]>; 31def int_aarch64_stlxp : Intrinsic<[llvm_i32_ty], 32 [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty], 33 [IntrNoFree, IntrWillReturn]>; 34 35def int_aarch64_clrex : Intrinsic<[]>; 36 37def int_aarch64_sdiv : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, 38 LLVMMatchType<0>], [IntrNoMem]>; 39def int_aarch64_udiv : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, 40 LLVMMatchType<0>], [IntrNoMem]>; 41 42def int_aarch64_fjcvtzs : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>; 43 44def int_aarch64_cls: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>; 45def int_aarch64_cls64: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem]>; 46 47def int_aarch64_frint32z 48 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 49 [ IntrNoMem ]>; 50def int_aarch64_frint64z 51 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 52 [ IntrNoMem ]>; 53def int_aarch64_frint32x 54 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 55 [ IntrNoMem ]>; 56def int_aarch64_frint64x 57 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 58 [ IntrNoMem ]>; 59 60//===----------------------------------------------------------------------===// 61// HINT 62 63def int_aarch64_hint : DefaultAttrsIntrinsic<[], [llvm_i32_ty]>; 64 65//===----------------------------------------------------------------------===// 66// Data Barrier Instructions 67 68def int_aarch64_dmb : GCCBuiltin<"__builtin_arm_dmb">, MSBuiltin<"__dmb">, 69 Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>; 70def int_aarch64_dsb : GCCBuiltin<"__builtin_arm_dsb">, MSBuiltin<"__dsb">, 71 Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>; 72def int_aarch64_isb : GCCBuiltin<"__builtin_arm_isb">, MSBuiltin<"__isb">, 73 Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>; 74 75// A space-consuming intrinsic primarily for testing block and jump table 76// placements. The first argument is the number of bytes this "instruction" 77// takes up, the second and return value are essentially chains, used to force 78// ordering during ISel. 79def int_aarch64_space : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i64_ty], []>; 80 81} 82 83//===----------------------------------------------------------------------===// 84// Advanced SIMD (NEON) 85 86let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 87 class AdvSIMD_2Scalar_Float_Intrinsic 88 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 89 [IntrNoMem]>; 90 91 class AdvSIMD_FPToIntRounding_Intrinsic 92 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty], [IntrNoMem]>; 93 94 class AdvSIMD_1IntArg_Intrinsic 95 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>], [IntrNoMem]>; 96 class AdvSIMD_1FloatArg_Intrinsic 97 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem]>; 98 class AdvSIMD_1VectorArg_Intrinsic 99 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>], [IntrNoMem]>; 100 class AdvSIMD_1VectorArg_Expand_Intrinsic 101 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>; 102 class AdvSIMD_1VectorArg_Long_Intrinsic 103 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMTruncatedType<0>], [IntrNoMem]>; 104 class AdvSIMD_1IntArg_Narrow_Intrinsic 105 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyint_ty], [IntrNoMem]>; 106 class AdvSIMD_1VectorArg_Narrow_Intrinsic 107 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMExtendedType<0>], [IntrNoMem]>; 108 class AdvSIMD_1VectorArg_Int_Across_Intrinsic 109 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyvector_ty], [IntrNoMem]>; 110 class AdvSIMD_1VectorArg_Float_Across_Intrinsic 111 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [llvm_anyvector_ty], [IntrNoMem]>; 112 113 class AdvSIMD_2IntArg_Intrinsic 114 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 115 [IntrNoMem]>; 116 class AdvSIMD_2FloatArg_Intrinsic 117 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 118 [IntrNoMem]>; 119 class AdvSIMD_2VectorArg_Intrinsic 120 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 121 [IntrNoMem]>; 122 class AdvSIMD_2VectorArg_Compare_Intrinsic 123 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMMatchType<1>], 124 [IntrNoMem]>; 125 class AdvSIMD_2Arg_FloatCompare_Intrinsic 126 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, LLVMMatchType<1>], 127 [IntrNoMem]>; 128 class AdvSIMD_2VectorArg_Long_Intrinsic 129 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 130 [LLVMTruncatedType<0>, LLVMTruncatedType<0>], 131 [IntrNoMem]>; 132 class AdvSIMD_2VectorArg_Wide_Intrinsic 133 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 134 [LLVMMatchType<0>, LLVMTruncatedType<0>], 135 [IntrNoMem]>; 136 class AdvSIMD_2VectorArg_Narrow_Intrinsic 137 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 138 [LLVMExtendedType<0>, LLVMExtendedType<0>], 139 [IntrNoMem]>; 140 class AdvSIMD_2Arg_Scalar_Narrow_Intrinsic 141 : DefaultAttrsIntrinsic<[llvm_anyint_ty], 142 [LLVMExtendedType<0>, llvm_i32_ty], 143 [IntrNoMem]>; 144 class AdvSIMD_2VectorArg_Scalar_Expand_BySize_Intrinsic 145 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 146 [llvm_anyvector_ty], 147 [IntrNoMem]>; 148 class AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic 149 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 150 [LLVMTruncatedType<0>], 151 [IntrNoMem]>; 152 class AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic 153 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 154 [LLVMTruncatedType<0>, llvm_i32_ty], 155 [IntrNoMem]>; 156 class AdvSIMD_2VectorArg_Tied_Narrow_Intrinsic 157 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 158 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty], 159 [IntrNoMem]>; 160 class AdvSIMD_2VectorArg_Lane_Intrinsic 161 : DefaultAttrsIntrinsic<[llvm_anyint_ty], 162 [LLVMMatchType<0>, llvm_anyint_ty, llvm_i32_ty], 163 [IntrNoMem]>; 164 165 class AdvSIMD_3VectorArg_Intrinsic 166 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 167 [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], 168 [IntrNoMem]>; 169 class AdvSIMD_3VectorArg_Scalar_Intrinsic 170 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 171 [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i32_ty], 172 [IntrNoMem]>; 173 class AdvSIMD_3VectorArg_Tied_Narrow_Intrinsic 174 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 175 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty, 176 LLVMMatchType<1>], [IntrNoMem]>; 177 class AdvSIMD_3VectorArg_Scalar_Tied_Narrow_Intrinsic 178 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 179 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty, llvm_i32_ty], 180 [IntrNoMem]>; 181 class AdvSIMD_CvtFxToFP_Intrinsic 182 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [llvm_anyint_ty, llvm_i32_ty], 183 [IntrNoMem]>; 184 class AdvSIMD_CvtFPToFx_Intrinsic 185 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, llvm_i32_ty], 186 [IntrNoMem]>; 187 188 class AdvSIMD_1Arg_Intrinsic 189 : DefaultAttrsIntrinsic<[llvm_any_ty], [LLVMMatchType<0>], [IntrNoMem]>; 190 191 class AdvSIMD_Dot_Intrinsic 192 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 193 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 194 [IntrNoMem]>; 195 196 class AdvSIMD_FP16FML_Intrinsic 197 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 198 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 199 [IntrNoMem]>; 200 201 class AdvSIMD_MatMul_Intrinsic 202 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 203 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 204 [IntrNoMem]>; 205 206 class AdvSIMD_FML_Intrinsic 207 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 208 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 209 [IntrNoMem]>; 210 211 class AdvSIMD_BF16FML_Intrinsic 212 : DefaultAttrsIntrinsic<[llvm_v4f32_ty], 213 [llvm_v4f32_ty, llvm_v8bf16_ty, llvm_v8bf16_ty], 214 [IntrNoMem]>; 215} 216 217// Arithmetic ops 218 219let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in { 220 // Vector Add Across Lanes 221 def int_aarch64_neon_saddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 222 def int_aarch64_neon_uaddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 223 def int_aarch64_neon_faddv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 224 225 // Vector Long Add Across Lanes 226 def int_aarch64_neon_saddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 227 def int_aarch64_neon_uaddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 228 229 // Vector Halving Add 230 def int_aarch64_neon_shadd : AdvSIMD_2VectorArg_Intrinsic; 231 def int_aarch64_neon_uhadd : AdvSIMD_2VectorArg_Intrinsic; 232 233 // Vector Rounding Halving Add 234 def int_aarch64_neon_srhadd : AdvSIMD_2VectorArg_Intrinsic; 235 def int_aarch64_neon_urhadd : AdvSIMD_2VectorArg_Intrinsic; 236 237 // Vector Saturating Add 238 def int_aarch64_neon_sqadd : AdvSIMD_2IntArg_Intrinsic; 239 def int_aarch64_neon_suqadd : AdvSIMD_2IntArg_Intrinsic; 240 def int_aarch64_neon_usqadd : AdvSIMD_2IntArg_Intrinsic; 241 def int_aarch64_neon_uqadd : AdvSIMD_2IntArg_Intrinsic; 242 243 // Vector Add High-Half 244 // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that 245 // header is no longer supported. 246 def int_aarch64_neon_addhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 247 248 // Vector Rounding Add High-Half 249 def int_aarch64_neon_raddhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 250 251 // Vector Saturating Doubling Multiply High 252 def int_aarch64_neon_sqdmulh : AdvSIMD_2IntArg_Intrinsic; 253 def int_aarch64_neon_sqdmulh_lane : AdvSIMD_2VectorArg_Lane_Intrinsic; 254 def int_aarch64_neon_sqdmulh_laneq : AdvSIMD_2VectorArg_Lane_Intrinsic; 255 256 // Vector Saturating Rounding Doubling Multiply High 257 def int_aarch64_neon_sqrdmulh : AdvSIMD_2IntArg_Intrinsic; 258 def int_aarch64_neon_sqrdmulh_lane : AdvSIMD_2VectorArg_Lane_Intrinsic; 259 def int_aarch64_neon_sqrdmulh_laneq : AdvSIMD_2VectorArg_Lane_Intrinsic; 260 261 // Vector Polynominal Multiply 262 def int_aarch64_neon_pmul : AdvSIMD_2VectorArg_Intrinsic; 263 264 // Vector Long Multiply 265 def int_aarch64_neon_smull : AdvSIMD_2VectorArg_Long_Intrinsic; 266 def int_aarch64_neon_umull : AdvSIMD_2VectorArg_Long_Intrinsic; 267 def int_aarch64_neon_pmull : AdvSIMD_2VectorArg_Long_Intrinsic; 268 269 // 64-bit polynomial multiply really returns an i128, which is not legal. Fake 270 // it with a v16i8. 271 def int_aarch64_neon_pmull64 : 272 DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>; 273 274 // Vector Extending Multiply 275 def int_aarch64_neon_fmulx : AdvSIMD_2FloatArg_Intrinsic { 276 let IntrProperties = [IntrNoMem, Commutative]; 277 } 278 279 // Vector Saturating Doubling Long Multiply 280 def int_aarch64_neon_sqdmull : AdvSIMD_2VectorArg_Long_Intrinsic; 281 def int_aarch64_neon_sqdmulls_scalar 282 : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>; 283 284 // Vector Halving Subtract 285 def int_aarch64_neon_shsub : AdvSIMD_2VectorArg_Intrinsic; 286 def int_aarch64_neon_uhsub : AdvSIMD_2VectorArg_Intrinsic; 287 288 // Vector Saturating Subtract 289 def int_aarch64_neon_sqsub : AdvSIMD_2IntArg_Intrinsic; 290 def int_aarch64_neon_uqsub : AdvSIMD_2IntArg_Intrinsic; 291 292 // Vector Subtract High-Half 293 // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that 294 // header is no longer supported. 295 def int_aarch64_neon_subhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 296 297 // Vector Rounding Subtract High-Half 298 def int_aarch64_neon_rsubhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 299 300 // Vector Compare Absolute Greater-than-or-equal 301 def int_aarch64_neon_facge : AdvSIMD_2Arg_FloatCompare_Intrinsic; 302 303 // Vector Compare Absolute Greater-than 304 def int_aarch64_neon_facgt : AdvSIMD_2Arg_FloatCompare_Intrinsic; 305 306 // Vector Absolute Difference 307 def int_aarch64_neon_sabd : AdvSIMD_2VectorArg_Intrinsic; 308 def int_aarch64_neon_uabd : AdvSIMD_2VectorArg_Intrinsic; 309 def int_aarch64_neon_fabd : AdvSIMD_2VectorArg_Intrinsic; 310 311 // Scalar Absolute Difference 312 def int_aarch64_sisd_fabd : AdvSIMD_2Scalar_Float_Intrinsic; 313 314 // Vector Max 315 def int_aarch64_neon_smax : AdvSIMD_2VectorArg_Intrinsic; 316 def int_aarch64_neon_umax : AdvSIMD_2VectorArg_Intrinsic; 317 def int_aarch64_neon_fmax : AdvSIMD_2FloatArg_Intrinsic; 318 def int_aarch64_neon_fmaxnmp : AdvSIMD_2VectorArg_Intrinsic; 319 320 // Vector Max Across Lanes 321 def int_aarch64_neon_smaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 322 def int_aarch64_neon_umaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 323 def int_aarch64_neon_fmaxv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 324 def int_aarch64_neon_fmaxnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 325 326 // Vector Min 327 def int_aarch64_neon_smin : AdvSIMD_2VectorArg_Intrinsic; 328 def int_aarch64_neon_umin : AdvSIMD_2VectorArg_Intrinsic; 329 def int_aarch64_neon_fmin : AdvSIMD_2FloatArg_Intrinsic; 330 def int_aarch64_neon_fminnmp : AdvSIMD_2VectorArg_Intrinsic; 331 332 // Vector Min/Max Number 333 def int_aarch64_neon_fminnm : AdvSIMD_2FloatArg_Intrinsic; 334 def int_aarch64_neon_fmaxnm : AdvSIMD_2FloatArg_Intrinsic; 335 336 // Vector Min Across Lanes 337 def int_aarch64_neon_sminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 338 def int_aarch64_neon_uminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 339 def int_aarch64_neon_fminv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 340 def int_aarch64_neon_fminnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 341 342 // Pairwise Add 343 def int_aarch64_neon_addp : AdvSIMD_2VectorArg_Intrinsic; 344 def int_aarch64_neon_faddp : AdvSIMD_2VectorArg_Intrinsic; 345 346 // Long Pairwise Add 347 // FIXME: In theory, we shouldn't need intrinsics for saddlp or 348 // uaddlp, but tblgen's type inference currently can't handle the 349 // pattern fragments this ends up generating. 350 def int_aarch64_neon_saddlp : AdvSIMD_1VectorArg_Expand_Intrinsic; 351 def int_aarch64_neon_uaddlp : AdvSIMD_1VectorArg_Expand_Intrinsic; 352 353 // Folding Maximum 354 def int_aarch64_neon_smaxp : AdvSIMD_2VectorArg_Intrinsic; 355 def int_aarch64_neon_umaxp : AdvSIMD_2VectorArg_Intrinsic; 356 def int_aarch64_neon_fmaxp : AdvSIMD_2VectorArg_Intrinsic; 357 358 // Folding Minimum 359 def int_aarch64_neon_sminp : AdvSIMD_2VectorArg_Intrinsic; 360 def int_aarch64_neon_uminp : AdvSIMD_2VectorArg_Intrinsic; 361 def int_aarch64_neon_fminp : AdvSIMD_2VectorArg_Intrinsic; 362 363 // Reciprocal Estimate/Step 364 def int_aarch64_neon_frecps : AdvSIMD_2FloatArg_Intrinsic; 365 def int_aarch64_neon_frsqrts : AdvSIMD_2FloatArg_Intrinsic; 366 367 // Reciprocal Exponent 368 def int_aarch64_neon_frecpx : AdvSIMD_1FloatArg_Intrinsic; 369 370 // Vector Saturating Shift Left 371 def int_aarch64_neon_sqshl : AdvSIMD_2IntArg_Intrinsic; 372 def int_aarch64_neon_uqshl : AdvSIMD_2IntArg_Intrinsic; 373 374 // Vector Rounding Shift Left 375 def int_aarch64_neon_srshl : AdvSIMD_2IntArg_Intrinsic; 376 def int_aarch64_neon_urshl : AdvSIMD_2IntArg_Intrinsic; 377 378 // Vector Saturating Rounding Shift Left 379 def int_aarch64_neon_sqrshl : AdvSIMD_2IntArg_Intrinsic; 380 def int_aarch64_neon_uqrshl : AdvSIMD_2IntArg_Intrinsic; 381 382 // Vector Signed->Unsigned Shift Left by Constant 383 def int_aarch64_neon_sqshlu : AdvSIMD_2IntArg_Intrinsic; 384 385 // Vector Signed->Unsigned Narrowing Saturating Shift Right by Constant 386 def int_aarch64_neon_sqshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 387 388 // Vector Signed->Unsigned Rounding Narrowing Saturating Shift Right by Const 389 def int_aarch64_neon_sqrshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 390 391 // Vector Narrowing Shift Right by Constant 392 def int_aarch64_neon_sqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 393 def int_aarch64_neon_uqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 394 395 // Vector Rounding Narrowing Shift Right by Constant 396 def int_aarch64_neon_rshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 397 398 // Vector Rounding Narrowing Saturating Shift Right by Constant 399 def int_aarch64_neon_sqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 400 def int_aarch64_neon_uqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 401 402 // Vector Shift Left 403 def int_aarch64_neon_sshl : AdvSIMD_2IntArg_Intrinsic; 404 def int_aarch64_neon_ushl : AdvSIMD_2IntArg_Intrinsic; 405 406 // Vector Widening Shift Left by Constant 407 def int_aarch64_neon_shll : AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic; 408 def int_aarch64_neon_sshll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic; 409 def int_aarch64_neon_ushll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic; 410 411 // Vector Shift Right by Constant and Insert 412 def int_aarch64_neon_vsri : AdvSIMD_3VectorArg_Scalar_Intrinsic; 413 414 // Vector Shift Left by Constant and Insert 415 def int_aarch64_neon_vsli : AdvSIMD_3VectorArg_Scalar_Intrinsic; 416 417 // Vector Saturating Narrow 418 def int_aarch64_neon_scalar_sqxtn: AdvSIMD_1IntArg_Narrow_Intrinsic; 419 def int_aarch64_neon_scalar_uqxtn : AdvSIMD_1IntArg_Narrow_Intrinsic; 420 def int_aarch64_neon_sqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic; 421 def int_aarch64_neon_uqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic; 422 423 // Vector Saturating Extract and Unsigned Narrow 424 def int_aarch64_neon_scalar_sqxtun : AdvSIMD_1IntArg_Narrow_Intrinsic; 425 def int_aarch64_neon_sqxtun : AdvSIMD_1VectorArg_Narrow_Intrinsic; 426 427 // Vector Absolute Value 428 def int_aarch64_neon_abs : AdvSIMD_1Arg_Intrinsic; 429 430 // Vector Saturating Absolute Value 431 def int_aarch64_neon_sqabs : AdvSIMD_1IntArg_Intrinsic; 432 433 // Vector Saturating Negation 434 def int_aarch64_neon_sqneg : AdvSIMD_1IntArg_Intrinsic; 435 436 // Vector Count Leading Sign Bits 437 def int_aarch64_neon_cls : AdvSIMD_1VectorArg_Intrinsic; 438 439 // Vector Reciprocal Estimate 440 def int_aarch64_neon_urecpe : AdvSIMD_1VectorArg_Intrinsic; 441 def int_aarch64_neon_frecpe : AdvSIMD_1FloatArg_Intrinsic; 442 443 // Vector Square Root Estimate 444 def int_aarch64_neon_ursqrte : AdvSIMD_1VectorArg_Intrinsic; 445 def int_aarch64_neon_frsqrte : AdvSIMD_1FloatArg_Intrinsic; 446 447 // Vector Conversions Between Half-Precision and Single-Precision. 448 def int_aarch64_neon_vcvtfp2hf 449 : DefaultAttrsIntrinsic<[llvm_v4i16_ty], [llvm_v4f32_ty], [IntrNoMem]>; 450 def int_aarch64_neon_vcvthf2fp 451 : DefaultAttrsIntrinsic<[llvm_v4f32_ty], [llvm_v4i16_ty], [IntrNoMem]>; 452 453 // Vector Conversions Between Floating-point and Fixed-point. 454 def int_aarch64_neon_vcvtfp2fxs : AdvSIMD_CvtFPToFx_Intrinsic; 455 def int_aarch64_neon_vcvtfp2fxu : AdvSIMD_CvtFPToFx_Intrinsic; 456 def int_aarch64_neon_vcvtfxs2fp : AdvSIMD_CvtFxToFP_Intrinsic; 457 def int_aarch64_neon_vcvtfxu2fp : AdvSIMD_CvtFxToFP_Intrinsic; 458 459 // Vector FP->Int Conversions 460 def int_aarch64_neon_fcvtas : AdvSIMD_FPToIntRounding_Intrinsic; 461 def int_aarch64_neon_fcvtau : AdvSIMD_FPToIntRounding_Intrinsic; 462 def int_aarch64_neon_fcvtms : AdvSIMD_FPToIntRounding_Intrinsic; 463 def int_aarch64_neon_fcvtmu : AdvSIMD_FPToIntRounding_Intrinsic; 464 def int_aarch64_neon_fcvtns : AdvSIMD_FPToIntRounding_Intrinsic; 465 def int_aarch64_neon_fcvtnu : AdvSIMD_FPToIntRounding_Intrinsic; 466 def int_aarch64_neon_fcvtps : AdvSIMD_FPToIntRounding_Intrinsic; 467 def int_aarch64_neon_fcvtpu : AdvSIMD_FPToIntRounding_Intrinsic; 468 def int_aarch64_neon_fcvtzs : AdvSIMD_FPToIntRounding_Intrinsic; 469 def int_aarch64_neon_fcvtzu : AdvSIMD_FPToIntRounding_Intrinsic; 470 471 // v8.5-A Vector FP Rounding 472 def int_aarch64_neon_frint32x : AdvSIMD_1FloatArg_Intrinsic; 473 def int_aarch64_neon_frint32z : AdvSIMD_1FloatArg_Intrinsic; 474 def int_aarch64_neon_frint64x : AdvSIMD_1FloatArg_Intrinsic; 475 def int_aarch64_neon_frint64z : AdvSIMD_1FloatArg_Intrinsic; 476 477 // Scalar FP->Int conversions 478 479 // Vector FP Inexact Narrowing 480 def int_aarch64_neon_fcvtxn : AdvSIMD_1VectorArg_Expand_Intrinsic; 481 482 // Scalar FP Inexact Narrowing 483 def int_aarch64_sisd_fcvtxn : DefaultAttrsIntrinsic<[llvm_float_ty], [llvm_double_ty], 484 [IntrNoMem]>; 485 486 // v8.2-A Dot Product 487 def int_aarch64_neon_udot : AdvSIMD_Dot_Intrinsic; 488 def int_aarch64_neon_sdot : AdvSIMD_Dot_Intrinsic; 489 490 // v8.6-A Matrix Multiply Intrinsics 491 def int_aarch64_neon_ummla : AdvSIMD_MatMul_Intrinsic; 492 def int_aarch64_neon_smmla : AdvSIMD_MatMul_Intrinsic; 493 def int_aarch64_neon_usmmla : AdvSIMD_MatMul_Intrinsic; 494 def int_aarch64_neon_usdot : AdvSIMD_Dot_Intrinsic; 495 def int_aarch64_neon_bfdot : AdvSIMD_Dot_Intrinsic; 496 def int_aarch64_neon_bfmmla 497 : DefaultAttrsIntrinsic<[llvm_v4f32_ty], 498 [llvm_v4f32_ty, llvm_v8bf16_ty, llvm_v8bf16_ty], 499 [IntrNoMem]>; 500 def int_aarch64_neon_bfmlalb : AdvSIMD_BF16FML_Intrinsic; 501 def int_aarch64_neon_bfmlalt : AdvSIMD_BF16FML_Intrinsic; 502 503 504 // v8.6-A Bfloat Intrinsics 505 def int_aarch64_neon_bfcvt 506 : DefaultAttrsIntrinsic<[llvm_bfloat_ty], [llvm_float_ty], [IntrNoMem]>; 507 def int_aarch64_neon_bfcvtn 508 : DefaultAttrsIntrinsic<[llvm_v8bf16_ty], [llvm_v4f32_ty], [IntrNoMem]>; 509 def int_aarch64_neon_bfcvtn2 510 : DefaultAttrsIntrinsic<[llvm_v8bf16_ty], 511 [llvm_v8bf16_ty, llvm_v4f32_ty], 512 [IntrNoMem]>; 513 514 // v8.2-A FP16 Fused Multiply-Add Long 515 def int_aarch64_neon_fmlal : AdvSIMD_FP16FML_Intrinsic; 516 def int_aarch64_neon_fmlsl : AdvSIMD_FP16FML_Intrinsic; 517 def int_aarch64_neon_fmlal2 : AdvSIMD_FP16FML_Intrinsic; 518 def int_aarch64_neon_fmlsl2 : AdvSIMD_FP16FML_Intrinsic; 519 520 // v8.3-A Floating-point complex add 521 def int_aarch64_neon_vcadd_rot90 : AdvSIMD_2VectorArg_Intrinsic; 522 def int_aarch64_neon_vcadd_rot270 : AdvSIMD_2VectorArg_Intrinsic; 523 524 def int_aarch64_neon_vcmla_rot0 : AdvSIMD_3VectorArg_Intrinsic; 525 def int_aarch64_neon_vcmla_rot90 : AdvSIMD_3VectorArg_Intrinsic; 526 def int_aarch64_neon_vcmla_rot180 : AdvSIMD_3VectorArg_Intrinsic; 527 def int_aarch64_neon_vcmla_rot270 : AdvSIMD_3VectorArg_Intrinsic; 528} 529 530let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 531 class AdvSIMD_2Vector2Index_Intrinsic 532 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 533 [llvm_anyvector_ty, llvm_i64_ty, LLVMMatchType<0>, llvm_i64_ty], 534 [IntrNoMem]>; 535} 536 537// Vector element to element moves 538def int_aarch64_neon_vcopy_lane: AdvSIMD_2Vector2Index_Intrinsic; 539 540let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 541 class AdvSIMD_1Vec_Load_Intrinsic 542 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMAnyPointerType<LLVMMatchType<0>>], 543 [IntrReadMem, IntrArgMemOnly]>; 544 class AdvSIMD_1Vec_Store_Lane_Intrinsic 545 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, llvm_i64_ty, llvm_anyptr_ty], 546 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>; 547 548 class AdvSIMD_2Vec_Load_Intrinsic 549 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, llvm_anyvector_ty], 550 [LLVMAnyPointerType<LLVMMatchType<0>>], 551 [IntrReadMem, IntrArgMemOnly]>; 552 class AdvSIMD_2Vec_Load_Lane_Intrinsic 553 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>], 554 [LLVMMatchType<0>, llvm_anyvector_ty, 555 llvm_i64_ty, llvm_anyptr_ty], 556 [IntrReadMem, IntrArgMemOnly]>; 557 class AdvSIMD_2Vec_Store_Intrinsic 558 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 559 LLVMAnyPointerType<LLVMMatchType<0>>], 560 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>; 561 class AdvSIMD_2Vec_Store_Lane_Intrinsic 562 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 563 llvm_i64_ty, llvm_anyptr_ty], 564 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>; 565 566 class AdvSIMD_3Vec_Load_Intrinsic 567 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyvector_ty], 568 [LLVMAnyPointerType<LLVMMatchType<0>>], 569 [IntrReadMem, IntrArgMemOnly]>; 570 class AdvSIMD_3Vec_Load_Lane_Intrinsic 571 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], 572 [LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyvector_ty, 573 llvm_i64_ty, llvm_anyptr_ty], 574 [IntrReadMem, IntrArgMemOnly]>; 575 class AdvSIMD_3Vec_Store_Intrinsic 576 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 577 LLVMMatchType<0>, LLVMAnyPointerType<LLVMMatchType<0>>], 578 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>; 579 class AdvSIMD_3Vec_Store_Lane_Intrinsic 580 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, 581 LLVMMatchType<0>, LLVMMatchType<0>, 582 llvm_i64_ty, llvm_anyptr_ty], 583 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>; 584 585 class AdvSIMD_4Vec_Load_Intrinsic 586 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, 587 LLVMMatchType<0>, llvm_anyvector_ty], 588 [LLVMAnyPointerType<LLVMMatchType<0>>], 589 [IntrReadMem, IntrArgMemOnly]>; 590 class AdvSIMD_4Vec_Load_Lane_Intrinsic 591 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, 592 LLVMMatchType<0>, LLVMMatchType<0>], 593 [LLVMMatchType<0>, LLVMMatchType<0>, 594 LLVMMatchType<0>, llvm_anyvector_ty, 595 llvm_i64_ty, llvm_anyptr_ty], 596 [IntrReadMem, IntrArgMemOnly]>; 597 class AdvSIMD_4Vec_Store_Intrinsic 598 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 599 LLVMMatchType<0>, LLVMMatchType<0>, 600 LLVMAnyPointerType<LLVMMatchType<0>>], 601 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>; 602 class AdvSIMD_4Vec_Store_Lane_Intrinsic 603 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 604 LLVMMatchType<0>, LLVMMatchType<0>, 605 llvm_i64_ty, llvm_anyptr_ty], 606 [IntrArgMemOnly, NoCapture<ArgIndex<5>>]>; 607} 608 609// Memory ops 610 611def int_aarch64_neon_ld1x2 : AdvSIMD_2Vec_Load_Intrinsic; 612def int_aarch64_neon_ld1x3 : AdvSIMD_3Vec_Load_Intrinsic; 613def int_aarch64_neon_ld1x4 : AdvSIMD_4Vec_Load_Intrinsic; 614 615def int_aarch64_neon_st1x2 : AdvSIMD_2Vec_Store_Intrinsic; 616def int_aarch64_neon_st1x3 : AdvSIMD_3Vec_Store_Intrinsic; 617def int_aarch64_neon_st1x4 : AdvSIMD_4Vec_Store_Intrinsic; 618 619def int_aarch64_neon_ld2 : AdvSIMD_2Vec_Load_Intrinsic; 620def int_aarch64_neon_ld3 : AdvSIMD_3Vec_Load_Intrinsic; 621def int_aarch64_neon_ld4 : AdvSIMD_4Vec_Load_Intrinsic; 622 623def int_aarch64_neon_ld2lane : AdvSIMD_2Vec_Load_Lane_Intrinsic; 624def int_aarch64_neon_ld3lane : AdvSIMD_3Vec_Load_Lane_Intrinsic; 625def int_aarch64_neon_ld4lane : AdvSIMD_4Vec_Load_Lane_Intrinsic; 626 627def int_aarch64_neon_ld2r : AdvSIMD_2Vec_Load_Intrinsic; 628def int_aarch64_neon_ld3r : AdvSIMD_3Vec_Load_Intrinsic; 629def int_aarch64_neon_ld4r : AdvSIMD_4Vec_Load_Intrinsic; 630 631def int_aarch64_neon_st2 : AdvSIMD_2Vec_Store_Intrinsic; 632def int_aarch64_neon_st3 : AdvSIMD_3Vec_Store_Intrinsic; 633def int_aarch64_neon_st4 : AdvSIMD_4Vec_Store_Intrinsic; 634 635def int_aarch64_neon_st2lane : AdvSIMD_2Vec_Store_Lane_Intrinsic; 636def int_aarch64_neon_st3lane : AdvSIMD_3Vec_Store_Lane_Intrinsic; 637def int_aarch64_neon_st4lane : AdvSIMD_4Vec_Store_Lane_Intrinsic; 638 639let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 640 class AdvSIMD_Tbl1_Intrinsic 641 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_v16i8_ty, LLVMMatchType<0>], 642 [IntrNoMem]>; 643 class AdvSIMD_Tbl2_Intrinsic 644 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 645 [llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>], [IntrNoMem]>; 646 class AdvSIMD_Tbl3_Intrinsic 647 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 648 [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, 649 LLVMMatchType<0>], 650 [IntrNoMem]>; 651 class AdvSIMD_Tbl4_Intrinsic 652 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 653 [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, 654 LLVMMatchType<0>], 655 [IntrNoMem]>; 656 657 class AdvSIMD_Tbx1_Intrinsic 658 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 659 [LLVMMatchType<0>, llvm_v16i8_ty, LLVMMatchType<0>], 660 [IntrNoMem]>; 661 class AdvSIMD_Tbx2_Intrinsic 662 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 663 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty, 664 LLVMMatchType<0>], 665 [IntrNoMem]>; 666 class AdvSIMD_Tbx3_Intrinsic 667 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 668 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty, 669 llvm_v16i8_ty, LLVMMatchType<0>], 670 [IntrNoMem]>; 671 class AdvSIMD_Tbx4_Intrinsic 672 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 673 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty, 674 llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>], 675 [IntrNoMem]>; 676} 677def int_aarch64_neon_tbl1 : AdvSIMD_Tbl1_Intrinsic; 678def int_aarch64_neon_tbl2 : AdvSIMD_Tbl2_Intrinsic; 679def int_aarch64_neon_tbl3 : AdvSIMD_Tbl3_Intrinsic; 680def int_aarch64_neon_tbl4 : AdvSIMD_Tbl4_Intrinsic; 681 682def int_aarch64_neon_tbx1 : AdvSIMD_Tbx1_Intrinsic; 683def int_aarch64_neon_tbx2 : AdvSIMD_Tbx2_Intrinsic; 684def int_aarch64_neon_tbx3 : AdvSIMD_Tbx3_Intrinsic; 685def int_aarch64_neon_tbx4 : AdvSIMD_Tbx4_Intrinsic; 686 687let TargetPrefix = "aarch64" in { 688 class FPCR_Get_Intrinsic 689 : DefaultAttrsIntrinsic<[llvm_i64_ty], [], [IntrNoMem, IntrHasSideEffects]>; 690 class FPCR_Set_Intrinsic 691 : DefaultAttrsIntrinsic<[], [llvm_i64_ty], [IntrNoMem, IntrHasSideEffects]>; 692 class RNDR_Intrinsic 693 : DefaultAttrsIntrinsic<[llvm_i64_ty, llvm_i1_ty], [], [IntrNoMem, IntrHasSideEffects]>; 694} 695 696// FPCR 697def int_aarch64_get_fpcr : FPCR_Get_Intrinsic; 698def int_aarch64_set_fpcr : FPCR_Set_Intrinsic; 699 700// Armv8.5-A Random number generation intrinsics 701def int_aarch64_rndr : RNDR_Intrinsic; 702def int_aarch64_rndrrs : RNDR_Intrinsic; 703 704let TargetPrefix = "aarch64" in { 705 class Crypto_AES_DataKey_Intrinsic 706 : DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>; 707 708 class Crypto_AES_Data_Intrinsic 709 : DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>; 710 711 // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule 712 // (v4i32). 713 class Crypto_SHA_5Hash4Schedule_Intrinsic 714 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty], 715 [IntrNoMem]>; 716 717 // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule 718 // (v4i32). 719 class Crypto_SHA_1Hash_Intrinsic 720 : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>; 721 722 // SHA intrinsic taking 8 words of the schedule 723 class Crypto_SHA_8Schedule_Intrinsic 724 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>; 725 726 // SHA intrinsic taking 12 words of the schedule 727 class Crypto_SHA_12Schedule_Intrinsic 728 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], 729 [IntrNoMem]>; 730 731 // SHA intrinsic taking 8 words of the hash and 4 of the schedule. 732 class Crypto_SHA_8Hash4Schedule_Intrinsic 733 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], 734 [IntrNoMem]>; 735 736 // SHA512 intrinsic taking 2 arguments 737 class Crypto_SHA512_2Arg_Intrinsic 738 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>; 739 740 // SHA512 intrinsic taking 3 Arguments 741 class Crypto_SHA512_3Arg_Intrinsic 742 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty], 743 [IntrNoMem]>; 744 745 // SHA3 Intrinsics taking 3 arguments 746 class Crypto_SHA3_3Arg_Intrinsic 747 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 748 [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], 749 [IntrNoMem]>; 750 751 // SHA3 Intrinsic taking 2 arguments 752 class Crypto_SHA3_2Arg_Intrinsic 753 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], 754 [IntrNoMem]>; 755 756 // SHA3 Intrinsic taking 3 Arguments 1 immediate 757 class Crypto_SHA3_2ArgImm_Intrinsic 758 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i64_ty], 759 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 760 761 class Crypto_SM3_3Vector_Intrinsic 762 : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], 763 [IntrNoMem]>; 764 765 class Crypto_SM3_3VectorIndexed_Intrinsic 766 : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i64_ty], 767 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 768 769 class Crypto_SM4_2Vector_Intrinsic 770 : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>; 771} 772 773// AES 774def int_aarch64_crypto_aese : Crypto_AES_DataKey_Intrinsic; 775def int_aarch64_crypto_aesd : Crypto_AES_DataKey_Intrinsic; 776def int_aarch64_crypto_aesmc : Crypto_AES_Data_Intrinsic; 777def int_aarch64_crypto_aesimc : Crypto_AES_Data_Intrinsic; 778 779// SHA1 780def int_aarch64_crypto_sha1c : Crypto_SHA_5Hash4Schedule_Intrinsic; 781def int_aarch64_crypto_sha1p : Crypto_SHA_5Hash4Schedule_Intrinsic; 782def int_aarch64_crypto_sha1m : Crypto_SHA_5Hash4Schedule_Intrinsic; 783def int_aarch64_crypto_sha1h : Crypto_SHA_1Hash_Intrinsic; 784 785def int_aarch64_crypto_sha1su0 : Crypto_SHA_12Schedule_Intrinsic; 786def int_aarch64_crypto_sha1su1 : Crypto_SHA_8Schedule_Intrinsic; 787 788// SHA256 789def int_aarch64_crypto_sha256h : Crypto_SHA_8Hash4Schedule_Intrinsic; 790def int_aarch64_crypto_sha256h2 : Crypto_SHA_8Hash4Schedule_Intrinsic; 791def int_aarch64_crypto_sha256su0 : Crypto_SHA_8Schedule_Intrinsic; 792def int_aarch64_crypto_sha256su1 : Crypto_SHA_12Schedule_Intrinsic; 793 794//SHA3 795def int_aarch64_crypto_eor3s : Crypto_SHA3_3Arg_Intrinsic; 796def int_aarch64_crypto_eor3u : Crypto_SHA3_3Arg_Intrinsic; 797def int_aarch64_crypto_bcaxs : Crypto_SHA3_3Arg_Intrinsic; 798def int_aarch64_crypto_bcaxu : Crypto_SHA3_3Arg_Intrinsic; 799def int_aarch64_crypto_rax1 : Crypto_SHA3_2Arg_Intrinsic; 800def int_aarch64_crypto_xar : Crypto_SHA3_2ArgImm_Intrinsic; 801 802// SHA512 803def int_aarch64_crypto_sha512h : Crypto_SHA512_3Arg_Intrinsic; 804def int_aarch64_crypto_sha512h2 : Crypto_SHA512_3Arg_Intrinsic; 805def int_aarch64_crypto_sha512su0 : Crypto_SHA512_2Arg_Intrinsic; 806def int_aarch64_crypto_sha512su1 : Crypto_SHA512_3Arg_Intrinsic; 807 808//SM3 & SM4 809def int_aarch64_crypto_sm3partw1 : Crypto_SM3_3Vector_Intrinsic; 810def int_aarch64_crypto_sm3partw2 : Crypto_SM3_3Vector_Intrinsic; 811def int_aarch64_crypto_sm3ss1 : Crypto_SM3_3Vector_Intrinsic; 812def int_aarch64_crypto_sm3tt1a : Crypto_SM3_3VectorIndexed_Intrinsic; 813def int_aarch64_crypto_sm3tt1b : Crypto_SM3_3VectorIndexed_Intrinsic; 814def int_aarch64_crypto_sm3tt2a : Crypto_SM3_3VectorIndexed_Intrinsic; 815def int_aarch64_crypto_sm3tt2b : Crypto_SM3_3VectorIndexed_Intrinsic; 816def int_aarch64_crypto_sm4e : Crypto_SM4_2Vector_Intrinsic; 817def int_aarch64_crypto_sm4ekey : Crypto_SM4_2Vector_Intrinsic; 818 819//===----------------------------------------------------------------------===// 820// CRC32 821 822let TargetPrefix = "aarch64" in { 823 824def int_aarch64_crc32b : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 825 [IntrNoMem]>; 826def int_aarch64_crc32cb : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 827 [IntrNoMem]>; 828def int_aarch64_crc32h : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 829 [IntrNoMem]>; 830def int_aarch64_crc32ch : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 831 [IntrNoMem]>; 832def int_aarch64_crc32w : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 833 [IntrNoMem]>; 834def int_aarch64_crc32cw : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 835 [IntrNoMem]>; 836def int_aarch64_crc32x : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty], 837 [IntrNoMem]>; 838def int_aarch64_crc32cx : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty], 839 [IntrNoMem]>; 840} 841 842//===----------------------------------------------------------------------===// 843// Memory Tagging Extensions (MTE) Intrinsics 844let TargetPrefix = "aarch64" in { 845def int_aarch64_irg : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty], 846 [IntrNoMem, IntrHasSideEffects]>; 847def int_aarch64_addg : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty], 848 [IntrNoMem]>; 849def int_aarch64_gmi : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_i64_ty], 850 [IntrNoMem]>; 851def int_aarch64_ldg : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_ptr_ty], 852 [IntrReadMem]>; 853def int_aarch64_stg : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_ptr_ty], 854 [IntrWriteMem]>; 855def int_aarch64_subp : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_ptr_ty], 856 [IntrNoMem]>; 857 858// The following are codegen-only intrinsics for stack instrumentation. 859 860// Generate a randomly tagged stack base pointer. 861def int_aarch64_irg_sp : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_i64_ty], 862 [IntrNoMem, IntrHasSideEffects]>; 863 864// Transfer pointer tag with offset. 865// ptr1 = tagp(ptr0, baseptr, tag_offset) returns a pointer where 866// * address is the address in ptr0 867// * tag is a function of (tag in baseptr, tag_offset). 868// ** Beware, this is not the same function as implemented by the ADDG instruction! 869// Backend optimizations may change tag_offset; the only guarantee is that calls 870// to tagp with the same pair of (baseptr, tag_offset) will produce pointers 871// with the same tag value, assuming the set of excluded tags has not changed. 872// Address bits in baseptr and tag bits in ptr0 are ignored. 873// When offset between ptr0 and baseptr is a compile time constant, this can be emitted as 874// ADDG ptr1, baseptr, (ptr0 - baseptr), tag_offset 875// It is intended that ptr0 is an alloca address, and baseptr is the direct output of llvm.aarch64.irg.sp. 876def int_aarch64_tagp : DefaultAttrsIntrinsic<[llvm_anyptr_ty], [LLVMMatchType<0>, llvm_ptr_ty, llvm_i64_ty], 877 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 878 879// Update allocation tags for the memory range to match the tag in the pointer argument. 880def int_aarch64_settag : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty], 881 [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>; 882 883// Update allocation tags for the memory range to match the tag in the pointer argument, 884// and set memory contents to zero. 885def int_aarch64_settag_zero : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty], 886 [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>; 887 888// Update allocation tags for 16-aligned, 16-sized memory region, and store a pair 8-byte values. 889def int_aarch64_stgp : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty, llvm_i64_ty], 890 [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>; 891} 892 893// Transactional Memory Extension (TME) Intrinsics 894let TargetPrefix = "aarch64" in { 895def int_aarch64_tstart : GCCBuiltin<"__builtin_arm_tstart">, 896 Intrinsic<[llvm_i64_ty], [], [IntrWillReturn]>; 897 898def int_aarch64_tcommit : GCCBuiltin<"__builtin_arm_tcommit">, Intrinsic<[], [], [IntrWillReturn]>; 899 900def int_aarch64_tcancel : GCCBuiltin<"__builtin_arm_tcancel">, 901 Intrinsic<[], [llvm_i64_ty], [IntrWillReturn, ImmArg<ArgIndex<0>>]>; 902 903def int_aarch64_ttest : GCCBuiltin<"__builtin_arm_ttest">, 904 Intrinsic<[llvm_i64_ty], [], 905 [IntrNoMem, IntrHasSideEffects, IntrWillReturn]>; 906 907// Armv8.7-A load/store 64-byte intrinsics 908defvar data512 = !listsplat(llvm_i64_ty, 8); 909def int_aarch64_ld64b: Intrinsic<data512, [llvm_ptr_ty]>; 910def int_aarch64_st64b: Intrinsic<[], !listconcat([llvm_ptr_ty], data512)>; 911def int_aarch64_st64bv: Intrinsic<[llvm_i64_ty], !listconcat([llvm_ptr_ty], data512)>; 912def int_aarch64_st64bv0: Intrinsic<[llvm_i64_ty], !listconcat([llvm_ptr_ty], data512)>; 913 914} 915 916def llvm_nxv2i1_ty : LLVMType<nxv2i1>; 917def llvm_nxv4i1_ty : LLVMType<nxv4i1>; 918def llvm_nxv8i1_ty : LLVMType<nxv8i1>; 919def llvm_nxv16i1_ty : LLVMType<nxv16i1>; 920def llvm_nxv16i8_ty : LLVMType<nxv16i8>; 921def llvm_nxv4i32_ty : LLVMType<nxv4i32>; 922def llvm_nxv2i64_ty : LLVMType<nxv2i64>; 923def llvm_nxv8f16_ty : LLVMType<nxv8f16>; 924def llvm_nxv8bf16_ty : LLVMType<nxv8bf16>; 925def llvm_nxv4f32_ty : LLVMType<nxv4f32>; 926def llvm_nxv2f64_ty : LLVMType<nxv2f64>; 927 928let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 929 930 class AdvSIMD_SVE_Create_2Vector_Tuple 931 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 932 [llvm_anyvector_ty, LLVMMatchType<1>], 933 [IntrReadMem]>; 934 935 class AdvSIMD_SVE_Create_3Vector_Tuple 936 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 937 [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>], 938 [IntrReadMem]>; 939 940 class AdvSIMD_SVE_Create_4Vector_Tuple 941 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 942 [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>, 943 LLVMMatchType<1>], 944 [IntrReadMem]>; 945 946 class AdvSIMD_SVE_Set_Vector_Tuple 947 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 948 [LLVMMatchType<0>, llvm_i32_ty, llvm_anyvector_ty], 949 [IntrReadMem, ImmArg<ArgIndex<1>>]>; 950 951 class AdvSIMD_SVE_Get_Vector_Tuple 952 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, llvm_i32_ty], 953 [IntrReadMem, IntrArgMemOnly, ImmArg<ArgIndex<1>>]>; 954 955 class AdvSIMD_ManyVec_PredLoad_Intrinsic 956 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMPointerToElt<0>], 957 [IntrReadMem, IntrArgMemOnly]>; 958 959 class AdvSIMD_1Vec_PredLoad_Intrinsic 960 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 961 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 962 LLVMPointerToElt<0>], 963 [IntrReadMem, IntrArgMemOnly]>; 964 965 class AdvSIMD_2Vec_PredLoad_Intrinsic 966 : DefaultAttrsIntrinsic<[llvm_anyvector_ty, LLVMMatchType<0>], 967 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 968 LLVMPointerToElt<0>], 969 [IntrReadMem, IntrArgMemOnly]>; 970 971 class AdvSIMD_3Vec_PredLoad_Intrinsic 972 : DefaultAttrsIntrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>], 973 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 974 LLVMPointerToElt<0>], 975 [IntrReadMem, IntrArgMemOnly]>; 976 977 class AdvSIMD_4Vec_PredLoad_Intrinsic 978 : DefaultAttrsIntrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, 979 LLVMMatchType<0>], 980 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 981 LLVMPointerToElt<0>], 982 [IntrReadMem, IntrArgMemOnly]>; 983 984 class AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic 985 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 986 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 987 LLVMPointerToElt<0>], 988 [IntrInaccessibleMemOrArgMemOnly]>; 989 990 class AdvSIMD_1Vec_PredStore_Intrinsic 991 : DefaultAttrsIntrinsic<[], 992 [llvm_anyvector_ty, 993 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 994 LLVMPointerToElt<0>], 995 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>; 996 997 class AdvSIMD_2Vec_PredStore_Intrinsic 998 : DefaultAttrsIntrinsic<[], 999 [llvm_anyvector_ty, LLVMMatchType<0>, 1000 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>], 1001 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>; 1002 1003 class AdvSIMD_3Vec_PredStore_Intrinsic 1004 : DefaultAttrsIntrinsic<[], 1005 [llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, 1006 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>], 1007 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>; 1008 1009 class AdvSIMD_4Vec_PredStore_Intrinsic 1010 : DefaultAttrsIntrinsic<[], 1011 [llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, 1012 LLVMMatchType<0>, 1013 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>], 1014 [IntrArgMemOnly, NoCapture<ArgIndex<5>>]>; 1015 1016 class AdvSIMD_SVE_Index_Intrinsic 1017 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1018 [LLVMVectorElementType<0>, 1019 LLVMVectorElementType<0>], 1020 [IntrNoMem]>; 1021 1022 class AdvSIMD_Merged1VectorArg_Intrinsic 1023 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1024 [LLVMMatchType<0>, 1025 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1026 LLVMMatchType<0>], 1027 [IntrNoMem]>; 1028 1029 class AdvSIMD_2VectorArgIndexed_Intrinsic 1030 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1031 [LLVMMatchType<0>, 1032 LLVMMatchType<0>, 1033 llvm_i32_ty], 1034 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1035 1036 class AdvSIMD_3VectorArgIndexed_Intrinsic 1037 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1038 [LLVMMatchType<0>, 1039 LLVMMatchType<0>, 1040 LLVMMatchType<0>, 1041 llvm_i32_ty], 1042 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1043 1044 class AdvSIMD_Pred1VectorArg_Intrinsic 1045 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1046 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1047 LLVMMatchType<0>], 1048 [IntrNoMem]>; 1049 1050 class AdvSIMD_Pred2VectorArg_Intrinsic 1051 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1052 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1053 LLVMMatchType<0>, 1054 LLVMMatchType<0>], 1055 [IntrNoMem]>; 1056 1057 class AdvSIMD_Pred3VectorArg_Intrinsic 1058 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1059 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1060 LLVMMatchType<0>, 1061 LLVMMatchType<0>, 1062 LLVMMatchType<0>], 1063 [IntrNoMem]>; 1064 1065 class AdvSIMD_SVE_Compare_Intrinsic 1066 : DefaultAttrsIntrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>], 1067 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1068 llvm_anyvector_ty, 1069 LLVMMatchType<0>], 1070 [IntrNoMem]>; 1071 1072 class AdvSIMD_SVE_CompareWide_Intrinsic 1073 : DefaultAttrsIntrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>], 1074 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1075 llvm_anyvector_ty, 1076 llvm_nxv2i64_ty], 1077 [IntrNoMem]>; 1078 1079 class AdvSIMD_SVE_Saturating_Intrinsic 1080 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1081 [LLVMMatchType<0>, 1082 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>], 1083 [IntrNoMem]>; 1084 1085 class AdvSIMD_SVE_SaturatingWithPattern_Intrinsic 1086 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1087 [LLVMMatchType<0>, 1088 llvm_i32_ty, 1089 llvm_i32_ty], 1090 [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>; 1091 1092 class AdvSIMD_SVE_Saturating_N_Intrinsic<LLVMType T> 1093 : DefaultAttrsIntrinsic<[T], 1094 [T, llvm_anyvector_ty], 1095 [IntrNoMem]>; 1096 1097 class AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<LLVMType T> 1098 : DefaultAttrsIntrinsic<[T], 1099 [T, llvm_i32_ty, llvm_i32_ty], 1100 [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>; 1101 1102 class AdvSIMD_SVE_CNT_Intrinsic 1103 : DefaultAttrsIntrinsic<[LLVMVectorOfBitcastsToInt<0>], 1104 [LLVMVectorOfBitcastsToInt<0>, 1105 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1106 llvm_anyvector_ty], 1107 [IntrNoMem]>; 1108 1109 class AdvSIMD_SVE_ReduceWithInit_Intrinsic 1110 : DefaultAttrsIntrinsic<[LLVMVectorElementType<0>], 1111 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1112 LLVMVectorElementType<0>, 1113 llvm_anyvector_ty], 1114 [IntrNoMem]>; 1115 1116 class AdvSIMD_SVE_ShiftByImm_Intrinsic 1117 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1118 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1119 LLVMMatchType<0>, 1120 llvm_i32_ty], 1121 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1122 1123 class AdvSIMD_SVE_ShiftWide_Intrinsic 1124 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1125 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1126 LLVMMatchType<0>, 1127 llvm_nxv2i64_ty], 1128 [IntrNoMem]>; 1129 1130 class AdvSIMD_SVE_Unpack_Intrinsic 1131 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1132 [LLVMSubdivide2VectorType<0>], 1133 [IntrNoMem]>; 1134 1135 class AdvSIMD_SVE_CADD_Intrinsic 1136 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1137 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1138 LLVMMatchType<0>, 1139 LLVMMatchType<0>, 1140 llvm_i32_ty], 1141 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1142 1143 class AdvSIMD_SVE_CMLA_Intrinsic 1144 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1145 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1146 LLVMMatchType<0>, 1147 LLVMMatchType<0>, 1148 LLVMMatchType<0>, 1149 llvm_i32_ty], 1150 [IntrNoMem, ImmArg<ArgIndex<4>>]>; 1151 1152 class AdvSIMD_SVE_CMLA_LANE_Intrinsic 1153 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1154 [LLVMMatchType<0>, 1155 LLVMMatchType<0>, 1156 LLVMMatchType<0>, 1157 llvm_i32_ty, 1158 llvm_i32_ty], 1159 [IntrNoMem, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>; 1160 1161 class AdvSIMD_SVE_DUP_Intrinsic 1162 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1163 [LLVMMatchType<0>, 1164 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1165 LLVMVectorElementType<0>], 1166 [IntrNoMem]>; 1167 1168 class AdvSIMD_SVE_DUP_Unpred_Intrinsic 1169 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMVectorElementType<0>], 1170 [IntrNoMem]>; 1171 1172 class AdvSIMD_SVE_DUPQ_Intrinsic 1173 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1174 [LLVMMatchType<0>, 1175 llvm_i64_ty], 1176 [IntrNoMem]>; 1177 1178 class AdvSIMD_SVE_EXPA_Intrinsic 1179 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1180 [LLVMVectorOfBitcastsToInt<0>], 1181 [IntrNoMem]>; 1182 1183 class AdvSIMD_SVE_FCVT_Intrinsic 1184 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1185 [LLVMMatchType<0>, 1186 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1187 llvm_anyvector_ty], 1188 [IntrNoMem]>; 1189 1190 class AdvSIMD_SVE_FCVTZS_Intrinsic 1191 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1192 [LLVMVectorOfBitcastsToInt<0>, 1193 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1194 llvm_anyvector_ty], 1195 [IntrNoMem]>; 1196 1197 class AdvSIMD_SVE_INSR_Intrinsic 1198 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1199 [LLVMMatchType<0>, 1200 LLVMVectorElementType<0>], 1201 [IntrNoMem]>; 1202 1203 class AdvSIMD_SVE_PTRUE_Intrinsic 1204 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1205 [llvm_i32_ty], 1206 [IntrNoMem, ImmArg<ArgIndex<0>>]>; 1207 1208 class AdvSIMD_SVE_PUNPKHI_Intrinsic 1209 : DefaultAttrsIntrinsic<[LLVMHalfElementsVectorType<0>], 1210 [llvm_anyvector_ty], 1211 [IntrNoMem]>; 1212 1213 class AdvSIMD_SVE_SCALE_Intrinsic 1214 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1215 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1216 LLVMMatchType<0>, 1217 LLVMVectorOfBitcastsToInt<0>], 1218 [IntrNoMem]>; 1219 1220 class AdvSIMD_SVE_SCVTF_Intrinsic 1221 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1222 [LLVMMatchType<0>, 1223 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1224 llvm_anyvector_ty], 1225 [IntrNoMem]>; 1226 1227 class AdvSIMD_SVE_TSMUL_Intrinsic 1228 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1229 [LLVMMatchType<0>, 1230 LLVMVectorOfBitcastsToInt<0>], 1231 [IntrNoMem]>; 1232 1233 class AdvSIMD_SVE_CNTB_Intrinsic 1234 : DefaultAttrsIntrinsic<[llvm_i64_ty], 1235 [llvm_i32_ty], 1236 [IntrNoMem, ImmArg<ArgIndex<0>>]>; 1237 1238 class AdvSIMD_SVE_CNTP_Intrinsic 1239 : DefaultAttrsIntrinsic<[llvm_i64_ty], 1240 [llvm_anyvector_ty, LLVMMatchType<0>], 1241 [IntrNoMem]>; 1242 1243 class AdvSIMD_SVE_DOT_Intrinsic 1244 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1245 [LLVMMatchType<0>, 1246 LLVMSubdivide4VectorType<0>, 1247 LLVMSubdivide4VectorType<0>], 1248 [IntrNoMem]>; 1249 1250 class AdvSIMD_SVE_DOT_Indexed_Intrinsic 1251 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1252 [LLVMMatchType<0>, 1253 LLVMSubdivide4VectorType<0>, 1254 LLVMSubdivide4VectorType<0>, 1255 llvm_i32_ty], 1256 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1257 1258 class AdvSIMD_SVE_PTEST_Intrinsic 1259 : DefaultAttrsIntrinsic<[llvm_i1_ty], 1260 [llvm_anyvector_ty, 1261 LLVMMatchType<0>], 1262 [IntrNoMem]>; 1263 1264 class AdvSIMD_SVE_TBL_Intrinsic 1265 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1266 [LLVMMatchType<0>, 1267 LLVMVectorOfBitcastsToInt<0>], 1268 [IntrNoMem]>; 1269 1270 class AdvSIMD_SVE2_TBX_Intrinsic 1271 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1272 [LLVMMatchType<0>, 1273 LLVMMatchType<0>, 1274 LLVMVectorOfBitcastsToInt<0>], 1275 [IntrNoMem]>; 1276 1277 class SVE2_1VectorArg_Long_Intrinsic 1278 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1279 [LLVMSubdivide2VectorType<0>, 1280 llvm_i32_ty], 1281 [IntrNoMem, ImmArg<ArgIndex<1>>]>; 1282 1283 class SVE2_2VectorArg_Long_Intrinsic 1284 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1285 [LLVMSubdivide2VectorType<0>, 1286 LLVMSubdivide2VectorType<0>], 1287 [IntrNoMem]>; 1288 1289 class SVE2_2VectorArgIndexed_Long_Intrinsic 1290 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1291 [LLVMSubdivide2VectorType<0>, 1292 LLVMSubdivide2VectorType<0>, 1293 llvm_i32_ty], 1294 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1295 1296 class SVE2_2VectorArg_Wide_Intrinsic 1297 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1298 [LLVMMatchType<0>, 1299 LLVMSubdivide2VectorType<0>], 1300 [IntrNoMem]>; 1301 1302 class SVE2_2VectorArg_Pred_Long_Intrinsic 1303 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1304 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1305 LLVMMatchType<0>, 1306 LLVMSubdivide2VectorType<0>], 1307 [IntrNoMem]>; 1308 1309 class SVE2_3VectorArg_Long_Intrinsic 1310 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1311 [LLVMMatchType<0>, 1312 LLVMSubdivide2VectorType<0>, 1313 LLVMSubdivide2VectorType<0>], 1314 [IntrNoMem]>; 1315 1316 class SVE2_3VectorArgIndexed_Long_Intrinsic 1317 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1318 [LLVMMatchType<0>, 1319 LLVMSubdivide2VectorType<0>, 1320 LLVMSubdivide2VectorType<0>, 1321 llvm_i32_ty], 1322 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1323 1324 class SVE2_1VectorArg_Narrowing_Intrinsic 1325 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1326 [llvm_anyvector_ty], 1327 [IntrNoMem]>; 1328 1329 class SVE2_Merged1VectorArg_Narrowing_Intrinsic 1330 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1331 [LLVMSubdivide2VectorType<0>, 1332 llvm_anyvector_ty], 1333 [IntrNoMem]>; 1334 class SVE2_2VectorArg_Narrowing_Intrinsic 1335 : DefaultAttrsIntrinsic< 1336 [LLVMSubdivide2VectorType<0>], 1337 [llvm_anyvector_ty, LLVMMatchType<0>], 1338 [IntrNoMem]>; 1339 1340 class SVE2_Merged2VectorArg_Narrowing_Intrinsic 1341 : DefaultAttrsIntrinsic< 1342 [LLVMSubdivide2VectorType<0>], 1343 [LLVMSubdivide2VectorType<0>, llvm_anyvector_ty, LLVMMatchType<0>], 1344 [IntrNoMem]>; 1345 1346 class SVE2_1VectorArg_Imm_Narrowing_Intrinsic 1347 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1348 [llvm_anyvector_ty, llvm_i32_ty], 1349 [IntrNoMem, ImmArg<ArgIndex<1>>]>; 1350 1351 class SVE2_2VectorArg_Imm_Narrowing_Intrinsic 1352 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1353 [LLVMSubdivide2VectorType<0>, llvm_anyvector_ty, 1354 llvm_i32_ty], 1355 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1356 1357 class SVE2_CONFLICT_DETECT_Intrinsic 1358 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1359 [LLVMAnyPointerType<llvm_any_ty>, 1360 LLVMMatchType<1>]>; 1361 1362 class SVE2_3VectorArg_Indexed_Intrinsic 1363 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1364 [LLVMMatchType<0>, 1365 LLVMSubdivide2VectorType<0>, 1366 LLVMSubdivide2VectorType<0>, 1367 llvm_i32_ty], 1368 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1369 1370 class AdvSIMD_SVE_CDOT_LANE_Intrinsic 1371 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1372 [LLVMMatchType<0>, 1373 LLVMSubdivide4VectorType<0>, 1374 LLVMSubdivide4VectorType<0>, 1375 llvm_i32_ty, 1376 llvm_i32_ty], 1377 [IntrNoMem, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>; 1378 1379 // NOTE: There is no relationship between these intrinsics beyond an attempt 1380 // to reuse currently identical class definitions. 1381 class AdvSIMD_SVE_LOGB_Intrinsic : AdvSIMD_SVE_CNT_Intrinsic; 1382 class AdvSIMD_SVE2_CADD_Intrinsic : AdvSIMD_2VectorArgIndexed_Intrinsic; 1383 class AdvSIMD_SVE2_CMLA_Intrinsic : AdvSIMD_3VectorArgIndexed_Intrinsic; 1384 1385 // This class of intrinsics are not intended to be useful within LLVM IR but 1386 // are instead here to support some of the more regid parts of the ACLE. 1387 class Builtin_SVCVT<LLVMType OUT, LLVMType PRED, LLVMType IN> 1388 : DefaultAttrsIntrinsic<[OUT], [OUT, PRED, IN], [IntrNoMem]>; 1389} 1390 1391//===----------------------------------------------------------------------===// 1392// SVE 1393 1394let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 1395 1396class AdvSIMD_SVE_Reduce_Intrinsic 1397 : DefaultAttrsIntrinsic<[LLVMVectorElementType<0>], 1398 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1399 llvm_anyvector_ty], 1400 [IntrNoMem]>; 1401 1402class AdvSIMD_SVE_SADDV_Reduce_Intrinsic 1403 : DefaultAttrsIntrinsic<[llvm_i64_ty], 1404 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1405 llvm_anyvector_ty], 1406 [IntrNoMem]>; 1407 1408class AdvSIMD_SVE_WHILE_Intrinsic 1409 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1410 [llvm_anyint_ty, LLVMMatchType<1>], 1411 [IntrNoMem]>; 1412 1413class AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic 1414 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1415 [ 1416 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1417 LLVMPointerToElt<0>, 1418 LLVMScalarOrSameVectorWidth<0, llvm_i64_ty> 1419 ], 1420 [IntrReadMem, IntrArgMemOnly]>; 1421 1422class AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic 1423 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1424 [ 1425 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1426 LLVMPointerToElt<0>, 1427 LLVMScalarOrSameVectorWidth<0, llvm_i64_ty> 1428 ], 1429 [IntrInaccessibleMemOrArgMemOnly]>; 1430 1431class AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic 1432 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1433 [ 1434 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1435 LLVMPointerToElt<0>, 1436 LLVMScalarOrSameVectorWidth<0, llvm_i32_ty> 1437 ], 1438 [IntrReadMem, IntrArgMemOnly]>; 1439 1440class AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic 1441 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1442 [ 1443 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1444 LLVMPointerToElt<0>, 1445 LLVMScalarOrSameVectorWidth<0, llvm_i32_ty> 1446 ], 1447 [IntrInaccessibleMemOrArgMemOnly]>; 1448 1449class AdvSIMD_GatherLoad_VS_Intrinsic 1450 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1451 [ 1452 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1453 llvm_anyvector_ty, 1454 llvm_i64_ty 1455 ], 1456 [IntrReadMem]>; 1457 1458class AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic 1459 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1460 [ 1461 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1462 llvm_anyvector_ty, 1463 llvm_i64_ty 1464 ], 1465 [IntrInaccessibleMemOrArgMemOnly]>; 1466 1467class AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic 1468 : DefaultAttrsIntrinsic<[], 1469 [ 1470 llvm_anyvector_ty, 1471 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1472 LLVMPointerToElt<0>, 1473 LLVMScalarOrSameVectorWidth<0, llvm_i64_ty> 1474 ], 1475 [IntrWriteMem, IntrArgMemOnly]>; 1476 1477class AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic 1478 : DefaultAttrsIntrinsic<[], 1479 [ 1480 llvm_anyvector_ty, 1481 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1482 LLVMPointerToElt<0>, 1483 LLVMScalarOrSameVectorWidth<0, llvm_i32_ty> 1484 ], 1485 [IntrWriteMem, IntrArgMemOnly]>; 1486 1487class AdvSIMD_ScatterStore_VS_Intrinsic 1488 : DefaultAttrsIntrinsic<[], 1489 [ 1490 llvm_anyvector_ty, 1491 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1492 llvm_anyvector_ty, llvm_i64_ty 1493 ], 1494 [IntrWriteMem]>; 1495 1496 1497class SVE_gather_prf_SV 1498 : DefaultAttrsIntrinsic<[], 1499 [ 1500 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, // Predicate 1501 llvm_ptr_ty, // Base address 1502 llvm_anyvector_ty, // Offsets 1503 llvm_i32_ty // Prfop 1504 ], 1505 [IntrInaccessibleMemOrArgMemOnly, NoCapture<ArgIndex<1>>, ImmArg<ArgIndex<3>>]>; 1506 1507class SVE_gather_prf_VS 1508 : DefaultAttrsIntrinsic<[], 1509 [ 1510 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, // Predicate 1511 llvm_anyvector_ty, // Base addresses 1512 llvm_i64_ty, // Scalar offset 1513 llvm_i32_ty // Prfop 1514 ], 1515 [IntrInaccessibleMemOrArgMemOnly, ImmArg<ArgIndex<3>>]>; 1516 1517class SVE_MatMul_Intrinsic 1518 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1519 [LLVMMatchType<0>, LLVMSubdivide4VectorType<0>, LLVMSubdivide4VectorType<0>], 1520 [IntrNoMem]>; 1521 1522class SVE_4Vec_BF16 1523 : DefaultAttrsIntrinsic<[llvm_nxv4f32_ty], 1524 [llvm_nxv4f32_ty, llvm_nxv8bf16_ty, llvm_nxv8bf16_ty], 1525 [IntrNoMem]>; 1526 1527class SVE_4Vec_BF16_Indexed 1528 : DefaultAttrsIntrinsic<[llvm_nxv4f32_ty], 1529 [llvm_nxv4f32_ty, llvm_nxv8bf16_ty, llvm_nxv8bf16_ty, llvm_i64_ty], 1530 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1531 1532// 1533// Vector tuple creation intrinsics (ACLE) 1534// 1535 1536def int_aarch64_sve_tuple_create2 : AdvSIMD_SVE_Create_2Vector_Tuple; 1537def int_aarch64_sve_tuple_create3 : AdvSIMD_SVE_Create_3Vector_Tuple; 1538def int_aarch64_sve_tuple_create4 : AdvSIMD_SVE_Create_4Vector_Tuple; 1539 1540// 1541// Vector tuple insertion/extraction intrinsics (ACLE) 1542// 1543 1544def int_aarch64_sve_tuple_get : AdvSIMD_SVE_Get_Vector_Tuple; 1545def int_aarch64_sve_tuple_set : AdvSIMD_SVE_Set_Vector_Tuple; 1546 1547// 1548// Loads 1549// 1550 1551def int_aarch64_sve_ld1 : AdvSIMD_1Vec_PredLoad_Intrinsic; 1552 1553def int_aarch64_sve_ld2 : AdvSIMD_ManyVec_PredLoad_Intrinsic; 1554def int_aarch64_sve_ld3 : AdvSIMD_ManyVec_PredLoad_Intrinsic; 1555def int_aarch64_sve_ld4 : AdvSIMD_ManyVec_PredLoad_Intrinsic; 1556 1557def int_aarch64_sve_ld2_sret : AdvSIMD_2Vec_PredLoad_Intrinsic; 1558def int_aarch64_sve_ld3_sret : AdvSIMD_3Vec_PredLoad_Intrinsic; 1559def int_aarch64_sve_ld4_sret : AdvSIMD_4Vec_PredLoad_Intrinsic; 1560 1561def int_aarch64_sve_ldnt1 : AdvSIMD_1Vec_PredLoad_Intrinsic; 1562def int_aarch64_sve_ldnf1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic; 1563def int_aarch64_sve_ldff1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic; 1564 1565def int_aarch64_sve_ld1rq : AdvSIMD_1Vec_PredLoad_Intrinsic; 1566def int_aarch64_sve_ld1ro : AdvSIMD_1Vec_PredLoad_Intrinsic; 1567 1568// 1569// Stores 1570// 1571 1572def int_aarch64_sve_st1 : AdvSIMD_1Vec_PredStore_Intrinsic; 1573def int_aarch64_sve_st2 : AdvSIMD_2Vec_PredStore_Intrinsic; 1574def int_aarch64_sve_st3 : AdvSIMD_3Vec_PredStore_Intrinsic; 1575def int_aarch64_sve_st4 : AdvSIMD_4Vec_PredStore_Intrinsic; 1576 1577def int_aarch64_sve_stnt1 : AdvSIMD_1Vec_PredStore_Intrinsic; 1578 1579// 1580// Prefetches 1581// 1582 1583def int_aarch64_sve_prf 1584 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, llvm_ptr_ty, llvm_i32_ty], 1585 [IntrArgMemOnly, ImmArg<ArgIndex<2>>]>; 1586 1587// Scalar + 32-bit scaled offset vector, zero extend, packed and 1588// unpacked. 1589def int_aarch64_sve_prfb_gather_uxtw_index : SVE_gather_prf_SV; 1590def int_aarch64_sve_prfh_gather_uxtw_index : SVE_gather_prf_SV; 1591def int_aarch64_sve_prfw_gather_uxtw_index : SVE_gather_prf_SV; 1592def int_aarch64_sve_prfd_gather_uxtw_index : SVE_gather_prf_SV; 1593 1594// Scalar + 32-bit scaled offset vector, sign extend, packed and 1595// unpacked. 1596def int_aarch64_sve_prfb_gather_sxtw_index : SVE_gather_prf_SV; 1597def int_aarch64_sve_prfw_gather_sxtw_index : SVE_gather_prf_SV; 1598def int_aarch64_sve_prfh_gather_sxtw_index : SVE_gather_prf_SV; 1599def int_aarch64_sve_prfd_gather_sxtw_index : SVE_gather_prf_SV; 1600 1601// Scalar + 64-bit scaled offset vector. 1602def int_aarch64_sve_prfb_gather_index : SVE_gather_prf_SV; 1603def int_aarch64_sve_prfh_gather_index : SVE_gather_prf_SV; 1604def int_aarch64_sve_prfw_gather_index : SVE_gather_prf_SV; 1605def int_aarch64_sve_prfd_gather_index : SVE_gather_prf_SV; 1606 1607// Vector + scalar. 1608def int_aarch64_sve_prfb_gather_scalar_offset : SVE_gather_prf_VS; 1609def int_aarch64_sve_prfh_gather_scalar_offset : SVE_gather_prf_VS; 1610def int_aarch64_sve_prfw_gather_scalar_offset : SVE_gather_prf_VS; 1611def int_aarch64_sve_prfd_gather_scalar_offset : SVE_gather_prf_VS; 1612 1613// 1614// Scalar to vector operations 1615// 1616 1617def int_aarch64_sve_dup : AdvSIMD_SVE_DUP_Intrinsic; 1618def int_aarch64_sve_dup_x : AdvSIMD_SVE_DUP_Unpred_Intrinsic; 1619 1620def int_aarch64_sve_index : AdvSIMD_SVE_Index_Intrinsic; 1621 1622// 1623// Address calculation 1624// 1625 1626def int_aarch64_sve_adrb : AdvSIMD_2VectorArg_Intrinsic; 1627def int_aarch64_sve_adrh : AdvSIMD_2VectorArg_Intrinsic; 1628def int_aarch64_sve_adrw : AdvSIMD_2VectorArg_Intrinsic; 1629def int_aarch64_sve_adrd : AdvSIMD_2VectorArg_Intrinsic; 1630 1631// 1632// Integer arithmetic 1633// 1634 1635def int_aarch64_sve_add : AdvSIMD_Pred2VectorArg_Intrinsic; 1636def int_aarch64_sve_sub : AdvSIMD_Pred2VectorArg_Intrinsic; 1637def int_aarch64_sve_subr : AdvSIMD_Pred2VectorArg_Intrinsic; 1638 1639def int_aarch64_sve_pmul : AdvSIMD_2VectorArg_Intrinsic; 1640 1641def int_aarch64_sve_mul : AdvSIMD_Pred2VectorArg_Intrinsic; 1642def int_aarch64_sve_mul_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 1643def int_aarch64_sve_smulh : AdvSIMD_Pred2VectorArg_Intrinsic; 1644def int_aarch64_sve_umulh : AdvSIMD_Pred2VectorArg_Intrinsic; 1645 1646def int_aarch64_sve_sdiv : AdvSIMD_Pred2VectorArg_Intrinsic; 1647def int_aarch64_sve_udiv : AdvSIMD_Pred2VectorArg_Intrinsic; 1648def int_aarch64_sve_sdivr : AdvSIMD_Pred2VectorArg_Intrinsic; 1649def int_aarch64_sve_udivr : AdvSIMD_Pred2VectorArg_Intrinsic; 1650 1651def int_aarch64_sve_smax : AdvSIMD_Pred2VectorArg_Intrinsic; 1652def int_aarch64_sve_umax : AdvSIMD_Pred2VectorArg_Intrinsic; 1653def int_aarch64_sve_smin : AdvSIMD_Pred2VectorArg_Intrinsic; 1654def int_aarch64_sve_umin : AdvSIMD_Pred2VectorArg_Intrinsic; 1655def int_aarch64_sve_sabd : AdvSIMD_Pred2VectorArg_Intrinsic; 1656def int_aarch64_sve_uabd : AdvSIMD_Pred2VectorArg_Intrinsic; 1657 1658def int_aarch64_sve_mad : AdvSIMD_Pred3VectorArg_Intrinsic; 1659def int_aarch64_sve_msb : AdvSIMD_Pred3VectorArg_Intrinsic; 1660def int_aarch64_sve_mla : AdvSIMD_Pred3VectorArg_Intrinsic; 1661def int_aarch64_sve_mla_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1662def int_aarch64_sve_mls : AdvSIMD_Pred3VectorArg_Intrinsic; 1663def int_aarch64_sve_mls_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1664 1665def int_aarch64_sve_saddv : AdvSIMD_SVE_SADDV_Reduce_Intrinsic; 1666def int_aarch64_sve_uaddv : AdvSIMD_SVE_SADDV_Reduce_Intrinsic; 1667 1668def int_aarch64_sve_smaxv : AdvSIMD_SVE_Reduce_Intrinsic; 1669def int_aarch64_sve_umaxv : AdvSIMD_SVE_Reduce_Intrinsic; 1670def int_aarch64_sve_sminv : AdvSIMD_SVE_Reduce_Intrinsic; 1671def int_aarch64_sve_uminv : AdvSIMD_SVE_Reduce_Intrinsic; 1672 1673def int_aarch64_sve_orv : AdvSIMD_SVE_Reduce_Intrinsic; 1674def int_aarch64_sve_eorv : AdvSIMD_SVE_Reduce_Intrinsic; 1675def int_aarch64_sve_andv : AdvSIMD_SVE_Reduce_Intrinsic; 1676 1677def int_aarch64_sve_abs : AdvSIMD_Merged1VectorArg_Intrinsic; 1678def int_aarch64_sve_neg : AdvSIMD_Merged1VectorArg_Intrinsic; 1679 1680def int_aarch64_sve_sdot : AdvSIMD_SVE_DOT_Intrinsic; 1681def int_aarch64_sve_sdot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 1682 1683def int_aarch64_sve_udot : AdvSIMD_SVE_DOT_Intrinsic; 1684def int_aarch64_sve_udot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 1685 1686def int_aarch64_sve_sqadd_x : AdvSIMD_2VectorArg_Intrinsic; 1687def int_aarch64_sve_sqsub_x : AdvSIMD_2VectorArg_Intrinsic; 1688def int_aarch64_sve_uqadd_x : AdvSIMD_2VectorArg_Intrinsic; 1689def int_aarch64_sve_uqsub_x : AdvSIMD_2VectorArg_Intrinsic; 1690 1691// Shifts 1692 1693def int_aarch64_sve_asr : AdvSIMD_Pred2VectorArg_Intrinsic; 1694def int_aarch64_sve_asr_wide : AdvSIMD_SVE_ShiftWide_Intrinsic; 1695def int_aarch64_sve_asrd : AdvSIMD_SVE_ShiftByImm_Intrinsic; 1696def int_aarch64_sve_insr : AdvSIMD_SVE_INSR_Intrinsic; 1697def int_aarch64_sve_lsl : AdvSIMD_Pred2VectorArg_Intrinsic; 1698def int_aarch64_sve_lsl_wide : AdvSIMD_SVE_ShiftWide_Intrinsic; 1699def int_aarch64_sve_lsr : AdvSIMD_Pred2VectorArg_Intrinsic; 1700def int_aarch64_sve_lsr_wide : AdvSIMD_SVE_ShiftWide_Intrinsic; 1701 1702// 1703// Integer comparisons 1704// 1705 1706def int_aarch64_sve_cmpeq : AdvSIMD_SVE_Compare_Intrinsic; 1707def int_aarch64_sve_cmpge : AdvSIMD_SVE_Compare_Intrinsic; 1708def int_aarch64_sve_cmpgt : AdvSIMD_SVE_Compare_Intrinsic; 1709def int_aarch64_sve_cmphi : AdvSIMD_SVE_Compare_Intrinsic; 1710def int_aarch64_sve_cmphs : AdvSIMD_SVE_Compare_Intrinsic; 1711def int_aarch64_sve_cmpne : AdvSIMD_SVE_Compare_Intrinsic; 1712 1713def int_aarch64_sve_cmpeq_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1714def int_aarch64_sve_cmpge_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1715def int_aarch64_sve_cmpgt_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1716def int_aarch64_sve_cmphi_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1717def int_aarch64_sve_cmphs_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1718def int_aarch64_sve_cmple_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1719def int_aarch64_sve_cmplo_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1720def int_aarch64_sve_cmpls_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1721def int_aarch64_sve_cmplt_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1722def int_aarch64_sve_cmpne_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1723 1724// 1725// Counting bits 1726// 1727 1728def int_aarch64_sve_cls : AdvSIMD_Merged1VectorArg_Intrinsic; 1729def int_aarch64_sve_clz : AdvSIMD_Merged1VectorArg_Intrinsic; 1730def int_aarch64_sve_cnt : AdvSIMD_SVE_CNT_Intrinsic; 1731 1732// 1733// Counting elements 1734// 1735 1736def int_aarch64_sve_cntb : AdvSIMD_SVE_CNTB_Intrinsic; 1737def int_aarch64_sve_cnth : AdvSIMD_SVE_CNTB_Intrinsic; 1738def int_aarch64_sve_cntw : AdvSIMD_SVE_CNTB_Intrinsic; 1739def int_aarch64_sve_cntd : AdvSIMD_SVE_CNTB_Intrinsic; 1740 1741def int_aarch64_sve_cntp : AdvSIMD_SVE_CNTP_Intrinsic; 1742 1743// 1744// FFR manipulation 1745// 1746 1747def int_aarch64_sve_rdffr : GCCBuiltin<"__builtin_sve_svrdffr">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [], [IntrReadMem, IntrInaccessibleMemOnly]>; 1748def int_aarch64_sve_rdffr_z : GCCBuiltin<"__builtin_sve_svrdffr_z">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [llvm_nxv16i1_ty], [IntrReadMem, IntrInaccessibleMemOnly]>; 1749def int_aarch64_sve_setffr : GCCBuiltin<"__builtin_sve_svsetffr">, DefaultAttrsIntrinsic<[], [], [IntrWriteMem, IntrInaccessibleMemOnly]>; 1750def int_aarch64_sve_wrffr : GCCBuiltin<"__builtin_sve_svwrffr">, DefaultAttrsIntrinsic<[], [llvm_nxv16i1_ty], [IntrWriteMem, IntrInaccessibleMemOnly]>; 1751 1752// 1753// Saturating scalar arithmetic 1754// 1755 1756def int_aarch64_sve_sqdech : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1757def int_aarch64_sve_sqdecw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1758def int_aarch64_sve_sqdecd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1759def int_aarch64_sve_sqdecp : AdvSIMD_SVE_Saturating_Intrinsic; 1760 1761def int_aarch64_sve_sqdecb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1762def int_aarch64_sve_sqdecb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1763def int_aarch64_sve_sqdech_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1764def int_aarch64_sve_sqdech_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1765def int_aarch64_sve_sqdecw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1766def int_aarch64_sve_sqdecw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1767def int_aarch64_sve_sqdecd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1768def int_aarch64_sve_sqdecd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1769def int_aarch64_sve_sqdecp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1770def int_aarch64_sve_sqdecp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1771 1772def int_aarch64_sve_sqinch : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1773def int_aarch64_sve_sqincw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1774def int_aarch64_sve_sqincd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1775def int_aarch64_sve_sqincp : AdvSIMD_SVE_Saturating_Intrinsic; 1776 1777def int_aarch64_sve_sqincb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1778def int_aarch64_sve_sqincb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1779def int_aarch64_sve_sqinch_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1780def int_aarch64_sve_sqinch_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1781def int_aarch64_sve_sqincw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1782def int_aarch64_sve_sqincw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1783def int_aarch64_sve_sqincd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1784def int_aarch64_sve_sqincd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1785def int_aarch64_sve_sqincp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1786def int_aarch64_sve_sqincp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1787 1788def int_aarch64_sve_uqdech : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1789def int_aarch64_sve_uqdecw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1790def int_aarch64_sve_uqdecd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1791def int_aarch64_sve_uqdecp : AdvSIMD_SVE_Saturating_Intrinsic; 1792 1793def int_aarch64_sve_uqdecb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1794def int_aarch64_sve_uqdecb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1795def int_aarch64_sve_uqdech_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1796def int_aarch64_sve_uqdech_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1797def int_aarch64_sve_uqdecw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1798def int_aarch64_sve_uqdecw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1799def int_aarch64_sve_uqdecd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1800def int_aarch64_sve_uqdecd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1801def int_aarch64_sve_uqdecp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1802def int_aarch64_sve_uqdecp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1803 1804def int_aarch64_sve_uqinch : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1805def int_aarch64_sve_uqincw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1806def int_aarch64_sve_uqincd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1807def int_aarch64_sve_uqincp : AdvSIMD_SVE_Saturating_Intrinsic; 1808 1809def int_aarch64_sve_uqincb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1810def int_aarch64_sve_uqincb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1811def int_aarch64_sve_uqinch_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1812def int_aarch64_sve_uqinch_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1813def int_aarch64_sve_uqincw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1814def int_aarch64_sve_uqincw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1815def int_aarch64_sve_uqincd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1816def int_aarch64_sve_uqincd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1817def int_aarch64_sve_uqincp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1818def int_aarch64_sve_uqincp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1819 1820// 1821// Reversal 1822// 1823 1824def int_aarch64_sve_rbit : AdvSIMD_Merged1VectorArg_Intrinsic; 1825def int_aarch64_sve_revb : AdvSIMD_Merged1VectorArg_Intrinsic; 1826def int_aarch64_sve_revh : AdvSIMD_Merged1VectorArg_Intrinsic; 1827def int_aarch64_sve_revw : AdvSIMD_Merged1VectorArg_Intrinsic; 1828 1829// 1830// Permutations and selection 1831// 1832 1833def int_aarch64_sve_clasta : AdvSIMD_Pred2VectorArg_Intrinsic; 1834def int_aarch64_sve_clasta_n : AdvSIMD_SVE_ReduceWithInit_Intrinsic; 1835def int_aarch64_sve_clastb : AdvSIMD_Pred2VectorArg_Intrinsic; 1836def int_aarch64_sve_clastb_n : AdvSIMD_SVE_ReduceWithInit_Intrinsic; 1837def int_aarch64_sve_compact : AdvSIMD_Pred1VectorArg_Intrinsic; 1838def int_aarch64_sve_dupq_lane : AdvSIMD_SVE_DUPQ_Intrinsic; 1839def int_aarch64_sve_ext : AdvSIMD_2VectorArgIndexed_Intrinsic; 1840def int_aarch64_sve_sel : AdvSIMD_Pred2VectorArg_Intrinsic; 1841def int_aarch64_sve_lasta : AdvSIMD_SVE_Reduce_Intrinsic; 1842def int_aarch64_sve_lastb : AdvSIMD_SVE_Reduce_Intrinsic; 1843def int_aarch64_sve_rev : AdvSIMD_1VectorArg_Intrinsic; 1844def int_aarch64_sve_splice : AdvSIMD_Pred2VectorArg_Intrinsic; 1845def int_aarch64_sve_sunpkhi : AdvSIMD_SVE_Unpack_Intrinsic; 1846def int_aarch64_sve_sunpklo : AdvSIMD_SVE_Unpack_Intrinsic; 1847def int_aarch64_sve_tbl : AdvSIMD_SVE_TBL_Intrinsic; 1848def int_aarch64_sve_trn1 : AdvSIMD_2VectorArg_Intrinsic; 1849def int_aarch64_sve_trn2 : AdvSIMD_2VectorArg_Intrinsic; 1850def int_aarch64_sve_trn1q : AdvSIMD_2VectorArg_Intrinsic; 1851def int_aarch64_sve_trn2q : AdvSIMD_2VectorArg_Intrinsic; 1852def int_aarch64_sve_uunpkhi : AdvSIMD_SVE_Unpack_Intrinsic; 1853def int_aarch64_sve_uunpklo : AdvSIMD_SVE_Unpack_Intrinsic; 1854def int_aarch64_sve_uzp1 : AdvSIMD_2VectorArg_Intrinsic; 1855def int_aarch64_sve_uzp2 : AdvSIMD_2VectorArg_Intrinsic; 1856def int_aarch64_sve_uzp1q : AdvSIMD_2VectorArg_Intrinsic; 1857def int_aarch64_sve_uzp2q : AdvSIMD_2VectorArg_Intrinsic; 1858def int_aarch64_sve_zip1 : AdvSIMD_2VectorArg_Intrinsic; 1859def int_aarch64_sve_zip2 : AdvSIMD_2VectorArg_Intrinsic; 1860def int_aarch64_sve_zip1q : AdvSIMD_2VectorArg_Intrinsic; 1861def int_aarch64_sve_zip2q : AdvSIMD_2VectorArg_Intrinsic; 1862 1863// 1864// Logical operations 1865// 1866 1867def int_aarch64_sve_and : AdvSIMD_Pred2VectorArg_Intrinsic; 1868def int_aarch64_sve_bic : AdvSIMD_Pred2VectorArg_Intrinsic; 1869def int_aarch64_sve_cnot : AdvSIMD_Merged1VectorArg_Intrinsic; 1870def int_aarch64_sve_eor : AdvSIMD_Pred2VectorArg_Intrinsic; 1871def int_aarch64_sve_not : AdvSIMD_Merged1VectorArg_Intrinsic; 1872def int_aarch64_sve_orr : AdvSIMD_Pred2VectorArg_Intrinsic; 1873 1874// 1875// Conversion 1876// 1877 1878def int_aarch64_sve_sxtb : AdvSIMD_Merged1VectorArg_Intrinsic; 1879def int_aarch64_sve_sxth : AdvSIMD_Merged1VectorArg_Intrinsic; 1880def int_aarch64_sve_sxtw : AdvSIMD_Merged1VectorArg_Intrinsic; 1881def int_aarch64_sve_uxtb : AdvSIMD_Merged1VectorArg_Intrinsic; 1882def int_aarch64_sve_uxth : AdvSIMD_Merged1VectorArg_Intrinsic; 1883def int_aarch64_sve_uxtw : AdvSIMD_Merged1VectorArg_Intrinsic; 1884 1885// 1886// While comparisons 1887// 1888 1889def int_aarch64_sve_whilele : AdvSIMD_SVE_WHILE_Intrinsic; 1890def int_aarch64_sve_whilelo : AdvSIMD_SVE_WHILE_Intrinsic; 1891def int_aarch64_sve_whilels : AdvSIMD_SVE_WHILE_Intrinsic; 1892def int_aarch64_sve_whilelt : AdvSIMD_SVE_WHILE_Intrinsic; 1893def int_aarch64_sve_whilege : AdvSIMD_SVE_WHILE_Intrinsic; 1894def int_aarch64_sve_whilegt : AdvSIMD_SVE_WHILE_Intrinsic; 1895def int_aarch64_sve_whilehs : AdvSIMD_SVE_WHILE_Intrinsic; 1896def int_aarch64_sve_whilehi : AdvSIMD_SVE_WHILE_Intrinsic; 1897 1898// 1899// Floating-point arithmetic 1900// 1901 1902def int_aarch64_sve_fabd : AdvSIMD_Pred2VectorArg_Intrinsic; 1903def int_aarch64_sve_fabs : AdvSIMD_Merged1VectorArg_Intrinsic; 1904def int_aarch64_sve_fadd : AdvSIMD_Pred2VectorArg_Intrinsic; 1905def int_aarch64_sve_fcadd : AdvSIMD_SVE_CADD_Intrinsic; 1906def int_aarch64_sve_fcmla : AdvSIMD_SVE_CMLA_Intrinsic; 1907def int_aarch64_sve_fcmla_lane : AdvSIMD_SVE_CMLA_LANE_Intrinsic; 1908def int_aarch64_sve_fdiv : AdvSIMD_Pred2VectorArg_Intrinsic; 1909def int_aarch64_sve_fdivr : AdvSIMD_Pred2VectorArg_Intrinsic; 1910def int_aarch64_sve_fexpa_x : AdvSIMD_SVE_EXPA_Intrinsic; 1911def int_aarch64_sve_fmad : AdvSIMD_Pred3VectorArg_Intrinsic; 1912def int_aarch64_sve_fmax : AdvSIMD_Pred2VectorArg_Intrinsic; 1913def int_aarch64_sve_fmaxnm : AdvSIMD_Pred2VectorArg_Intrinsic; 1914def int_aarch64_sve_fmin : AdvSIMD_Pred2VectorArg_Intrinsic; 1915def int_aarch64_sve_fminnm : AdvSIMD_Pred2VectorArg_Intrinsic; 1916def int_aarch64_sve_fmla : AdvSIMD_Pred3VectorArg_Intrinsic; 1917def int_aarch64_sve_fmla_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1918def int_aarch64_sve_fmls : AdvSIMD_Pred3VectorArg_Intrinsic; 1919def int_aarch64_sve_fmls_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1920def int_aarch64_sve_fmsb : AdvSIMD_Pred3VectorArg_Intrinsic; 1921def int_aarch64_sve_fmul : AdvSIMD_Pred2VectorArg_Intrinsic; 1922def int_aarch64_sve_fmulx : AdvSIMD_Pred2VectorArg_Intrinsic; 1923def int_aarch64_sve_fneg : AdvSIMD_Merged1VectorArg_Intrinsic; 1924def int_aarch64_sve_fmul_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 1925def int_aarch64_sve_fnmad : AdvSIMD_Pred3VectorArg_Intrinsic; 1926def int_aarch64_sve_fnmla : AdvSIMD_Pred3VectorArg_Intrinsic; 1927def int_aarch64_sve_fnmls : AdvSIMD_Pred3VectorArg_Intrinsic; 1928def int_aarch64_sve_fnmsb : AdvSIMD_Pred3VectorArg_Intrinsic; 1929def int_aarch64_sve_frecpe_x : AdvSIMD_1VectorArg_Intrinsic; 1930def int_aarch64_sve_frecps_x : AdvSIMD_2VectorArg_Intrinsic; 1931def int_aarch64_sve_frecpx : AdvSIMD_Merged1VectorArg_Intrinsic; 1932def int_aarch64_sve_frinta : AdvSIMD_Merged1VectorArg_Intrinsic; 1933def int_aarch64_sve_frinti : AdvSIMD_Merged1VectorArg_Intrinsic; 1934def int_aarch64_sve_frintm : AdvSIMD_Merged1VectorArg_Intrinsic; 1935def int_aarch64_sve_frintn : AdvSIMD_Merged1VectorArg_Intrinsic; 1936def int_aarch64_sve_frintp : AdvSIMD_Merged1VectorArg_Intrinsic; 1937def int_aarch64_sve_frintx : AdvSIMD_Merged1VectorArg_Intrinsic; 1938def int_aarch64_sve_frintz : AdvSIMD_Merged1VectorArg_Intrinsic; 1939def int_aarch64_sve_frsqrte_x : AdvSIMD_1VectorArg_Intrinsic; 1940def int_aarch64_sve_frsqrts_x : AdvSIMD_2VectorArg_Intrinsic; 1941def int_aarch64_sve_fscale : AdvSIMD_SVE_SCALE_Intrinsic; 1942def int_aarch64_sve_fsqrt : AdvSIMD_Merged1VectorArg_Intrinsic; 1943def int_aarch64_sve_fsub : AdvSIMD_Pred2VectorArg_Intrinsic; 1944def int_aarch64_sve_fsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 1945def int_aarch64_sve_ftmad_x : AdvSIMD_2VectorArgIndexed_Intrinsic; 1946def int_aarch64_sve_ftsmul_x : AdvSIMD_SVE_TSMUL_Intrinsic; 1947def int_aarch64_sve_ftssel_x : AdvSIMD_SVE_TSMUL_Intrinsic; 1948 1949// 1950// Floating-point reductions 1951// 1952 1953def int_aarch64_sve_fadda : AdvSIMD_SVE_ReduceWithInit_Intrinsic; 1954def int_aarch64_sve_faddv : AdvSIMD_SVE_Reduce_Intrinsic; 1955def int_aarch64_sve_fmaxv : AdvSIMD_SVE_Reduce_Intrinsic; 1956def int_aarch64_sve_fmaxnmv : AdvSIMD_SVE_Reduce_Intrinsic; 1957def int_aarch64_sve_fminv : AdvSIMD_SVE_Reduce_Intrinsic; 1958def int_aarch64_sve_fminnmv : AdvSIMD_SVE_Reduce_Intrinsic; 1959 1960// 1961// Floating-point conversions 1962// 1963 1964def int_aarch64_sve_fcvt : AdvSIMD_SVE_FCVT_Intrinsic; 1965def int_aarch64_sve_fcvtzs : AdvSIMD_SVE_FCVTZS_Intrinsic; 1966def int_aarch64_sve_fcvtzu : AdvSIMD_SVE_FCVTZS_Intrinsic; 1967def int_aarch64_sve_scvtf : AdvSIMD_SVE_SCVTF_Intrinsic; 1968def int_aarch64_sve_ucvtf : AdvSIMD_SVE_SCVTF_Intrinsic; 1969 1970// 1971// Floating-point comparisons 1972// 1973 1974def int_aarch64_sve_facge : AdvSIMD_SVE_Compare_Intrinsic; 1975def int_aarch64_sve_facgt : AdvSIMD_SVE_Compare_Intrinsic; 1976 1977def int_aarch64_sve_fcmpeq : AdvSIMD_SVE_Compare_Intrinsic; 1978def int_aarch64_sve_fcmpge : AdvSIMD_SVE_Compare_Intrinsic; 1979def int_aarch64_sve_fcmpgt : AdvSIMD_SVE_Compare_Intrinsic; 1980def int_aarch64_sve_fcmpne : AdvSIMD_SVE_Compare_Intrinsic; 1981def int_aarch64_sve_fcmpuo : AdvSIMD_SVE_Compare_Intrinsic; 1982 1983def int_aarch64_sve_fcvtzs_i32f16 : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 1984def int_aarch64_sve_fcvtzs_i32f64 : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1985def int_aarch64_sve_fcvtzs_i64f16 : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>; 1986def int_aarch64_sve_fcvtzs_i64f32 : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 1987 1988def int_aarch64_sve_fcvt_bf16f32 : Builtin_SVCVT<llvm_nxv8bf16_ty, llvm_nxv8i1_ty, llvm_nxv4f32_ty>; 1989def int_aarch64_sve_fcvtnt_bf16f32 : Builtin_SVCVT<llvm_nxv8bf16_ty, llvm_nxv8i1_ty, llvm_nxv4f32_ty>; 1990 1991def int_aarch64_sve_fcvtzu_i32f16 : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 1992def int_aarch64_sve_fcvtzu_i32f64 : Builtin_SVCVT<llvm_nxv4i32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1993def int_aarch64_sve_fcvtzu_i64f16 : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>; 1994def int_aarch64_sve_fcvtzu_i64f32 : Builtin_SVCVT<llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 1995 1996def int_aarch64_sve_fcvt_f16f32 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4f32_ty>; 1997def int_aarch64_sve_fcvt_f16f64 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1998def int_aarch64_sve_fcvt_f32f64 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1999 2000def int_aarch64_sve_fcvt_f32f16 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 2001def int_aarch64_sve_fcvt_f64f16 : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>; 2002def int_aarch64_sve_fcvt_f64f32 : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 2003 2004def int_aarch64_sve_fcvtlt_f32f16 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 2005def int_aarch64_sve_fcvtlt_f64f32 : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 2006def int_aarch64_sve_fcvtnt_f16f32 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4f32_ty>; 2007def int_aarch64_sve_fcvtnt_f32f64 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 2008 2009def int_aarch64_sve_fcvtx_f32f64 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 2010def int_aarch64_sve_fcvtxnt_f32f64 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 2011 2012def int_aarch64_sve_scvtf_f16i32 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4i32_ty>; 2013def int_aarch64_sve_scvtf_f16i64 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 2014def int_aarch64_sve_scvtf_f32i64 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 2015def int_aarch64_sve_scvtf_f64i32 : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4i32_ty>; 2016 2017def int_aarch64_sve_ucvtf_f16i32 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4i32_ty>; 2018def int_aarch64_sve_ucvtf_f16i64 : Builtin_SVCVT<llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 2019def int_aarch64_sve_ucvtf_f32i64 : Builtin_SVCVT<llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 2020def int_aarch64_sve_ucvtf_f64i32 : Builtin_SVCVT<llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4i32_ty>; 2021 2022// 2023// Predicate creation 2024// 2025 2026def int_aarch64_sve_ptrue : AdvSIMD_SVE_PTRUE_Intrinsic; 2027 2028// 2029// Predicate operations 2030// 2031 2032def int_aarch64_sve_and_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2033def int_aarch64_sve_bic_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2034def int_aarch64_sve_brka : AdvSIMD_Merged1VectorArg_Intrinsic; 2035def int_aarch64_sve_brka_z : AdvSIMD_Pred1VectorArg_Intrinsic; 2036def int_aarch64_sve_brkb : AdvSIMD_Merged1VectorArg_Intrinsic; 2037def int_aarch64_sve_brkb_z : AdvSIMD_Pred1VectorArg_Intrinsic; 2038def int_aarch64_sve_brkn_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2039def int_aarch64_sve_brkpa_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2040def int_aarch64_sve_brkpb_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2041def int_aarch64_sve_eor_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2042def int_aarch64_sve_nand_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2043def int_aarch64_sve_nor_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2044def int_aarch64_sve_orn_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2045def int_aarch64_sve_orr_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2046def int_aarch64_sve_pfirst : AdvSIMD_Pred1VectorArg_Intrinsic; 2047def int_aarch64_sve_pnext : AdvSIMD_Pred1VectorArg_Intrinsic; 2048def int_aarch64_sve_punpkhi : AdvSIMD_SVE_PUNPKHI_Intrinsic; 2049def int_aarch64_sve_punpklo : AdvSIMD_SVE_PUNPKHI_Intrinsic; 2050 2051// 2052// Testing predicates 2053// 2054 2055def int_aarch64_sve_ptest_any : AdvSIMD_SVE_PTEST_Intrinsic; 2056def int_aarch64_sve_ptest_first : AdvSIMD_SVE_PTEST_Intrinsic; 2057def int_aarch64_sve_ptest_last : AdvSIMD_SVE_PTEST_Intrinsic; 2058 2059// 2060// Reinterpreting data 2061// 2062 2063def int_aarch64_sve_convert_from_svbool : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 2064 [llvm_nxv16i1_ty], 2065 [IntrNoMem]>; 2066 2067def int_aarch64_sve_convert_to_svbool : DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], 2068 [llvm_anyvector_ty], 2069 [IntrNoMem]>; 2070 2071// 2072// Gather loads: scalar base + vector offsets 2073// 2074 2075// 64 bit unscaled offsets 2076def int_aarch64_sve_ld1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2077 2078// 64 bit scaled offsets 2079def int_aarch64_sve_ld1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2080 2081// 32 bit unscaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2082def int_aarch64_sve_ld1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2083def int_aarch64_sve_ld1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2084 2085// 32 bit scaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2086def int_aarch64_sve_ld1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2087def int_aarch64_sve_ld1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2088 2089// 2090// Gather loads: vector base + scalar offset 2091// 2092 2093def int_aarch64_sve_ld1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic; 2094 2095 2096// 2097// First-faulting gather loads: scalar base + vector offsets 2098// 2099 2100// 64 bit unscaled offsets 2101def int_aarch64_sve_ldff1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic; 2102 2103// 64 bit scaled offsets 2104def int_aarch64_sve_ldff1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic; 2105 2106// 32 bit unscaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits 2107def int_aarch64_sve_ldff1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2108def int_aarch64_sve_ldff1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2109 2110// 32 bit scaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits 2111def int_aarch64_sve_ldff1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2112def int_aarch64_sve_ldff1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2113 2114// 2115// First-faulting gather loads: vector base + scalar offset 2116// 2117 2118def int_aarch64_sve_ldff1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic; 2119 2120 2121// 2122// Non-temporal gather loads: scalar base + vector offsets 2123// 2124 2125// 64 bit unscaled offsets 2126def int_aarch64_sve_ldnt1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2127 2128// 64 bit indices 2129def int_aarch64_sve_ldnt1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2130 2131// 32 bit unscaled offsets, zero (zxtw) extended to 64 bits 2132def int_aarch64_sve_ldnt1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2133 2134// 2135// Non-temporal gather loads: vector base + scalar offset 2136// 2137 2138def int_aarch64_sve_ldnt1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic; 2139 2140// 2141// Scatter stores: scalar base + vector offsets 2142// 2143 2144// 64 bit unscaled offsets 2145def int_aarch64_sve_st1_scatter : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2146 2147// 64 bit scaled offsets 2148def int_aarch64_sve_st1_scatter_index 2149 : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2150 2151// 32 bit unscaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2152def int_aarch64_sve_st1_scatter_sxtw 2153 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2154 2155def int_aarch64_sve_st1_scatter_uxtw 2156 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2157 2158// 32 bit scaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2159def int_aarch64_sve_st1_scatter_sxtw_index 2160 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2161 2162def int_aarch64_sve_st1_scatter_uxtw_index 2163 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2164 2165// 2166// Scatter stores: vector base + scalar offset 2167// 2168 2169def int_aarch64_sve_st1_scatter_scalar_offset : AdvSIMD_ScatterStore_VS_Intrinsic; 2170 2171// 2172// Non-temporal scatter stores: scalar base + vector offsets 2173// 2174 2175// 64 bit unscaled offsets 2176def int_aarch64_sve_stnt1_scatter : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2177 2178// 64 bit indices 2179def int_aarch64_sve_stnt1_scatter_index 2180 : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2181 2182// 32 bit unscaled offsets, zero (zxtw) extended to 64 bits 2183def int_aarch64_sve_stnt1_scatter_uxtw : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2184 2185// 2186// Non-temporal scatter stores: vector base + scalar offset 2187// 2188 2189def int_aarch64_sve_stnt1_scatter_scalar_offset : AdvSIMD_ScatterStore_VS_Intrinsic; 2190 2191// 2192// SVE2 - Uniform DSP operations 2193// 2194 2195def int_aarch64_sve_saba : AdvSIMD_3VectorArg_Intrinsic; 2196def int_aarch64_sve_shadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2197def int_aarch64_sve_shsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2198def int_aarch64_sve_shsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2199def int_aarch64_sve_sli : AdvSIMD_2VectorArgIndexed_Intrinsic; 2200def int_aarch64_sve_sqabs : AdvSIMD_Merged1VectorArg_Intrinsic; 2201def int_aarch64_sve_sqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2202def int_aarch64_sve_sqdmulh : AdvSIMD_2VectorArg_Intrinsic; 2203def int_aarch64_sve_sqdmulh_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 2204def int_aarch64_sve_sqneg : AdvSIMD_Merged1VectorArg_Intrinsic; 2205def int_aarch64_sve_sqrdmlah : AdvSIMD_3VectorArg_Intrinsic; 2206def int_aarch64_sve_sqrdmlah_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 2207def int_aarch64_sve_sqrdmlsh : AdvSIMD_3VectorArg_Intrinsic; 2208def int_aarch64_sve_sqrdmlsh_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 2209def int_aarch64_sve_sqrdmulh : AdvSIMD_2VectorArg_Intrinsic; 2210def int_aarch64_sve_sqrdmulh_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 2211def int_aarch64_sve_sqrshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2212def int_aarch64_sve_sqshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2213def int_aarch64_sve_sqshlu : AdvSIMD_SVE_ShiftByImm_Intrinsic; 2214def int_aarch64_sve_sqsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2215def int_aarch64_sve_sqsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2216def int_aarch64_sve_srhadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2217def int_aarch64_sve_sri : AdvSIMD_2VectorArgIndexed_Intrinsic; 2218def int_aarch64_sve_srshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2219def int_aarch64_sve_srshr : AdvSIMD_SVE_ShiftByImm_Intrinsic; 2220def int_aarch64_sve_srsra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2221def int_aarch64_sve_ssra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2222def int_aarch64_sve_suqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2223def int_aarch64_sve_uaba : AdvSIMD_3VectorArg_Intrinsic; 2224def int_aarch64_sve_uhadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2225def int_aarch64_sve_uhsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2226def int_aarch64_sve_uhsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2227def int_aarch64_sve_uqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2228def int_aarch64_sve_uqrshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2229def int_aarch64_sve_uqshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2230def int_aarch64_sve_uqsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2231def int_aarch64_sve_uqsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2232def int_aarch64_sve_urecpe : AdvSIMD_Merged1VectorArg_Intrinsic; 2233def int_aarch64_sve_urhadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2234def int_aarch64_sve_urshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2235def int_aarch64_sve_urshr : AdvSIMD_SVE_ShiftByImm_Intrinsic; 2236def int_aarch64_sve_ursqrte : AdvSIMD_Merged1VectorArg_Intrinsic; 2237def int_aarch64_sve_ursra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2238def int_aarch64_sve_usqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2239def int_aarch64_sve_usra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2240 2241// 2242// SVE2 - Widening DSP operations 2243// 2244 2245def int_aarch64_sve_sabalb : SVE2_3VectorArg_Long_Intrinsic; 2246def int_aarch64_sve_sabalt : SVE2_3VectorArg_Long_Intrinsic; 2247def int_aarch64_sve_sabdlb : SVE2_2VectorArg_Long_Intrinsic; 2248def int_aarch64_sve_sabdlt : SVE2_2VectorArg_Long_Intrinsic; 2249def int_aarch64_sve_saddlb : SVE2_2VectorArg_Long_Intrinsic; 2250def int_aarch64_sve_saddlt : SVE2_2VectorArg_Long_Intrinsic; 2251def int_aarch64_sve_saddwb : SVE2_2VectorArg_Wide_Intrinsic; 2252def int_aarch64_sve_saddwt : SVE2_2VectorArg_Wide_Intrinsic; 2253def int_aarch64_sve_sshllb : SVE2_1VectorArg_Long_Intrinsic; 2254def int_aarch64_sve_sshllt : SVE2_1VectorArg_Long_Intrinsic; 2255def int_aarch64_sve_ssublb : SVE2_2VectorArg_Long_Intrinsic; 2256def int_aarch64_sve_ssublt : SVE2_2VectorArg_Long_Intrinsic; 2257def int_aarch64_sve_ssubwb : SVE2_2VectorArg_Wide_Intrinsic; 2258def int_aarch64_sve_ssubwt : SVE2_2VectorArg_Wide_Intrinsic; 2259def int_aarch64_sve_uabalb : SVE2_3VectorArg_Long_Intrinsic; 2260def int_aarch64_sve_uabalt : SVE2_3VectorArg_Long_Intrinsic; 2261def int_aarch64_sve_uabdlb : SVE2_2VectorArg_Long_Intrinsic; 2262def int_aarch64_sve_uabdlt : SVE2_2VectorArg_Long_Intrinsic; 2263def int_aarch64_sve_uaddlb : SVE2_2VectorArg_Long_Intrinsic; 2264def int_aarch64_sve_uaddlt : SVE2_2VectorArg_Long_Intrinsic; 2265def int_aarch64_sve_uaddwb : SVE2_2VectorArg_Wide_Intrinsic; 2266def int_aarch64_sve_uaddwt : SVE2_2VectorArg_Wide_Intrinsic; 2267def int_aarch64_sve_ushllb : SVE2_1VectorArg_Long_Intrinsic; 2268def int_aarch64_sve_ushllt : SVE2_1VectorArg_Long_Intrinsic; 2269def int_aarch64_sve_usublb : SVE2_2VectorArg_Long_Intrinsic; 2270def int_aarch64_sve_usublt : SVE2_2VectorArg_Long_Intrinsic; 2271def int_aarch64_sve_usubwb : SVE2_2VectorArg_Wide_Intrinsic; 2272def int_aarch64_sve_usubwt : SVE2_2VectorArg_Wide_Intrinsic; 2273 2274// 2275// SVE2 - Non-widening pairwise arithmetic 2276// 2277 2278def int_aarch64_sve_addp : AdvSIMD_Pred2VectorArg_Intrinsic; 2279def int_aarch64_sve_faddp : AdvSIMD_Pred2VectorArg_Intrinsic; 2280def int_aarch64_sve_fmaxp : AdvSIMD_Pred2VectorArg_Intrinsic; 2281def int_aarch64_sve_fmaxnmp : AdvSIMD_Pred2VectorArg_Intrinsic; 2282def int_aarch64_sve_fminp : AdvSIMD_Pred2VectorArg_Intrinsic; 2283def int_aarch64_sve_fminnmp : AdvSIMD_Pred2VectorArg_Intrinsic; 2284def int_aarch64_sve_smaxp : AdvSIMD_Pred2VectorArg_Intrinsic; 2285def int_aarch64_sve_sminp : AdvSIMD_Pred2VectorArg_Intrinsic; 2286def int_aarch64_sve_umaxp : AdvSIMD_Pred2VectorArg_Intrinsic; 2287def int_aarch64_sve_uminp : AdvSIMD_Pred2VectorArg_Intrinsic; 2288 2289// 2290// SVE2 - Widening pairwise arithmetic 2291// 2292 2293def int_aarch64_sve_sadalp : SVE2_2VectorArg_Pred_Long_Intrinsic; 2294def int_aarch64_sve_uadalp : SVE2_2VectorArg_Pred_Long_Intrinsic; 2295 2296// 2297// SVE2 - Uniform complex integer arithmetic 2298// 2299 2300def int_aarch64_sve_cadd_x : AdvSIMD_SVE2_CADD_Intrinsic; 2301def int_aarch64_sve_sqcadd_x : AdvSIMD_SVE2_CADD_Intrinsic; 2302def int_aarch64_sve_cmla_x : AdvSIMD_SVE2_CMLA_Intrinsic; 2303def int_aarch64_sve_cmla_lane_x : AdvSIMD_SVE_CMLA_LANE_Intrinsic; 2304def int_aarch64_sve_sqrdcmlah_x : AdvSIMD_SVE2_CMLA_Intrinsic; 2305def int_aarch64_sve_sqrdcmlah_lane_x : AdvSIMD_SVE_CMLA_LANE_Intrinsic; 2306 2307// 2308// SVE2 - Widening complex integer arithmetic 2309// 2310 2311def int_aarch64_sve_saddlbt : SVE2_2VectorArg_Long_Intrinsic; 2312def int_aarch64_sve_ssublbt : SVE2_2VectorArg_Long_Intrinsic; 2313def int_aarch64_sve_ssubltb : SVE2_2VectorArg_Long_Intrinsic; 2314 2315// 2316// SVE2 - Widening complex integer dot product 2317// 2318 2319def int_aarch64_sve_cdot : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 2320def int_aarch64_sve_cdot_lane : AdvSIMD_SVE_CDOT_LANE_Intrinsic; 2321 2322// 2323// SVE2 - Floating-point widening multiply-accumulate 2324// 2325 2326def int_aarch64_sve_fmlalb : SVE2_3VectorArg_Long_Intrinsic; 2327def int_aarch64_sve_fmlalb_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2328def int_aarch64_sve_fmlalt : SVE2_3VectorArg_Long_Intrinsic; 2329def int_aarch64_sve_fmlalt_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2330def int_aarch64_sve_fmlslb : SVE2_3VectorArg_Long_Intrinsic; 2331def int_aarch64_sve_fmlslb_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2332def int_aarch64_sve_fmlslt : SVE2_3VectorArg_Long_Intrinsic; 2333def int_aarch64_sve_fmlslt_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2334 2335// 2336// SVE2 - Floating-point integer binary logarithm 2337// 2338 2339def int_aarch64_sve_flogb : AdvSIMD_SVE_LOGB_Intrinsic; 2340 2341// 2342// SVE2 - Vector histogram count 2343// 2344 2345def int_aarch64_sve_histcnt : AdvSIMD_Pred2VectorArg_Intrinsic; 2346def int_aarch64_sve_histseg : AdvSIMD_2VectorArg_Intrinsic; 2347 2348// 2349// SVE2 - Character match 2350// 2351 2352def int_aarch64_sve_match : AdvSIMD_SVE_Compare_Intrinsic; 2353def int_aarch64_sve_nmatch : AdvSIMD_SVE_Compare_Intrinsic; 2354 2355// 2356// SVE2 - Unary narrowing operations 2357// 2358 2359def int_aarch64_sve_sqxtnb : SVE2_1VectorArg_Narrowing_Intrinsic; 2360def int_aarch64_sve_sqxtnt : SVE2_Merged1VectorArg_Narrowing_Intrinsic; 2361def int_aarch64_sve_sqxtunb : SVE2_1VectorArg_Narrowing_Intrinsic; 2362def int_aarch64_sve_sqxtunt : SVE2_Merged1VectorArg_Narrowing_Intrinsic; 2363def int_aarch64_sve_uqxtnb : SVE2_1VectorArg_Narrowing_Intrinsic; 2364def int_aarch64_sve_uqxtnt : SVE2_Merged1VectorArg_Narrowing_Intrinsic; 2365 2366// 2367// SVE2 - Binary narrowing DSP operations 2368// 2369def int_aarch64_sve_addhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2370def int_aarch64_sve_addhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2371 2372def int_aarch64_sve_raddhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2373def int_aarch64_sve_raddhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2374 2375def int_aarch64_sve_subhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2376def int_aarch64_sve_subhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2377 2378def int_aarch64_sve_rsubhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2379def int_aarch64_sve_rsubhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2380 2381// Narrowing shift right 2382def int_aarch64_sve_shrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2383def int_aarch64_sve_shrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2384 2385def int_aarch64_sve_rshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2386def int_aarch64_sve_rshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2387 2388// Saturating shift right - signed input/output 2389def int_aarch64_sve_sqshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2390def int_aarch64_sve_sqshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2391 2392def int_aarch64_sve_sqrshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2393def int_aarch64_sve_sqrshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2394 2395// Saturating shift right - unsigned input/output 2396def int_aarch64_sve_uqshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2397def int_aarch64_sve_uqshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2398 2399def int_aarch64_sve_uqrshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2400def int_aarch64_sve_uqrshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2401 2402// Saturating shift right - signed input, unsigned output 2403def int_aarch64_sve_sqshrunb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2404def int_aarch64_sve_sqshrunt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2405 2406def int_aarch64_sve_sqrshrunb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2407def int_aarch64_sve_sqrshrunt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2408 2409// SVE2 MLA LANE. 2410def int_aarch64_sve_smlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2411def int_aarch64_sve_smlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2412def int_aarch64_sve_umlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2413def int_aarch64_sve_umlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2414def int_aarch64_sve_smlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2415def int_aarch64_sve_smlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2416def int_aarch64_sve_umlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2417def int_aarch64_sve_umlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2418def int_aarch64_sve_smullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2419def int_aarch64_sve_smullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2420def int_aarch64_sve_umullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2421def int_aarch64_sve_umullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2422def int_aarch64_sve_sqdmlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2423def int_aarch64_sve_sqdmlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2424def int_aarch64_sve_sqdmlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2425def int_aarch64_sve_sqdmlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2426def int_aarch64_sve_sqdmullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2427def int_aarch64_sve_sqdmullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2428 2429// SVE2 MLA Unpredicated. 2430def int_aarch64_sve_smlalb : SVE2_3VectorArg_Long_Intrinsic; 2431def int_aarch64_sve_smlalt : SVE2_3VectorArg_Long_Intrinsic; 2432def int_aarch64_sve_umlalb : SVE2_3VectorArg_Long_Intrinsic; 2433def int_aarch64_sve_umlalt : SVE2_3VectorArg_Long_Intrinsic; 2434def int_aarch64_sve_smlslb : SVE2_3VectorArg_Long_Intrinsic; 2435def int_aarch64_sve_smlslt : SVE2_3VectorArg_Long_Intrinsic; 2436def int_aarch64_sve_umlslb : SVE2_3VectorArg_Long_Intrinsic; 2437def int_aarch64_sve_umlslt : SVE2_3VectorArg_Long_Intrinsic; 2438def int_aarch64_sve_smullb : SVE2_2VectorArg_Long_Intrinsic; 2439def int_aarch64_sve_smullt : SVE2_2VectorArg_Long_Intrinsic; 2440def int_aarch64_sve_umullb : SVE2_2VectorArg_Long_Intrinsic; 2441def int_aarch64_sve_umullt : SVE2_2VectorArg_Long_Intrinsic; 2442 2443def int_aarch64_sve_sqdmlalb : SVE2_3VectorArg_Long_Intrinsic; 2444def int_aarch64_sve_sqdmlalt : SVE2_3VectorArg_Long_Intrinsic; 2445def int_aarch64_sve_sqdmlslb : SVE2_3VectorArg_Long_Intrinsic; 2446def int_aarch64_sve_sqdmlslt : SVE2_3VectorArg_Long_Intrinsic; 2447def int_aarch64_sve_sqdmullb : SVE2_2VectorArg_Long_Intrinsic; 2448def int_aarch64_sve_sqdmullt : SVE2_2VectorArg_Long_Intrinsic; 2449def int_aarch64_sve_sqdmlalbt : SVE2_3VectorArg_Long_Intrinsic; 2450def int_aarch64_sve_sqdmlslbt : SVE2_3VectorArg_Long_Intrinsic; 2451 2452// SVE2 ADDSUB Long Unpredicated. 2453def int_aarch64_sve_adclb : AdvSIMD_3VectorArg_Intrinsic; 2454def int_aarch64_sve_adclt : AdvSIMD_3VectorArg_Intrinsic; 2455def int_aarch64_sve_sbclb : AdvSIMD_3VectorArg_Intrinsic; 2456def int_aarch64_sve_sbclt : AdvSIMD_3VectorArg_Intrinsic; 2457 2458// 2459// SVE2 - Polynomial arithmetic 2460// 2461def int_aarch64_sve_eorbt : AdvSIMD_3VectorArg_Intrinsic; 2462def int_aarch64_sve_eortb : AdvSIMD_3VectorArg_Intrinsic; 2463def int_aarch64_sve_pmullb_pair : AdvSIMD_2VectorArg_Intrinsic; 2464def int_aarch64_sve_pmullt_pair : AdvSIMD_2VectorArg_Intrinsic; 2465 2466// 2467// SVE2 bitwise ternary operations. 2468// 2469def int_aarch64_sve_eor3 : AdvSIMD_3VectorArg_Intrinsic; 2470def int_aarch64_sve_bcax : AdvSIMD_3VectorArg_Intrinsic; 2471def int_aarch64_sve_bsl : AdvSIMD_3VectorArg_Intrinsic; 2472def int_aarch64_sve_bsl1n : AdvSIMD_3VectorArg_Intrinsic; 2473def int_aarch64_sve_bsl2n : AdvSIMD_3VectorArg_Intrinsic; 2474def int_aarch64_sve_nbsl : AdvSIMD_3VectorArg_Intrinsic; 2475def int_aarch64_sve_xar : AdvSIMD_2VectorArgIndexed_Intrinsic; 2476 2477// 2478// SVE2 - Optional AES, SHA-3 and SM4 2479// 2480 2481def int_aarch64_sve_aesd : GCCBuiltin<"__builtin_sve_svaesd_u8">, 2482 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2483 [llvm_nxv16i8_ty, llvm_nxv16i8_ty], 2484 [IntrNoMem]>; 2485def int_aarch64_sve_aesimc : GCCBuiltin<"__builtin_sve_svaesimc_u8">, 2486 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2487 [llvm_nxv16i8_ty], 2488 [IntrNoMem]>; 2489def int_aarch64_sve_aese : GCCBuiltin<"__builtin_sve_svaese_u8">, 2490 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2491 [llvm_nxv16i8_ty, llvm_nxv16i8_ty], 2492 [IntrNoMem]>; 2493def int_aarch64_sve_aesmc : GCCBuiltin<"__builtin_sve_svaesmc_u8">, 2494 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2495 [llvm_nxv16i8_ty], 2496 [IntrNoMem]>; 2497def int_aarch64_sve_rax1 : GCCBuiltin<"__builtin_sve_svrax1_u64">, 2498 DefaultAttrsIntrinsic<[llvm_nxv2i64_ty], 2499 [llvm_nxv2i64_ty, llvm_nxv2i64_ty], 2500 [IntrNoMem]>; 2501def int_aarch64_sve_sm4e : GCCBuiltin<"__builtin_sve_svsm4e_u32">, 2502 DefaultAttrsIntrinsic<[llvm_nxv4i32_ty], 2503 [llvm_nxv4i32_ty, llvm_nxv4i32_ty], 2504 [IntrNoMem]>; 2505def int_aarch64_sve_sm4ekey : GCCBuiltin<"__builtin_sve_svsm4ekey_u32">, 2506 DefaultAttrsIntrinsic<[llvm_nxv4i32_ty], 2507 [llvm_nxv4i32_ty, llvm_nxv4i32_ty], 2508 [IntrNoMem]>; 2509// 2510// SVE2 - Extended table lookup/permute 2511// 2512 2513def int_aarch64_sve_tbl2 : AdvSIMD_SVE2_TBX_Intrinsic; 2514def int_aarch64_sve_tbx : AdvSIMD_SVE2_TBX_Intrinsic; 2515 2516// 2517// SVE2 - Optional bit permutation 2518// 2519 2520def int_aarch64_sve_bdep_x : AdvSIMD_2VectorArg_Intrinsic; 2521def int_aarch64_sve_bext_x : AdvSIMD_2VectorArg_Intrinsic; 2522def int_aarch64_sve_bgrp_x : AdvSIMD_2VectorArg_Intrinsic; 2523 2524 2525// 2526// SVE ACLE: 7.3. INT8 matrix multiply extensions 2527// 2528def int_aarch64_sve_ummla : SVE_MatMul_Intrinsic; 2529def int_aarch64_sve_smmla : SVE_MatMul_Intrinsic; 2530def int_aarch64_sve_usmmla : SVE_MatMul_Intrinsic; 2531 2532def int_aarch64_sve_usdot : AdvSIMD_SVE_DOT_Intrinsic; 2533def int_aarch64_sve_usdot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 2534def int_aarch64_sve_sudot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 2535 2536// 2537// SVE ACLE: 7.4/5. FP64/FP32 matrix multiply extensions 2538// 2539def int_aarch64_sve_fmmla : AdvSIMD_3VectorArg_Intrinsic; 2540 2541// 2542// SVE ACLE: 7.2. BFloat16 extensions 2543// 2544 2545def int_aarch64_sve_bfdot : SVE_4Vec_BF16; 2546def int_aarch64_sve_bfmlalb : SVE_4Vec_BF16; 2547def int_aarch64_sve_bfmlalt : SVE_4Vec_BF16; 2548 2549def int_aarch64_sve_bfmmla : SVE_4Vec_BF16; 2550 2551def int_aarch64_sve_bfdot_lane : SVE_4Vec_BF16_Indexed; 2552def int_aarch64_sve_bfmlalb_lane : SVE_4Vec_BF16_Indexed; 2553def int_aarch64_sve_bfmlalt_lane : SVE_4Vec_BF16_Indexed; 2554} 2555 2556// 2557// SVE2 - Contiguous conflict detection 2558// 2559 2560def int_aarch64_sve_whilerw_b : SVE2_CONFLICT_DETECT_Intrinsic; 2561def int_aarch64_sve_whilerw_h : SVE2_CONFLICT_DETECT_Intrinsic; 2562def int_aarch64_sve_whilerw_s : SVE2_CONFLICT_DETECT_Intrinsic; 2563def int_aarch64_sve_whilerw_d : SVE2_CONFLICT_DETECT_Intrinsic; 2564def int_aarch64_sve_whilewr_b : SVE2_CONFLICT_DETECT_Intrinsic; 2565def int_aarch64_sve_whilewr_h : SVE2_CONFLICT_DETECT_Intrinsic; 2566def int_aarch64_sve_whilewr_s : SVE2_CONFLICT_DETECT_Intrinsic; 2567def int_aarch64_sve_whilewr_d : SVE2_CONFLICT_DETECT_Intrinsic; 2568