1//===- IntrinsicsAARCH64.td - Defines AARCH64 intrinsics ---*- tablegen -*-===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// 9// This file defines all of the AARCH64-specific intrinsics. 10// 11//===----------------------------------------------------------------------===// 12 13let TargetPrefix = "aarch64" in { 14 15def int_aarch64_ldxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty], 16 [IntrNoFree, IntrWillReturn]>; 17def int_aarch64_ldaxr : Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty], 18 [IntrNoFree, IntrWillReturn]>; 19def int_aarch64_stxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty], 20 [IntrNoFree, IntrWillReturn]>; 21def int_aarch64_stlxr : Intrinsic<[llvm_i32_ty], [llvm_i64_ty, llvm_anyptr_ty], 22 [IntrNoFree, IntrWillReturn]>; 23 24def int_aarch64_ldxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty], 25 [IntrNoFree, IntrWillReturn]>; 26def int_aarch64_ldaxp : Intrinsic<[llvm_i64_ty, llvm_i64_ty], [llvm_ptr_ty], 27 [IntrNoFree, IntrWillReturn]>; 28def int_aarch64_stxp : Intrinsic<[llvm_i32_ty], 29 [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty], 30 [IntrNoFree, IntrWillReturn]>; 31def int_aarch64_stlxp : Intrinsic<[llvm_i32_ty], 32 [llvm_i64_ty, llvm_i64_ty, llvm_ptr_ty], 33 [IntrNoFree, IntrWillReturn]>; 34 35def int_aarch64_clrex : Intrinsic<[]>; 36 37def int_aarch64_sdiv : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, 38 LLVMMatchType<0>], [IntrNoMem]>; 39def int_aarch64_udiv : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, 40 LLVMMatchType<0>], [IntrNoMem]>; 41 42def int_aarch64_fjcvtzs : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>; 43 44def int_aarch64_cls: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>; 45def int_aarch64_cls64: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem]>; 46 47def int_aarch64_frint32z 48 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 49 [ IntrNoMem ]>; 50def int_aarch64_frint64z 51 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 52 [ IntrNoMem ]>; 53def int_aarch64_frint32x 54 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 55 [ IntrNoMem ]>; 56def int_aarch64_frint64x 57 : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ], 58 [ IntrNoMem ]>; 59 60//===----------------------------------------------------------------------===// 61// HINT 62 63def int_aarch64_hint : DefaultAttrsIntrinsic<[], [llvm_i32_ty]>; 64 65//===----------------------------------------------------------------------===// 66// Data Barrier Instructions 67 68def int_aarch64_dmb : GCCBuiltin<"__builtin_arm_dmb">, MSBuiltin<"__dmb">, 69 Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>; 70def int_aarch64_dsb : GCCBuiltin<"__builtin_arm_dsb">, MSBuiltin<"__dsb">, 71 Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>; 72def int_aarch64_isb : GCCBuiltin<"__builtin_arm_isb">, MSBuiltin<"__isb">, 73 Intrinsic<[], [llvm_i32_ty], [IntrNoFree, IntrWillReturn]>; 74 75// A space-consuming intrinsic primarily for testing block and jump table 76// placements. The first argument is the number of bytes this "instruction" 77// takes up, the second and return value are essentially chains, used to force 78// ordering during ISel. 79def int_aarch64_space : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i64_ty], []>; 80 81} 82 83//===----------------------------------------------------------------------===// 84// Advanced SIMD (NEON) 85 86let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 87 class AdvSIMD_2Scalar_Float_Intrinsic 88 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 89 [IntrNoMem]>; 90 91 class AdvSIMD_FPToIntRounding_Intrinsic 92 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty], [IntrNoMem]>; 93 94 class AdvSIMD_1IntArg_Intrinsic 95 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>], [IntrNoMem]>; 96 class AdvSIMD_1FloatArg_Intrinsic 97 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem]>; 98 class AdvSIMD_1VectorArg_Intrinsic 99 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>], [IntrNoMem]>; 100 class AdvSIMD_1VectorArg_Expand_Intrinsic 101 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>; 102 class AdvSIMD_1VectorArg_Long_Intrinsic 103 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMTruncatedType<0>], [IntrNoMem]>; 104 class AdvSIMD_1IntArg_Narrow_Intrinsic 105 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyint_ty], [IntrNoMem]>; 106 class AdvSIMD_1VectorArg_Narrow_Intrinsic 107 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMExtendedType<0>], [IntrNoMem]>; 108 class AdvSIMD_1VectorArg_Int_Across_Intrinsic 109 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyvector_ty], [IntrNoMem]>; 110 class AdvSIMD_1VectorArg_Float_Across_Intrinsic 111 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [llvm_anyvector_ty], [IntrNoMem]>; 112 113 class AdvSIMD_2IntArg_Intrinsic 114 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 115 [IntrNoMem]>; 116 class AdvSIMD_2FloatArg_Intrinsic 117 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 118 [IntrNoMem]>; 119 class AdvSIMD_2VectorArg_Intrinsic 120 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, LLVMMatchType<0>], 121 [IntrNoMem]>; 122 class AdvSIMD_2VectorArg_Compare_Intrinsic 123 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMMatchType<1>], 124 [IntrNoMem]>; 125 class AdvSIMD_2Arg_FloatCompare_Intrinsic 126 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, LLVMMatchType<1>], 127 [IntrNoMem]>; 128 class AdvSIMD_2VectorArg_Long_Intrinsic 129 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 130 [LLVMTruncatedType<0>, LLVMTruncatedType<0>], 131 [IntrNoMem]>; 132 class AdvSIMD_2VectorArg_Wide_Intrinsic 133 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 134 [LLVMMatchType<0>, LLVMTruncatedType<0>], 135 [IntrNoMem]>; 136 class AdvSIMD_2VectorArg_Narrow_Intrinsic 137 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 138 [LLVMExtendedType<0>, LLVMExtendedType<0>], 139 [IntrNoMem]>; 140 class AdvSIMD_2Arg_Scalar_Narrow_Intrinsic 141 : DefaultAttrsIntrinsic<[llvm_anyint_ty], 142 [LLVMExtendedType<0>, llvm_i32_ty], 143 [IntrNoMem]>; 144 class AdvSIMD_2VectorArg_Scalar_Expand_BySize_Intrinsic 145 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 146 [llvm_anyvector_ty], 147 [IntrNoMem]>; 148 class AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic 149 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 150 [LLVMTruncatedType<0>], 151 [IntrNoMem]>; 152 class AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic 153 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 154 [LLVMTruncatedType<0>, llvm_i32_ty], 155 [IntrNoMem]>; 156 class AdvSIMD_2VectorArg_Tied_Narrow_Intrinsic 157 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 158 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty], 159 [IntrNoMem]>; 160 class AdvSIMD_2VectorArg_Lane_Intrinsic 161 : DefaultAttrsIntrinsic<[llvm_anyint_ty], 162 [LLVMMatchType<0>, llvm_anyint_ty, llvm_i32_ty], 163 [IntrNoMem]>; 164 165 class AdvSIMD_3VectorArg_Intrinsic 166 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 167 [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], 168 [IntrNoMem]>; 169 class AdvSIMD_3VectorArg_Scalar_Intrinsic 170 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 171 [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i32_ty], 172 [IntrNoMem]>; 173 class AdvSIMD_3VectorArg_Tied_Narrow_Intrinsic 174 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 175 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty, 176 LLVMMatchType<1>], [IntrNoMem]>; 177 class AdvSIMD_3VectorArg_Scalar_Tied_Narrow_Intrinsic 178 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 179 [LLVMHalfElementsVectorType<0>, llvm_anyvector_ty, llvm_i32_ty], 180 [IntrNoMem]>; 181 class AdvSIMD_CvtFxToFP_Intrinsic 182 : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [llvm_anyint_ty, llvm_i32_ty], 183 [IntrNoMem]>; 184 class AdvSIMD_CvtFPToFx_Intrinsic 185 : DefaultAttrsIntrinsic<[llvm_anyint_ty], [llvm_anyfloat_ty, llvm_i32_ty], 186 [IntrNoMem]>; 187 188 class AdvSIMD_1Arg_Intrinsic 189 : DefaultAttrsIntrinsic<[llvm_any_ty], [LLVMMatchType<0>], [IntrNoMem]>; 190 191 class AdvSIMD_Dot_Intrinsic 192 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 193 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 194 [IntrNoMem]>; 195 196 class AdvSIMD_FP16FML_Intrinsic 197 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 198 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 199 [IntrNoMem]>; 200 201 class AdvSIMD_MatMul_Intrinsic 202 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 203 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 204 [IntrNoMem]>; 205 206 class AdvSIMD_FML_Intrinsic 207 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 208 [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>], 209 [IntrNoMem]>; 210 211 class AdvSIMD_BF16FML_Intrinsic 212 : DefaultAttrsIntrinsic<[llvm_v4f32_ty], 213 [llvm_v4f32_ty, llvm_v8bf16_ty, llvm_v8bf16_ty], 214 [IntrNoMem]>; 215} 216 217// Arithmetic ops 218 219let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in { 220 // Vector Add Across Lanes 221 def int_aarch64_neon_saddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 222 def int_aarch64_neon_uaddv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 223 def int_aarch64_neon_faddv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 224 225 // Vector Long Add Across Lanes 226 def int_aarch64_neon_saddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 227 def int_aarch64_neon_uaddlv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 228 229 // Vector Halving Add 230 def int_aarch64_neon_shadd : AdvSIMD_2VectorArg_Intrinsic; 231 def int_aarch64_neon_uhadd : AdvSIMD_2VectorArg_Intrinsic; 232 233 // Vector Rounding Halving Add 234 def int_aarch64_neon_srhadd : AdvSIMD_2VectorArg_Intrinsic; 235 def int_aarch64_neon_urhadd : AdvSIMD_2VectorArg_Intrinsic; 236 237 // Vector Saturating Add 238 def int_aarch64_neon_sqadd : AdvSIMD_2IntArg_Intrinsic; 239 def int_aarch64_neon_suqadd : AdvSIMD_2IntArg_Intrinsic; 240 def int_aarch64_neon_usqadd : AdvSIMD_2IntArg_Intrinsic; 241 def int_aarch64_neon_uqadd : AdvSIMD_2IntArg_Intrinsic; 242 243 // Vector Add High-Half 244 // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that 245 // header is no longer supported. 246 def int_aarch64_neon_addhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 247 248 // Vector Rounding Add High-Half 249 def int_aarch64_neon_raddhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 250 251 // Vector Saturating Doubling Multiply High 252 def int_aarch64_neon_sqdmulh : AdvSIMD_2IntArg_Intrinsic; 253 def int_aarch64_neon_sqdmulh_lane : AdvSIMD_2VectorArg_Lane_Intrinsic; 254 def int_aarch64_neon_sqdmulh_laneq : AdvSIMD_2VectorArg_Lane_Intrinsic; 255 256 // Vector Saturating Rounding Doubling Multiply High 257 def int_aarch64_neon_sqrdmulh : AdvSIMD_2IntArg_Intrinsic; 258 def int_aarch64_neon_sqrdmulh_lane : AdvSIMD_2VectorArg_Lane_Intrinsic; 259 def int_aarch64_neon_sqrdmulh_laneq : AdvSIMD_2VectorArg_Lane_Intrinsic; 260 261 // Vector Polynominal Multiply 262 def int_aarch64_neon_pmul : AdvSIMD_2VectorArg_Intrinsic; 263 264 // Vector Long Multiply 265 def int_aarch64_neon_smull : AdvSIMD_2VectorArg_Long_Intrinsic; 266 def int_aarch64_neon_umull : AdvSIMD_2VectorArg_Long_Intrinsic; 267 def int_aarch64_neon_pmull : AdvSIMD_2VectorArg_Long_Intrinsic; 268 269 // 64-bit polynomial multiply really returns an i128, which is not legal. Fake 270 // it with a v16i8. 271 def int_aarch64_neon_pmull64 : 272 DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>; 273 274 // Vector Extending Multiply 275 def int_aarch64_neon_fmulx : AdvSIMD_2FloatArg_Intrinsic { 276 let IntrProperties = [IntrNoMem, Commutative]; 277 } 278 279 // Vector Saturating Doubling Long Multiply 280 def int_aarch64_neon_sqdmull : AdvSIMD_2VectorArg_Long_Intrinsic; 281 def int_aarch64_neon_sqdmulls_scalar 282 : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>; 283 284 // Vector Halving Subtract 285 def int_aarch64_neon_shsub : AdvSIMD_2VectorArg_Intrinsic; 286 def int_aarch64_neon_uhsub : AdvSIMD_2VectorArg_Intrinsic; 287 288 // Vector Saturating Subtract 289 def int_aarch64_neon_sqsub : AdvSIMD_2IntArg_Intrinsic; 290 def int_aarch64_neon_uqsub : AdvSIMD_2IntArg_Intrinsic; 291 292 // Vector Subtract High-Half 293 // FIXME: this is a legacy intrinsic for aarch64_simd.h. Remove it when that 294 // header is no longer supported. 295 def int_aarch64_neon_subhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 296 297 // Vector Rounding Subtract High-Half 298 def int_aarch64_neon_rsubhn : AdvSIMD_2VectorArg_Narrow_Intrinsic; 299 300 // Vector Compare Absolute Greater-than-or-equal 301 def int_aarch64_neon_facge : AdvSIMD_2Arg_FloatCompare_Intrinsic; 302 303 // Vector Compare Absolute Greater-than 304 def int_aarch64_neon_facgt : AdvSIMD_2Arg_FloatCompare_Intrinsic; 305 306 // Vector Absolute Difference 307 def int_aarch64_neon_sabd : AdvSIMD_2VectorArg_Intrinsic; 308 def int_aarch64_neon_uabd : AdvSIMD_2VectorArg_Intrinsic; 309 def int_aarch64_neon_fabd : AdvSIMD_2VectorArg_Intrinsic; 310 311 // Scalar Absolute Difference 312 def int_aarch64_sisd_fabd : AdvSIMD_2Scalar_Float_Intrinsic; 313 314 // Vector Max 315 def int_aarch64_neon_smax : AdvSIMD_2VectorArg_Intrinsic; 316 def int_aarch64_neon_umax : AdvSIMD_2VectorArg_Intrinsic; 317 def int_aarch64_neon_fmax : AdvSIMD_2FloatArg_Intrinsic; 318 def int_aarch64_neon_fmaxnmp : AdvSIMD_2VectorArg_Intrinsic; 319 320 // Vector Max Across Lanes 321 def int_aarch64_neon_smaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 322 def int_aarch64_neon_umaxv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 323 def int_aarch64_neon_fmaxv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 324 def int_aarch64_neon_fmaxnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 325 326 // Vector Min 327 def int_aarch64_neon_smin : AdvSIMD_2VectorArg_Intrinsic; 328 def int_aarch64_neon_umin : AdvSIMD_2VectorArg_Intrinsic; 329 def int_aarch64_neon_fmin : AdvSIMD_2FloatArg_Intrinsic; 330 def int_aarch64_neon_fminnmp : AdvSIMD_2VectorArg_Intrinsic; 331 332 // Vector Min/Max Number 333 def int_aarch64_neon_fminnm : AdvSIMD_2FloatArg_Intrinsic; 334 def int_aarch64_neon_fmaxnm : AdvSIMD_2FloatArg_Intrinsic; 335 336 // Vector Min Across Lanes 337 def int_aarch64_neon_sminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 338 def int_aarch64_neon_uminv : AdvSIMD_1VectorArg_Int_Across_Intrinsic; 339 def int_aarch64_neon_fminv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 340 def int_aarch64_neon_fminnmv : AdvSIMD_1VectorArg_Float_Across_Intrinsic; 341 342 // Pairwise Add 343 def int_aarch64_neon_addp : AdvSIMD_2VectorArg_Intrinsic; 344 def int_aarch64_neon_faddp : AdvSIMD_2VectorArg_Intrinsic; 345 346 // Long Pairwise Add 347 // FIXME: In theory, we shouldn't need intrinsics for saddlp or 348 // uaddlp, but tblgen's type inference currently can't handle the 349 // pattern fragments this ends up generating. 350 def int_aarch64_neon_saddlp : AdvSIMD_1VectorArg_Expand_Intrinsic; 351 def int_aarch64_neon_uaddlp : AdvSIMD_1VectorArg_Expand_Intrinsic; 352 353 // Folding Maximum 354 def int_aarch64_neon_smaxp : AdvSIMD_2VectorArg_Intrinsic; 355 def int_aarch64_neon_umaxp : AdvSIMD_2VectorArg_Intrinsic; 356 def int_aarch64_neon_fmaxp : AdvSIMD_2VectorArg_Intrinsic; 357 358 // Folding Minimum 359 def int_aarch64_neon_sminp : AdvSIMD_2VectorArg_Intrinsic; 360 def int_aarch64_neon_uminp : AdvSIMD_2VectorArg_Intrinsic; 361 def int_aarch64_neon_fminp : AdvSIMD_2VectorArg_Intrinsic; 362 363 // Reciprocal Estimate/Step 364 def int_aarch64_neon_frecps : AdvSIMD_2FloatArg_Intrinsic; 365 def int_aarch64_neon_frsqrts : AdvSIMD_2FloatArg_Intrinsic; 366 367 // Reciprocal Exponent 368 def int_aarch64_neon_frecpx : AdvSIMD_1FloatArg_Intrinsic; 369 370 // Vector Saturating Shift Left 371 def int_aarch64_neon_sqshl : AdvSIMD_2IntArg_Intrinsic; 372 def int_aarch64_neon_uqshl : AdvSIMD_2IntArg_Intrinsic; 373 374 // Vector Rounding Shift Left 375 def int_aarch64_neon_srshl : AdvSIMD_2IntArg_Intrinsic; 376 def int_aarch64_neon_urshl : AdvSIMD_2IntArg_Intrinsic; 377 378 // Vector Saturating Rounding Shift Left 379 def int_aarch64_neon_sqrshl : AdvSIMD_2IntArg_Intrinsic; 380 def int_aarch64_neon_uqrshl : AdvSIMD_2IntArg_Intrinsic; 381 382 // Vector Signed->Unsigned Shift Left by Constant 383 def int_aarch64_neon_sqshlu : AdvSIMD_2IntArg_Intrinsic; 384 385 // Vector Signed->Unsigned Narrowing Saturating Shift Right by Constant 386 def int_aarch64_neon_sqshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 387 388 // Vector Signed->Unsigned Rounding Narrowing Saturating Shift Right by Const 389 def int_aarch64_neon_sqrshrun : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 390 391 // Vector Narrowing Shift Right by Constant 392 def int_aarch64_neon_sqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 393 def int_aarch64_neon_uqshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 394 395 // Vector Rounding Narrowing Shift Right by Constant 396 def int_aarch64_neon_rshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 397 398 // Vector Rounding Narrowing Saturating Shift Right by Constant 399 def int_aarch64_neon_sqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 400 def int_aarch64_neon_uqrshrn : AdvSIMD_2Arg_Scalar_Narrow_Intrinsic; 401 402 // Vector Shift Left 403 def int_aarch64_neon_sshl : AdvSIMD_2IntArg_Intrinsic; 404 def int_aarch64_neon_ushl : AdvSIMD_2IntArg_Intrinsic; 405 406 // Vector Widening Shift Left by Constant 407 def int_aarch64_neon_shll : AdvSIMD_2VectorArg_Scalar_Wide_BySize_Intrinsic; 408 def int_aarch64_neon_sshll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic; 409 def int_aarch64_neon_ushll : AdvSIMD_2VectorArg_Scalar_Wide_Intrinsic; 410 411 // Vector Shift Right by Constant and Insert 412 def int_aarch64_neon_vsri : AdvSIMD_3VectorArg_Scalar_Intrinsic; 413 414 // Vector Shift Left by Constant and Insert 415 def int_aarch64_neon_vsli : AdvSIMD_3VectorArg_Scalar_Intrinsic; 416 417 // Vector Saturating Narrow 418 def int_aarch64_neon_scalar_sqxtn: AdvSIMD_1IntArg_Narrow_Intrinsic; 419 def int_aarch64_neon_scalar_uqxtn : AdvSIMD_1IntArg_Narrow_Intrinsic; 420 def int_aarch64_neon_sqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic; 421 def int_aarch64_neon_uqxtn : AdvSIMD_1VectorArg_Narrow_Intrinsic; 422 423 // Vector Saturating Extract and Unsigned Narrow 424 def int_aarch64_neon_scalar_sqxtun : AdvSIMD_1IntArg_Narrow_Intrinsic; 425 def int_aarch64_neon_sqxtun : AdvSIMD_1VectorArg_Narrow_Intrinsic; 426 427 // Vector Absolute Value 428 def int_aarch64_neon_abs : AdvSIMD_1Arg_Intrinsic; 429 430 // Vector Saturating Absolute Value 431 def int_aarch64_neon_sqabs : AdvSIMD_1IntArg_Intrinsic; 432 433 // Vector Saturating Negation 434 def int_aarch64_neon_sqneg : AdvSIMD_1IntArg_Intrinsic; 435 436 // Vector Count Leading Sign Bits 437 def int_aarch64_neon_cls : AdvSIMD_1VectorArg_Intrinsic; 438 439 // Vector Reciprocal Estimate 440 def int_aarch64_neon_urecpe : AdvSIMD_1VectorArg_Intrinsic; 441 def int_aarch64_neon_frecpe : AdvSIMD_1FloatArg_Intrinsic; 442 443 // Vector Square Root Estimate 444 def int_aarch64_neon_ursqrte : AdvSIMD_1VectorArg_Intrinsic; 445 def int_aarch64_neon_frsqrte : AdvSIMD_1FloatArg_Intrinsic; 446 447 // Vector Conversions Between Half-Precision and Single-Precision. 448 def int_aarch64_neon_vcvtfp2hf 449 : DefaultAttrsIntrinsic<[llvm_v4i16_ty], [llvm_v4f32_ty], [IntrNoMem]>; 450 def int_aarch64_neon_vcvthf2fp 451 : DefaultAttrsIntrinsic<[llvm_v4f32_ty], [llvm_v4i16_ty], [IntrNoMem]>; 452 453 // Vector Conversions Between Floating-point and Fixed-point. 454 def int_aarch64_neon_vcvtfp2fxs : AdvSIMD_CvtFPToFx_Intrinsic; 455 def int_aarch64_neon_vcvtfp2fxu : AdvSIMD_CvtFPToFx_Intrinsic; 456 def int_aarch64_neon_vcvtfxs2fp : AdvSIMD_CvtFxToFP_Intrinsic; 457 def int_aarch64_neon_vcvtfxu2fp : AdvSIMD_CvtFxToFP_Intrinsic; 458 459 // Vector FP->Int Conversions 460 def int_aarch64_neon_fcvtas : AdvSIMD_FPToIntRounding_Intrinsic; 461 def int_aarch64_neon_fcvtau : AdvSIMD_FPToIntRounding_Intrinsic; 462 def int_aarch64_neon_fcvtms : AdvSIMD_FPToIntRounding_Intrinsic; 463 def int_aarch64_neon_fcvtmu : AdvSIMD_FPToIntRounding_Intrinsic; 464 def int_aarch64_neon_fcvtns : AdvSIMD_FPToIntRounding_Intrinsic; 465 def int_aarch64_neon_fcvtnu : AdvSIMD_FPToIntRounding_Intrinsic; 466 def int_aarch64_neon_fcvtps : AdvSIMD_FPToIntRounding_Intrinsic; 467 def int_aarch64_neon_fcvtpu : AdvSIMD_FPToIntRounding_Intrinsic; 468 def int_aarch64_neon_fcvtzs : AdvSIMD_FPToIntRounding_Intrinsic; 469 def int_aarch64_neon_fcvtzu : AdvSIMD_FPToIntRounding_Intrinsic; 470 471 // v8.5-A Vector FP Rounding 472 def int_aarch64_neon_frint32x : AdvSIMD_1FloatArg_Intrinsic; 473 def int_aarch64_neon_frint32z : AdvSIMD_1FloatArg_Intrinsic; 474 def int_aarch64_neon_frint64x : AdvSIMD_1FloatArg_Intrinsic; 475 def int_aarch64_neon_frint64z : AdvSIMD_1FloatArg_Intrinsic; 476 477 // Scalar FP->Int conversions 478 479 // Vector FP Inexact Narrowing 480 def int_aarch64_neon_fcvtxn : AdvSIMD_1VectorArg_Expand_Intrinsic; 481 482 // Scalar FP Inexact Narrowing 483 def int_aarch64_sisd_fcvtxn : DefaultAttrsIntrinsic<[llvm_float_ty], [llvm_double_ty], 484 [IntrNoMem]>; 485 486 // v8.2-A Dot Product 487 def int_aarch64_neon_udot : AdvSIMD_Dot_Intrinsic; 488 def int_aarch64_neon_sdot : AdvSIMD_Dot_Intrinsic; 489 490 // v8.6-A Matrix Multiply Intrinsics 491 def int_aarch64_neon_ummla : AdvSIMD_MatMul_Intrinsic; 492 def int_aarch64_neon_smmla : AdvSIMD_MatMul_Intrinsic; 493 def int_aarch64_neon_usmmla : AdvSIMD_MatMul_Intrinsic; 494 def int_aarch64_neon_usdot : AdvSIMD_Dot_Intrinsic; 495 def int_aarch64_neon_bfdot : AdvSIMD_Dot_Intrinsic; 496 def int_aarch64_neon_bfmmla 497 : DefaultAttrsIntrinsic<[llvm_v4f32_ty], 498 [llvm_v4f32_ty, llvm_v8bf16_ty, llvm_v8bf16_ty], 499 [IntrNoMem]>; 500 def int_aarch64_neon_bfmlalb : AdvSIMD_BF16FML_Intrinsic; 501 def int_aarch64_neon_bfmlalt : AdvSIMD_BF16FML_Intrinsic; 502 503 504 // v8.6-A Bfloat Intrinsics 505 def int_aarch64_neon_bfcvt 506 : DefaultAttrsIntrinsic<[llvm_bfloat_ty], [llvm_float_ty], [IntrNoMem]>; 507 def int_aarch64_neon_bfcvtn 508 : DefaultAttrsIntrinsic<[llvm_v8bf16_ty], [llvm_v4f32_ty], [IntrNoMem]>; 509 def int_aarch64_neon_bfcvtn2 510 : DefaultAttrsIntrinsic<[llvm_v8bf16_ty], 511 [llvm_v8bf16_ty, llvm_v4f32_ty], 512 [IntrNoMem]>; 513 514 // v8.2-A FP16 Fused Multiply-Add Long 515 def int_aarch64_neon_fmlal : AdvSIMD_FP16FML_Intrinsic; 516 def int_aarch64_neon_fmlsl : AdvSIMD_FP16FML_Intrinsic; 517 def int_aarch64_neon_fmlal2 : AdvSIMD_FP16FML_Intrinsic; 518 def int_aarch64_neon_fmlsl2 : AdvSIMD_FP16FML_Intrinsic; 519 520 // v8.3-A Floating-point complex add 521 def int_aarch64_neon_vcadd_rot90 : AdvSIMD_2VectorArg_Intrinsic; 522 def int_aarch64_neon_vcadd_rot270 : AdvSIMD_2VectorArg_Intrinsic; 523 524 def int_aarch64_neon_vcmla_rot0 : AdvSIMD_3VectorArg_Intrinsic; 525 def int_aarch64_neon_vcmla_rot90 : AdvSIMD_3VectorArg_Intrinsic; 526 def int_aarch64_neon_vcmla_rot180 : AdvSIMD_3VectorArg_Intrinsic; 527 def int_aarch64_neon_vcmla_rot270 : AdvSIMD_3VectorArg_Intrinsic; 528} 529 530let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 531 class AdvSIMD_2Vector2Index_Intrinsic 532 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 533 [llvm_anyvector_ty, llvm_i64_ty, LLVMMatchType<0>, llvm_i64_ty], 534 [IntrNoMem]>; 535} 536 537// Vector element to element moves 538def int_aarch64_neon_vcopy_lane: AdvSIMD_2Vector2Index_Intrinsic; 539 540let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 541 class AdvSIMD_1Vec_Load_Intrinsic 542 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMAnyPointerType<LLVMMatchType<0>>], 543 [IntrReadMem, IntrArgMemOnly]>; 544 class AdvSIMD_1Vec_Store_Lane_Intrinsic 545 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, llvm_i64_ty, llvm_anyptr_ty], 546 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>; 547 548 class AdvSIMD_2Vec_Load_Intrinsic 549 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, llvm_anyvector_ty], 550 [LLVMAnyPointerType<LLVMMatchType<0>>], 551 [IntrReadMem, IntrArgMemOnly]>; 552 class AdvSIMD_2Vec_Load_Lane_Intrinsic 553 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>], 554 [LLVMMatchType<0>, llvm_anyvector_ty, 555 llvm_i64_ty, llvm_anyptr_ty], 556 [IntrReadMem, IntrArgMemOnly]>; 557 class AdvSIMD_2Vec_Store_Intrinsic 558 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 559 LLVMAnyPointerType<LLVMMatchType<0>>], 560 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>; 561 class AdvSIMD_2Vec_Store_Lane_Intrinsic 562 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 563 llvm_i64_ty, llvm_anyptr_ty], 564 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>; 565 566 class AdvSIMD_3Vec_Load_Intrinsic 567 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyvector_ty], 568 [LLVMAnyPointerType<LLVMMatchType<0>>], 569 [IntrReadMem, IntrArgMemOnly]>; 570 class AdvSIMD_3Vec_Load_Lane_Intrinsic 571 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], 572 [LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyvector_ty, 573 llvm_i64_ty, llvm_anyptr_ty], 574 [IntrReadMem, IntrArgMemOnly]>; 575 class AdvSIMD_3Vec_Store_Intrinsic 576 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 577 LLVMMatchType<0>, LLVMAnyPointerType<LLVMMatchType<0>>], 578 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>; 579 class AdvSIMD_3Vec_Store_Lane_Intrinsic 580 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, 581 LLVMMatchType<0>, LLVMMatchType<0>, 582 llvm_i64_ty, llvm_anyptr_ty], 583 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>; 584 585 class AdvSIMD_4Vec_Load_Intrinsic 586 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, 587 LLVMMatchType<0>, llvm_anyvector_ty], 588 [LLVMAnyPointerType<LLVMMatchType<0>>], 589 [IntrReadMem, IntrArgMemOnly]>; 590 class AdvSIMD_4Vec_Load_Lane_Intrinsic 591 : DefaultAttrsIntrinsic<[LLVMMatchType<0>, LLVMMatchType<0>, 592 LLVMMatchType<0>, LLVMMatchType<0>], 593 [LLVMMatchType<0>, LLVMMatchType<0>, 594 LLVMMatchType<0>, llvm_anyvector_ty, 595 llvm_i64_ty, llvm_anyptr_ty], 596 [IntrReadMem, IntrArgMemOnly]>; 597 class AdvSIMD_4Vec_Store_Intrinsic 598 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 599 LLVMMatchType<0>, LLVMMatchType<0>, 600 LLVMAnyPointerType<LLVMMatchType<0>>], 601 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>; 602 class AdvSIMD_4Vec_Store_Lane_Intrinsic 603 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, LLVMMatchType<0>, 604 LLVMMatchType<0>, LLVMMatchType<0>, 605 llvm_i64_ty, llvm_anyptr_ty], 606 [IntrArgMemOnly, NoCapture<ArgIndex<5>>]>; 607} 608 609// Memory ops 610 611def int_aarch64_neon_ld1x2 : AdvSIMD_2Vec_Load_Intrinsic; 612def int_aarch64_neon_ld1x3 : AdvSIMD_3Vec_Load_Intrinsic; 613def int_aarch64_neon_ld1x4 : AdvSIMD_4Vec_Load_Intrinsic; 614 615def int_aarch64_neon_st1x2 : AdvSIMD_2Vec_Store_Intrinsic; 616def int_aarch64_neon_st1x3 : AdvSIMD_3Vec_Store_Intrinsic; 617def int_aarch64_neon_st1x4 : AdvSIMD_4Vec_Store_Intrinsic; 618 619def int_aarch64_neon_ld2 : AdvSIMD_2Vec_Load_Intrinsic; 620def int_aarch64_neon_ld3 : AdvSIMD_3Vec_Load_Intrinsic; 621def int_aarch64_neon_ld4 : AdvSIMD_4Vec_Load_Intrinsic; 622 623def int_aarch64_neon_ld2lane : AdvSIMD_2Vec_Load_Lane_Intrinsic; 624def int_aarch64_neon_ld3lane : AdvSIMD_3Vec_Load_Lane_Intrinsic; 625def int_aarch64_neon_ld4lane : AdvSIMD_4Vec_Load_Lane_Intrinsic; 626 627def int_aarch64_neon_ld2r : AdvSIMD_2Vec_Load_Intrinsic; 628def int_aarch64_neon_ld3r : AdvSIMD_3Vec_Load_Intrinsic; 629def int_aarch64_neon_ld4r : AdvSIMD_4Vec_Load_Intrinsic; 630 631def int_aarch64_neon_st2 : AdvSIMD_2Vec_Store_Intrinsic; 632def int_aarch64_neon_st3 : AdvSIMD_3Vec_Store_Intrinsic; 633def int_aarch64_neon_st4 : AdvSIMD_4Vec_Store_Intrinsic; 634 635def int_aarch64_neon_st2lane : AdvSIMD_2Vec_Store_Lane_Intrinsic; 636def int_aarch64_neon_st3lane : AdvSIMD_3Vec_Store_Lane_Intrinsic; 637def int_aarch64_neon_st4lane : AdvSIMD_4Vec_Store_Lane_Intrinsic; 638 639let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 640 class AdvSIMD_Tbl1_Intrinsic 641 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_v16i8_ty, LLVMMatchType<0>], 642 [IntrNoMem]>; 643 class AdvSIMD_Tbl2_Intrinsic 644 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 645 [llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>], [IntrNoMem]>; 646 class AdvSIMD_Tbl3_Intrinsic 647 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 648 [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, 649 LLVMMatchType<0>], 650 [IntrNoMem]>; 651 class AdvSIMD_Tbl4_Intrinsic 652 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 653 [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty, 654 LLVMMatchType<0>], 655 [IntrNoMem]>; 656 657 class AdvSIMD_Tbx1_Intrinsic 658 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 659 [LLVMMatchType<0>, llvm_v16i8_ty, LLVMMatchType<0>], 660 [IntrNoMem]>; 661 class AdvSIMD_Tbx2_Intrinsic 662 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 663 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty, 664 LLVMMatchType<0>], 665 [IntrNoMem]>; 666 class AdvSIMD_Tbx3_Intrinsic 667 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 668 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty, 669 llvm_v16i8_ty, LLVMMatchType<0>], 670 [IntrNoMem]>; 671 class AdvSIMD_Tbx4_Intrinsic 672 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 673 [LLVMMatchType<0>, llvm_v16i8_ty, llvm_v16i8_ty, 674 llvm_v16i8_ty, llvm_v16i8_ty, LLVMMatchType<0>], 675 [IntrNoMem]>; 676} 677def int_aarch64_neon_tbl1 : AdvSIMD_Tbl1_Intrinsic; 678def int_aarch64_neon_tbl2 : AdvSIMD_Tbl2_Intrinsic; 679def int_aarch64_neon_tbl3 : AdvSIMD_Tbl3_Intrinsic; 680def int_aarch64_neon_tbl4 : AdvSIMD_Tbl4_Intrinsic; 681 682def int_aarch64_neon_tbx1 : AdvSIMD_Tbx1_Intrinsic; 683def int_aarch64_neon_tbx2 : AdvSIMD_Tbx2_Intrinsic; 684def int_aarch64_neon_tbx3 : AdvSIMD_Tbx3_Intrinsic; 685def int_aarch64_neon_tbx4 : AdvSIMD_Tbx4_Intrinsic; 686 687let TargetPrefix = "aarch64" in { 688 class FPCR_Get_Intrinsic 689 : DefaultAttrsIntrinsic<[llvm_i64_ty], [], [IntrNoMem, IntrHasSideEffects]>; 690 class FPCR_Set_Intrinsic 691 : DefaultAttrsIntrinsic<[], [llvm_i64_ty], [IntrNoMem, IntrHasSideEffects]>; 692 class RNDR_Intrinsic 693 : DefaultAttrsIntrinsic<[llvm_i64_ty, llvm_i1_ty], [], [IntrNoMem, IntrHasSideEffects]>; 694} 695 696// FPCR 697def int_aarch64_get_fpcr : FPCR_Get_Intrinsic; 698def int_aarch64_set_fpcr : FPCR_Set_Intrinsic; 699 700// Armv8.5-A Random number generation intrinsics 701def int_aarch64_rndr : RNDR_Intrinsic; 702def int_aarch64_rndrrs : RNDR_Intrinsic; 703 704let TargetPrefix = "aarch64" in { 705 class Crypto_AES_DataKey_Intrinsic 706 : DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>; 707 708 class Crypto_AES_Data_Intrinsic 709 : DefaultAttrsIntrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>; 710 711 // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule 712 // (v4i32). 713 class Crypto_SHA_5Hash4Schedule_Intrinsic 714 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty], 715 [IntrNoMem]>; 716 717 // SHA intrinsic taking 5 words of the hash (v4i32, i32) and 4 of the schedule 718 // (v4i32). 719 class Crypto_SHA_1Hash_Intrinsic 720 : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>; 721 722 // SHA intrinsic taking 8 words of the schedule 723 class Crypto_SHA_8Schedule_Intrinsic 724 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>; 725 726 // SHA intrinsic taking 12 words of the schedule 727 class Crypto_SHA_12Schedule_Intrinsic 728 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], 729 [IntrNoMem]>; 730 731 // SHA intrinsic taking 8 words of the hash and 4 of the schedule. 732 class Crypto_SHA_8Hash4Schedule_Intrinsic 733 : DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], 734 [IntrNoMem]>; 735 736 // SHA512 intrinsic taking 2 arguments 737 class Crypto_SHA512_2Arg_Intrinsic 738 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>; 739 740 // SHA512 intrinsic taking 3 Arguments 741 class Crypto_SHA512_3Arg_Intrinsic 742 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty], 743 [IntrNoMem]>; 744 745 // SHA3 Intrinsics taking 3 arguments 746 class Crypto_SHA3_3Arg_Intrinsic 747 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 748 [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], 749 [IntrNoMem]>; 750 751 // SHA3 Intrinsic taking 2 arguments 752 class Crypto_SHA3_2Arg_Intrinsic 753 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], 754 [IntrNoMem]>; 755 756 // SHA3 Intrinsic taking 3 Arguments 1 immediate 757 class Crypto_SHA3_2ArgImm_Intrinsic 758 : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i64_ty], 759 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 760 761 class Crypto_SM3_3Vector_Intrinsic 762 : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty], 763 [IntrNoMem]>; 764 765 class Crypto_SM3_3VectorIndexed_Intrinsic 766 : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i64_ty], 767 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 768 769 class Crypto_SM4_2Vector_Intrinsic 770 : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>; 771} 772 773// AES 774def int_aarch64_crypto_aese : Crypto_AES_DataKey_Intrinsic; 775def int_aarch64_crypto_aesd : Crypto_AES_DataKey_Intrinsic; 776def int_aarch64_crypto_aesmc : Crypto_AES_Data_Intrinsic; 777def int_aarch64_crypto_aesimc : Crypto_AES_Data_Intrinsic; 778 779// SHA1 780def int_aarch64_crypto_sha1c : Crypto_SHA_5Hash4Schedule_Intrinsic; 781def int_aarch64_crypto_sha1p : Crypto_SHA_5Hash4Schedule_Intrinsic; 782def int_aarch64_crypto_sha1m : Crypto_SHA_5Hash4Schedule_Intrinsic; 783def int_aarch64_crypto_sha1h : Crypto_SHA_1Hash_Intrinsic; 784 785def int_aarch64_crypto_sha1su0 : Crypto_SHA_12Schedule_Intrinsic; 786def int_aarch64_crypto_sha1su1 : Crypto_SHA_8Schedule_Intrinsic; 787 788// SHA256 789def int_aarch64_crypto_sha256h : Crypto_SHA_8Hash4Schedule_Intrinsic; 790def int_aarch64_crypto_sha256h2 : Crypto_SHA_8Hash4Schedule_Intrinsic; 791def int_aarch64_crypto_sha256su0 : Crypto_SHA_8Schedule_Intrinsic; 792def int_aarch64_crypto_sha256su1 : Crypto_SHA_12Schedule_Intrinsic; 793 794//SHA3 795def int_aarch64_crypto_eor3s : Crypto_SHA3_3Arg_Intrinsic; 796def int_aarch64_crypto_eor3u : Crypto_SHA3_3Arg_Intrinsic; 797def int_aarch64_crypto_bcaxs : Crypto_SHA3_3Arg_Intrinsic; 798def int_aarch64_crypto_bcaxu : Crypto_SHA3_3Arg_Intrinsic; 799def int_aarch64_crypto_rax1 : Crypto_SHA3_2Arg_Intrinsic; 800def int_aarch64_crypto_xar : Crypto_SHA3_2ArgImm_Intrinsic; 801 802// SHA512 803def int_aarch64_crypto_sha512h : Crypto_SHA512_3Arg_Intrinsic; 804def int_aarch64_crypto_sha512h2 : Crypto_SHA512_3Arg_Intrinsic; 805def int_aarch64_crypto_sha512su0 : Crypto_SHA512_2Arg_Intrinsic; 806def int_aarch64_crypto_sha512su1 : Crypto_SHA512_3Arg_Intrinsic; 807 808//SM3 & SM4 809def int_aarch64_crypto_sm3partw1 : Crypto_SM3_3Vector_Intrinsic; 810def int_aarch64_crypto_sm3partw2 : Crypto_SM3_3Vector_Intrinsic; 811def int_aarch64_crypto_sm3ss1 : Crypto_SM3_3Vector_Intrinsic; 812def int_aarch64_crypto_sm3tt1a : Crypto_SM3_3VectorIndexed_Intrinsic; 813def int_aarch64_crypto_sm3tt1b : Crypto_SM3_3VectorIndexed_Intrinsic; 814def int_aarch64_crypto_sm3tt2a : Crypto_SM3_3VectorIndexed_Intrinsic; 815def int_aarch64_crypto_sm3tt2b : Crypto_SM3_3VectorIndexed_Intrinsic; 816def int_aarch64_crypto_sm4e : Crypto_SM4_2Vector_Intrinsic; 817def int_aarch64_crypto_sm4ekey : Crypto_SM4_2Vector_Intrinsic; 818 819//===----------------------------------------------------------------------===// 820// CRC32 821 822let TargetPrefix = "aarch64" in { 823 824def int_aarch64_crc32b : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 825 [IntrNoMem]>; 826def int_aarch64_crc32cb : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 827 [IntrNoMem]>; 828def int_aarch64_crc32h : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 829 [IntrNoMem]>; 830def int_aarch64_crc32ch : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 831 [IntrNoMem]>; 832def int_aarch64_crc32w : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 833 [IntrNoMem]>; 834def int_aarch64_crc32cw : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], 835 [IntrNoMem]>; 836def int_aarch64_crc32x : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty], 837 [IntrNoMem]>; 838def int_aarch64_crc32cx : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i64_ty], 839 [IntrNoMem]>; 840} 841 842//===----------------------------------------------------------------------===// 843// Memory Tagging Extensions (MTE) Intrinsics 844let TargetPrefix = "aarch64" in { 845def int_aarch64_irg : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty], 846 [IntrNoMem, IntrHasSideEffects]>; 847def int_aarch64_addg : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_i64_ty], 848 [IntrNoMem]>; 849def int_aarch64_gmi : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_i64_ty], 850 [IntrNoMem]>; 851def int_aarch64_ldg : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_ptr_ty, llvm_ptr_ty], 852 [IntrReadMem]>; 853def int_aarch64_stg : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_ptr_ty], 854 [IntrWriteMem]>; 855def int_aarch64_subp : DefaultAttrsIntrinsic<[llvm_i64_ty], [llvm_ptr_ty, llvm_ptr_ty], 856 [IntrNoMem]>; 857 858// The following are codegen-only intrinsics for stack instrumentation. 859 860// Generate a randomly tagged stack base pointer. 861def int_aarch64_irg_sp : DefaultAttrsIntrinsic<[llvm_ptr_ty], [llvm_i64_ty], 862 [IntrNoMem, IntrHasSideEffects]>; 863 864// Transfer pointer tag with offset. 865// ptr1 = tagp(ptr0, baseptr, tag_offset) returns a pointer where 866// * address is the address in ptr0 867// * tag is a function of (tag in baseptr, tag_offset). 868// ** Beware, this is not the same function as implemented by the ADDG instruction! 869// Backend optimizations may change tag_offset; the only guarantee is that calls 870// to tagp with the same pair of (baseptr, tag_offset) will produce pointers 871// with the same tag value, assuming the set of excluded tags has not changed. 872// Address bits in baseptr and tag bits in ptr0 are ignored. 873// When offset between ptr0 and baseptr is a compile time constant, this can be emitted as 874// ADDG ptr1, baseptr, (ptr0 - baseptr), tag_offset 875// It is intended that ptr0 is an alloca address, and baseptr is the direct output of llvm.aarch64.irg.sp. 876def int_aarch64_tagp : DefaultAttrsIntrinsic<[llvm_anyptr_ty], [LLVMMatchType<0>, llvm_ptr_ty, llvm_i64_ty], 877 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 878 879// Update allocation tags for the memory range to match the tag in the pointer argument. 880def int_aarch64_settag : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty], 881 [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>; 882 883// Update allocation tags for the memory range to match the tag in the pointer argument, 884// and set memory contents to zero. 885def int_aarch64_settag_zero : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty], 886 [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>; 887 888// Update allocation tags for 16-aligned, 16-sized memory region, and store a pair 8-byte values. 889def int_aarch64_stgp : DefaultAttrsIntrinsic<[], [llvm_ptr_ty, llvm_i64_ty, llvm_i64_ty], 890 [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>]>; 891} 892 893// Transactional Memory Extension (TME) Intrinsics 894let TargetPrefix = "aarch64" in { 895def int_aarch64_tstart : GCCBuiltin<"__builtin_arm_tstart">, 896 Intrinsic<[llvm_i64_ty], [], [IntrWillReturn]>; 897 898def int_aarch64_tcommit : GCCBuiltin<"__builtin_arm_tcommit">, Intrinsic<[], [], [IntrWillReturn]>; 899 900def int_aarch64_tcancel : GCCBuiltin<"__builtin_arm_tcancel">, 901 Intrinsic<[], [llvm_i64_ty], [IntrWillReturn, ImmArg<ArgIndex<0>>]>; 902 903def int_aarch64_ttest : GCCBuiltin<"__builtin_arm_ttest">, 904 Intrinsic<[llvm_i64_ty], [], 905 [IntrNoMem, IntrHasSideEffects, IntrWillReturn]>; 906 907// Armv8.7-A load/store 64-byte intrinsics 908defvar data512 = !listsplat(llvm_i64_ty, 8); 909def int_aarch64_ld64b: Intrinsic<data512, [llvm_ptr_ty]>; 910def int_aarch64_st64b: Intrinsic<[], !listconcat([llvm_ptr_ty], data512)>; 911def int_aarch64_st64bv: Intrinsic<[llvm_i64_ty], !listconcat([llvm_ptr_ty], data512)>; 912def int_aarch64_st64bv0: Intrinsic<[llvm_i64_ty], !listconcat([llvm_ptr_ty], data512)>; 913 914} 915 916def llvm_nxv2i1_ty : LLVMType<nxv2i1>; 917def llvm_nxv4i1_ty : LLVMType<nxv4i1>; 918def llvm_nxv8i1_ty : LLVMType<nxv8i1>; 919def llvm_nxv16i1_ty : LLVMType<nxv16i1>; 920def llvm_nxv16i8_ty : LLVMType<nxv16i8>; 921def llvm_nxv4i32_ty : LLVMType<nxv4i32>; 922def llvm_nxv2i64_ty : LLVMType<nxv2i64>; 923def llvm_nxv8f16_ty : LLVMType<nxv8f16>; 924def llvm_nxv8bf16_ty : LLVMType<nxv8bf16>; 925def llvm_nxv4f32_ty : LLVMType<nxv4f32>; 926def llvm_nxv2f64_ty : LLVMType<nxv2f64>; 927 928let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 929 930 class AdvSIMD_SVE_Create_2Vector_Tuple 931 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 932 [llvm_anyvector_ty, LLVMMatchType<1>], 933 [IntrReadMem]>; 934 935 class AdvSIMD_SVE_Create_3Vector_Tuple 936 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 937 [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>], 938 [IntrReadMem]>; 939 940 class AdvSIMD_SVE_Create_4Vector_Tuple 941 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 942 [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>, 943 LLVMMatchType<1>], 944 [IntrReadMem]>; 945 946 class AdvSIMD_SVE_Set_Vector_Tuple 947 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 948 [LLVMMatchType<0>, llvm_i32_ty, llvm_anyvector_ty], 949 [IntrReadMem, ImmArg<ArgIndex<1>>]>; 950 951 class AdvSIMD_SVE_Get_Vector_Tuple 952 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, llvm_i32_ty], 953 [IntrReadMem, IntrArgMemOnly, ImmArg<ArgIndex<1>>]>; 954 955 class AdvSIMD_ManyVec_PredLoad_Intrinsic 956 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, LLVMPointerToElt<0>], 957 [IntrReadMem, IntrArgMemOnly]>; 958 959 class AdvSIMD_1Vec_PredLoad_Intrinsic 960 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 961 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 962 LLVMPointerToElt<0>], 963 [IntrReadMem, IntrArgMemOnly]>; 964 965 class AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic 966 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 967 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 968 LLVMPointerToElt<0>], 969 [IntrInaccessibleMemOrArgMemOnly]>; 970 971 class AdvSIMD_1Vec_PredStore_Intrinsic 972 : DefaultAttrsIntrinsic<[], 973 [llvm_anyvector_ty, 974 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 975 LLVMPointerToElt<0>], 976 [IntrArgMemOnly, NoCapture<ArgIndex<2>>]>; 977 978 class AdvSIMD_2Vec_PredStore_Intrinsic 979 : DefaultAttrsIntrinsic<[], 980 [llvm_anyvector_ty, LLVMMatchType<0>, 981 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>], 982 [IntrArgMemOnly, NoCapture<ArgIndex<3>>]>; 983 984 class AdvSIMD_3Vec_PredStore_Intrinsic 985 : DefaultAttrsIntrinsic<[], 986 [llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, 987 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>], 988 [IntrArgMemOnly, NoCapture<ArgIndex<4>>]>; 989 990 class AdvSIMD_4Vec_PredStore_Intrinsic 991 : DefaultAttrsIntrinsic<[], 992 [llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, 993 LLVMMatchType<0>, 994 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMPointerToElt<0>], 995 [IntrArgMemOnly, NoCapture<ArgIndex<5>>]>; 996 997 class AdvSIMD_SVE_Index_Intrinsic 998 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 999 [LLVMVectorElementType<0>, 1000 LLVMVectorElementType<0>], 1001 [IntrNoMem]>; 1002 1003 class AdvSIMD_Merged1VectorArg_Intrinsic 1004 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1005 [LLVMMatchType<0>, 1006 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1007 LLVMMatchType<0>], 1008 [IntrNoMem]>; 1009 1010 class AdvSIMD_2VectorArgIndexed_Intrinsic 1011 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1012 [LLVMMatchType<0>, 1013 LLVMMatchType<0>, 1014 llvm_i32_ty], 1015 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1016 1017 class AdvSIMD_3VectorArgIndexed_Intrinsic 1018 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1019 [LLVMMatchType<0>, 1020 LLVMMatchType<0>, 1021 LLVMMatchType<0>, 1022 llvm_i32_ty], 1023 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1024 1025 class AdvSIMD_Pred1VectorArg_Intrinsic 1026 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1027 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1028 LLVMMatchType<0>], 1029 [IntrNoMem]>; 1030 1031 class AdvSIMD_Pred2VectorArg_Intrinsic 1032 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1033 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1034 LLVMMatchType<0>, 1035 LLVMMatchType<0>], 1036 [IntrNoMem]>; 1037 1038 class AdvSIMD_Pred3VectorArg_Intrinsic 1039 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1040 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1041 LLVMMatchType<0>, 1042 LLVMMatchType<0>, 1043 LLVMMatchType<0>], 1044 [IntrNoMem]>; 1045 1046 class AdvSIMD_SVE_Compare_Intrinsic 1047 : DefaultAttrsIntrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>], 1048 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1049 llvm_anyvector_ty, 1050 LLVMMatchType<0>], 1051 [IntrNoMem]>; 1052 1053 class AdvSIMD_SVE_CompareWide_Intrinsic 1054 : DefaultAttrsIntrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>], 1055 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1056 llvm_anyvector_ty, 1057 llvm_nxv2i64_ty], 1058 [IntrNoMem]>; 1059 1060 class AdvSIMD_SVE_Saturating_Intrinsic 1061 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1062 [LLVMMatchType<0>, 1063 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>], 1064 [IntrNoMem]>; 1065 1066 class AdvSIMD_SVE_SaturatingWithPattern_Intrinsic 1067 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1068 [LLVMMatchType<0>, 1069 llvm_i32_ty, 1070 llvm_i32_ty], 1071 [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>; 1072 1073 class AdvSIMD_SVE_Saturating_N_Intrinsic<LLVMType T> 1074 : DefaultAttrsIntrinsic<[T], 1075 [T, llvm_anyvector_ty], 1076 [IntrNoMem]>; 1077 1078 class AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<LLVMType T> 1079 : DefaultAttrsIntrinsic<[T], 1080 [T, llvm_i32_ty, llvm_i32_ty], 1081 [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>; 1082 1083 class AdvSIMD_SVE_CNT_Intrinsic 1084 : DefaultAttrsIntrinsic<[LLVMVectorOfBitcastsToInt<0>], 1085 [LLVMVectorOfBitcastsToInt<0>, 1086 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1087 llvm_anyvector_ty], 1088 [IntrNoMem]>; 1089 1090 class AdvSIMD_SVE_ReduceWithInit_Intrinsic 1091 : DefaultAttrsIntrinsic<[LLVMVectorElementType<0>], 1092 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1093 LLVMVectorElementType<0>, 1094 llvm_anyvector_ty], 1095 [IntrNoMem]>; 1096 1097 class AdvSIMD_SVE_ShiftByImm_Intrinsic 1098 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1099 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1100 LLVMMatchType<0>, 1101 llvm_i32_ty], 1102 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1103 1104 class AdvSIMD_SVE_ShiftWide_Intrinsic 1105 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1106 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1107 LLVMMatchType<0>, 1108 llvm_nxv2i64_ty], 1109 [IntrNoMem]>; 1110 1111 class AdvSIMD_SVE_Unpack_Intrinsic 1112 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1113 [LLVMSubdivide2VectorType<0>], 1114 [IntrNoMem]>; 1115 1116 class AdvSIMD_SVE_CADD_Intrinsic 1117 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1118 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1119 LLVMMatchType<0>, 1120 LLVMMatchType<0>, 1121 llvm_i32_ty], 1122 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1123 1124 class AdvSIMD_SVE_CMLA_Intrinsic 1125 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1126 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1127 LLVMMatchType<0>, 1128 LLVMMatchType<0>, 1129 LLVMMatchType<0>, 1130 llvm_i32_ty], 1131 [IntrNoMem, ImmArg<ArgIndex<4>>]>; 1132 1133 class AdvSIMD_SVE_CMLA_LANE_Intrinsic 1134 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1135 [LLVMMatchType<0>, 1136 LLVMMatchType<0>, 1137 LLVMMatchType<0>, 1138 llvm_i32_ty, 1139 llvm_i32_ty], 1140 [IntrNoMem, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>; 1141 1142 class AdvSIMD_SVE_DUP_Intrinsic 1143 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1144 [LLVMMatchType<0>, 1145 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1146 LLVMVectorElementType<0>], 1147 [IntrNoMem]>; 1148 1149 class AdvSIMD_SVE_DUP_Unpred_Intrinsic 1150 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], [LLVMVectorElementType<0>], 1151 [IntrNoMem]>; 1152 1153 class AdvSIMD_SVE_DUPQ_Intrinsic 1154 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1155 [LLVMMatchType<0>, 1156 llvm_i64_ty], 1157 [IntrNoMem]>; 1158 1159 class AdvSIMD_SVE_EXPA_Intrinsic 1160 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1161 [LLVMVectorOfBitcastsToInt<0>], 1162 [IntrNoMem]>; 1163 1164 class AdvSIMD_SVE_FCVT_Intrinsic 1165 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1166 [LLVMMatchType<0>, 1167 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1168 llvm_anyvector_ty], 1169 [IntrNoMem]>; 1170 1171 class AdvSIMD_SVE_FCVTZS_Intrinsic 1172 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1173 [LLVMVectorOfBitcastsToInt<0>, 1174 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1175 llvm_anyvector_ty], 1176 [IntrNoMem]>; 1177 1178 class AdvSIMD_SVE_INSR_Intrinsic 1179 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1180 [LLVMMatchType<0>, 1181 LLVMVectorElementType<0>], 1182 [IntrNoMem]>; 1183 1184 class AdvSIMD_SVE_PTRUE_Intrinsic 1185 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1186 [llvm_i32_ty], 1187 [IntrNoMem, ImmArg<ArgIndex<0>>]>; 1188 1189 class AdvSIMD_SVE_PUNPKHI_Intrinsic 1190 : DefaultAttrsIntrinsic<[LLVMHalfElementsVectorType<0>], 1191 [llvm_anyvector_ty], 1192 [IntrNoMem]>; 1193 1194 class AdvSIMD_SVE_SCALE_Intrinsic 1195 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1196 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1197 LLVMMatchType<0>, 1198 LLVMVectorOfBitcastsToInt<0>], 1199 [IntrNoMem]>; 1200 1201 class AdvSIMD_SVE_SCVTF_Intrinsic 1202 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1203 [LLVMMatchType<0>, 1204 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1205 llvm_anyvector_ty], 1206 [IntrNoMem]>; 1207 1208 class AdvSIMD_SVE_TSMUL_Intrinsic 1209 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1210 [LLVMMatchType<0>, 1211 LLVMVectorOfBitcastsToInt<0>], 1212 [IntrNoMem]>; 1213 1214 class AdvSIMD_SVE_CNTB_Intrinsic 1215 : DefaultAttrsIntrinsic<[llvm_i64_ty], 1216 [llvm_i32_ty], 1217 [IntrNoMem, ImmArg<ArgIndex<0>>]>; 1218 1219 class AdvSIMD_SVE_CNTP_Intrinsic 1220 : DefaultAttrsIntrinsic<[llvm_i64_ty], 1221 [llvm_anyvector_ty, LLVMMatchType<0>], 1222 [IntrNoMem]>; 1223 1224 class AdvSIMD_SVE_DOT_Intrinsic 1225 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1226 [LLVMMatchType<0>, 1227 LLVMSubdivide4VectorType<0>, 1228 LLVMSubdivide4VectorType<0>], 1229 [IntrNoMem]>; 1230 1231 class AdvSIMD_SVE_DOT_Indexed_Intrinsic 1232 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1233 [LLVMMatchType<0>, 1234 LLVMSubdivide4VectorType<0>, 1235 LLVMSubdivide4VectorType<0>, 1236 llvm_i32_ty], 1237 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1238 1239 class AdvSIMD_SVE_PTEST_Intrinsic 1240 : DefaultAttrsIntrinsic<[llvm_i1_ty], 1241 [llvm_anyvector_ty, 1242 LLVMMatchType<0>], 1243 [IntrNoMem]>; 1244 1245 class AdvSIMD_SVE_TBL_Intrinsic 1246 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1247 [LLVMMatchType<0>, 1248 LLVMVectorOfBitcastsToInt<0>], 1249 [IntrNoMem]>; 1250 1251 class AdvSIMD_SVE2_TBX_Intrinsic 1252 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1253 [LLVMMatchType<0>, 1254 LLVMMatchType<0>, 1255 LLVMVectorOfBitcastsToInt<0>], 1256 [IntrNoMem]>; 1257 1258 class SVE2_1VectorArg_Long_Intrinsic 1259 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1260 [LLVMSubdivide2VectorType<0>, 1261 llvm_i32_ty], 1262 [IntrNoMem, ImmArg<ArgIndex<1>>]>; 1263 1264 class SVE2_2VectorArg_Long_Intrinsic 1265 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1266 [LLVMSubdivide2VectorType<0>, 1267 LLVMSubdivide2VectorType<0>], 1268 [IntrNoMem]>; 1269 1270 class SVE2_2VectorArgIndexed_Long_Intrinsic 1271 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1272 [LLVMSubdivide2VectorType<0>, 1273 LLVMSubdivide2VectorType<0>, 1274 llvm_i32_ty], 1275 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1276 1277 class SVE2_2VectorArg_Wide_Intrinsic 1278 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1279 [LLVMMatchType<0>, 1280 LLVMSubdivide2VectorType<0>], 1281 [IntrNoMem]>; 1282 1283 class SVE2_2VectorArg_Pred_Long_Intrinsic 1284 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1285 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1286 LLVMMatchType<0>, 1287 LLVMSubdivide2VectorType<0>], 1288 [IntrNoMem]>; 1289 1290 class SVE2_3VectorArg_Long_Intrinsic 1291 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1292 [LLVMMatchType<0>, 1293 LLVMSubdivide2VectorType<0>, 1294 LLVMSubdivide2VectorType<0>], 1295 [IntrNoMem]>; 1296 1297 class SVE2_3VectorArgIndexed_Long_Intrinsic 1298 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1299 [LLVMMatchType<0>, 1300 LLVMSubdivide2VectorType<0>, 1301 LLVMSubdivide2VectorType<0>, 1302 llvm_i32_ty], 1303 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1304 1305 class SVE2_1VectorArg_Narrowing_Intrinsic 1306 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1307 [llvm_anyvector_ty], 1308 [IntrNoMem]>; 1309 1310 class SVE2_Merged1VectorArg_Narrowing_Intrinsic 1311 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1312 [LLVMSubdivide2VectorType<0>, 1313 llvm_anyvector_ty], 1314 [IntrNoMem]>; 1315 class SVE2_2VectorArg_Narrowing_Intrinsic 1316 : DefaultAttrsIntrinsic< 1317 [LLVMSubdivide2VectorType<0>], 1318 [llvm_anyvector_ty, LLVMMatchType<0>], 1319 [IntrNoMem]>; 1320 1321 class SVE2_Merged2VectorArg_Narrowing_Intrinsic 1322 : DefaultAttrsIntrinsic< 1323 [LLVMSubdivide2VectorType<0>], 1324 [LLVMSubdivide2VectorType<0>, llvm_anyvector_ty, LLVMMatchType<0>], 1325 [IntrNoMem]>; 1326 1327 class SVE2_1VectorArg_Imm_Narrowing_Intrinsic 1328 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1329 [llvm_anyvector_ty, llvm_i32_ty], 1330 [IntrNoMem, ImmArg<ArgIndex<1>>]>; 1331 1332 class SVE2_2VectorArg_Imm_Narrowing_Intrinsic 1333 : DefaultAttrsIntrinsic<[LLVMSubdivide2VectorType<0>], 1334 [LLVMSubdivide2VectorType<0>, llvm_anyvector_ty, 1335 llvm_i32_ty], 1336 [IntrNoMem, ImmArg<ArgIndex<2>>]>; 1337 1338 class SVE2_CONFLICT_DETECT_Intrinsic 1339 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1340 [LLVMAnyPointerType<llvm_any_ty>, 1341 LLVMMatchType<1>]>; 1342 1343 class SVE2_3VectorArg_Indexed_Intrinsic 1344 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1345 [LLVMMatchType<0>, 1346 LLVMSubdivide2VectorType<0>, 1347 LLVMSubdivide2VectorType<0>, 1348 llvm_i32_ty], 1349 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1350 1351 class AdvSIMD_SVE_CDOT_LANE_Intrinsic 1352 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1353 [LLVMMatchType<0>, 1354 LLVMSubdivide4VectorType<0>, 1355 LLVMSubdivide4VectorType<0>, 1356 llvm_i32_ty, 1357 llvm_i32_ty], 1358 [IntrNoMem, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>; 1359 1360 // NOTE: There is no relationship between these intrinsics beyond an attempt 1361 // to reuse currently identical class definitions. 1362 class AdvSIMD_SVE_LOGB_Intrinsic : AdvSIMD_SVE_CNT_Intrinsic; 1363 class AdvSIMD_SVE2_CADD_Intrinsic : AdvSIMD_2VectorArgIndexed_Intrinsic; 1364 class AdvSIMD_SVE2_CMLA_Intrinsic : AdvSIMD_3VectorArgIndexed_Intrinsic; 1365 1366 // This class of intrinsics are not intended to be useful within LLVM IR but 1367 // are instead here to support some of the more regid parts of the ACLE. 1368 class Builtin_SVCVT<string name, LLVMType OUT, LLVMType PRED, LLVMType IN> 1369 : DefaultAttrsIntrinsic<[OUT], [OUT, PRED, IN], [IntrNoMem]>; 1370} 1371 1372//===----------------------------------------------------------------------===// 1373// SVE 1374 1375let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.". 1376 1377class AdvSIMD_SVE_Reduce_Intrinsic 1378 : DefaultAttrsIntrinsic<[LLVMVectorElementType<0>], 1379 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1380 llvm_anyvector_ty], 1381 [IntrNoMem]>; 1382 1383class AdvSIMD_SVE_SADDV_Reduce_Intrinsic 1384 : DefaultAttrsIntrinsic<[llvm_i64_ty], 1385 [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1386 llvm_anyvector_ty], 1387 [IntrNoMem]>; 1388 1389class AdvSIMD_SVE_WHILE_Intrinsic 1390 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1391 [llvm_anyint_ty, LLVMMatchType<1>], 1392 [IntrNoMem]>; 1393 1394class AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic 1395 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1396 [ 1397 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1398 LLVMPointerToElt<0>, 1399 LLVMScalarOrSameVectorWidth<0, llvm_i64_ty> 1400 ], 1401 [IntrReadMem, IntrArgMemOnly]>; 1402 1403class AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic 1404 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1405 [ 1406 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1407 LLVMPointerToElt<0>, 1408 LLVMScalarOrSameVectorWidth<0, llvm_i64_ty> 1409 ], 1410 [IntrInaccessibleMemOrArgMemOnly]>; 1411 1412class AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic 1413 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1414 [ 1415 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1416 LLVMPointerToElt<0>, 1417 LLVMScalarOrSameVectorWidth<0, llvm_i32_ty> 1418 ], 1419 [IntrReadMem, IntrArgMemOnly]>; 1420 1421class AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic 1422 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1423 [ 1424 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1425 LLVMPointerToElt<0>, 1426 LLVMScalarOrSameVectorWidth<0, llvm_i32_ty> 1427 ], 1428 [IntrInaccessibleMemOrArgMemOnly]>; 1429 1430class AdvSIMD_GatherLoad_VS_Intrinsic 1431 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1432 [ 1433 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1434 llvm_anyvector_ty, 1435 llvm_i64_ty 1436 ], 1437 [IntrReadMem]>; 1438 1439class AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic 1440 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1441 [ 1442 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1443 llvm_anyvector_ty, 1444 llvm_i64_ty 1445 ], 1446 [IntrInaccessibleMemOrArgMemOnly]>; 1447 1448class AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic 1449 : DefaultAttrsIntrinsic<[], 1450 [ 1451 llvm_anyvector_ty, 1452 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1453 LLVMPointerToElt<0>, 1454 LLVMScalarOrSameVectorWidth<0, llvm_i64_ty> 1455 ], 1456 [IntrWriteMem, IntrArgMemOnly]>; 1457 1458class AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic 1459 : DefaultAttrsIntrinsic<[], 1460 [ 1461 llvm_anyvector_ty, 1462 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1463 LLVMPointerToElt<0>, 1464 LLVMScalarOrSameVectorWidth<0, llvm_i32_ty> 1465 ], 1466 [IntrWriteMem, IntrArgMemOnly]>; 1467 1468class AdvSIMD_ScatterStore_VS_Intrinsic 1469 : DefaultAttrsIntrinsic<[], 1470 [ 1471 llvm_anyvector_ty, 1472 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, 1473 llvm_anyvector_ty, llvm_i64_ty 1474 ], 1475 [IntrWriteMem]>; 1476 1477 1478class SVE_gather_prf_SV 1479 : DefaultAttrsIntrinsic<[], 1480 [ 1481 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, // Predicate 1482 llvm_ptr_ty, // Base address 1483 llvm_anyvector_ty, // Offsets 1484 llvm_i32_ty // Prfop 1485 ], 1486 [IntrInaccessibleMemOrArgMemOnly, NoCapture<ArgIndex<1>>, ImmArg<ArgIndex<3>>]>; 1487 1488class SVE_gather_prf_VS 1489 : DefaultAttrsIntrinsic<[], 1490 [ 1491 LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, // Predicate 1492 llvm_anyvector_ty, // Base addresses 1493 llvm_i64_ty, // Scalar offset 1494 llvm_i32_ty // Prfop 1495 ], 1496 [IntrInaccessibleMemOrArgMemOnly, ImmArg<ArgIndex<3>>]>; 1497 1498class SVE_MatMul_Intrinsic 1499 : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 1500 [LLVMMatchType<0>, LLVMSubdivide4VectorType<0>, LLVMSubdivide4VectorType<0>], 1501 [IntrNoMem]>; 1502 1503class SVE_4Vec_BF16 1504 : DefaultAttrsIntrinsic<[llvm_nxv4f32_ty], 1505 [llvm_nxv4f32_ty, llvm_nxv8bf16_ty, llvm_nxv8bf16_ty], 1506 [IntrNoMem]>; 1507 1508class SVE_4Vec_BF16_Indexed 1509 : DefaultAttrsIntrinsic<[llvm_nxv4f32_ty], 1510 [llvm_nxv4f32_ty, llvm_nxv8bf16_ty, llvm_nxv8bf16_ty, llvm_i64_ty], 1511 [IntrNoMem, ImmArg<ArgIndex<3>>]>; 1512 1513// 1514// Vector tuple creation intrinsics (ACLE) 1515// 1516 1517def int_aarch64_sve_tuple_create2 : AdvSIMD_SVE_Create_2Vector_Tuple; 1518def int_aarch64_sve_tuple_create3 : AdvSIMD_SVE_Create_3Vector_Tuple; 1519def int_aarch64_sve_tuple_create4 : AdvSIMD_SVE_Create_4Vector_Tuple; 1520 1521// 1522// Vector tuple insertion/extraction intrinsics (ACLE) 1523// 1524 1525def int_aarch64_sve_tuple_get : AdvSIMD_SVE_Get_Vector_Tuple; 1526def int_aarch64_sve_tuple_set : AdvSIMD_SVE_Set_Vector_Tuple; 1527 1528// 1529// Loads 1530// 1531 1532def int_aarch64_sve_ld1 : AdvSIMD_1Vec_PredLoad_Intrinsic; 1533 1534def int_aarch64_sve_ld2 : AdvSIMD_ManyVec_PredLoad_Intrinsic; 1535def int_aarch64_sve_ld3 : AdvSIMD_ManyVec_PredLoad_Intrinsic; 1536def int_aarch64_sve_ld4 : AdvSIMD_ManyVec_PredLoad_Intrinsic; 1537 1538def int_aarch64_sve_ldnt1 : AdvSIMD_1Vec_PredLoad_Intrinsic; 1539def int_aarch64_sve_ldnf1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic; 1540def int_aarch64_sve_ldff1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic; 1541 1542def int_aarch64_sve_ld1rq : AdvSIMD_1Vec_PredLoad_Intrinsic; 1543def int_aarch64_sve_ld1ro : AdvSIMD_1Vec_PredLoad_Intrinsic; 1544 1545// 1546// Stores 1547// 1548 1549def int_aarch64_sve_st1 : AdvSIMD_1Vec_PredStore_Intrinsic; 1550def int_aarch64_sve_st2 : AdvSIMD_2Vec_PredStore_Intrinsic; 1551def int_aarch64_sve_st3 : AdvSIMD_3Vec_PredStore_Intrinsic; 1552def int_aarch64_sve_st4 : AdvSIMD_4Vec_PredStore_Intrinsic; 1553 1554def int_aarch64_sve_stnt1 : AdvSIMD_1Vec_PredStore_Intrinsic; 1555 1556// 1557// Prefetches 1558// 1559 1560def int_aarch64_sve_prf 1561 : DefaultAttrsIntrinsic<[], [llvm_anyvector_ty, llvm_ptr_ty, llvm_i32_ty], 1562 [IntrArgMemOnly, ImmArg<ArgIndex<2>>]>; 1563 1564// Scalar + 32-bit scaled offset vector, zero extend, packed and 1565// unpacked. 1566def int_aarch64_sve_prfb_gather_uxtw_index : SVE_gather_prf_SV; 1567def int_aarch64_sve_prfh_gather_uxtw_index : SVE_gather_prf_SV; 1568def int_aarch64_sve_prfw_gather_uxtw_index : SVE_gather_prf_SV; 1569def int_aarch64_sve_prfd_gather_uxtw_index : SVE_gather_prf_SV; 1570 1571// Scalar + 32-bit scaled offset vector, sign extend, packed and 1572// unpacked. 1573def int_aarch64_sve_prfb_gather_sxtw_index : SVE_gather_prf_SV; 1574def int_aarch64_sve_prfw_gather_sxtw_index : SVE_gather_prf_SV; 1575def int_aarch64_sve_prfh_gather_sxtw_index : SVE_gather_prf_SV; 1576def int_aarch64_sve_prfd_gather_sxtw_index : SVE_gather_prf_SV; 1577 1578// Scalar + 64-bit scaled offset vector. 1579def int_aarch64_sve_prfb_gather_index : SVE_gather_prf_SV; 1580def int_aarch64_sve_prfh_gather_index : SVE_gather_prf_SV; 1581def int_aarch64_sve_prfw_gather_index : SVE_gather_prf_SV; 1582def int_aarch64_sve_prfd_gather_index : SVE_gather_prf_SV; 1583 1584// Vector + scalar. 1585def int_aarch64_sve_prfb_gather_scalar_offset : SVE_gather_prf_VS; 1586def int_aarch64_sve_prfh_gather_scalar_offset : SVE_gather_prf_VS; 1587def int_aarch64_sve_prfw_gather_scalar_offset : SVE_gather_prf_VS; 1588def int_aarch64_sve_prfd_gather_scalar_offset : SVE_gather_prf_VS; 1589 1590// 1591// Scalar to vector operations 1592// 1593 1594def int_aarch64_sve_dup : AdvSIMD_SVE_DUP_Intrinsic; 1595def int_aarch64_sve_dup_x : AdvSIMD_SVE_DUP_Unpred_Intrinsic; 1596 1597def int_aarch64_sve_index : AdvSIMD_SVE_Index_Intrinsic; 1598 1599// 1600// Address calculation 1601// 1602 1603def int_aarch64_sve_adrb : AdvSIMD_2VectorArg_Intrinsic; 1604def int_aarch64_sve_adrh : AdvSIMD_2VectorArg_Intrinsic; 1605def int_aarch64_sve_adrw : AdvSIMD_2VectorArg_Intrinsic; 1606def int_aarch64_sve_adrd : AdvSIMD_2VectorArg_Intrinsic; 1607 1608// 1609// Integer arithmetic 1610// 1611 1612def int_aarch64_sve_add : AdvSIMD_Pred2VectorArg_Intrinsic; 1613def int_aarch64_sve_sub : AdvSIMD_Pred2VectorArg_Intrinsic; 1614def int_aarch64_sve_subr : AdvSIMD_Pred2VectorArg_Intrinsic; 1615 1616def int_aarch64_sve_pmul : AdvSIMD_2VectorArg_Intrinsic; 1617 1618def int_aarch64_sve_mul : AdvSIMD_Pred2VectorArg_Intrinsic; 1619def int_aarch64_sve_mul_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 1620def int_aarch64_sve_smulh : AdvSIMD_Pred2VectorArg_Intrinsic; 1621def int_aarch64_sve_umulh : AdvSIMD_Pred2VectorArg_Intrinsic; 1622 1623def int_aarch64_sve_sdiv : AdvSIMD_Pred2VectorArg_Intrinsic; 1624def int_aarch64_sve_udiv : AdvSIMD_Pred2VectorArg_Intrinsic; 1625def int_aarch64_sve_sdivr : AdvSIMD_Pred2VectorArg_Intrinsic; 1626def int_aarch64_sve_udivr : AdvSIMD_Pred2VectorArg_Intrinsic; 1627 1628def int_aarch64_sve_smax : AdvSIMD_Pred2VectorArg_Intrinsic; 1629def int_aarch64_sve_umax : AdvSIMD_Pred2VectorArg_Intrinsic; 1630def int_aarch64_sve_smin : AdvSIMD_Pred2VectorArg_Intrinsic; 1631def int_aarch64_sve_umin : AdvSIMD_Pred2VectorArg_Intrinsic; 1632def int_aarch64_sve_sabd : AdvSIMD_Pred2VectorArg_Intrinsic; 1633def int_aarch64_sve_uabd : AdvSIMD_Pred2VectorArg_Intrinsic; 1634 1635def int_aarch64_sve_mad : AdvSIMD_Pred3VectorArg_Intrinsic; 1636def int_aarch64_sve_msb : AdvSIMD_Pred3VectorArg_Intrinsic; 1637def int_aarch64_sve_mla : AdvSIMD_Pred3VectorArg_Intrinsic; 1638def int_aarch64_sve_mla_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1639def int_aarch64_sve_mls : AdvSIMD_Pred3VectorArg_Intrinsic; 1640def int_aarch64_sve_mls_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1641 1642def int_aarch64_sve_saddv : AdvSIMD_SVE_SADDV_Reduce_Intrinsic; 1643def int_aarch64_sve_uaddv : AdvSIMD_SVE_SADDV_Reduce_Intrinsic; 1644 1645def int_aarch64_sve_smaxv : AdvSIMD_SVE_Reduce_Intrinsic; 1646def int_aarch64_sve_umaxv : AdvSIMD_SVE_Reduce_Intrinsic; 1647def int_aarch64_sve_sminv : AdvSIMD_SVE_Reduce_Intrinsic; 1648def int_aarch64_sve_uminv : AdvSIMD_SVE_Reduce_Intrinsic; 1649 1650def int_aarch64_sve_orv : AdvSIMD_SVE_Reduce_Intrinsic; 1651def int_aarch64_sve_eorv : AdvSIMD_SVE_Reduce_Intrinsic; 1652def int_aarch64_sve_andv : AdvSIMD_SVE_Reduce_Intrinsic; 1653 1654def int_aarch64_sve_abs : AdvSIMD_Merged1VectorArg_Intrinsic; 1655def int_aarch64_sve_neg : AdvSIMD_Merged1VectorArg_Intrinsic; 1656 1657def int_aarch64_sve_sdot : AdvSIMD_SVE_DOT_Intrinsic; 1658def int_aarch64_sve_sdot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 1659 1660def int_aarch64_sve_udot : AdvSIMD_SVE_DOT_Intrinsic; 1661def int_aarch64_sve_udot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 1662 1663def int_aarch64_sve_sqadd_x : AdvSIMD_2VectorArg_Intrinsic; 1664def int_aarch64_sve_sqsub_x : AdvSIMD_2VectorArg_Intrinsic; 1665def int_aarch64_sve_uqadd_x : AdvSIMD_2VectorArg_Intrinsic; 1666def int_aarch64_sve_uqsub_x : AdvSIMD_2VectorArg_Intrinsic; 1667 1668// Shifts 1669 1670def int_aarch64_sve_asr : AdvSIMD_Pred2VectorArg_Intrinsic; 1671def int_aarch64_sve_asr_wide : AdvSIMD_SVE_ShiftWide_Intrinsic; 1672def int_aarch64_sve_asrd : AdvSIMD_SVE_ShiftByImm_Intrinsic; 1673def int_aarch64_sve_insr : AdvSIMD_SVE_INSR_Intrinsic; 1674def int_aarch64_sve_lsl : AdvSIMD_Pred2VectorArg_Intrinsic; 1675def int_aarch64_sve_lsl_wide : AdvSIMD_SVE_ShiftWide_Intrinsic; 1676def int_aarch64_sve_lsr : AdvSIMD_Pred2VectorArg_Intrinsic; 1677def int_aarch64_sve_lsr_wide : AdvSIMD_SVE_ShiftWide_Intrinsic; 1678 1679// 1680// Integer comparisons 1681// 1682 1683def int_aarch64_sve_cmpeq : AdvSIMD_SVE_Compare_Intrinsic; 1684def int_aarch64_sve_cmpge : AdvSIMD_SVE_Compare_Intrinsic; 1685def int_aarch64_sve_cmpgt : AdvSIMD_SVE_Compare_Intrinsic; 1686def int_aarch64_sve_cmphi : AdvSIMD_SVE_Compare_Intrinsic; 1687def int_aarch64_sve_cmphs : AdvSIMD_SVE_Compare_Intrinsic; 1688def int_aarch64_sve_cmpne : AdvSIMD_SVE_Compare_Intrinsic; 1689 1690def int_aarch64_sve_cmpeq_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1691def int_aarch64_sve_cmpge_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1692def int_aarch64_sve_cmpgt_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1693def int_aarch64_sve_cmphi_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1694def int_aarch64_sve_cmphs_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1695def int_aarch64_sve_cmple_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1696def int_aarch64_sve_cmplo_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1697def int_aarch64_sve_cmpls_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1698def int_aarch64_sve_cmplt_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1699def int_aarch64_sve_cmpne_wide : AdvSIMD_SVE_CompareWide_Intrinsic; 1700 1701// 1702// Counting bits 1703// 1704 1705def int_aarch64_sve_cls : AdvSIMD_Merged1VectorArg_Intrinsic; 1706def int_aarch64_sve_clz : AdvSIMD_Merged1VectorArg_Intrinsic; 1707def int_aarch64_sve_cnt : AdvSIMD_SVE_CNT_Intrinsic; 1708 1709// 1710// Counting elements 1711// 1712 1713def int_aarch64_sve_cntb : AdvSIMD_SVE_CNTB_Intrinsic; 1714def int_aarch64_sve_cnth : AdvSIMD_SVE_CNTB_Intrinsic; 1715def int_aarch64_sve_cntw : AdvSIMD_SVE_CNTB_Intrinsic; 1716def int_aarch64_sve_cntd : AdvSIMD_SVE_CNTB_Intrinsic; 1717 1718def int_aarch64_sve_cntp : AdvSIMD_SVE_CNTP_Intrinsic; 1719 1720// 1721// FFR manipulation 1722// 1723 1724def int_aarch64_sve_rdffr : GCCBuiltin<"__builtin_sve_svrdffr">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [], [IntrReadMem, IntrInaccessibleMemOnly]>; 1725def int_aarch64_sve_rdffr_z : GCCBuiltin<"__builtin_sve_svrdffr_z">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [llvm_nxv16i1_ty], [IntrReadMem, IntrInaccessibleMemOnly]>; 1726def int_aarch64_sve_setffr : GCCBuiltin<"__builtin_sve_svsetffr">, DefaultAttrsIntrinsic<[], [], [IntrWriteMem, IntrInaccessibleMemOnly]>; 1727def int_aarch64_sve_wrffr : GCCBuiltin<"__builtin_sve_svwrffr">, DefaultAttrsIntrinsic<[], [llvm_nxv16i1_ty], [IntrWriteMem, IntrInaccessibleMemOnly]>; 1728 1729// 1730// Saturating scalar arithmetic 1731// 1732 1733def int_aarch64_sve_sqdech : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1734def int_aarch64_sve_sqdecw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1735def int_aarch64_sve_sqdecd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1736def int_aarch64_sve_sqdecp : AdvSIMD_SVE_Saturating_Intrinsic; 1737 1738def int_aarch64_sve_sqdecb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1739def int_aarch64_sve_sqdecb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1740def int_aarch64_sve_sqdech_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1741def int_aarch64_sve_sqdech_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1742def int_aarch64_sve_sqdecw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1743def int_aarch64_sve_sqdecw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1744def int_aarch64_sve_sqdecd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1745def int_aarch64_sve_sqdecd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1746def int_aarch64_sve_sqdecp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1747def int_aarch64_sve_sqdecp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1748 1749def int_aarch64_sve_sqinch : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1750def int_aarch64_sve_sqincw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1751def int_aarch64_sve_sqincd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1752def int_aarch64_sve_sqincp : AdvSIMD_SVE_Saturating_Intrinsic; 1753 1754def int_aarch64_sve_sqincb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1755def int_aarch64_sve_sqincb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1756def int_aarch64_sve_sqinch_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1757def int_aarch64_sve_sqinch_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1758def int_aarch64_sve_sqincw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1759def int_aarch64_sve_sqincw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1760def int_aarch64_sve_sqincd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1761def int_aarch64_sve_sqincd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1762def int_aarch64_sve_sqincp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1763def int_aarch64_sve_sqincp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1764 1765def int_aarch64_sve_uqdech : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1766def int_aarch64_sve_uqdecw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1767def int_aarch64_sve_uqdecd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1768def int_aarch64_sve_uqdecp : AdvSIMD_SVE_Saturating_Intrinsic; 1769 1770def int_aarch64_sve_uqdecb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1771def int_aarch64_sve_uqdecb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1772def int_aarch64_sve_uqdech_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1773def int_aarch64_sve_uqdech_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1774def int_aarch64_sve_uqdecw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1775def int_aarch64_sve_uqdecw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1776def int_aarch64_sve_uqdecd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1777def int_aarch64_sve_uqdecd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1778def int_aarch64_sve_uqdecp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1779def int_aarch64_sve_uqdecp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1780 1781def int_aarch64_sve_uqinch : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1782def int_aarch64_sve_uqincw : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1783def int_aarch64_sve_uqincd : AdvSIMD_SVE_SaturatingWithPattern_Intrinsic; 1784def int_aarch64_sve_uqincp : AdvSIMD_SVE_Saturating_Intrinsic; 1785 1786def int_aarch64_sve_uqincb_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1787def int_aarch64_sve_uqincb_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1788def int_aarch64_sve_uqinch_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1789def int_aarch64_sve_uqinch_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1790def int_aarch64_sve_uqincw_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1791def int_aarch64_sve_uqincw_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1792def int_aarch64_sve_uqincd_n32 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i32_ty>; 1793def int_aarch64_sve_uqincd_n64 : AdvSIMD_SVE_SaturatingWithPattern_N_Intrinsic<llvm_i64_ty>; 1794def int_aarch64_sve_uqincp_n32 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i32_ty>; 1795def int_aarch64_sve_uqincp_n64 : AdvSIMD_SVE_Saturating_N_Intrinsic<llvm_i64_ty>; 1796 1797// 1798// Reversal 1799// 1800 1801def int_aarch64_sve_rbit : AdvSIMD_Merged1VectorArg_Intrinsic; 1802def int_aarch64_sve_revb : AdvSIMD_Merged1VectorArg_Intrinsic; 1803def int_aarch64_sve_revh : AdvSIMD_Merged1VectorArg_Intrinsic; 1804def int_aarch64_sve_revw : AdvSIMD_Merged1VectorArg_Intrinsic; 1805 1806// 1807// Permutations and selection 1808// 1809 1810def int_aarch64_sve_clasta : AdvSIMD_Pred2VectorArg_Intrinsic; 1811def int_aarch64_sve_clasta_n : AdvSIMD_SVE_ReduceWithInit_Intrinsic; 1812def int_aarch64_sve_clastb : AdvSIMD_Pred2VectorArg_Intrinsic; 1813def int_aarch64_sve_clastb_n : AdvSIMD_SVE_ReduceWithInit_Intrinsic; 1814def int_aarch64_sve_compact : AdvSIMD_Pred1VectorArg_Intrinsic; 1815def int_aarch64_sve_dupq_lane : AdvSIMD_SVE_DUPQ_Intrinsic; 1816def int_aarch64_sve_ext : AdvSIMD_2VectorArgIndexed_Intrinsic; 1817def int_aarch64_sve_sel : AdvSIMD_Pred2VectorArg_Intrinsic; 1818def int_aarch64_sve_lasta : AdvSIMD_SVE_Reduce_Intrinsic; 1819def int_aarch64_sve_lastb : AdvSIMD_SVE_Reduce_Intrinsic; 1820def int_aarch64_sve_rev : AdvSIMD_1VectorArg_Intrinsic; 1821def int_aarch64_sve_splice : AdvSIMD_Pred2VectorArg_Intrinsic; 1822def int_aarch64_sve_sunpkhi : AdvSIMD_SVE_Unpack_Intrinsic; 1823def int_aarch64_sve_sunpklo : AdvSIMD_SVE_Unpack_Intrinsic; 1824def int_aarch64_sve_tbl : AdvSIMD_SVE_TBL_Intrinsic; 1825def int_aarch64_sve_trn1 : AdvSIMD_2VectorArg_Intrinsic; 1826def int_aarch64_sve_trn2 : AdvSIMD_2VectorArg_Intrinsic; 1827def int_aarch64_sve_trn1q : AdvSIMD_2VectorArg_Intrinsic; 1828def int_aarch64_sve_trn2q : AdvSIMD_2VectorArg_Intrinsic; 1829def int_aarch64_sve_uunpkhi : AdvSIMD_SVE_Unpack_Intrinsic; 1830def int_aarch64_sve_uunpklo : AdvSIMD_SVE_Unpack_Intrinsic; 1831def int_aarch64_sve_uzp1 : AdvSIMD_2VectorArg_Intrinsic; 1832def int_aarch64_sve_uzp2 : AdvSIMD_2VectorArg_Intrinsic; 1833def int_aarch64_sve_uzp1q : AdvSIMD_2VectorArg_Intrinsic; 1834def int_aarch64_sve_uzp2q : AdvSIMD_2VectorArg_Intrinsic; 1835def int_aarch64_sve_zip1 : AdvSIMD_2VectorArg_Intrinsic; 1836def int_aarch64_sve_zip2 : AdvSIMD_2VectorArg_Intrinsic; 1837def int_aarch64_sve_zip1q : AdvSIMD_2VectorArg_Intrinsic; 1838def int_aarch64_sve_zip2q : AdvSIMD_2VectorArg_Intrinsic; 1839 1840// 1841// Logical operations 1842// 1843 1844def int_aarch64_sve_and : AdvSIMD_Pred2VectorArg_Intrinsic; 1845def int_aarch64_sve_bic : AdvSIMD_Pred2VectorArg_Intrinsic; 1846def int_aarch64_sve_cnot : AdvSIMD_Merged1VectorArg_Intrinsic; 1847def int_aarch64_sve_eor : AdvSIMD_Pred2VectorArg_Intrinsic; 1848def int_aarch64_sve_not : AdvSIMD_Merged1VectorArg_Intrinsic; 1849def int_aarch64_sve_orr : AdvSIMD_Pred2VectorArg_Intrinsic; 1850 1851// 1852// Conversion 1853// 1854 1855def int_aarch64_sve_sxtb : AdvSIMD_Merged1VectorArg_Intrinsic; 1856def int_aarch64_sve_sxth : AdvSIMD_Merged1VectorArg_Intrinsic; 1857def int_aarch64_sve_sxtw : AdvSIMD_Merged1VectorArg_Intrinsic; 1858def int_aarch64_sve_uxtb : AdvSIMD_Merged1VectorArg_Intrinsic; 1859def int_aarch64_sve_uxth : AdvSIMD_Merged1VectorArg_Intrinsic; 1860def int_aarch64_sve_uxtw : AdvSIMD_Merged1VectorArg_Intrinsic; 1861 1862// 1863// While comparisons 1864// 1865 1866def int_aarch64_sve_whilele : AdvSIMD_SVE_WHILE_Intrinsic; 1867def int_aarch64_sve_whilelo : AdvSIMD_SVE_WHILE_Intrinsic; 1868def int_aarch64_sve_whilels : AdvSIMD_SVE_WHILE_Intrinsic; 1869def int_aarch64_sve_whilelt : AdvSIMD_SVE_WHILE_Intrinsic; 1870def int_aarch64_sve_whilege : AdvSIMD_SVE_WHILE_Intrinsic; 1871def int_aarch64_sve_whilegt : AdvSIMD_SVE_WHILE_Intrinsic; 1872def int_aarch64_sve_whilehs : AdvSIMD_SVE_WHILE_Intrinsic; 1873def int_aarch64_sve_whilehi : AdvSIMD_SVE_WHILE_Intrinsic; 1874 1875// 1876// Floating-point arithmetic 1877// 1878 1879def int_aarch64_sve_fabd : AdvSIMD_Pred2VectorArg_Intrinsic; 1880def int_aarch64_sve_fabs : AdvSIMD_Merged1VectorArg_Intrinsic; 1881def int_aarch64_sve_fadd : AdvSIMD_Pred2VectorArg_Intrinsic; 1882def int_aarch64_sve_fcadd : AdvSIMD_SVE_CADD_Intrinsic; 1883def int_aarch64_sve_fcmla : AdvSIMD_SVE_CMLA_Intrinsic; 1884def int_aarch64_sve_fcmla_lane : AdvSIMD_SVE_CMLA_LANE_Intrinsic; 1885def int_aarch64_sve_fdiv : AdvSIMD_Pred2VectorArg_Intrinsic; 1886def int_aarch64_sve_fdivr : AdvSIMD_Pred2VectorArg_Intrinsic; 1887def int_aarch64_sve_fexpa_x : AdvSIMD_SVE_EXPA_Intrinsic; 1888def int_aarch64_sve_fmad : AdvSIMD_Pred3VectorArg_Intrinsic; 1889def int_aarch64_sve_fmax : AdvSIMD_Pred2VectorArg_Intrinsic; 1890def int_aarch64_sve_fmaxnm : AdvSIMD_Pred2VectorArg_Intrinsic; 1891def int_aarch64_sve_fmin : AdvSIMD_Pred2VectorArg_Intrinsic; 1892def int_aarch64_sve_fminnm : AdvSIMD_Pred2VectorArg_Intrinsic; 1893def int_aarch64_sve_fmla : AdvSIMD_Pred3VectorArg_Intrinsic; 1894def int_aarch64_sve_fmla_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1895def int_aarch64_sve_fmls : AdvSIMD_Pred3VectorArg_Intrinsic; 1896def int_aarch64_sve_fmls_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 1897def int_aarch64_sve_fmsb : AdvSIMD_Pred3VectorArg_Intrinsic; 1898def int_aarch64_sve_fmul : AdvSIMD_Pred2VectorArg_Intrinsic; 1899def int_aarch64_sve_fmulx : AdvSIMD_Pred2VectorArg_Intrinsic; 1900def int_aarch64_sve_fneg : AdvSIMD_Merged1VectorArg_Intrinsic; 1901def int_aarch64_sve_fmul_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 1902def int_aarch64_sve_fnmad : AdvSIMD_Pred3VectorArg_Intrinsic; 1903def int_aarch64_sve_fnmla : AdvSIMD_Pred3VectorArg_Intrinsic; 1904def int_aarch64_sve_fnmls : AdvSIMD_Pred3VectorArg_Intrinsic; 1905def int_aarch64_sve_fnmsb : AdvSIMD_Pred3VectorArg_Intrinsic; 1906def int_aarch64_sve_frecpe_x : AdvSIMD_1VectorArg_Intrinsic; 1907def int_aarch64_sve_frecps_x : AdvSIMD_2VectorArg_Intrinsic; 1908def int_aarch64_sve_frecpx : AdvSIMD_Merged1VectorArg_Intrinsic; 1909def int_aarch64_sve_frinta : AdvSIMD_Merged1VectorArg_Intrinsic; 1910def int_aarch64_sve_frinti : AdvSIMD_Merged1VectorArg_Intrinsic; 1911def int_aarch64_sve_frintm : AdvSIMD_Merged1VectorArg_Intrinsic; 1912def int_aarch64_sve_frintn : AdvSIMD_Merged1VectorArg_Intrinsic; 1913def int_aarch64_sve_frintp : AdvSIMD_Merged1VectorArg_Intrinsic; 1914def int_aarch64_sve_frintx : AdvSIMD_Merged1VectorArg_Intrinsic; 1915def int_aarch64_sve_frintz : AdvSIMD_Merged1VectorArg_Intrinsic; 1916def int_aarch64_sve_frsqrte_x : AdvSIMD_1VectorArg_Intrinsic; 1917def int_aarch64_sve_frsqrts_x : AdvSIMD_2VectorArg_Intrinsic; 1918def int_aarch64_sve_fscale : AdvSIMD_SVE_SCALE_Intrinsic; 1919def int_aarch64_sve_fsqrt : AdvSIMD_Merged1VectorArg_Intrinsic; 1920def int_aarch64_sve_fsub : AdvSIMD_Pred2VectorArg_Intrinsic; 1921def int_aarch64_sve_fsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 1922def int_aarch64_sve_ftmad_x : AdvSIMD_2VectorArgIndexed_Intrinsic; 1923def int_aarch64_sve_ftsmul_x : AdvSIMD_SVE_TSMUL_Intrinsic; 1924def int_aarch64_sve_ftssel_x : AdvSIMD_SVE_TSMUL_Intrinsic; 1925 1926// 1927// Floating-point reductions 1928// 1929 1930def int_aarch64_sve_fadda : AdvSIMD_SVE_ReduceWithInit_Intrinsic; 1931def int_aarch64_sve_faddv : AdvSIMD_SVE_Reduce_Intrinsic; 1932def int_aarch64_sve_fmaxv : AdvSIMD_SVE_Reduce_Intrinsic; 1933def int_aarch64_sve_fmaxnmv : AdvSIMD_SVE_Reduce_Intrinsic; 1934def int_aarch64_sve_fminv : AdvSIMD_SVE_Reduce_Intrinsic; 1935def int_aarch64_sve_fminnmv : AdvSIMD_SVE_Reduce_Intrinsic; 1936 1937// 1938// Floating-point conversions 1939// 1940 1941def int_aarch64_sve_fcvt : AdvSIMD_SVE_FCVT_Intrinsic; 1942def int_aarch64_sve_fcvtzs : AdvSIMD_SVE_FCVTZS_Intrinsic; 1943def int_aarch64_sve_fcvtzu : AdvSIMD_SVE_FCVTZS_Intrinsic; 1944def int_aarch64_sve_scvtf : AdvSIMD_SVE_SCVTF_Intrinsic; 1945def int_aarch64_sve_ucvtf : AdvSIMD_SVE_SCVTF_Intrinsic; 1946 1947// 1948// Floating-point comparisons 1949// 1950 1951def int_aarch64_sve_facge : AdvSIMD_SVE_Compare_Intrinsic; 1952def int_aarch64_sve_facgt : AdvSIMD_SVE_Compare_Intrinsic; 1953 1954def int_aarch64_sve_fcmpeq : AdvSIMD_SVE_Compare_Intrinsic; 1955def int_aarch64_sve_fcmpge : AdvSIMD_SVE_Compare_Intrinsic; 1956def int_aarch64_sve_fcmpgt : AdvSIMD_SVE_Compare_Intrinsic; 1957def int_aarch64_sve_fcmpne : AdvSIMD_SVE_Compare_Intrinsic; 1958def int_aarch64_sve_fcmpuo : AdvSIMD_SVE_Compare_Intrinsic; 1959 1960def int_aarch64_sve_fcvtzs_i32f16 : Builtin_SVCVT<"svcvt_s32_f16_m", llvm_nxv4i32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 1961def int_aarch64_sve_fcvtzs_i32f64 : Builtin_SVCVT<"svcvt_s32_f64_m", llvm_nxv4i32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1962def int_aarch64_sve_fcvtzs_i64f16 : Builtin_SVCVT<"svcvt_s64_f16_m", llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>; 1963def int_aarch64_sve_fcvtzs_i64f32 : Builtin_SVCVT<"svcvt_s64_f32_m", llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 1964 1965def int_aarch64_sve_fcvt_bf16f32 : Builtin_SVCVT<"svcvt_bf16_f32_m", llvm_nxv8bf16_ty, llvm_nxv8i1_ty, llvm_nxv4f32_ty>; 1966def int_aarch64_sve_fcvtnt_bf16f32 : Builtin_SVCVT<"svcvtnt_bf16_f32_m", llvm_nxv8bf16_ty, llvm_nxv8i1_ty, llvm_nxv4f32_ty>; 1967 1968def int_aarch64_sve_fcvtzu_i32f16 : Builtin_SVCVT<"svcvt_u32_f16_m", llvm_nxv4i32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 1969def int_aarch64_sve_fcvtzu_i32f64 : Builtin_SVCVT<"svcvt_u32_f64_m", llvm_nxv4i32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1970def int_aarch64_sve_fcvtzu_i64f16 : Builtin_SVCVT<"svcvt_u64_f16_m", llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>; 1971def int_aarch64_sve_fcvtzu_i64f32 : Builtin_SVCVT<"svcvt_u64_f32_m", llvm_nxv2i64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 1972 1973def int_aarch64_sve_fcvt_f16f32 : Builtin_SVCVT<"svcvt_f16_f32_m", llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4f32_ty>; 1974def int_aarch64_sve_fcvt_f16f64 : Builtin_SVCVT<"svcvt_f16_f64_m", llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1975def int_aarch64_sve_fcvt_f32f64 : Builtin_SVCVT<"svcvt_f32_f64_m", llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1976 1977def int_aarch64_sve_fcvt_f32f16 : Builtin_SVCVT<"svcvt_f32_f16_m", llvm_nxv4f32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 1978def int_aarch64_sve_fcvt_f64f16 : Builtin_SVCVT<"svcvt_f64_f16_m", llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv8f16_ty>; 1979def int_aarch64_sve_fcvt_f64f32 : Builtin_SVCVT<"svcvt_f64_f32_m", llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 1980 1981def int_aarch64_sve_fcvtlt_f32f16 : Builtin_SVCVT<"svcvtlt_f32_f16_m", llvm_nxv4f32_ty, llvm_nxv4i1_ty, llvm_nxv8f16_ty>; 1982def int_aarch64_sve_fcvtlt_f64f32 : Builtin_SVCVT<"svcvtlt_f64_f32_m", llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4f32_ty>; 1983def int_aarch64_sve_fcvtnt_f16f32 : Builtin_SVCVT<"svcvtnt_f16_f32_m", llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4f32_ty>; 1984def int_aarch64_sve_fcvtnt_f32f64 : Builtin_SVCVT<"svcvtnt_f32_f64_m", llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1985 1986def int_aarch64_sve_fcvtx_f32f64 : Builtin_SVCVT<"svcvtx_f32_f64_m", llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1987def int_aarch64_sve_fcvtxnt_f32f64 : Builtin_SVCVT<"svcvtxnt_f32_f64_m", llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2f64_ty>; 1988 1989def int_aarch64_sve_scvtf_f16i32 : Builtin_SVCVT<"svcvt_f16_s32_m", llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4i32_ty>; 1990def int_aarch64_sve_scvtf_f16i64 : Builtin_SVCVT<"svcvt_f16_s64_m", llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 1991def int_aarch64_sve_scvtf_f32i64 : Builtin_SVCVT<"svcvt_f32_s64_m", llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 1992def int_aarch64_sve_scvtf_f64i32 : Builtin_SVCVT<"svcvt_f64_s32_m", llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4i32_ty>; 1993 1994def int_aarch64_sve_ucvtf_f16i32 : Builtin_SVCVT<"svcvt_f16_u32_m", llvm_nxv8f16_ty, llvm_nxv4i1_ty, llvm_nxv4i32_ty>; 1995def int_aarch64_sve_ucvtf_f16i64 : Builtin_SVCVT<"svcvt_f16_u64_m", llvm_nxv8f16_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 1996def int_aarch64_sve_ucvtf_f32i64 : Builtin_SVCVT<"svcvt_f32_u64_m", llvm_nxv4f32_ty, llvm_nxv2i1_ty, llvm_nxv2i64_ty>; 1997def int_aarch64_sve_ucvtf_f64i32 : Builtin_SVCVT<"svcvt_f64_u32_m", llvm_nxv2f64_ty, llvm_nxv2i1_ty, llvm_nxv4i32_ty>; 1998 1999// 2000// Predicate creation 2001// 2002 2003def int_aarch64_sve_ptrue : AdvSIMD_SVE_PTRUE_Intrinsic; 2004 2005// 2006// Predicate operations 2007// 2008 2009def int_aarch64_sve_and_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2010def int_aarch64_sve_bic_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2011def int_aarch64_sve_brka : AdvSIMD_Merged1VectorArg_Intrinsic; 2012def int_aarch64_sve_brka_z : AdvSIMD_Pred1VectorArg_Intrinsic; 2013def int_aarch64_sve_brkb : AdvSIMD_Merged1VectorArg_Intrinsic; 2014def int_aarch64_sve_brkb_z : AdvSIMD_Pred1VectorArg_Intrinsic; 2015def int_aarch64_sve_brkn_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2016def int_aarch64_sve_brkpa_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2017def int_aarch64_sve_brkpb_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2018def int_aarch64_sve_eor_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2019def int_aarch64_sve_nand_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2020def int_aarch64_sve_nor_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2021def int_aarch64_sve_orn_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2022def int_aarch64_sve_orr_z : AdvSIMD_Pred2VectorArg_Intrinsic; 2023def int_aarch64_sve_pfirst : AdvSIMD_Pred1VectorArg_Intrinsic; 2024def int_aarch64_sve_pnext : AdvSIMD_Pred1VectorArg_Intrinsic; 2025def int_aarch64_sve_punpkhi : AdvSIMD_SVE_PUNPKHI_Intrinsic; 2026def int_aarch64_sve_punpklo : AdvSIMD_SVE_PUNPKHI_Intrinsic; 2027 2028// 2029// Testing predicates 2030// 2031 2032def int_aarch64_sve_ptest_any : AdvSIMD_SVE_PTEST_Intrinsic; 2033def int_aarch64_sve_ptest_first : AdvSIMD_SVE_PTEST_Intrinsic; 2034def int_aarch64_sve_ptest_last : AdvSIMD_SVE_PTEST_Intrinsic; 2035 2036// 2037// Reinterpreting data 2038// 2039 2040def int_aarch64_sve_convert_from_svbool : DefaultAttrsIntrinsic<[llvm_anyvector_ty], 2041 [llvm_nxv16i1_ty], 2042 [IntrNoMem]>; 2043 2044def int_aarch64_sve_convert_to_svbool : DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], 2045 [llvm_anyvector_ty], 2046 [IntrNoMem]>; 2047 2048// 2049// Gather loads: scalar base + vector offsets 2050// 2051 2052// 64 bit unscaled offsets 2053def int_aarch64_sve_ld1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2054 2055// 64 bit scaled offsets 2056def int_aarch64_sve_ld1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2057 2058// 32 bit unscaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2059def int_aarch64_sve_ld1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2060def int_aarch64_sve_ld1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2061 2062// 32 bit scaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2063def int_aarch64_sve_ld1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2064def int_aarch64_sve_ld1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2065 2066// 2067// Gather loads: vector base + scalar offset 2068// 2069 2070def int_aarch64_sve_ld1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic; 2071 2072 2073// 2074// First-faulting gather loads: scalar base + vector offsets 2075// 2076 2077// 64 bit unscaled offsets 2078def int_aarch64_sve_ldff1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic; 2079 2080// 64 bit scaled offsets 2081def int_aarch64_sve_ldff1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic; 2082 2083// 32 bit unscaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits 2084def int_aarch64_sve_ldff1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2085def int_aarch64_sve_ldff1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2086 2087// 32 bit scaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits 2088def int_aarch64_sve_ldff1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2089def int_aarch64_sve_ldff1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic; 2090 2091// 2092// First-faulting gather loads: vector base + scalar offset 2093// 2094 2095def int_aarch64_sve_ldff1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic; 2096 2097 2098// 2099// Non-temporal gather loads: scalar base + vector offsets 2100// 2101 2102// 64 bit unscaled offsets 2103def int_aarch64_sve_ldnt1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2104 2105// 64 bit indices 2106def int_aarch64_sve_ldnt1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic; 2107 2108// 32 bit unscaled offsets, zero (zxtw) extended to 64 bits 2109def int_aarch64_sve_ldnt1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic; 2110 2111// 2112// Non-temporal gather loads: vector base + scalar offset 2113// 2114 2115def int_aarch64_sve_ldnt1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic; 2116 2117// 2118// Scatter stores: scalar base + vector offsets 2119// 2120 2121// 64 bit unscaled offsets 2122def int_aarch64_sve_st1_scatter : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2123 2124// 64 bit scaled offsets 2125def int_aarch64_sve_st1_scatter_index 2126 : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2127 2128// 32 bit unscaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2129def int_aarch64_sve_st1_scatter_sxtw 2130 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2131 2132def int_aarch64_sve_st1_scatter_uxtw 2133 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2134 2135// 32 bit scaled offsets, sign (sxtw) or zero (zxtw) extended to 64 bits 2136def int_aarch64_sve_st1_scatter_sxtw_index 2137 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2138 2139def int_aarch64_sve_st1_scatter_uxtw_index 2140 : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2141 2142// 2143// Scatter stores: vector base + scalar offset 2144// 2145 2146def int_aarch64_sve_st1_scatter_scalar_offset : AdvSIMD_ScatterStore_VS_Intrinsic; 2147 2148// 2149// Non-temporal scatter stores: scalar base + vector offsets 2150// 2151 2152// 64 bit unscaled offsets 2153def int_aarch64_sve_stnt1_scatter : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2154 2155// 64 bit indices 2156def int_aarch64_sve_stnt1_scatter_index 2157 : AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic; 2158 2159// 32 bit unscaled offsets, zero (zxtw) extended to 64 bits 2160def int_aarch64_sve_stnt1_scatter_uxtw : AdvSIMD_ScatterStore_SV_32b_Offsets_Intrinsic; 2161 2162// 2163// Non-temporal scatter stores: vector base + scalar offset 2164// 2165 2166def int_aarch64_sve_stnt1_scatter_scalar_offset : AdvSIMD_ScatterStore_VS_Intrinsic; 2167 2168// 2169// SVE2 - Uniform DSP operations 2170// 2171 2172def int_aarch64_sve_saba : AdvSIMD_3VectorArg_Intrinsic; 2173def int_aarch64_sve_shadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2174def int_aarch64_sve_shsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2175def int_aarch64_sve_shsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2176def int_aarch64_sve_sli : AdvSIMD_2VectorArgIndexed_Intrinsic; 2177def int_aarch64_sve_sqabs : AdvSIMD_Merged1VectorArg_Intrinsic; 2178def int_aarch64_sve_sqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2179def int_aarch64_sve_sqdmulh : AdvSIMD_2VectorArg_Intrinsic; 2180def int_aarch64_sve_sqdmulh_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 2181def int_aarch64_sve_sqneg : AdvSIMD_Merged1VectorArg_Intrinsic; 2182def int_aarch64_sve_sqrdmlah : AdvSIMD_3VectorArg_Intrinsic; 2183def int_aarch64_sve_sqrdmlah_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 2184def int_aarch64_sve_sqrdmlsh : AdvSIMD_3VectorArg_Intrinsic; 2185def int_aarch64_sve_sqrdmlsh_lane : AdvSIMD_3VectorArgIndexed_Intrinsic; 2186def int_aarch64_sve_sqrdmulh : AdvSIMD_2VectorArg_Intrinsic; 2187def int_aarch64_sve_sqrdmulh_lane : AdvSIMD_2VectorArgIndexed_Intrinsic; 2188def int_aarch64_sve_sqrshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2189def int_aarch64_sve_sqshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2190def int_aarch64_sve_sqshlu : AdvSIMD_SVE_ShiftByImm_Intrinsic; 2191def int_aarch64_sve_sqsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2192def int_aarch64_sve_sqsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2193def int_aarch64_sve_srhadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2194def int_aarch64_sve_sri : AdvSIMD_2VectorArgIndexed_Intrinsic; 2195def int_aarch64_sve_srshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2196def int_aarch64_sve_srshr : AdvSIMD_SVE_ShiftByImm_Intrinsic; 2197def int_aarch64_sve_srsra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2198def int_aarch64_sve_ssra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2199def int_aarch64_sve_suqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2200def int_aarch64_sve_uaba : AdvSIMD_3VectorArg_Intrinsic; 2201def int_aarch64_sve_uhadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2202def int_aarch64_sve_uhsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2203def int_aarch64_sve_uhsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2204def int_aarch64_sve_uqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2205def int_aarch64_sve_uqrshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2206def int_aarch64_sve_uqshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2207def int_aarch64_sve_uqsub : AdvSIMD_Pred2VectorArg_Intrinsic; 2208def int_aarch64_sve_uqsubr : AdvSIMD_Pred2VectorArg_Intrinsic; 2209def int_aarch64_sve_urecpe : AdvSIMD_Merged1VectorArg_Intrinsic; 2210def int_aarch64_sve_urhadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2211def int_aarch64_sve_urshl : AdvSIMD_Pred2VectorArg_Intrinsic; 2212def int_aarch64_sve_urshr : AdvSIMD_SVE_ShiftByImm_Intrinsic; 2213def int_aarch64_sve_ursqrte : AdvSIMD_Merged1VectorArg_Intrinsic; 2214def int_aarch64_sve_ursra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2215def int_aarch64_sve_usqadd : AdvSIMD_Pred2VectorArg_Intrinsic; 2216def int_aarch64_sve_usra : AdvSIMD_2VectorArgIndexed_Intrinsic; 2217 2218// 2219// SVE2 - Widening DSP operations 2220// 2221 2222def int_aarch64_sve_sabalb : SVE2_3VectorArg_Long_Intrinsic; 2223def int_aarch64_sve_sabalt : SVE2_3VectorArg_Long_Intrinsic; 2224def int_aarch64_sve_sabdlb : SVE2_2VectorArg_Long_Intrinsic; 2225def int_aarch64_sve_sabdlt : SVE2_2VectorArg_Long_Intrinsic; 2226def int_aarch64_sve_saddlb : SVE2_2VectorArg_Long_Intrinsic; 2227def int_aarch64_sve_saddlt : SVE2_2VectorArg_Long_Intrinsic; 2228def int_aarch64_sve_saddwb : SVE2_2VectorArg_Wide_Intrinsic; 2229def int_aarch64_sve_saddwt : SVE2_2VectorArg_Wide_Intrinsic; 2230def int_aarch64_sve_sshllb : SVE2_1VectorArg_Long_Intrinsic; 2231def int_aarch64_sve_sshllt : SVE2_1VectorArg_Long_Intrinsic; 2232def int_aarch64_sve_ssublb : SVE2_2VectorArg_Long_Intrinsic; 2233def int_aarch64_sve_ssublt : SVE2_2VectorArg_Long_Intrinsic; 2234def int_aarch64_sve_ssubwb : SVE2_2VectorArg_Wide_Intrinsic; 2235def int_aarch64_sve_ssubwt : SVE2_2VectorArg_Wide_Intrinsic; 2236def int_aarch64_sve_uabalb : SVE2_3VectorArg_Long_Intrinsic; 2237def int_aarch64_sve_uabalt : SVE2_3VectorArg_Long_Intrinsic; 2238def int_aarch64_sve_uabdlb : SVE2_2VectorArg_Long_Intrinsic; 2239def int_aarch64_sve_uabdlt : SVE2_2VectorArg_Long_Intrinsic; 2240def int_aarch64_sve_uaddlb : SVE2_2VectorArg_Long_Intrinsic; 2241def int_aarch64_sve_uaddlt : SVE2_2VectorArg_Long_Intrinsic; 2242def int_aarch64_sve_uaddwb : SVE2_2VectorArg_Wide_Intrinsic; 2243def int_aarch64_sve_uaddwt : SVE2_2VectorArg_Wide_Intrinsic; 2244def int_aarch64_sve_ushllb : SVE2_1VectorArg_Long_Intrinsic; 2245def int_aarch64_sve_ushllt : SVE2_1VectorArg_Long_Intrinsic; 2246def int_aarch64_sve_usublb : SVE2_2VectorArg_Long_Intrinsic; 2247def int_aarch64_sve_usublt : SVE2_2VectorArg_Long_Intrinsic; 2248def int_aarch64_sve_usubwb : SVE2_2VectorArg_Wide_Intrinsic; 2249def int_aarch64_sve_usubwt : SVE2_2VectorArg_Wide_Intrinsic; 2250 2251// 2252// SVE2 - Non-widening pairwise arithmetic 2253// 2254 2255def int_aarch64_sve_addp : AdvSIMD_Pred2VectorArg_Intrinsic; 2256def int_aarch64_sve_faddp : AdvSIMD_Pred2VectorArg_Intrinsic; 2257def int_aarch64_sve_fmaxp : AdvSIMD_Pred2VectorArg_Intrinsic; 2258def int_aarch64_sve_fmaxnmp : AdvSIMD_Pred2VectorArg_Intrinsic; 2259def int_aarch64_sve_fminp : AdvSIMD_Pred2VectorArg_Intrinsic; 2260def int_aarch64_sve_fminnmp : AdvSIMD_Pred2VectorArg_Intrinsic; 2261def int_aarch64_sve_smaxp : AdvSIMD_Pred2VectorArg_Intrinsic; 2262def int_aarch64_sve_sminp : AdvSIMD_Pred2VectorArg_Intrinsic; 2263def int_aarch64_sve_umaxp : AdvSIMD_Pred2VectorArg_Intrinsic; 2264def int_aarch64_sve_uminp : AdvSIMD_Pred2VectorArg_Intrinsic; 2265 2266// 2267// SVE2 - Widening pairwise arithmetic 2268// 2269 2270def int_aarch64_sve_sadalp : SVE2_2VectorArg_Pred_Long_Intrinsic; 2271def int_aarch64_sve_uadalp : SVE2_2VectorArg_Pred_Long_Intrinsic; 2272 2273// 2274// SVE2 - Uniform complex integer arithmetic 2275// 2276 2277def int_aarch64_sve_cadd_x : AdvSIMD_SVE2_CADD_Intrinsic; 2278def int_aarch64_sve_sqcadd_x : AdvSIMD_SVE2_CADD_Intrinsic; 2279def int_aarch64_sve_cmla_x : AdvSIMD_SVE2_CMLA_Intrinsic; 2280def int_aarch64_sve_cmla_lane_x : AdvSIMD_SVE_CMLA_LANE_Intrinsic; 2281def int_aarch64_sve_sqrdcmlah_x : AdvSIMD_SVE2_CMLA_Intrinsic; 2282def int_aarch64_sve_sqrdcmlah_lane_x : AdvSIMD_SVE_CMLA_LANE_Intrinsic; 2283 2284// 2285// SVE2 - Widening complex integer arithmetic 2286// 2287 2288def int_aarch64_sve_saddlbt : SVE2_2VectorArg_Long_Intrinsic; 2289def int_aarch64_sve_ssublbt : SVE2_2VectorArg_Long_Intrinsic; 2290def int_aarch64_sve_ssubltb : SVE2_2VectorArg_Long_Intrinsic; 2291 2292// 2293// SVE2 - Widening complex integer dot product 2294// 2295 2296def int_aarch64_sve_cdot : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 2297def int_aarch64_sve_cdot_lane : AdvSIMD_SVE_CDOT_LANE_Intrinsic; 2298 2299// 2300// SVE2 - Floating-point widening multiply-accumulate 2301// 2302 2303def int_aarch64_sve_fmlalb : SVE2_3VectorArg_Long_Intrinsic; 2304def int_aarch64_sve_fmlalb_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2305def int_aarch64_sve_fmlalt : SVE2_3VectorArg_Long_Intrinsic; 2306def int_aarch64_sve_fmlalt_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2307def int_aarch64_sve_fmlslb : SVE2_3VectorArg_Long_Intrinsic; 2308def int_aarch64_sve_fmlslb_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2309def int_aarch64_sve_fmlslt : SVE2_3VectorArg_Long_Intrinsic; 2310def int_aarch64_sve_fmlslt_lane : SVE2_3VectorArgIndexed_Long_Intrinsic; 2311 2312// 2313// SVE2 - Floating-point integer binary logarithm 2314// 2315 2316def int_aarch64_sve_flogb : AdvSIMD_SVE_LOGB_Intrinsic; 2317 2318// 2319// SVE2 - Vector histogram count 2320// 2321 2322def int_aarch64_sve_histcnt : AdvSIMD_Pred2VectorArg_Intrinsic; 2323def int_aarch64_sve_histseg : AdvSIMD_2VectorArg_Intrinsic; 2324 2325// 2326// SVE2 - Character match 2327// 2328 2329def int_aarch64_sve_match : AdvSIMD_SVE_Compare_Intrinsic; 2330def int_aarch64_sve_nmatch : AdvSIMD_SVE_Compare_Intrinsic; 2331 2332// 2333// SVE2 - Unary narrowing operations 2334// 2335 2336def int_aarch64_sve_sqxtnb : SVE2_1VectorArg_Narrowing_Intrinsic; 2337def int_aarch64_sve_sqxtnt : SVE2_Merged1VectorArg_Narrowing_Intrinsic; 2338def int_aarch64_sve_sqxtunb : SVE2_1VectorArg_Narrowing_Intrinsic; 2339def int_aarch64_sve_sqxtunt : SVE2_Merged1VectorArg_Narrowing_Intrinsic; 2340def int_aarch64_sve_uqxtnb : SVE2_1VectorArg_Narrowing_Intrinsic; 2341def int_aarch64_sve_uqxtnt : SVE2_Merged1VectorArg_Narrowing_Intrinsic; 2342 2343// 2344// SVE2 - Binary narrowing DSP operations 2345// 2346def int_aarch64_sve_addhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2347def int_aarch64_sve_addhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2348 2349def int_aarch64_sve_raddhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2350def int_aarch64_sve_raddhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2351 2352def int_aarch64_sve_subhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2353def int_aarch64_sve_subhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2354 2355def int_aarch64_sve_rsubhnb : SVE2_2VectorArg_Narrowing_Intrinsic; 2356def int_aarch64_sve_rsubhnt : SVE2_Merged2VectorArg_Narrowing_Intrinsic; 2357 2358// Narrowing shift right 2359def int_aarch64_sve_shrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2360def int_aarch64_sve_shrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2361 2362def int_aarch64_sve_rshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2363def int_aarch64_sve_rshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2364 2365// Saturating shift right - signed input/output 2366def int_aarch64_sve_sqshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2367def int_aarch64_sve_sqshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2368 2369def int_aarch64_sve_sqrshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2370def int_aarch64_sve_sqrshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2371 2372// Saturating shift right - unsigned input/output 2373def int_aarch64_sve_uqshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2374def int_aarch64_sve_uqshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2375 2376def int_aarch64_sve_uqrshrnb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2377def int_aarch64_sve_uqrshrnt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2378 2379// Saturating shift right - signed input, unsigned output 2380def int_aarch64_sve_sqshrunb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2381def int_aarch64_sve_sqshrunt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2382 2383def int_aarch64_sve_sqrshrunb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; 2384def int_aarch64_sve_sqrshrunt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; 2385 2386// SVE2 MLA LANE. 2387def int_aarch64_sve_smlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2388def int_aarch64_sve_smlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2389def int_aarch64_sve_umlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2390def int_aarch64_sve_umlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2391def int_aarch64_sve_smlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2392def int_aarch64_sve_smlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2393def int_aarch64_sve_umlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2394def int_aarch64_sve_umlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2395def int_aarch64_sve_smullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2396def int_aarch64_sve_smullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2397def int_aarch64_sve_umullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2398def int_aarch64_sve_umullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2399def int_aarch64_sve_sqdmlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2400def int_aarch64_sve_sqdmlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2401def int_aarch64_sve_sqdmlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2402def int_aarch64_sve_sqdmlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; 2403def int_aarch64_sve_sqdmullb_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2404def int_aarch64_sve_sqdmullt_lane : SVE2_2VectorArgIndexed_Long_Intrinsic; 2405 2406// SVE2 MLA Unpredicated. 2407def int_aarch64_sve_smlalb : SVE2_3VectorArg_Long_Intrinsic; 2408def int_aarch64_sve_smlalt : SVE2_3VectorArg_Long_Intrinsic; 2409def int_aarch64_sve_umlalb : SVE2_3VectorArg_Long_Intrinsic; 2410def int_aarch64_sve_umlalt : SVE2_3VectorArg_Long_Intrinsic; 2411def int_aarch64_sve_smlslb : SVE2_3VectorArg_Long_Intrinsic; 2412def int_aarch64_sve_smlslt : SVE2_3VectorArg_Long_Intrinsic; 2413def int_aarch64_sve_umlslb : SVE2_3VectorArg_Long_Intrinsic; 2414def int_aarch64_sve_umlslt : SVE2_3VectorArg_Long_Intrinsic; 2415def int_aarch64_sve_smullb : SVE2_2VectorArg_Long_Intrinsic; 2416def int_aarch64_sve_smullt : SVE2_2VectorArg_Long_Intrinsic; 2417def int_aarch64_sve_umullb : SVE2_2VectorArg_Long_Intrinsic; 2418def int_aarch64_sve_umullt : SVE2_2VectorArg_Long_Intrinsic; 2419 2420def int_aarch64_sve_sqdmlalb : SVE2_3VectorArg_Long_Intrinsic; 2421def int_aarch64_sve_sqdmlalt : SVE2_3VectorArg_Long_Intrinsic; 2422def int_aarch64_sve_sqdmlslb : SVE2_3VectorArg_Long_Intrinsic; 2423def int_aarch64_sve_sqdmlslt : SVE2_3VectorArg_Long_Intrinsic; 2424def int_aarch64_sve_sqdmullb : SVE2_2VectorArg_Long_Intrinsic; 2425def int_aarch64_sve_sqdmullt : SVE2_2VectorArg_Long_Intrinsic; 2426def int_aarch64_sve_sqdmlalbt : SVE2_3VectorArg_Long_Intrinsic; 2427def int_aarch64_sve_sqdmlslbt : SVE2_3VectorArg_Long_Intrinsic; 2428 2429// SVE2 ADDSUB Long Unpredicated. 2430def int_aarch64_sve_adclb : AdvSIMD_3VectorArg_Intrinsic; 2431def int_aarch64_sve_adclt : AdvSIMD_3VectorArg_Intrinsic; 2432def int_aarch64_sve_sbclb : AdvSIMD_3VectorArg_Intrinsic; 2433def int_aarch64_sve_sbclt : AdvSIMD_3VectorArg_Intrinsic; 2434 2435// 2436// SVE2 - Polynomial arithmetic 2437// 2438def int_aarch64_sve_eorbt : AdvSIMD_3VectorArg_Intrinsic; 2439def int_aarch64_sve_eortb : AdvSIMD_3VectorArg_Intrinsic; 2440def int_aarch64_sve_pmullb_pair : AdvSIMD_2VectorArg_Intrinsic; 2441def int_aarch64_sve_pmullt_pair : AdvSIMD_2VectorArg_Intrinsic; 2442 2443// 2444// SVE2 bitwise ternary operations. 2445// 2446def int_aarch64_sve_eor3 : AdvSIMD_3VectorArg_Intrinsic; 2447def int_aarch64_sve_bcax : AdvSIMD_3VectorArg_Intrinsic; 2448def int_aarch64_sve_bsl : AdvSIMD_3VectorArg_Intrinsic; 2449def int_aarch64_sve_bsl1n : AdvSIMD_3VectorArg_Intrinsic; 2450def int_aarch64_sve_bsl2n : AdvSIMD_3VectorArg_Intrinsic; 2451def int_aarch64_sve_nbsl : AdvSIMD_3VectorArg_Intrinsic; 2452def int_aarch64_sve_xar : AdvSIMD_2VectorArgIndexed_Intrinsic; 2453 2454// 2455// SVE2 - Optional AES, SHA-3 and SM4 2456// 2457 2458def int_aarch64_sve_aesd : GCCBuiltin<"__builtin_sve_svaesd_u8">, 2459 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2460 [llvm_nxv16i8_ty, llvm_nxv16i8_ty], 2461 [IntrNoMem]>; 2462def int_aarch64_sve_aesimc : GCCBuiltin<"__builtin_sve_svaesimc_u8">, 2463 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2464 [llvm_nxv16i8_ty], 2465 [IntrNoMem]>; 2466def int_aarch64_sve_aese : GCCBuiltin<"__builtin_sve_svaese_u8">, 2467 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2468 [llvm_nxv16i8_ty, llvm_nxv16i8_ty], 2469 [IntrNoMem]>; 2470def int_aarch64_sve_aesmc : GCCBuiltin<"__builtin_sve_svaesmc_u8">, 2471 DefaultAttrsIntrinsic<[llvm_nxv16i8_ty], 2472 [llvm_nxv16i8_ty], 2473 [IntrNoMem]>; 2474def int_aarch64_sve_rax1 : GCCBuiltin<"__builtin_sve_svrax1_u64">, 2475 DefaultAttrsIntrinsic<[llvm_nxv2i64_ty], 2476 [llvm_nxv2i64_ty, llvm_nxv2i64_ty], 2477 [IntrNoMem]>; 2478def int_aarch64_sve_sm4e : GCCBuiltin<"__builtin_sve_svsm4e_u32">, 2479 DefaultAttrsIntrinsic<[llvm_nxv4i32_ty], 2480 [llvm_nxv4i32_ty, llvm_nxv4i32_ty], 2481 [IntrNoMem]>; 2482def int_aarch64_sve_sm4ekey : GCCBuiltin<"__builtin_sve_svsm4ekey_u32">, 2483 DefaultAttrsIntrinsic<[llvm_nxv4i32_ty], 2484 [llvm_nxv4i32_ty, llvm_nxv4i32_ty], 2485 [IntrNoMem]>; 2486// 2487// SVE2 - Extended table lookup/permute 2488// 2489 2490def int_aarch64_sve_tbl2 : AdvSIMD_SVE2_TBX_Intrinsic; 2491def int_aarch64_sve_tbx : AdvSIMD_SVE2_TBX_Intrinsic; 2492 2493// 2494// SVE2 - Optional bit permutation 2495// 2496 2497def int_aarch64_sve_bdep_x : AdvSIMD_2VectorArg_Intrinsic; 2498def int_aarch64_sve_bext_x : AdvSIMD_2VectorArg_Intrinsic; 2499def int_aarch64_sve_bgrp_x : AdvSIMD_2VectorArg_Intrinsic; 2500 2501 2502// 2503// SVE ACLE: 7.3. INT8 matrix multiply extensions 2504// 2505def int_aarch64_sve_ummla : SVE_MatMul_Intrinsic; 2506def int_aarch64_sve_smmla : SVE_MatMul_Intrinsic; 2507def int_aarch64_sve_usmmla : SVE_MatMul_Intrinsic; 2508 2509def int_aarch64_sve_usdot : AdvSIMD_SVE_DOT_Intrinsic; 2510def int_aarch64_sve_usdot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 2511def int_aarch64_sve_sudot_lane : AdvSIMD_SVE_DOT_Indexed_Intrinsic; 2512 2513// 2514// SVE ACLE: 7.4/5. FP64/FP32 matrix multiply extensions 2515// 2516def int_aarch64_sve_fmmla : AdvSIMD_3VectorArg_Intrinsic; 2517 2518// 2519// SVE ACLE: 7.2. BFloat16 extensions 2520// 2521 2522def int_aarch64_sve_bfdot : SVE_4Vec_BF16; 2523def int_aarch64_sve_bfmlalb : SVE_4Vec_BF16; 2524def int_aarch64_sve_bfmlalt : SVE_4Vec_BF16; 2525 2526def int_aarch64_sve_bfmmla : SVE_4Vec_BF16; 2527 2528def int_aarch64_sve_bfdot_lane : SVE_4Vec_BF16_Indexed; 2529def int_aarch64_sve_bfmlalb_lane : SVE_4Vec_BF16_Indexed; 2530def int_aarch64_sve_bfmlalt_lane : SVE_4Vec_BF16_Indexed; 2531} 2532 2533// 2534// SVE2 - Contiguous conflict detection 2535// 2536 2537def int_aarch64_sve_whilerw_b : SVE2_CONFLICT_DETECT_Intrinsic; 2538def int_aarch64_sve_whilerw_h : SVE2_CONFLICT_DETECT_Intrinsic; 2539def int_aarch64_sve_whilerw_s : SVE2_CONFLICT_DETECT_Intrinsic; 2540def int_aarch64_sve_whilerw_d : SVE2_CONFLICT_DETECT_Intrinsic; 2541def int_aarch64_sve_whilewr_b : SVE2_CONFLICT_DETECT_Intrinsic; 2542def int_aarch64_sve_whilewr_h : SVE2_CONFLICT_DETECT_Intrinsic; 2543def int_aarch64_sve_whilewr_s : SVE2_CONFLICT_DETECT_Intrinsic; 2544def int_aarch64_sve_whilewr_d : SVE2_CONFLICT_DETECT_Intrinsic; 2545