1 /*===---- wasm_simd128.h - WebAssembly portable SIMD intrinsics ------------=== 2 * 3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 * See https://llvm.org/LICENSE.txt for license information. 5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 * 7 *===-----------------------------------------------------------------------=== 8 */ 9 10 #ifndef __WASM_SIMD128_H 11 #define __WASM_SIMD128_H 12 13 #include <stdbool.h> 14 #include <stdint.h> 15 16 // User-facing type 17 typedef int32_t v128_t __attribute__((__vector_size__(16), __aligned__(16))); 18 19 // Internal types determined by clang builtin definitions 20 typedef int32_t __v128_u __attribute__((__vector_size__(16), __aligned__(1))); 21 typedef char __i8x16 __attribute__((__vector_size__(16), __aligned__(16))); 22 typedef signed char __s8x16 23 __attribute__((__vector_size__(16), __aligned__(16))); 24 typedef unsigned char __u8x16 25 __attribute__((__vector_size__(16), __aligned__(16))); 26 typedef short __i16x8 __attribute__((__vector_size__(16), __aligned__(16))); 27 typedef unsigned short __u16x8 28 __attribute__((__vector_size__(16), __aligned__(16))); 29 typedef int __i32x4 __attribute__((__vector_size__(16), __aligned__(16))); 30 typedef unsigned int __u32x4 31 __attribute__((__vector_size__(16), __aligned__(16))); 32 typedef long long __i64x2 __attribute__((__vector_size__(16), __aligned__(16))); 33 typedef unsigned long long __u64x2 34 __attribute__((__vector_size__(16), __aligned__(16))); 35 typedef float __f32x4 __attribute__((__vector_size__(16), __aligned__(16))); 36 typedef double __f64x2 __attribute__((__vector_size__(16), __aligned__(16))); 37 38 #define __DEFAULT_FN_ATTRS \ 39 __attribute__((__always_inline__, __nodebug__, __target__("simd128"), \ 40 __min_vector_width__(128))) 41 42 #define __REQUIRE_CONSTANT(e) \ 43 _Static_assert(__builtin_constant_p(e), "Expected constant") 44 45 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load(const void *__mem) { 46 // UB-free unaligned access copied from xmmintrin.h 47 struct __wasm_v128_load_struct { 48 __v128_u __v; 49 } __attribute__((__packed__, __may_alias__)); 50 return ((const struct __wasm_v128_load_struct *)__mem)->__v; 51 } 52 53 static __inline__ v128_t __DEFAULT_FN_ATTRS 54 wasm_v8x16_load_splat(const void *__mem) { 55 struct __wasm_v8x16_load_splat_struct { 56 uint8_t __v; 57 } __attribute__((__packed__, __may_alias__)); 58 uint8_t __v = ((const struct __wasm_v8x16_load_splat_struct *)__mem)->__v; 59 return (v128_t)(__u8x16){__v, __v, __v, __v, __v, __v, __v, __v, 60 __v, __v, __v, __v, __v, __v, __v, __v}; 61 } 62 63 static __inline__ v128_t __DEFAULT_FN_ATTRS 64 wasm_v16x8_load_splat(const void *__mem) { 65 struct __wasm_v16x8_load_splat_struct { 66 uint16_t __v; 67 } __attribute__((__packed__, __may_alias__)); 68 uint16_t __v = ((const struct __wasm_v16x8_load_splat_struct *)__mem)->__v; 69 return (v128_t)(__u16x8){__v, __v, __v, __v, __v, __v, __v, __v}; 70 } 71 72 static __inline__ v128_t __DEFAULT_FN_ATTRS 73 wasm_v32x4_load_splat(const void *__mem) { 74 struct __wasm_v32x4_load_splat_struct { 75 uint32_t __v; 76 } __attribute__((__packed__, __may_alias__)); 77 uint32_t __v = ((const struct __wasm_v32x4_load_splat_struct *)__mem)->__v; 78 return (v128_t)(__u32x4){__v, __v, __v, __v}; 79 } 80 81 static __inline__ v128_t __DEFAULT_FN_ATTRS 82 wasm_v64x2_load_splat(const void *__mem) { 83 struct __wasm_v64x2_load_splat_struct { 84 uint64_t __v; 85 } __attribute__((__packed__, __may_alias__)); 86 uint64_t __v = ((const struct __wasm_v64x2_load_splat_struct *)__mem)->__v; 87 return (v128_t)(__u64x2){__v, __v}; 88 } 89 90 static __inline__ v128_t __DEFAULT_FN_ATTRS 91 wasm_i16x8_load_8x8(const void *__mem) { 92 typedef int8_t __i8x8 __attribute__((__vector_size__(8), __aligned__(8))); 93 struct __wasm_i16x8_load_8x8_struct { 94 __i8x8 __v; 95 } __attribute__((__packed__, __may_alias__)); 96 __i8x8 __v = ((const struct __wasm_i16x8_load_8x8_struct *)__mem)->__v; 97 return (v128_t) __builtin_convertvector(__v, __i16x8); 98 } 99 100 static __inline__ v128_t __DEFAULT_FN_ATTRS 101 wasm_u16x8_load_8x8(const void *__mem) { 102 typedef uint8_t __u8x8 __attribute__((__vector_size__(8), __aligned__(8))); 103 struct __wasm_u16x8_load_8x8_struct { 104 __u8x8 __v; 105 } __attribute__((__packed__, __may_alias__)); 106 __u8x8 __v = ((const struct __wasm_u16x8_load_8x8_struct *)__mem)->__v; 107 return (v128_t) __builtin_convertvector(__v, __u16x8); 108 } 109 110 static __inline__ v128_t __DEFAULT_FN_ATTRS 111 wasm_i32x4_load_16x4(const void *__mem) { 112 typedef int16_t __i16x4 __attribute__((__vector_size__(8), __aligned__(8))); 113 struct __wasm_i32x4_load_16x4_struct { 114 __i16x4 __v; 115 } __attribute__((__packed__, __may_alias__)); 116 __i16x4 __v = ((const struct __wasm_i32x4_load_16x4_struct *)__mem)->__v; 117 return (v128_t) __builtin_convertvector(__v, __i32x4); 118 } 119 120 static __inline__ v128_t __DEFAULT_FN_ATTRS 121 wasm_u32x4_load_16x4(const void *__mem) { 122 typedef uint16_t __u16x4 __attribute__((__vector_size__(8), __aligned__(8))); 123 struct __wasm_u32x4_load_16x4_struct { 124 __u16x4 __v; 125 } __attribute__((__packed__, __may_alias__)); 126 __u16x4 __v = ((const struct __wasm_u32x4_load_16x4_struct *)__mem)->__v; 127 return (v128_t) __builtin_convertvector(__v, __u32x4); 128 } 129 130 static __inline__ v128_t __DEFAULT_FN_ATTRS 131 wasm_i64x2_load_32x2(const void *__mem) { 132 typedef int32_t __i32x2 __attribute__((__vector_size__(8), __aligned__(8))); 133 struct __wasm_i64x2_load_32x2_struct { 134 __i32x2 __v; 135 } __attribute__((__packed__, __may_alias__)); 136 __i32x2 __v = ((const struct __wasm_i64x2_load_32x2_struct *)__mem)->__v; 137 return (v128_t) __builtin_convertvector(__v, __i64x2); 138 } 139 140 static __inline__ v128_t __DEFAULT_FN_ATTRS 141 wasm_u64x2_load_32x2(const void *__mem) { 142 typedef uint32_t __u32x2 __attribute__((__vector_size__(8), __aligned__(8))); 143 struct __wasm_u64x2_load_32x2_struct { 144 __u32x2 __v; 145 } __attribute__((__packed__, __may_alias__)); 146 __u32x2 __v = ((const struct __wasm_u64x2_load_32x2_struct *)__mem)->__v; 147 return (v128_t) __builtin_convertvector(__v, __u64x2); 148 } 149 150 static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store(void *__mem, 151 v128_t __a) { 152 // UB-free unaligned access copied from xmmintrin.h 153 struct __wasm_v128_store_struct { 154 __v128_u __v; 155 } __attribute__((__packed__, __may_alias__)); 156 ((struct __wasm_v128_store_struct *)__mem)->__v = __a; 157 } 158 159 static __inline__ v128_t __DEFAULT_FN_ATTRS 160 wasm_i8x16_make(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3, int8_t __c4, 161 int8_t __c5, int8_t __c6, int8_t __c7, int8_t __c8, int8_t __c9, 162 int8_t __c10, int8_t __c11, int8_t __c12, int8_t __c13, 163 int8_t __c14, int8_t __c15) { 164 return (v128_t)(__i8x16){__c0, __c1, __c2, __c3, __c4, __c5, 165 __c6, __c7, __c8, __c9, __c10, __c11, 166 __c12, __c13, __c14, __c15}; 167 } 168 169 static __inline__ v128_t __DEFAULT_FN_ATTRS 170 wasm_i16x8_make(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3, 171 int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7) { 172 return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7}; 173 } 174 175 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_make(int32_t __c0, 176 int32_t __c1, 177 int32_t __c2, 178 int32_t __c3) { 179 return (v128_t)(__i32x4){__c0, __c1, __c2, __c3}; 180 } 181 182 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_make(float __c0, 183 float __c1, 184 float __c2, 185 float __c3) { 186 return (v128_t)(__f32x4){__c0, __c1, __c2, __c3}; 187 } 188 189 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_make(int64_t __c0, 190 int64_t __c1) { 191 return (v128_t)(__i64x2){__c0, __c1}; 192 } 193 194 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_make(double __c0, 195 double __c1) { 196 return (v128_t)(__f64x2){__c0, __c1}; 197 } 198 199 #define wasm_i8x16_const(__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, __c8, \ 200 __c9, __c10, __c11, __c12, __c13, __c14, __c15) \ 201 __extension__({ \ 202 __REQUIRE_CONSTANT(__c0); \ 203 __REQUIRE_CONSTANT(__c1); \ 204 __REQUIRE_CONSTANT(__c2); \ 205 __REQUIRE_CONSTANT(__c3); \ 206 __REQUIRE_CONSTANT(__c4); \ 207 __REQUIRE_CONSTANT(__c5); \ 208 __REQUIRE_CONSTANT(__c6); \ 209 __REQUIRE_CONSTANT(__c7); \ 210 __REQUIRE_CONSTANT(__c8); \ 211 __REQUIRE_CONSTANT(__c9); \ 212 __REQUIRE_CONSTANT(__c10); \ 213 __REQUIRE_CONSTANT(__c11); \ 214 __REQUIRE_CONSTANT(__c12); \ 215 __REQUIRE_CONSTANT(__c13); \ 216 __REQUIRE_CONSTANT(__c14); \ 217 __REQUIRE_CONSTANT(__c15); \ 218 (v128_t)(__i8x16){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, \ 219 __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15}; \ 220 }) 221 222 #define wasm_i16x8_const(__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7) \ 223 __extension__({ \ 224 __REQUIRE_CONSTANT(__c0); \ 225 __REQUIRE_CONSTANT(__c1); \ 226 __REQUIRE_CONSTANT(__c2); \ 227 __REQUIRE_CONSTANT(__c3); \ 228 __REQUIRE_CONSTANT(__c4); \ 229 __REQUIRE_CONSTANT(__c5); \ 230 __REQUIRE_CONSTANT(__c6); \ 231 __REQUIRE_CONSTANT(__c7); \ 232 (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7}; \ 233 }) 234 235 #define wasm_i32x4_const(__c0, __c1, __c2, __c3) \ 236 __extension__({ \ 237 __REQUIRE_CONSTANT(__c0); \ 238 __REQUIRE_CONSTANT(__c1); \ 239 __REQUIRE_CONSTANT(__c2); \ 240 __REQUIRE_CONSTANT(__c3); \ 241 (v128_t)(__i32x4){__c0, __c1, __c2, __c3}; \ 242 }) 243 244 #define wasm_f32x4_const(__c0, __c1, __c2, __c3) \ 245 __extension__({ \ 246 __REQUIRE_CONSTANT(__c0); \ 247 __REQUIRE_CONSTANT(__c1); \ 248 __REQUIRE_CONSTANT(__c2); \ 249 __REQUIRE_CONSTANT(__c3); \ 250 (v128_t)(__f32x4){__c0, __c1, __c2, __c3}; \ 251 }) 252 253 #define wasm_i64x2_const(__c0, __c1) \ 254 __extension__({ \ 255 __REQUIRE_CONSTANT(__c0); \ 256 __REQUIRE_CONSTANT(__c1); \ 257 (v128_t)(__i64x2){__c0, __c1}; \ 258 }) 259 260 #define wasm_f64x2_const(__c0, __c1) \ 261 __extension__({ \ 262 __REQUIRE_CONSTANT(__c0); \ 263 __REQUIRE_CONSTANT(__c1); \ 264 (v128_t)(__f64x2){__c0, __c1}; \ 265 }) 266 267 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_splat(int8_t __a) { 268 return (v128_t)(__i8x16){__a, __a, __a, __a, __a, __a, __a, __a, 269 __a, __a, __a, __a, __a, __a, __a, __a}; 270 } 271 272 #define wasm_i8x16_extract_lane(__a, __i) \ 273 (__builtin_wasm_extract_lane_s_i8x16((__i8x16)(__a), __i)) 274 275 #define wasm_u8x16_extract_lane(__a, __i) \ 276 (__builtin_wasm_extract_lane_u_i8x16((__i8x16)(__a), __i)) 277 278 #define wasm_i8x16_replace_lane(__a, __i, __b) \ 279 ((v128_t)__builtin_wasm_replace_lane_i8x16((__i8x16)(__a), __i, __b)) 280 281 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_splat(int16_t __a) { 282 return (v128_t)(__i16x8){__a, __a, __a, __a, __a, __a, __a, __a}; 283 } 284 285 #define wasm_i16x8_extract_lane(__a, __i) \ 286 (__builtin_wasm_extract_lane_s_i16x8((__i16x8)(__a), __i)) 287 288 #define wasm_u16x8_extract_lane(__a, __i) \ 289 (__builtin_wasm_extract_lane_u_i16x8((__i16x8)(__a), __i)) 290 291 #define wasm_i16x8_replace_lane(__a, __i, __b) \ 292 ((v128_t)__builtin_wasm_replace_lane_i16x8((__i16x8)(__a), __i, __b)) 293 294 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_splat(int32_t __a) { 295 return (v128_t)(__i32x4){__a, __a, __a, __a}; 296 } 297 298 #define wasm_i32x4_extract_lane(__a, __i) \ 299 (__builtin_wasm_extract_lane_i32x4((__i32x4)(__a), __i)) 300 301 #define wasm_i32x4_replace_lane(__a, __i, __b) \ 302 ((v128_t)__builtin_wasm_replace_lane_i32x4((__i32x4)(__a), __i, __b)) 303 304 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_splat(int64_t __a) { 305 return (v128_t)(__i64x2){__a, __a}; 306 } 307 308 #define wasm_i64x2_extract_lane(__a, __i) \ 309 (__builtin_wasm_extract_lane_i64x2((__i64x2)(__a), __i)) 310 311 #define wasm_i64x2_replace_lane(__a, __i, __b) \ 312 ((v128_t)__builtin_wasm_replace_lane_i64x2((__i64x2)(__a), __i, __b)) 313 314 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_splat(float __a) { 315 return (v128_t)(__f32x4){__a, __a, __a, __a}; 316 } 317 318 #define wasm_f32x4_extract_lane(__a, __i) \ 319 (__builtin_wasm_extract_lane_f32x4((__f32x4)(__a), __i)) 320 321 #define wasm_f32x4_replace_lane(__a, __i, __b) \ 322 ((v128_t)__builtin_wasm_replace_lane_f32x4((__f32x4)(__a), __i, __b)) 323 324 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_splat(double __a) { 325 return (v128_t)(__f64x2){__a, __a}; 326 } 327 328 #define wasm_f64x2_extract_lane(__a, __i) \ 329 (__builtin_wasm_extract_lane_f64x2((__f64x2)(__a), __i)) 330 331 #define wasm_f64x2_replace_lane(__a, __i, __b) \ 332 ((v128_t)__builtin_wasm_replace_lane_f64x2((__f64x2)(__a), __i, __b)) 333 334 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_eq(v128_t __a, 335 v128_t __b) { 336 return (v128_t)((__s8x16)__a == (__s8x16)__b); 337 } 338 339 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ne(v128_t __a, 340 v128_t __b) { 341 return (v128_t)((__s8x16)__a != (__s8x16)__b); 342 } 343 344 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_lt(v128_t __a, 345 v128_t __b) { 346 return (v128_t)((__s8x16)__a < (__s8x16)__b); 347 } 348 349 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_lt(v128_t __a, 350 v128_t __b) { 351 return (v128_t)((__u8x16)__a < (__u8x16)__b); 352 } 353 354 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_gt(v128_t __a, 355 v128_t __b) { 356 return (v128_t)((__s8x16)__a > (__s8x16)__b); 357 } 358 359 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_gt(v128_t __a, 360 v128_t __b) { 361 return (v128_t)((__u8x16)__a > (__u8x16)__b); 362 } 363 364 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_le(v128_t __a, 365 v128_t __b) { 366 return (v128_t)((__s8x16)__a <= (__s8x16)__b); 367 } 368 369 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_le(v128_t __a, 370 v128_t __b) { 371 return (v128_t)((__u8x16)__a <= (__u8x16)__b); 372 } 373 374 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ge(v128_t __a, 375 v128_t __b) { 376 return (v128_t)((__s8x16)__a >= (__s8x16)__b); 377 } 378 379 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_ge(v128_t __a, 380 v128_t __b) { 381 return (v128_t)((__u8x16)__a >= (__u8x16)__b); 382 } 383 384 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_eq(v128_t __a, 385 v128_t __b) { 386 return (v128_t)((__i16x8)__a == (__i16x8)__b); 387 } 388 389 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ne(v128_t __a, 390 v128_t __b) { 391 return (v128_t)((__u16x8)__a != (__u16x8)__b); 392 } 393 394 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_lt(v128_t __a, 395 v128_t __b) { 396 return (v128_t)((__i16x8)__a < (__i16x8)__b); 397 } 398 399 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_lt(v128_t __a, 400 v128_t __b) { 401 return (v128_t)((__u16x8)__a < (__u16x8)__b); 402 } 403 404 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_gt(v128_t __a, 405 v128_t __b) { 406 return (v128_t)((__i16x8)__a > (__i16x8)__b); 407 } 408 409 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_gt(v128_t __a, 410 v128_t __b) { 411 return (v128_t)((__u16x8)__a > (__u16x8)__b); 412 } 413 414 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_le(v128_t __a, 415 v128_t __b) { 416 return (v128_t)((__i16x8)__a <= (__i16x8)__b); 417 } 418 419 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_le(v128_t __a, 420 v128_t __b) { 421 return (v128_t)((__u16x8)__a <= (__u16x8)__b); 422 } 423 424 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ge(v128_t __a, 425 v128_t __b) { 426 return (v128_t)((__i16x8)__a >= (__i16x8)__b); 427 } 428 429 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_ge(v128_t __a, 430 v128_t __b) { 431 return (v128_t)((__u16x8)__a >= (__u16x8)__b); 432 } 433 434 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_eq(v128_t __a, 435 v128_t __b) { 436 return (v128_t)((__i32x4)__a == (__i32x4)__b); 437 } 438 439 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ne(v128_t __a, 440 v128_t __b) { 441 return (v128_t)((__i32x4)__a != (__i32x4)__b); 442 } 443 444 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_lt(v128_t __a, 445 v128_t __b) { 446 return (v128_t)((__i32x4)__a < (__i32x4)__b); 447 } 448 449 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_lt(v128_t __a, 450 v128_t __b) { 451 return (v128_t)((__u32x4)__a < (__u32x4)__b); 452 } 453 454 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_gt(v128_t __a, 455 v128_t __b) { 456 return (v128_t)((__i32x4)__a > (__i32x4)__b); 457 } 458 459 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_gt(v128_t __a, 460 v128_t __b) { 461 return (v128_t)((__u32x4)__a > (__u32x4)__b); 462 } 463 464 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_le(v128_t __a, 465 v128_t __b) { 466 return (v128_t)((__i32x4)__a <= (__i32x4)__b); 467 } 468 469 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_le(v128_t __a, 470 v128_t __b) { 471 return (v128_t)((__u32x4)__a <= (__u32x4)__b); 472 } 473 474 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ge(v128_t __a, 475 v128_t __b) { 476 return (v128_t)((__i32x4)__a >= (__i32x4)__b); 477 } 478 479 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_ge(v128_t __a, 480 v128_t __b) { 481 return (v128_t)((__u32x4)__a >= (__u32x4)__b); 482 } 483 484 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_eq(v128_t __a, 485 v128_t __b) { 486 return (v128_t)((__f32x4)__a == (__f32x4)__b); 487 } 488 489 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ne(v128_t __a, 490 v128_t __b) { 491 return (v128_t)((__f32x4)__a != (__f32x4)__b); 492 } 493 494 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_lt(v128_t __a, 495 v128_t __b) { 496 return (v128_t)((__f32x4)__a < (__f32x4)__b); 497 } 498 499 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_gt(v128_t __a, 500 v128_t __b) { 501 return (v128_t)((__f32x4)__a > (__f32x4)__b); 502 } 503 504 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_le(v128_t __a, 505 v128_t __b) { 506 return (v128_t)((__f32x4)__a <= (__f32x4)__b); 507 } 508 509 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ge(v128_t __a, 510 v128_t __b) { 511 return (v128_t)((__f32x4)__a >= (__f32x4)__b); 512 } 513 514 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_eq(v128_t __a, 515 v128_t __b) { 516 return (v128_t)((__f64x2)__a == (__f64x2)__b); 517 } 518 519 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ne(v128_t __a, 520 v128_t __b) { 521 return (v128_t)((__f64x2)__a != (__f64x2)__b); 522 } 523 524 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_lt(v128_t __a, 525 v128_t __b) { 526 return (v128_t)((__f64x2)__a < (__f64x2)__b); 527 } 528 529 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_gt(v128_t __a, 530 v128_t __b) { 531 return (v128_t)((__f64x2)__a > (__f64x2)__b); 532 } 533 534 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_le(v128_t __a, 535 v128_t __b) { 536 return (v128_t)((__f64x2)__a <= (__f64x2)__b); 537 } 538 539 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ge(v128_t __a, 540 v128_t __b) { 541 return (v128_t)((__f64x2)__a >= (__f64x2)__b); 542 } 543 544 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_not(v128_t __a) { 545 return ~__a; 546 } 547 548 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_and(v128_t __a, 549 v128_t __b) { 550 return __a & __b; 551 } 552 553 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_or(v128_t __a, 554 v128_t __b) { 555 return __a | __b; 556 } 557 558 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_xor(v128_t __a, 559 v128_t __b) { 560 return __a ^ __b; 561 } 562 563 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_andnot(v128_t __a, 564 v128_t __b) { 565 return __a & ~__b; 566 } 567 568 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_bitselect(v128_t __a, 569 v128_t __b, 570 v128_t __mask) { 571 return (v128_t)__builtin_wasm_bitselect((__i32x4)__a, (__i32x4)__b, 572 (__i32x4)__mask); 573 } 574 575 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_abs(v128_t __a) { 576 return (v128_t)__builtin_wasm_abs_i8x16((__i8x16)__a); 577 } 578 579 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_neg(v128_t __a) { 580 return (v128_t)(-(__u8x16)__a); 581 } 582 583 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i8x16_any_true(v128_t __a) { 584 return __builtin_wasm_any_true_i8x16((__i8x16)__a); 585 } 586 587 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i8x16_all_true(v128_t __a) { 588 return __builtin_wasm_all_true_i8x16((__i8x16)__a); 589 } 590 591 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shl(v128_t __a, 592 int32_t __b) { 593 return (v128_t)((__i8x16)__a << __b); 594 } 595 596 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shr(v128_t __a, 597 int32_t __b) { 598 return (v128_t)((__s8x16)__a >> __b); 599 } 600 601 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_shr(v128_t __a, 602 int32_t __b) { 603 return (v128_t)((__u8x16)__a >> __b); 604 } 605 606 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add(v128_t __a, 607 v128_t __b) { 608 return (v128_t)((__u8x16)__a + (__u8x16)__b); 609 } 610 611 static __inline__ v128_t __DEFAULT_FN_ATTRS 612 wasm_i8x16_add_saturate(v128_t __a, v128_t __b) { 613 return (v128_t)__builtin_wasm_add_saturate_s_i8x16((__i8x16)__a, 614 (__i8x16)__b); 615 } 616 617 static __inline__ v128_t __DEFAULT_FN_ATTRS 618 wasm_u8x16_add_saturate(v128_t __a, v128_t __b) { 619 return (v128_t)__builtin_wasm_add_saturate_u_i8x16((__i8x16)__a, 620 (__i8x16)__b); 621 } 622 623 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub(v128_t __a, 624 v128_t __b) { 625 return (v128_t)((__u8x16)__a - (__u8x16)__b); 626 } 627 628 static __inline__ v128_t __DEFAULT_FN_ATTRS 629 wasm_i8x16_sub_saturate(v128_t __a, v128_t __b) { 630 return (v128_t)__builtin_wasm_sub_saturate_s_i8x16((__i8x16)__a, 631 (__i8x16)__b); 632 } 633 634 static __inline__ v128_t __DEFAULT_FN_ATTRS 635 wasm_u8x16_sub_saturate(v128_t __a, v128_t __b) { 636 return (v128_t)__builtin_wasm_sub_saturate_u_i8x16((__i8x16)__a, 637 (__i8x16)__b); 638 } 639 640 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_min(v128_t __a, 641 v128_t __b) { 642 return (v128_t)__builtin_wasm_min_s_i8x16((__i8x16)__a, (__i8x16)__b); 643 } 644 645 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_min(v128_t __a, 646 v128_t __b) { 647 return (v128_t)__builtin_wasm_min_u_i8x16((__i8x16)__a, (__i8x16)__b); 648 } 649 650 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_max(v128_t __a, 651 v128_t __b) { 652 return (v128_t)__builtin_wasm_max_s_i8x16((__i8x16)__a, (__i8x16)__b); 653 } 654 655 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_max(v128_t __a, 656 v128_t __b) { 657 return (v128_t)__builtin_wasm_max_u_i8x16((__i8x16)__a, (__i8x16)__b); 658 } 659 660 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_avgr(v128_t __a, 661 v128_t __b) { 662 return (v128_t)__builtin_wasm_avgr_u_i8x16((__i8x16)__a, (__i8x16)__b); 663 } 664 665 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_abs(v128_t __a) { 666 return (v128_t)__builtin_wasm_abs_i16x8((__i16x8)__a); 667 } 668 669 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_neg(v128_t __a) { 670 return (v128_t)(-(__u16x8)__a); 671 } 672 673 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i16x8_any_true(v128_t __a) { 674 return __builtin_wasm_any_true_i16x8((__i16x8)__a); 675 } 676 677 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i16x8_all_true(v128_t __a) { 678 return __builtin_wasm_all_true_i16x8((__i16x8)__a); 679 } 680 681 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shl(v128_t __a, 682 int32_t __b) { 683 return (v128_t)((__i16x8)__a << __b); 684 } 685 686 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shr(v128_t __a, 687 int32_t __b) { 688 return (v128_t)((__i16x8)__a >> __b); 689 } 690 691 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_shr(v128_t __a, 692 int32_t __b) { 693 return (v128_t)((__u16x8)__a >> __b); 694 } 695 696 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add(v128_t __a, 697 v128_t __b) { 698 return (v128_t)((__u16x8)__a + (__u16x8)__b); 699 } 700 701 static __inline__ v128_t __DEFAULT_FN_ATTRS 702 wasm_i16x8_add_saturate(v128_t __a, v128_t __b) { 703 return (v128_t)__builtin_wasm_add_saturate_s_i16x8((__i16x8)__a, 704 (__i16x8)__b); 705 } 706 707 static __inline__ v128_t __DEFAULT_FN_ATTRS 708 wasm_u16x8_add_saturate(v128_t __a, v128_t __b) { 709 return (v128_t)__builtin_wasm_add_saturate_u_i16x8((__i16x8)__a, 710 (__i16x8)__b); 711 } 712 713 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub(v128_t __a, 714 v128_t __b) { 715 return (v128_t)((__i16x8)__a - (__i16x8)__b); 716 } 717 718 static __inline__ v128_t __DEFAULT_FN_ATTRS 719 wasm_i16x8_sub_saturate(v128_t __a, v128_t __b) { 720 return (v128_t)__builtin_wasm_sub_saturate_s_i16x8((__i16x8)__a, 721 (__i16x8)__b); 722 } 723 724 static __inline__ v128_t __DEFAULT_FN_ATTRS 725 wasm_u16x8_sub_saturate(v128_t __a, v128_t __b) { 726 return (v128_t)__builtin_wasm_sub_saturate_u_i16x8((__i16x8)__a, 727 (__i16x8)__b); 728 } 729 730 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_mul(v128_t __a, 731 v128_t __b) { 732 return (v128_t)((__u16x8)__a * (__u16x8)__b); 733 } 734 735 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_min(v128_t __a, 736 v128_t __b) { 737 return (v128_t)__builtin_wasm_min_s_i16x8((__i16x8)__a, (__i16x8)__b); 738 } 739 740 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_min(v128_t __a, 741 v128_t __b) { 742 return (v128_t)__builtin_wasm_min_u_i16x8((__i16x8)__a, (__i16x8)__b); 743 } 744 745 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_max(v128_t __a, 746 v128_t __b) { 747 return (v128_t)__builtin_wasm_max_s_i16x8((__i16x8)__a, (__i16x8)__b); 748 } 749 750 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_max(v128_t __a, 751 v128_t __b) { 752 return (v128_t)__builtin_wasm_max_u_i16x8((__i16x8)__a, (__i16x8)__b); 753 } 754 755 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_avgr(v128_t __a, 756 v128_t __b) { 757 return (v128_t)__builtin_wasm_avgr_u_i16x8((__i16x8)__a, (__i16x8)__b); 758 } 759 760 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_abs(v128_t __a) { 761 return (v128_t)__builtin_wasm_abs_i32x4((__i32x4)__a); 762 } 763 764 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_neg(v128_t __a) { 765 return (v128_t)(-(__u32x4)__a); 766 } 767 768 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i32x4_any_true(v128_t __a) { 769 return __builtin_wasm_any_true_i32x4((__i32x4)__a); 770 } 771 772 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i32x4_all_true(v128_t __a) { 773 return __builtin_wasm_all_true_i32x4((__i32x4)__a); 774 } 775 776 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shl(v128_t __a, 777 int32_t __b) { 778 return (v128_t)((__i32x4)__a << __b); 779 } 780 781 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shr(v128_t __a, 782 int32_t __b) { 783 return (v128_t)((__i32x4)__a >> __b); 784 } 785 786 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_shr(v128_t __a, 787 int32_t __b) { 788 return (v128_t)((__u32x4)__a >> __b); 789 } 790 791 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_add(v128_t __a, 792 v128_t __b) { 793 return (v128_t)((__u32x4)__a + (__u32x4)__b); 794 } 795 796 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_sub(v128_t __a, 797 v128_t __b) { 798 return (v128_t)((__u32x4)__a - (__u32x4)__b); 799 } 800 801 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_mul(v128_t __a, 802 v128_t __b) { 803 return (v128_t)((__u32x4)__a * (__u32x4)__b); 804 } 805 806 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_min(v128_t __a, 807 v128_t __b) { 808 return (v128_t)__builtin_wasm_min_s_i32x4((__i32x4)__a, (__i32x4)__b); 809 } 810 811 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_min(v128_t __a, 812 v128_t __b) { 813 return (v128_t)__builtin_wasm_min_u_i32x4((__i32x4)__a, (__i32x4)__b); 814 } 815 816 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_max(v128_t __a, 817 v128_t __b) { 818 return (v128_t)__builtin_wasm_max_s_i32x4((__i32x4)__a, (__i32x4)__b); 819 } 820 821 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_max(v128_t __a, 822 v128_t __b) { 823 return (v128_t)__builtin_wasm_max_u_i32x4((__i32x4)__a, (__i32x4)__b); 824 } 825 826 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_neg(v128_t __a) { 827 return (v128_t)(-(__u64x2)__a); 828 } 829 830 #ifdef __wasm_unimplemented_simd128__ 831 832 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i64x2_any_true(v128_t __a) { 833 return __builtin_wasm_any_true_i64x2((__i64x2)__a); 834 } 835 836 static __inline__ bool __DEFAULT_FN_ATTRS wasm_i64x2_all_true(v128_t __a) { 837 return __builtin_wasm_all_true_i64x2((__i64x2)__a); 838 } 839 840 #endif // __wasm_unimplemented_simd128__ 841 842 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shl(v128_t __a, 843 int32_t __b) { 844 return (v128_t)((__i64x2)__a << (int64_t)__b); 845 } 846 847 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shr(v128_t __a, 848 int32_t __b) { 849 return (v128_t)((__i64x2)__a >> (int64_t)__b); 850 } 851 852 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_shr(v128_t __a, 853 int32_t __b) { 854 return (v128_t)((__u64x2)__a >> (int64_t)__b); 855 } 856 857 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_add(v128_t __a, 858 v128_t __b) { 859 return (v128_t)((__u64x2)__a + (__u64x2)__b); 860 } 861 862 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_sub(v128_t __a, 863 v128_t __b) { 864 return (v128_t)((__u64x2)__a - (__u64x2)__b); 865 } 866 867 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_mul(v128_t __a, 868 v128_t __b) { 869 return (v128_t)((__u64x2)__a * (__u64x2)__b); 870 } 871 872 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_abs(v128_t __a) { 873 return (v128_t)__builtin_wasm_abs_f32x4((__f32x4)__a); 874 } 875 876 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_neg(v128_t __a) { 877 return (v128_t)(-(__f32x4)__a); 878 } 879 880 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sqrt(v128_t __a) { 881 return (v128_t)__builtin_wasm_sqrt_f32x4((__f32x4)__a); 882 } 883 884 #ifdef __wasm_unimplemented_simd128__ 885 886 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_qfma(v128_t __a, 887 v128_t __b, 888 v128_t __c) { 889 return (v128_t)__builtin_wasm_qfma_f32x4((__f32x4)__a, (__f32x4)__b, 890 (__f32x4)__c); 891 } 892 893 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_qfms(v128_t __a, 894 v128_t __b, 895 v128_t __c) { 896 return (v128_t)__builtin_wasm_qfms_f32x4((__f32x4)__a, (__f32x4)__b, 897 (__f32x4)__c); 898 } 899 900 #endif // __wasm_unimplemented_simd128__ 901 902 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_add(v128_t __a, 903 v128_t __b) { 904 return (v128_t)((__f32x4)__a + (__f32x4)__b); 905 } 906 907 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sub(v128_t __a, 908 v128_t __b) { 909 return (v128_t)((__f32x4)__a - (__f32x4)__b); 910 } 911 912 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_mul(v128_t __a, 913 v128_t __b) { 914 return (v128_t)((__f32x4)__a * (__f32x4)__b); 915 } 916 917 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_div(v128_t __a, 918 v128_t __b) { 919 return (v128_t)((__f32x4)__a / (__f32x4)__b); 920 } 921 922 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_min(v128_t __a, 923 v128_t __b) { 924 return (v128_t)__builtin_wasm_min_f32x4((__f32x4)__a, (__f32x4)__b); 925 } 926 927 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_max(v128_t __a, 928 v128_t __b) { 929 return (v128_t)__builtin_wasm_max_f32x4((__f32x4)__a, (__f32x4)__b); 930 } 931 932 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmin(v128_t __a, 933 v128_t __b) { 934 return (v128_t)__builtin_wasm_pmin_f32x4((__f32x4)__a, (__f32x4)__b); 935 } 936 937 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmax(v128_t __a, 938 v128_t __b) { 939 return (v128_t)__builtin_wasm_pmax_f32x4((__f32x4)__a, (__f32x4)__b); 940 } 941 942 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_abs(v128_t __a) { 943 return (v128_t)__builtin_wasm_abs_f64x2((__f64x2)__a); 944 } 945 946 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_neg(v128_t __a) { 947 return (v128_t)(-(__f64x2)__a); 948 } 949 950 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sqrt(v128_t __a) { 951 return (v128_t)__builtin_wasm_sqrt_f64x2((__f64x2)__a); 952 } 953 954 #ifdef __wasm_unimplemented_simd128__ 955 956 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_qfma(v128_t __a, 957 v128_t __b, 958 v128_t __c) { 959 return (v128_t)__builtin_wasm_qfma_f64x2((__f64x2)__a, (__f64x2)__b, 960 (__f64x2)__c); 961 } 962 963 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_qfms(v128_t __a, 964 v128_t __b, 965 v128_t __c) { 966 return (v128_t)__builtin_wasm_qfms_f64x2((__f64x2)__a, (__f64x2)__b, 967 (__f64x2)__c); 968 } 969 970 #endif // __wasm_unimplemented_simd128__ 971 972 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_add(v128_t __a, 973 v128_t __b) { 974 return (v128_t)((__f64x2)__a + (__f64x2)__b); 975 } 976 977 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sub(v128_t __a, 978 v128_t __b) { 979 return (v128_t)((__f64x2)__a - (__f64x2)__b); 980 } 981 982 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_mul(v128_t __a, 983 v128_t __b) { 984 return (v128_t)((__f64x2)__a * (__f64x2)__b); 985 } 986 987 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_div(v128_t __a, 988 v128_t __b) { 989 return (v128_t)((__f64x2)__a / (__f64x2)__b); 990 } 991 992 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_min(v128_t __a, 993 v128_t __b) { 994 return (v128_t)__builtin_wasm_min_f64x2((__f64x2)__a, (__f64x2)__b); 995 } 996 997 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_max(v128_t __a, 998 v128_t __b) { 999 return (v128_t)__builtin_wasm_max_f64x2((__f64x2)__a, (__f64x2)__b); 1000 } 1001 1002 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmin(v128_t __a, 1003 v128_t __b) { 1004 return (v128_t)__builtin_wasm_pmin_f64x2((__f64x2)__a, (__f64x2)__b); 1005 } 1006 1007 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmax(v128_t __a, 1008 v128_t __b) { 1009 return (v128_t)__builtin_wasm_pmax_f64x2((__f64x2)__a, (__f64x2)__b); 1010 } 1011 1012 static __inline__ v128_t __DEFAULT_FN_ATTRS 1013 wasm_i32x4_trunc_saturate_f32x4(v128_t __a) { 1014 return (v128_t)__builtin_wasm_trunc_saturate_s_i32x4_f32x4((__f32x4)__a); 1015 } 1016 1017 static __inline__ v128_t __DEFAULT_FN_ATTRS 1018 wasm_u32x4_trunc_saturate_f32x4(v128_t __a) { 1019 return (v128_t)__builtin_wasm_trunc_saturate_u_i32x4_f32x4((__f32x4)__a); 1020 } 1021 1022 static __inline__ v128_t __DEFAULT_FN_ATTRS 1023 wasm_f32x4_convert_i32x4(v128_t __a) { 1024 return (v128_t) __builtin_convertvector((__i32x4)__a, __f32x4); 1025 } 1026 1027 static __inline__ v128_t __DEFAULT_FN_ATTRS 1028 wasm_f32x4_convert_u32x4(v128_t __a) { 1029 return (v128_t) __builtin_convertvector((__u32x4)__a, __f32x4); 1030 } 1031 1032 #define wasm_v8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \ 1033 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \ 1034 __c14, __c15) \ 1035 ((v128_t)__builtin_wasm_shuffle_v8x16( \ 1036 (__i8x16)(__a), (__i8x16)(__b), __c0, __c1, __c2, __c3, __c4, __c5, \ 1037 __c6, __c7, __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15)) 1038 1039 #define wasm_v16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \ 1040 __c7) \ 1041 ((v128_t)__builtin_wasm_shuffle_v8x16( \ 1042 (__i8x16)(__a), (__i8x16)(__b), (__c0)*2, (__c0)*2 + 1, (__c1)*2, \ 1043 (__c1)*2 + 1, (__c2)*2, (__c2)*2 + 1, (__c3)*2, (__c3)*2 + 1, (__c4)*2, \ 1044 (__c4)*2 + 1, (__c5)*2, (__c5)*2 + 1, (__c6)*2, (__c6)*2 + 1, (__c7)*2, \ 1045 (__c7)*2 + 1)) 1046 1047 #define wasm_v32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \ 1048 ((v128_t)__builtin_wasm_shuffle_v8x16( \ 1049 (__i8x16)(__a), (__i8x16)(__b), (__c0)*4, (__c0)*4 + 1, (__c0)*4 + 2, \ 1050 (__c0)*4 + 3, (__c1)*4, (__c1)*4 + 1, (__c1)*4 + 2, (__c1)*4 + 3, \ 1051 (__c2)*4, (__c2)*4 + 1, (__c2)*4 + 2, (__c2)*4 + 3, (__c3)*4, \ 1052 (__c3)*4 + 1, (__c3)*4 + 2, (__c3)*4 + 3)) 1053 1054 #define wasm_v64x2_shuffle(__a, __b, __c0, __c1) \ 1055 ((v128_t)__builtin_wasm_shuffle_v8x16( \ 1056 (__i8x16)(__a), (__i8x16)(__b), (__c0)*8, (__c0)*8 + 1, (__c0)*8 + 2, \ 1057 (__c0)*8 + 3, (__c0)*8 + 4, (__c0)*8 + 5, (__c0)*8 + 6, (__c0)*8 + 7, \ 1058 (__c1)*8, (__c1)*8 + 1, (__c1)*8 + 2, (__c1)*8 + 3, (__c1)*8 + 4, \ 1059 (__c1)*8 + 5, (__c1)*8 + 6, (__c1)*8 + 7)) 1060 1061 static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v8x16_swizzle(v128_t __a, 1062 v128_t __b) { 1063 return (v128_t)__builtin_wasm_swizzle_v8x16((__i8x16)__a, (__i8x16)__b); 1064 } 1065 1066 static __inline__ v128_t __DEFAULT_FN_ATTRS 1067 wasm_i8x16_narrow_i16x8(v128_t __a, v128_t __b) { 1068 return (v128_t)__builtin_wasm_narrow_s_i8x16_i16x8((__i16x8)__a, 1069 (__i16x8)__b); 1070 } 1071 1072 static __inline__ v128_t __DEFAULT_FN_ATTRS 1073 wasm_u8x16_narrow_i16x8(v128_t __a, v128_t __b) { 1074 return (v128_t)__builtin_wasm_narrow_u_i8x16_i16x8((__i16x8)__a, 1075 (__i16x8)__b); 1076 } 1077 1078 static __inline__ v128_t __DEFAULT_FN_ATTRS 1079 wasm_i16x8_narrow_i32x4(v128_t __a, v128_t __b) { 1080 return (v128_t)__builtin_wasm_narrow_s_i16x8_i32x4((__i32x4)__a, 1081 (__i32x4)__b); 1082 } 1083 1084 static __inline__ v128_t __DEFAULT_FN_ATTRS 1085 wasm_u16x8_narrow_i32x4(v128_t __a, v128_t __b) { 1086 return (v128_t)__builtin_wasm_narrow_u_i16x8_i32x4((__i32x4)__a, 1087 (__i32x4)__b); 1088 } 1089 1090 static __inline__ v128_t __DEFAULT_FN_ATTRS 1091 wasm_i16x8_widen_low_i8x16(v128_t __a) { 1092 return (v128_t)__builtin_wasm_widen_low_s_i16x8_i8x16((__i8x16)__a); 1093 } 1094 1095 static __inline__ v128_t __DEFAULT_FN_ATTRS 1096 wasm_i16x8_widen_high_i8x16(v128_t __a) { 1097 return (v128_t)__builtin_wasm_widen_high_s_i16x8_i8x16((__i8x16)__a); 1098 } 1099 1100 static __inline__ v128_t __DEFAULT_FN_ATTRS 1101 wasm_i16x8_widen_low_u8x16(v128_t __a) { 1102 return (v128_t)__builtin_wasm_widen_low_u_i16x8_i8x16((__i8x16)__a); 1103 } 1104 1105 static __inline__ v128_t __DEFAULT_FN_ATTRS 1106 wasm_i16x8_widen_high_u8x16(v128_t __a) { 1107 return (v128_t)__builtin_wasm_widen_high_u_i16x8_i8x16((__i8x16)__a); 1108 } 1109 1110 static __inline__ v128_t __DEFAULT_FN_ATTRS 1111 wasm_i32x4_widen_low_i16x8(v128_t __a) { 1112 return (v128_t)__builtin_wasm_widen_low_s_i32x4_i16x8((__i16x8)__a); 1113 } 1114 1115 static __inline__ v128_t __DEFAULT_FN_ATTRS 1116 wasm_i32x4_widen_high_i16x8(v128_t __a) { 1117 return (v128_t)__builtin_wasm_widen_high_s_i32x4_i16x8((__i16x8)__a); 1118 } 1119 1120 static __inline__ v128_t __DEFAULT_FN_ATTRS 1121 wasm_i32x4_widen_low_u16x8(v128_t __a) { 1122 return (v128_t)__builtin_wasm_widen_low_u_i32x4_i16x8((__i16x8)__a); 1123 } 1124 1125 static __inline__ v128_t __DEFAULT_FN_ATTRS 1126 wasm_i32x4_widen_high_u16x8(v128_t __a) { 1127 return (v128_t)__builtin_wasm_widen_high_u_i32x4_i16x8((__i16x8)__a); 1128 } 1129 1130 // Undefine helper macros 1131 #undef __DEFAULT_FN_ATTRS 1132 1133 #endif // __WASM_SIMD128_H 1134