106c3fb27SDimitry Andric //===----- sifive_vector.h - SiFive Vector definitions --------------------===// 206c3fb27SDimitry Andric // 306c3fb27SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 406c3fb27SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 506c3fb27SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 606c3fb27SDimitry Andric // 706c3fb27SDimitry Andric //===----------------------------------------------------------------------===// 806c3fb27SDimitry Andric 906c3fb27SDimitry Andric #ifndef _SIFIVE_VECTOR_H_ 1006c3fb27SDimitry Andric #define _SIFIVE_VECTOR_H_ 1106c3fb27SDimitry Andric 1206c3fb27SDimitry Andric #include "riscv_vector.h" 1306c3fb27SDimitry Andric 1406c3fb27SDimitry Andric #pragma clang riscv intrinsic sifive_vector 1506c3fb27SDimitry Andric 16*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8mf4(p27_26, p24_20, p11_7, rs1, vl) \ 17*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 6, vl) 18*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8mf2(p27_26, p24_20, p11_7, rs1, vl) \ 19*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 7, vl) 20*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8m1(p27_26, p24_20, p11_7, rs1, vl) \ 21*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 0, vl) 22*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8m2(p27_26, p24_20, p11_7, rs1, vl) \ 23*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 1, vl) 24*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8m4(p27_26, p24_20, p11_7, rs1, vl) \ 25*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 2, vl) 26*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8m8(p27_26, p24_20, p11_7, rs1, vl) \ 27*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 3, vl) 28*0fca6ea1SDimitry Andric 29*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u16mf2(p27_26, p24_20, p11_7, rs1, vl) \ 30*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint16_t)rs1, 16, 7, vl) 31*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u16m1(p27_26, p24_20, p11_7, rs1, vl) \ 32*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint16_t)rs1, 16, 0, vl) 33*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u16m2(p27_26, p24_20, p11_7, rs1, vl) \ 34*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint16_t)rs1, 16, 1, vl) 35*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u16m4(p27_26, p24_20, p11_7, rs1, vl) \ 36*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint16_t)rs1, 16, 2, vl) 37*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u16m8(p27_26, p24_20, p11_7, rs1, vl) \ 38*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint16_t)rs1, 16, 3, vl) 39*0fca6ea1SDimitry Andric 40*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u32m1(p27_26, p24_20, p11_7, rs1, vl) \ 41*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint32_t)rs1, 32, 0, vl) 42*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u32m2(p27_26, p24_20, p11_7, rs1, vl) \ 43*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint32_t)rs1, 32, 1, vl) 44*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u32m4(p27_26, p24_20, p11_7, rs1, vl) \ 45*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint32_t)rs1, 32, 2, vl) 46*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u32m8(p27_26, p24_20, p11_7, rs1, vl) \ 47*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint32_t)rs1, 32, 3, vl) 48*0fca6ea1SDimitry Andric 49*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8mf4(p27_26, p24_20, p11_7, simm5, vl) \ 50*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 7, vl) 51*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8mf2(p27_26, p24_20, p11_7, simm5, vl) \ 52*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 6, vl) 53*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8m1(p27_26, p24_20, p11_7, simm5, vl) \ 54*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 0, vl) 55*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8m2(p27_26, p24_20, p11_7, simm5, vl) \ 56*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 1, vl) 57*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8m4(p27_26, p24_20, p11_7, simm5, vl) \ 58*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 2, vl) 59*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8m8(p27_26, p24_20, p11_7, simm5, vl) \ 60*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 3, vl) 61*0fca6ea1SDimitry Andric 62*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u16mf2(p27_26, p24_20, p11_7, simm5, vl) \ 63*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 16, 7, vl) 64*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u16m1(p27_26, p24_20, p11_7, simm5, vl) \ 65*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 16, 0, vl) 66*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u16m2(p27_26, p24_20, p11_7, simm5, vl) \ 67*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 16, 1, vl) 68*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u16m4(p27_26, p24_20, p11_7, simm5, vl) \ 69*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 16, 2, vl) 70*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u16m8(p27_26, p24_20, p11_7, simm5, vl) \ 71*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 16, 3, vl) 72*0fca6ea1SDimitry Andric 73*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u32m1(p27_26, p24_20, p11_7, simm5, vl) \ 74*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 32, 0, vl) 75*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u32m2(p27_26, p24_20, p11_7, simm5, vl) \ 76*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 32, 1, vl) 77*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u32m4(p27_26, p24_20, p11_7, simm5, vl) \ 78*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 32, 2, vl) 79*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u32m8(p27_26, p24_20, p11_7, simm5, vl) \ 80*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 32, 3, vl) 81*0fca6ea1SDimitry Andric 82*0fca6ea1SDimitry Andric #if __riscv_v_elen >= 64 83*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u8mf8(p27_26, p24_20, p11_7, rs1, vl) \ 84*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint8_t)rs1, 8, 5, vl) 85*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u16mf4(p27_26, p24_20, p11_7, rs1, vl) \ 86*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint16_t)rs1, 16, 6, vl) 87*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u32mf2(p27_26, p24_20, p11_7, rs1, vl) \ 88*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint32_t)rs1, 32, 7, vl) 89*0fca6ea1SDimitry Andric 90*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u8mf8(p27_26, p24_20, p11_7, simm5, vl) \ 91*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 8, 5, vl) 92*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u16mf4(p27_26, p24_20, p11_7, simm5, vl) \ 93*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 16, 6, vl) 94*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u32mf2(p27_26, p24_20, p11_7, simm5, vl) \ 95*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 32, 7, vl) 96*0fca6ea1SDimitry Andric 97*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u64m1(p27_26, p24_20, p11_7, simm5, vl) \ 98*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 64, 0, vl) 99*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u64m2(p27_26, p24_20, p11_7, simm5, vl) \ 100*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 64, 1, vl) 101*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u64m4(p27_26, p24_20, p11_7, simm5, vl) \ 102*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 64, 2, vl) 103*0fca6ea1SDimitry Andric #define __riscv_sf_vc_i_se_u64m8(p27_26, p24_20, p11_7, simm5, vl) \ 104*0fca6ea1SDimitry Andric __riscv_sf_vc_i_se(p27_26, p24_20, p11_7, simm5, 64, 3, vl) 105*0fca6ea1SDimitry Andric 106*0fca6ea1SDimitry Andric #if __riscv_xlen >= 64 107*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u64m1(p27_26, p24_20, p11_7, rs1, vl) \ 108*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint64_t)rs1, 64, 0, vl) 109*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u64m2(p27_26, p24_20, p11_7, rs1, vl) \ 110*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint64_t)rs1, 64, 1, vl) 111*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u64m4(p27_26, p24_20, p11_7, rs1, vl) \ 112*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint64_t)rs1, 64, 2, vl) 113*0fca6ea1SDimitry Andric #define __riscv_sf_vc_x_se_u64m8(p27_26, p24_20, p11_7, rs1, vl) \ 114*0fca6ea1SDimitry Andric __riscv_sf_vc_x_se(p27_26, p24_20, p11_7, (uint64_t)rs1, 64, 3, vl) 115*0fca6ea1SDimitry Andric #endif 116*0fca6ea1SDimitry Andric #endif 117*0fca6ea1SDimitry Andric 11806c3fb27SDimitry Andric #endif //_SIFIVE_VECTOR_H_ 119