xref: /freebsd/sys/contrib/openzfs/module/zfs/vdev_raidz_math_avx512f.c (revision 61145dc2b94f12f6a47344fb9aac702321880e43)
1*61145dc2SMartin Matuska // SPDX-License-Identifier: CDDL-1.0
2eda14cbcSMatt Macy /*
3eda14cbcSMatt Macy  * CDDL HEADER START
4eda14cbcSMatt Macy  *
5eda14cbcSMatt Macy  * The contents of this file are subject to the terms of the
6eda14cbcSMatt Macy  * Common Development and Distribution License (the "License").
7eda14cbcSMatt Macy  * You may not use this file except in compliance with the License.
8eda14cbcSMatt Macy  *
9eda14cbcSMatt Macy  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10271171e0SMartin Matuska  * or https://opensource.org/licenses/CDDL-1.0.
11eda14cbcSMatt Macy  * See the License for the specific language governing permissions
12eda14cbcSMatt Macy  * and limitations under the License.
13eda14cbcSMatt Macy  *
14eda14cbcSMatt Macy  * When distributing Covered Code, include this CDDL HEADER in each
15eda14cbcSMatt Macy  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16eda14cbcSMatt Macy  * If applicable, add the following below this CDDL HEADER, with the
17eda14cbcSMatt Macy  * fields enclosed by brackets "[]" replaced with your own identifying
18eda14cbcSMatt Macy  * information: Portions Copyright [yyyy] [name of copyright owner]
19eda14cbcSMatt Macy  *
20eda14cbcSMatt Macy  * CDDL HEADER END
21eda14cbcSMatt Macy  */
22eda14cbcSMatt Macy /*
23eda14cbcSMatt Macy  * Copyright (C) 2016 Romain Dolbeau. All rights reserved.
24eda14cbcSMatt Macy  * Copyright (C) 2016 Gvozden Nešković. All rights reserved.
25eda14cbcSMatt Macy  */
26eda14cbcSMatt Macy 
27eda14cbcSMatt Macy #include <sys/isa_defs.h>
28eda14cbcSMatt Macy 
29eda14cbcSMatt Macy #if defined(__x86_64) && defined(HAVE_AVX512F)
30eda14cbcSMatt Macy 
31eda14cbcSMatt Macy #include <sys/types.h>
32eda14cbcSMatt Macy #include <sys/simd.h>
33eda14cbcSMatt Macy #include <sys/debug.h>
34eda14cbcSMatt Macy 
35eda14cbcSMatt Macy #ifdef __linux__
36eda14cbcSMatt Macy #define	__asm __asm__ __volatile__
37eda14cbcSMatt Macy #endif
38eda14cbcSMatt Macy 
39eda14cbcSMatt Macy #define	_REG_CNT(_0, _1, _2, _3, _4, _5, _6, _7, N, ...) N
40eda14cbcSMatt Macy #define	REG_CNT(r...) _REG_CNT(r, 8, 7, 6, 5, 4, 3, 2, 1)
41eda14cbcSMatt Macy 
42eda14cbcSMatt Macy #define	VR0_(REG, ...) "zmm"#REG
43eda14cbcSMatt Macy #define	VR1_(_1, REG, ...) "zmm"#REG
44eda14cbcSMatt Macy #define	VR2_(_1, _2, REG, ...) "zmm"#REG
45eda14cbcSMatt Macy #define	VR3_(_1, _2, _3, REG, ...) "zmm"#REG
46eda14cbcSMatt Macy #define	VR4_(_1, _2, _3, _4, REG, ...) "zmm"#REG
47eda14cbcSMatt Macy #define	VR5_(_1, _2, _3, _4, _5, REG, ...) "zmm"#REG
48eda14cbcSMatt Macy #define	VR6_(_1, _2, _3, _4, _5, _6, REG, ...) "zmm"#REG
49eda14cbcSMatt Macy #define	VR7_(_1, _2, _3, _4, _5, _6, _7, REG, ...) "zmm"#REG
50eda14cbcSMatt Macy 
51eda14cbcSMatt Macy #define	VR0(r...) VR0_(r)
52eda14cbcSMatt Macy #define	VR1(r...) VR1_(r)
53eda14cbcSMatt Macy #define	VR2(r...) VR2_(r, 1)
54eda14cbcSMatt Macy #define	VR3(r...) VR3_(r, 1, 2)
55eda14cbcSMatt Macy #define	VR4(r...) VR4_(r, 1, 2)
56eda14cbcSMatt Macy #define	VR5(r...) VR5_(r, 1, 2, 3)
57eda14cbcSMatt Macy #define	VR6(r...) VR6_(r, 1, 2, 3, 4)
58eda14cbcSMatt Macy #define	VR7(r...) VR7_(r, 1, 2, 3, 4, 5)
59eda14cbcSMatt Macy 
60eda14cbcSMatt Macy #define	VRy0_(REG, ...) "ymm"#REG
61eda14cbcSMatt Macy #define	VRy1_(_1, REG, ...) "ymm"#REG
62eda14cbcSMatt Macy #define	VRy2_(_1, _2, REG, ...) "ymm"#REG
63eda14cbcSMatt Macy #define	VRy3_(_1, _2, _3, REG, ...) "ymm"#REG
64eda14cbcSMatt Macy #define	VRy4_(_1, _2, _3, _4, REG, ...) "ymm"#REG
65eda14cbcSMatt Macy #define	VRy5_(_1, _2, _3, _4, _5, REG, ...) "ymm"#REG
66eda14cbcSMatt Macy #define	VRy6_(_1, _2, _3, _4, _5, _6, REG, ...) "ymm"#REG
67eda14cbcSMatt Macy #define	VRy7_(_1, _2, _3, _4, _5, _6, _7, REG, ...) "ymm"#REG
68eda14cbcSMatt Macy 
69eda14cbcSMatt Macy #define	VRy0(r...) VRy0_(r)
70eda14cbcSMatt Macy #define	VRy1(r...) VRy1_(r)
71eda14cbcSMatt Macy #define	VRy2(r...) VRy2_(r, 1)
72eda14cbcSMatt Macy #define	VRy3(r...) VRy3_(r, 1, 2)
73eda14cbcSMatt Macy #define	VRy4(r...) VRy4_(r, 1, 2)
74eda14cbcSMatt Macy #define	VRy5(r...) VRy5_(r, 1, 2, 3)
75eda14cbcSMatt Macy #define	VRy6(r...) VRy6_(r, 1, 2, 3, 4)
76eda14cbcSMatt Macy #define	VRy7(r...) VRy7_(r, 1, 2, 3, 4, 5)
77eda14cbcSMatt Macy 
78eda14cbcSMatt Macy #define	R_01(REG1, REG2, ...) REG1, REG2
79eda14cbcSMatt Macy #define	_R_23(_0, _1, REG2, REG3, ...) REG2, REG3
80eda14cbcSMatt Macy #define	R_23(REG...) _R_23(REG, 1, 2, 3)
81eda14cbcSMatt Macy 
82eda14cbcSMatt Macy #define	ELEM_SIZE 64
83eda14cbcSMatt Macy 
84eda14cbcSMatt Macy typedef struct v {
85eda14cbcSMatt Macy 	uint8_t b[ELEM_SIZE] __attribute__((aligned(ELEM_SIZE)));
86eda14cbcSMatt Macy } v_t;
87eda14cbcSMatt Macy 
88eda14cbcSMatt Macy 
89eda14cbcSMatt Macy #define	XOR_ACC(src, r...)						\
90eda14cbcSMatt Macy {									\
91eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
92eda14cbcSMatt Macy 	case 4:								\
93eda14cbcSMatt Macy 		__asm(							\
94eda14cbcSMatt Macy 		    "vpxorq 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n"	\
95eda14cbcSMatt Macy 		    "vpxorq 0x40(%[SRC]), %%" VR1(r)", %%" VR1(r) "\n"	\
96eda14cbcSMatt Macy 		    "vpxorq 0x80(%[SRC]), %%" VR2(r)", %%" VR2(r) "\n"	\
97eda14cbcSMatt Macy 		    "vpxorq 0xc0(%[SRC]), %%" VR3(r)", %%" VR3(r) "\n"	\
98eda14cbcSMatt Macy 		    : : [SRC] "r" (src));				\
99eda14cbcSMatt Macy 		break;							\
100eda14cbcSMatt Macy 	}								\
101eda14cbcSMatt Macy }
102eda14cbcSMatt Macy 
103eda14cbcSMatt Macy #define	XOR(r...)							\
104eda14cbcSMatt Macy {									\
105eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
106eda14cbcSMatt Macy 	case 8:								\
107eda14cbcSMatt Macy 		__asm(							\
108eda14cbcSMatt Macy 		    "vpxorq %" VR0(r) ", %" VR4(r)", %" VR4(r) "\n"	\
109eda14cbcSMatt Macy 		    "vpxorq %" VR1(r) ", %" VR5(r)", %" VR5(r) "\n"	\
110eda14cbcSMatt Macy 		    "vpxorq %" VR2(r) ", %" VR6(r)", %" VR6(r) "\n"	\
111eda14cbcSMatt Macy 		    "vpxorq %" VR3(r) ", %" VR7(r)", %" VR7(r));	\
112eda14cbcSMatt Macy 		break;							\
113eda14cbcSMatt Macy 	case 4:								\
114eda14cbcSMatt Macy 		__asm(							\
115eda14cbcSMatt Macy 		    "vpxorq %" VR0(r) ", %" VR2(r)", %" VR2(r) "\n"	\
116eda14cbcSMatt Macy 		    "vpxorq %" VR1(r) ", %" VR3(r)", %" VR3(r));	\
117eda14cbcSMatt Macy 		break;							\
118eda14cbcSMatt Macy 	}								\
119eda14cbcSMatt Macy }
120eda14cbcSMatt Macy 
121eda14cbcSMatt Macy 
122eda14cbcSMatt Macy #define	ZERO(r...)	XOR(r, r)
123eda14cbcSMatt Macy 
124eda14cbcSMatt Macy 
125eda14cbcSMatt Macy #define	COPY(r...) 							\
126eda14cbcSMatt Macy {									\
127eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
128eda14cbcSMatt Macy 	case 8:								\
129eda14cbcSMatt Macy 		__asm(							\
130eda14cbcSMatt Macy 		    "vmovdqa64 %" VR0(r) ", %" VR4(r) "\n"		\
131eda14cbcSMatt Macy 		    "vmovdqa64 %" VR1(r) ", %" VR5(r) "\n"		\
132eda14cbcSMatt Macy 		    "vmovdqa64 %" VR2(r) ", %" VR6(r) "\n"		\
133eda14cbcSMatt Macy 		    "vmovdqa64 %" VR3(r) ", %" VR7(r));			\
134eda14cbcSMatt Macy 		break;							\
135eda14cbcSMatt Macy 	case 4:								\
136eda14cbcSMatt Macy 		__asm(							\
137eda14cbcSMatt Macy 		    "vmovdqa64 %" VR0(r) ", %" VR2(r) "\n"		\
138eda14cbcSMatt Macy 		    "vmovdqa64 %" VR1(r) ", %" VR3(r));			\
139eda14cbcSMatt Macy 		break;							\
140eda14cbcSMatt Macy 	}								\
141eda14cbcSMatt Macy }
142eda14cbcSMatt Macy 
143eda14cbcSMatt Macy #define	LOAD(src, r...) 						\
144eda14cbcSMatt Macy {									\
145eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
146eda14cbcSMatt Macy 	case 4:								\
147eda14cbcSMatt Macy 		__asm(							\
148eda14cbcSMatt Macy 		    "vmovdqa64 0x00(%[SRC]), %%" VR0(r) "\n"		\
149eda14cbcSMatt Macy 		    "vmovdqa64 0x40(%[SRC]), %%" VR1(r) "\n"		\
150eda14cbcSMatt Macy 		    "vmovdqa64 0x80(%[SRC]), %%" VR2(r) "\n"		\
151eda14cbcSMatt Macy 		    "vmovdqa64 0xc0(%[SRC]), %%" VR3(r) "\n"		\
152eda14cbcSMatt Macy 		    : : [SRC] "r" (src));				\
153eda14cbcSMatt Macy 		break;							\
154eda14cbcSMatt Macy 	}								\
155eda14cbcSMatt Macy }
156eda14cbcSMatt Macy 
157eda14cbcSMatt Macy #define	STORE(dst, r...)   						\
158eda14cbcSMatt Macy {									\
159eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
160eda14cbcSMatt Macy 	case 4:								\
161eda14cbcSMatt Macy 		__asm(							\
162eda14cbcSMatt Macy 		    "vmovdqa64 %%" VR0(r) ", 0x00(%[DST])\n"		\
163eda14cbcSMatt Macy 		    "vmovdqa64 %%" VR1(r) ", 0x40(%[DST])\n"		\
164eda14cbcSMatt Macy 		    "vmovdqa64 %%" VR2(r) ", 0x80(%[DST])\n"		\
165eda14cbcSMatt Macy 		    "vmovdqa64 %%" VR3(r) ", 0xc0(%[DST])\n"		\
166eda14cbcSMatt Macy 		    : : [DST] "r" (dst));				\
167eda14cbcSMatt Macy 		break;							\
168eda14cbcSMatt Macy 	}								\
169eda14cbcSMatt Macy }
170eda14cbcSMatt Macy 
171eda14cbcSMatt Macy #define	MUL2_SETUP() 							\
172eda14cbcSMatt Macy {   									\
173eda14cbcSMatt Macy 	__asm("vmovq %0,   %%xmm31" :: "r"(0x1d1d1d1d1d1d1d1d));	\
174eda14cbcSMatt Macy 	__asm("vpbroadcastq %xmm31, %zmm31");				\
175eda14cbcSMatt Macy 	__asm("vmovq %0,   %%xmm30" :: "r"(0x8080808080808080));	\
176eda14cbcSMatt Macy 	__asm("vpbroadcastq %xmm30, %zmm30");				\
177eda14cbcSMatt Macy 	__asm("vmovq %0,   %%xmm29" :: "r"(0xfefefefefefefefe));	\
178eda14cbcSMatt Macy 	__asm("vpbroadcastq %xmm29, %zmm29");				\
179eda14cbcSMatt Macy }
180eda14cbcSMatt Macy 
181eda14cbcSMatt Macy #define	_MUL2(r...) 							\
182eda14cbcSMatt Macy {									\
183eda14cbcSMatt Macy 	switch	(REG_CNT(r)) {						\
184eda14cbcSMatt Macy 	case 2:								\
185eda14cbcSMatt Macy 		__asm(							\
186eda14cbcSMatt Macy 		    "vpandq   %" VR0(r)", %zmm30, %zmm26\n"		\
187eda14cbcSMatt Macy 		    "vpandq   %" VR1(r)", %zmm30, %zmm25\n"		\
188eda14cbcSMatt Macy 		    "vpsrlq   $7, %zmm26, %zmm28\n"			\
189eda14cbcSMatt Macy 		    "vpsrlq   $7, %zmm25, %zmm27\n"			\
190eda14cbcSMatt Macy 		    "vpsllq   $1, %zmm26, %zmm26\n"			\
191eda14cbcSMatt Macy 		    "vpsllq   $1, %zmm25, %zmm25\n"			\
192eda14cbcSMatt Macy 		    "vpsubq   %zmm28, %zmm26, %zmm26\n"			\
193eda14cbcSMatt Macy 		    "vpsubq   %zmm27, %zmm25, %zmm25\n"			\
194eda14cbcSMatt Macy 		    "vpsllq   $1, %" VR0(r)", %" VR0(r) "\n"		\
195eda14cbcSMatt Macy 		    "vpsllq   $1, %" VR1(r)", %" VR1(r) "\n"		\
196eda14cbcSMatt Macy 		    "vpandq   %zmm26, %zmm31, %zmm26\n" 		\
197eda14cbcSMatt Macy 		    "vpandq   %zmm25, %zmm31, %zmm25\n" 		\
198eda14cbcSMatt Macy 		    "vpternlogd $0x6c,%zmm29, %zmm26, %" VR0(r) "\n"	\
199eda14cbcSMatt Macy 		    "vpternlogd $0x6c,%zmm29, %zmm25, %" VR1(r));	\
200eda14cbcSMatt Macy 		break;							\
201eda14cbcSMatt Macy 	default:							\
202eda14cbcSMatt Macy 		VERIFY(0);						\
203eda14cbcSMatt Macy 	}								\
204eda14cbcSMatt Macy }
205eda14cbcSMatt Macy 
206eda14cbcSMatt Macy #define	MUL2(r...)							\
207eda14cbcSMatt Macy {									\
208eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
209eda14cbcSMatt Macy 	case 4:								\
210eda14cbcSMatt Macy 	    _MUL2(R_01(r));						\
211eda14cbcSMatt Macy 	    _MUL2(R_23(r));						\
212eda14cbcSMatt Macy 	    break;							\
213eda14cbcSMatt Macy 	case 2:								\
214eda14cbcSMatt Macy 	    _MUL2(r);							\
215eda14cbcSMatt Macy 	    break;							\
216eda14cbcSMatt Macy 	}								\
217eda14cbcSMatt Macy }
218eda14cbcSMatt Macy 
219eda14cbcSMatt Macy #define	MUL4(r...)							\
220eda14cbcSMatt Macy {									\
221eda14cbcSMatt Macy 	MUL2(r);							\
222eda14cbcSMatt Macy 	MUL2(r);							\
223eda14cbcSMatt Macy }
224eda14cbcSMatt Macy 
225eda14cbcSMatt Macy 
226eda14cbcSMatt Macy /* General multiplication by adding powers of two */
227eda14cbcSMatt Macy 
228eda14cbcSMatt Macy #define	_mul_x2_in	21, 22
229eda14cbcSMatt Macy #define	_mul_x2_acc	23, 24
230eda14cbcSMatt Macy 
231eda14cbcSMatt Macy #define	_MUL_PARAM(x, in, acc)						\
232eda14cbcSMatt Macy {									\
233eda14cbcSMatt Macy 	if (x & 0x01) {	COPY(in, acc); } else { ZERO(acc); }		\
234eda14cbcSMatt Macy 	if (x & 0xfe) { MUL2(in); }					\
235eda14cbcSMatt Macy 	if (x & 0x02) { XOR(in, acc); }					\
236eda14cbcSMatt Macy 	if (x & 0xfc) { MUL2(in); }					\
237eda14cbcSMatt Macy 	if (x & 0x04) { XOR(in, acc); }					\
238eda14cbcSMatt Macy 	if (x & 0xf8) { MUL2(in); }					\
239eda14cbcSMatt Macy 	if (x & 0x08) { XOR(in, acc); }					\
240eda14cbcSMatt Macy 	if (x & 0xf0) { MUL2(in); }					\
241eda14cbcSMatt Macy 	if (x & 0x10) { XOR(in, acc); }					\
242eda14cbcSMatt Macy 	if (x & 0xe0) { MUL2(in); }					\
243eda14cbcSMatt Macy 	if (x & 0x20) { XOR(in, acc); }					\
244eda14cbcSMatt Macy 	if (x & 0xc0) { MUL2(in); }					\
245eda14cbcSMatt Macy 	if (x & 0x40) { XOR(in, acc); }					\
246eda14cbcSMatt Macy 	if (x & 0x80) { MUL2(in); XOR(in, acc); }			\
247eda14cbcSMatt Macy }
248eda14cbcSMatt Macy 
249eda14cbcSMatt Macy #define	MUL_x2_DEFINE(x)						\
250eda14cbcSMatt Macy static void 								\
251eda14cbcSMatt Macy mul_x2_ ## x(void) { _MUL_PARAM(x, _mul_x2_in, _mul_x2_acc); }
252eda14cbcSMatt Macy 
253eda14cbcSMatt Macy 
254eda14cbcSMatt Macy MUL_x2_DEFINE(0); MUL_x2_DEFINE(1); MUL_x2_DEFINE(2); MUL_x2_DEFINE(3);
255eda14cbcSMatt Macy MUL_x2_DEFINE(4); MUL_x2_DEFINE(5); MUL_x2_DEFINE(6); MUL_x2_DEFINE(7);
256eda14cbcSMatt Macy MUL_x2_DEFINE(8); MUL_x2_DEFINE(9); MUL_x2_DEFINE(10); MUL_x2_DEFINE(11);
257eda14cbcSMatt Macy MUL_x2_DEFINE(12); MUL_x2_DEFINE(13); MUL_x2_DEFINE(14); MUL_x2_DEFINE(15);
258eda14cbcSMatt Macy MUL_x2_DEFINE(16); MUL_x2_DEFINE(17); MUL_x2_DEFINE(18); MUL_x2_DEFINE(19);
259eda14cbcSMatt Macy MUL_x2_DEFINE(20); MUL_x2_DEFINE(21); MUL_x2_DEFINE(22); MUL_x2_DEFINE(23);
260eda14cbcSMatt Macy MUL_x2_DEFINE(24); MUL_x2_DEFINE(25); MUL_x2_DEFINE(26); MUL_x2_DEFINE(27);
261eda14cbcSMatt Macy MUL_x2_DEFINE(28); MUL_x2_DEFINE(29); MUL_x2_DEFINE(30); MUL_x2_DEFINE(31);
262eda14cbcSMatt Macy MUL_x2_DEFINE(32); MUL_x2_DEFINE(33); MUL_x2_DEFINE(34); MUL_x2_DEFINE(35);
263eda14cbcSMatt Macy MUL_x2_DEFINE(36); MUL_x2_DEFINE(37); MUL_x2_DEFINE(38); MUL_x2_DEFINE(39);
264eda14cbcSMatt Macy MUL_x2_DEFINE(40); MUL_x2_DEFINE(41); MUL_x2_DEFINE(42); MUL_x2_DEFINE(43);
265eda14cbcSMatt Macy MUL_x2_DEFINE(44); MUL_x2_DEFINE(45); MUL_x2_DEFINE(46); MUL_x2_DEFINE(47);
266eda14cbcSMatt Macy MUL_x2_DEFINE(48); MUL_x2_DEFINE(49); MUL_x2_DEFINE(50); MUL_x2_DEFINE(51);
267eda14cbcSMatt Macy MUL_x2_DEFINE(52); MUL_x2_DEFINE(53); MUL_x2_DEFINE(54); MUL_x2_DEFINE(55);
268eda14cbcSMatt Macy MUL_x2_DEFINE(56); MUL_x2_DEFINE(57); MUL_x2_DEFINE(58); MUL_x2_DEFINE(59);
269eda14cbcSMatt Macy MUL_x2_DEFINE(60); MUL_x2_DEFINE(61); MUL_x2_DEFINE(62); MUL_x2_DEFINE(63);
270eda14cbcSMatt Macy MUL_x2_DEFINE(64); MUL_x2_DEFINE(65); MUL_x2_DEFINE(66); MUL_x2_DEFINE(67);
271eda14cbcSMatt Macy MUL_x2_DEFINE(68); MUL_x2_DEFINE(69); MUL_x2_DEFINE(70); MUL_x2_DEFINE(71);
272eda14cbcSMatt Macy MUL_x2_DEFINE(72); MUL_x2_DEFINE(73); MUL_x2_DEFINE(74); MUL_x2_DEFINE(75);
273eda14cbcSMatt Macy MUL_x2_DEFINE(76); MUL_x2_DEFINE(77); MUL_x2_DEFINE(78); MUL_x2_DEFINE(79);
274eda14cbcSMatt Macy MUL_x2_DEFINE(80); MUL_x2_DEFINE(81); MUL_x2_DEFINE(82); MUL_x2_DEFINE(83);
275eda14cbcSMatt Macy MUL_x2_DEFINE(84); MUL_x2_DEFINE(85); MUL_x2_DEFINE(86); MUL_x2_DEFINE(87);
276eda14cbcSMatt Macy MUL_x2_DEFINE(88); MUL_x2_DEFINE(89); MUL_x2_DEFINE(90); MUL_x2_DEFINE(91);
277eda14cbcSMatt Macy MUL_x2_DEFINE(92); MUL_x2_DEFINE(93); MUL_x2_DEFINE(94); MUL_x2_DEFINE(95);
278eda14cbcSMatt Macy MUL_x2_DEFINE(96); MUL_x2_DEFINE(97); MUL_x2_DEFINE(98); MUL_x2_DEFINE(99);
279eda14cbcSMatt Macy MUL_x2_DEFINE(100); MUL_x2_DEFINE(101); MUL_x2_DEFINE(102); MUL_x2_DEFINE(103);
280eda14cbcSMatt Macy MUL_x2_DEFINE(104); MUL_x2_DEFINE(105); MUL_x2_DEFINE(106); MUL_x2_DEFINE(107);
281eda14cbcSMatt Macy MUL_x2_DEFINE(108); MUL_x2_DEFINE(109); MUL_x2_DEFINE(110); MUL_x2_DEFINE(111);
282eda14cbcSMatt Macy MUL_x2_DEFINE(112); MUL_x2_DEFINE(113); MUL_x2_DEFINE(114); MUL_x2_DEFINE(115);
283eda14cbcSMatt Macy MUL_x2_DEFINE(116); MUL_x2_DEFINE(117); MUL_x2_DEFINE(118); MUL_x2_DEFINE(119);
284eda14cbcSMatt Macy MUL_x2_DEFINE(120); MUL_x2_DEFINE(121); MUL_x2_DEFINE(122); MUL_x2_DEFINE(123);
285eda14cbcSMatt Macy MUL_x2_DEFINE(124); MUL_x2_DEFINE(125); MUL_x2_DEFINE(126); MUL_x2_DEFINE(127);
286eda14cbcSMatt Macy MUL_x2_DEFINE(128); MUL_x2_DEFINE(129); MUL_x2_DEFINE(130); MUL_x2_DEFINE(131);
287eda14cbcSMatt Macy MUL_x2_DEFINE(132); MUL_x2_DEFINE(133); MUL_x2_DEFINE(134); MUL_x2_DEFINE(135);
288eda14cbcSMatt Macy MUL_x2_DEFINE(136); MUL_x2_DEFINE(137); MUL_x2_DEFINE(138); MUL_x2_DEFINE(139);
289eda14cbcSMatt Macy MUL_x2_DEFINE(140); MUL_x2_DEFINE(141); MUL_x2_DEFINE(142); MUL_x2_DEFINE(143);
290eda14cbcSMatt Macy MUL_x2_DEFINE(144); MUL_x2_DEFINE(145); MUL_x2_DEFINE(146); MUL_x2_DEFINE(147);
291eda14cbcSMatt Macy MUL_x2_DEFINE(148); MUL_x2_DEFINE(149); MUL_x2_DEFINE(150); MUL_x2_DEFINE(151);
292eda14cbcSMatt Macy MUL_x2_DEFINE(152); MUL_x2_DEFINE(153); MUL_x2_DEFINE(154); MUL_x2_DEFINE(155);
293eda14cbcSMatt Macy MUL_x2_DEFINE(156); MUL_x2_DEFINE(157); MUL_x2_DEFINE(158); MUL_x2_DEFINE(159);
294eda14cbcSMatt Macy MUL_x2_DEFINE(160); MUL_x2_DEFINE(161); MUL_x2_DEFINE(162); MUL_x2_DEFINE(163);
295eda14cbcSMatt Macy MUL_x2_DEFINE(164); MUL_x2_DEFINE(165); MUL_x2_DEFINE(166); MUL_x2_DEFINE(167);
296eda14cbcSMatt Macy MUL_x2_DEFINE(168); MUL_x2_DEFINE(169); MUL_x2_DEFINE(170); MUL_x2_DEFINE(171);
297eda14cbcSMatt Macy MUL_x2_DEFINE(172); MUL_x2_DEFINE(173); MUL_x2_DEFINE(174); MUL_x2_DEFINE(175);
298eda14cbcSMatt Macy MUL_x2_DEFINE(176); MUL_x2_DEFINE(177); MUL_x2_DEFINE(178); MUL_x2_DEFINE(179);
299eda14cbcSMatt Macy MUL_x2_DEFINE(180); MUL_x2_DEFINE(181); MUL_x2_DEFINE(182); MUL_x2_DEFINE(183);
300eda14cbcSMatt Macy MUL_x2_DEFINE(184); MUL_x2_DEFINE(185); MUL_x2_DEFINE(186); MUL_x2_DEFINE(187);
301eda14cbcSMatt Macy MUL_x2_DEFINE(188); MUL_x2_DEFINE(189); MUL_x2_DEFINE(190); MUL_x2_DEFINE(191);
302eda14cbcSMatt Macy MUL_x2_DEFINE(192); MUL_x2_DEFINE(193); MUL_x2_DEFINE(194); MUL_x2_DEFINE(195);
303eda14cbcSMatt Macy MUL_x2_DEFINE(196); MUL_x2_DEFINE(197); MUL_x2_DEFINE(198); MUL_x2_DEFINE(199);
304eda14cbcSMatt Macy MUL_x2_DEFINE(200); MUL_x2_DEFINE(201); MUL_x2_DEFINE(202); MUL_x2_DEFINE(203);
305eda14cbcSMatt Macy MUL_x2_DEFINE(204); MUL_x2_DEFINE(205); MUL_x2_DEFINE(206); MUL_x2_DEFINE(207);
306eda14cbcSMatt Macy MUL_x2_DEFINE(208); MUL_x2_DEFINE(209); MUL_x2_DEFINE(210); MUL_x2_DEFINE(211);
307eda14cbcSMatt Macy MUL_x2_DEFINE(212); MUL_x2_DEFINE(213); MUL_x2_DEFINE(214); MUL_x2_DEFINE(215);
308eda14cbcSMatt Macy MUL_x2_DEFINE(216); MUL_x2_DEFINE(217); MUL_x2_DEFINE(218); MUL_x2_DEFINE(219);
309eda14cbcSMatt Macy MUL_x2_DEFINE(220); MUL_x2_DEFINE(221); MUL_x2_DEFINE(222); MUL_x2_DEFINE(223);
310eda14cbcSMatt Macy MUL_x2_DEFINE(224); MUL_x2_DEFINE(225); MUL_x2_DEFINE(226); MUL_x2_DEFINE(227);
311eda14cbcSMatt Macy MUL_x2_DEFINE(228); MUL_x2_DEFINE(229); MUL_x2_DEFINE(230); MUL_x2_DEFINE(231);
312eda14cbcSMatt Macy MUL_x2_DEFINE(232); MUL_x2_DEFINE(233); MUL_x2_DEFINE(234); MUL_x2_DEFINE(235);
313eda14cbcSMatt Macy MUL_x2_DEFINE(236); MUL_x2_DEFINE(237); MUL_x2_DEFINE(238); MUL_x2_DEFINE(239);
314eda14cbcSMatt Macy MUL_x2_DEFINE(240); MUL_x2_DEFINE(241); MUL_x2_DEFINE(242); MUL_x2_DEFINE(243);
315eda14cbcSMatt Macy MUL_x2_DEFINE(244); MUL_x2_DEFINE(245); MUL_x2_DEFINE(246); MUL_x2_DEFINE(247);
316eda14cbcSMatt Macy MUL_x2_DEFINE(248); MUL_x2_DEFINE(249); MUL_x2_DEFINE(250); MUL_x2_DEFINE(251);
317eda14cbcSMatt Macy MUL_x2_DEFINE(252); MUL_x2_DEFINE(253); MUL_x2_DEFINE(254); MUL_x2_DEFINE(255);
318eda14cbcSMatt Macy 
319eda14cbcSMatt Macy 
320eda14cbcSMatt Macy typedef void (*mul_fn_ptr_t)(void);
321eda14cbcSMatt Macy 
322eda14cbcSMatt Macy static const mul_fn_ptr_t __attribute__((aligned(256)))
323eda14cbcSMatt Macy gf_x2_mul_fns[256] = {
324eda14cbcSMatt Macy 	mul_x2_0, mul_x2_1, mul_x2_2, mul_x2_3, mul_x2_4, mul_x2_5,
325eda14cbcSMatt Macy 	mul_x2_6, mul_x2_7, mul_x2_8, mul_x2_9, mul_x2_10, mul_x2_11,
326eda14cbcSMatt Macy 	mul_x2_12, mul_x2_13, mul_x2_14, mul_x2_15, mul_x2_16, mul_x2_17,
327eda14cbcSMatt Macy 	mul_x2_18, mul_x2_19, mul_x2_20, mul_x2_21, mul_x2_22, mul_x2_23,
328eda14cbcSMatt Macy 	mul_x2_24, mul_x2_25, mul_x2_26, mul_x2_27, mul_x2_28, mul_x2_29,
329eda14cbcSMatt Macy 	mul_x2_30, mul_x2_31, mul_x2_32, mul_x2_33, mul_x2_34, mul_x2_35,
330eda14cbcSMatt Macy 	mul_x2_36, mul_x2_37, mul_x2_38, mul_x2_39, mul_x2_40, mul_x2_41,
331eda14cbcSMatt Macy 	mul_x2_42, mul_x2_43, mul_x2_44, mul_x2_45, mul_x2_46, mul_x2_47,
332eda14cbcSMatt Macy 	mul_x2_48, mul_x2_49, mul_x2_50, mul_x2_51, mul_x2_52, mul_x2_53,
333eda14cbcSMatt Macy 	mul_x2_54, mul_x2_55, mul_x2_56, mul_x2_57, mul_x2_58, mul_x2_59,
334eda14cbcSMatt Macy 	mul_x2_60, mul_x2_61, mul_x2_62, mul_x2_63, mul_x2_64, mul_x2_65,
335eda14cbcSMatt Macy 	mul_x2_66, mul_x2_67, mul_x2_68, mul_x2_69, mul_x2_70, mul_x2_71,
336eda14cbcSMatt Macy 	mul_x2_72, mul_x2_73, mul_x2_74, mul_x2_75, mul_x2_76, mul_x2_77,
337eda14cbcSMatt Macy 	mul_x2_78, mul_x2_79, mul_x2_80, mul_x2_81, mul_x2_82, mul_x2_83,
338eda14cbcSMatt Macy 	mul_x2_84, mul_x2_85, mul_x2_86, mul_x2_87, mul_x2_88, mul_x2_89,
339eda14cbcSMatt Macy 	mul_x2_90, mul_x2_91, mul_x2_92, mul_x2_93, mul_x2_94, mul_x2_95,
340eda14cbcSMatt Macy 	mul_x2_96, mul_x2_97, mul_x2_98, mul_x2_99, mul_x2_100, mul_x2_101,
341eda14cbcSMatt Macy 	mul_x2_102, mul_x2_103, mul_x2_104, mul_x2_105, mul_x2_106, mul_x2_107,
342eda14cbcSMatt Macy 	mul_x2_108, mul_x2_109, mul_x2_110, mul_x2_111, mul_x2_112, mul_x2_113,
343eda14cbcSMatt Macy 	mul_x2_114, mul_x2_115, mul_x2_116, mul_x2_117, mul_x2_118, mul_x2_119,
344eda14cbcSMatt Macy 	mul_x2_120, mul_x2_121, mul_x2_122, mul_x2_123, mul_x2_124, mul_x2_125,
345eda14cbcSMatt Macy 	mul_x2_126, mul_x2_127, mul_x2_128, mul_x2_129, mul_x2_130, mul_x2_131,
346eda14cbcSMatt Macy 	mul_x2_132, mul_x2_133, mul_x2_134, mul_x2_135, mul_x2_136, mul_x2_137,
347eda14cbcSMatt Macy 	mul_x2_138, mul_x2_139, mul_x2_140, mul_x2_141, mul_x2_142, mul_x2_143,
348eda14cbcSMatt Macy 	mul_x2_144, mul_x2_145, mul_x2_146, mul_x2_147, mul_x2_148, mul_x2_149,
349eda14cbcSMatt Macy 	mul_x2_150, mul_x2_151, mul_x2_152, mul_x2_153, mul_x2_154, mul_x2_155,
350eda14cbcSMatt Macy 	mul_x2_156, mul_x2_157, mul_x2_158, mul_x2_159, mul_x2_160, mul_x2_161,
351eda14cbcSMatt Macy 	mul_x2_162, mul_x2_163, mul_x2_164, mul_x2_165, mul_x2_166, mul_x2_167,
352eda14cbcSMatt Macy 	mul_x2_168, mul_x2_169, mul_x2_170, mul_x2_171, mul_x2_172, mul_x2_173,
353eda14cbcSMatt Macy 	mul_x2_174, mul_x2_175, mul_x2_176, mul_x2_177, mul_x2_178, mul_x2_179,
354eda14cbcSMatt Macy 	mul_x2_180, mul_x2_181, mul_x2_182, mul_x2_183, mul_x2_184, mul_x2_185,
355eda14cbcSMatt Macy 	mul_x2_186, mul_x2_187, mul_x2_188, mul_x2_189, mul_x2_190, mul_x2_191,
356eda14cbcSMatt Macy 	mul_x2_192, mul_x2_193, mul_x2_194, mul_x2_195, mul_x2_196, mul_x2_197,
357eda14cbcSMatt Macy 	mul_x2_198, mul_x2_199, mul_x2_200, mul_x2_201, mul_x2_202, mul_x2_203,
358eda14cbcSMatt Macy 	mul_x2_204, mul_x2_205, mul_x2_206, mul_x2_207, mul_x2_208, mul_x2_209,
359eda14cbcSMatt Macy 	mul_x2_210, mul_x2_211, mul_x2_212, mul_x2_213, mul_x2_214, mul_x2_215,
360eda14cbcSMatt Macy 	mul_x2_216, mul_x2_217, mul_x2_218, mul_x2_219, mul_x2_220, mul_x2_221,
361eda14cbcSMatt Macy 	mul_x2_222, mul_x2_223, mul_x2_224, mul_x2_225, mul_x2_226, mul_x2_227,
362eda14cbcSMatt Macy 	mul_x2_228, mul_x2_229, mul_x2_230, mul_x2_231, mul_x2_232, mul_x2_233,
363eda14cbcSMatt Macy 	mul_x2_234, mul_x2_235, mul_x2_236, mul_x2_237, mul_x2_238, mul_x2_239,
364eda14cbcSMatt Macy 	mul_x2_240, mul_x2_241, mul_x2_242, mul_x2_243, mul_x2_244, mul_x2_245,
365eda14cbcSMatt Macy 	mul_x2_246, mul_x2_247, mul_x2_248, mul_x2_249, mul_x2_250, mul_x2_251,
366eda14cbcSMatt Macy 	mul_x2_252, mul_x2_253, mul_x2_254, mul_x2_255
367eda14cbcSMatt Macy };
368eda14cbcSMatt Macy 
369eda14cbcSMatt Macy #define	MUL(c, r...) 							\
370eda14cbcSMatt Macy {									\
371eda14cbcSMatt Macy 	switch (REG_CNT(r)) {						\
372eda14cbcSMatt Macy 	case 4:								\
373eda14cbcSMatt Macy 		COPY(R_01(r), _mul_x2_in);				\
374eda14cbcSMatt Macy 		gf_x2_mul_fns[c]();					\
375eda14cbcSMatt Macy 		COPY(_mul_x2_acc, R_01(r));				\
376eda14cbcSMatt Macy 		COPY(R_23(r), _mul_x2_in);				\
377eda14cbcSMatt Macy 		gf_x2_mul_fns[c]();					\
378eda14cbcSMatt Macy 		COPY(_mul_x2_acc, R_23(r));				\
379eda14cbcSMatt Macy 		break;							\
380eda14cbcSMatt Macy 	default:							\
381eda14cbcSMatt Macy 		VERIFY(0);						\
382eda14cbcSMatt Macy 	}								\
383eda14cbcSMatt Macy }
384eda14cbcSMatt Macy 
385eda14cbcSMatt Macy 
386eda14cbcSMatt Macy #define	raidz_math_begin()	kfpu_begin()
387eda14cbcSMatt Macy #define	raidz_math_end()	kfpu_end()
388eda14cbcSMatt Macy 
389eda14cbcSMatt Macy 
390eda14cbcSMatt Macy #define	SYN_STRIDE		4
391eda14cbcSMatt Macy 
392eda14cbcSMatt Macy #define	ZERO_STRIDE		4
393eda14cbcSMatt Macy #define	ZERO_DEFINE()		{}
394eda14cbcSMatt Macy #define	ZERO_D			0, 1, 2, 3
395eda14cbcSMatt Macy 
396eda14cbcSMatt Macy #define	COPY_STRIDE		4
397eda14cbcSMatt Macy #define	COPY_DEFINE()		{}
398eda14cbcSMatt Macy #define	COPY_D			0, 1, 2, 3
399eda14cbcSMatt Macy 
400eda14cbcSMatt Macy #define	ADD_STRIDE		4
401eda14cbcSMatt Macy #define	ADD_DEFINE()		{}
402eda14cbcSMatt Macy #define	ADD_D 			0, 1, 2, 3
403eda14cbcSMatt Macy 
404eda14cbcSMatt Macy #define	MUL_STRIDE		4
405eda14cbcSMatt Macy #define	MUL_DEFINE() 		MUL2_SETUP()
406eda14cbcSMatt Macy #define	MUL_D			0, 1, 2, 3
407eda14cbcSMatt Macy 
408eda14cbcSMatt Macy #define	GEN_P_STRIDE		4
409eda14cbcSMatt Macy #define	GEN_P_DEFINE()		{}
410eda14cbcSMatt Macy #define	GEN_P_P			0, 1, 2, 3
411eda14cbcSMatt Macy 
412eda14cbcSMatt Macy #define	GEN_PQ_STRIDE		4
413eda14cbcSMatt Macy #define	GEN_PQ_DEFINE() 	{}
414eda14cbcSMatt Macy #define	GEN_PQ_D		0, 1, 2, 3
415eda14cbcSMatt Macy #define	GEN_PQ_C		4, 5, 6, 7
416eda14cbcSMatt Macy 
417eda14cbcSMatt Macy #define	GEN_PQR_STRIDE		4
418eda14cbcSMatt Macy #define	GEN_PQR_DEFINE() 	{}
419eda14cbcSMatt Macy #define	GEN_PQR_D		0, 1, 2, 3
420eda14cbcSMatt Macy #define	GEN_PQR_C		4, 5, 6, 7
421eda14cbcSMatt Macy 
422eda14cbcSMatt Macy #define	SYN_Q_DEFINE()		{}
423eda14cbcSMatt Macy #define	SYN_Q_D			0, 1, 2, 3
424eda14cbcSMatt Macy #define	SYN_Q_X			4, 5, 6, 7
425eda14cbcSMatt Macy 
426eda14cbcSMatt Macy #define	SYN_R_DEFINE()		{}
427eda14cbcSMatt Macy #define	SYN_R_D			0, 1, 2, 3
428eda14cbcSMatt Macy #define	SYN_R_X			4, 5, 6, 7
429eda14cbcSMatt Macy 
430eda14cbcSMatt Macy #define	SYN_PQ_DEFINE() 	{}
431eda14cbcSMatt Macy #define	SYN_PQ_D		0, 1, 2, 3
432eda14cbcSMatt Macy #define	SYN_PQ_X		4, 5, 6, 7
433eda14cbcSMatt Macy 
434eda14cbcSMatt Macy #define	REC_PQ_STRIDE		4
435eda14cbcSMatt Macy #define	REC_PQ_DEFINE()		MUL2_SETUP()
436eda14cbcSMatt Macy #define	REC_PQ_X		0, 1, 2, 3
437eda14cbcSMatt Macy #define	REC_PQ_Y		4, 5, 6, 7
438eda14cbcSMatt Macy #define	REC_PQ_T		8, 9, 10, 11
439eda14cbcSMatt Macy 
440eda14cbcSMatt Macy #define	SYN_PR_DEFINE() 	{}
441eda14cbcSMatt Macy #define	SYN_PR_D		0, 1, 2, 3
442eda14cbcSMatt Macy #define	SYN_PR_X		4, 5, 6, 7
443eda14cbcSMatt Macy 
444eda14cbcSMatt Macy #define	REC_PR_STRIDE		4
445eda14cbcSMatt Macy #define	REC_PR_DEFINE() 	MUL2_SETUP()
446eda14cbcSMatt Macy #define	REC_PR_X		0, 1, 2, 3
447eda14cbcSMatt Macy #define	REC_PR_Y		4, 5, 6, 7
448eda14cbcSMatt Macy #define	REC_PR_T		8, 9, 10, 11
449eda14cbcSMatt Macy 
450eda14cbcSMatt Macy #define	SYN_QR_DEFINE() 	{}
451eda14cbcSMatt Macy #define	SYN_QR_D		0, 1, 2, 3
452eda14cbcSMatt Macy #define	SYN_QR_X		4, 5, 6, 7
453eda14cbcSMatt Macy 
454eda14cbcSMatt Macy #define	REC_QR_STRIDE		4
455eda14cbcSMatt Macy #define	REC_QR_DEFINE() 	MUL2_SETUP()
456eda14cbcSMatt Macy #define	REC_QR_X		0, 1, 2, 3
457eda14cbcSMatt Macy #define	REC_QR_Y		4, 5, 6, 7
458eda14cbcSMatt Macy #define	REC_QR_T		8, 9, 10, 11
459eda14cbcSMatt Macy 
460eda14cbcSMatt Macy #define	SYN_PQR_DEFINE() 	{}
461eda14cbcSMatt Macy #define	SYN_PQR_D		0, 1, 2, 3
462eda14cbcSMatt Macy #define	SYN_PQR_X		4, 5, 6, 7
463eda14cbcSMatt Macy 
464eda14cbcSMatt Macy #define	REC_PQR_STRIDE		4
465eda14cbcSMatt Macy #define	REC_PQR_DEFINE() 	MUL2_SETUP()
466eda14cbcSMatt Macy #define	REC_PQR_X		0, 1, 2, 3
467eda14cbcSMatt Macy #define	REC_PQR_Y		4, 5, 6, 7
468eda14cbcSMatt Macy #define	REC_PQR_Z		8, 9, 10, 11
469eda14cbcSMatt Macy #define	REC_PQR_XS		12, 13, 14, 15
470eda14cbcSMatt Macy #define	REC_PQR_YS		16, 17, 18, 19
471eda14cbcSMatt Macy 
472eda14cbcSMatt Macy 
473eda14cbcSMatt Macy #include <sys/vdev_raidz_impl.h>
474eda14cbcSMatt Macy #include "vdev_raidz_math_impl.h"
475eda14cbcSMatt Macy 
476eda14cbcSMatt Macy DEFINE_GEN_METHODS(avx512f);
477eda14cbcSMatt Macy DEFINE_REC_METHODS(avx512f);
478eda14cbcSMatt Macy 
479eda14cbcSMatt Macy static boolean_t
raidz_will_avx512f_work(void)480eda14cbcSMatt Macy raidz_will_avx512f_work(void)
481eda14cbcSMatt Macy {
482eda14cbcSMatt Macy 	return (kfpu_allowed() && zfs_avx_available() &&
483eda14cbcSMatt Macy 	    zfs_avx2_available() && zfs_avx512f_available());
484eda14cbcSMatt Macy }
485eda14cbcSMatt Macy 
486eda14cbcSMatt Macy const raidz_impl_ops_t vdev_raidz_avx512f_impl = {
487eda14cbcSMatt Macy 	.init = NULL,
488eda14cbcSMatt Macy 	.fini = NULL,
489eda14cbcSMatt Macy 	.gen = RAIDZ_GEN_METHODS(avx512f),
490eda14cbcSMatt Macy 	.rec = RAIDZ_REC_METHODS(avx512f),
491eda14cbcSMatt Macy 	.is_supported = &raidz_will_avx512f_work,
492eda14cbcSMatt Macy 	.name = "avx512f"
493eda14cbcSMatt Macy };
494eda14cbcSMatt Macy 
495eda14cbcSMatt Macy #endif /* defined(__x86_64) && defined(HAVE_AVX512F) */
496