xref: /linux/arch/s390/include/asm/fpu-insn.h (revision b731bc5f49651bb85ef31fa1db6e76a0fe10d572)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Support for Floating Point and Vector Instructions
4  *
5  */
6 
7 #ifndef __ASM_S390_FPU_INSN_H
8 #define __ASM_S390_FPU_INSN_H
9 
10 #include <asm/fpu-insn-asm.h>
11 
12 #ifndef __ASSEMBLY__
13 
14 #include <linux/instrumented.h>
15 #include <asm/asm-extable.h>
16 
17 asm(".include \"asm/fpu-insn-asm.h\"\n");
18 
19 /*
20  * Various small helper functions, which can and should be used within
21  * kernel fpu code sections. Each function represents only one floating
22  * point or vector instruction (except for helper functions which require
23  * exception handling).
24  *
25  * This allows to use floating point and vector instructions like C
26  * functions, which has the advantage that all supporting code, like
27  * e.g. loops, can be written in easy to read C code.
28  *
29  * Each of the helper functions provides support for code instrumentation,
30  * like e.g. KASAN. Therefore instrumentation is also covered automatically
31  * when using these functions.
32  *
33  * In order to ensure that code generated with the helper functions stays
34  * within kernel fpu sections, which are guarded with kernel_fpu_begin()
35  * and kernel_fpu_end() calls, each function has a mandatory "memory"
36  * barrier.
37  */
38 
fpu_cefbr(u8 f1,s32 val)39 static __always_inline void fpu_cefbr(u8 f1, s32 val)
40 {
41 	asm volatile("cefbr	%[f1],%[val]\n"
42 		     :
43 		     : [f1] "I" (f1), [val] "d" (val)
44 		     : "memory");
45 }
46 
fpu_cgebr(u8 f2,u8 mode)47 static __always_inline unsigned long fpu_cgebr(u8 f2, u8 mode)
48 {
49 	unsigned long val;
50 
51 	asm volatile("cgebr	%[val],%[mode],%[f2]\n"
52 		     : [val] "=d" (val)
53 		     : [f2] "I" (f2), [mode] "I" (mode)
54 		     : "memory");
55 	return val;
56 }
57 
fpu_debr(u8 f1,u8 f2)58 static __always_inline void fpu_debr(u8 f1, u8 f2)
59 {
60 	asm volatile("debr	%[f1],%[f2]\n"
61 		     :
62 		     : [f1] "I" (f1), [f2] "I" (f2)
63 		     : "memory");
64 }
65 
fpu_ld(unsigned short fpr,freg_t * reg)66 static __always_inline void fpu_ld(unsigned short fpr, freg_t *reg)
67 {
68 	instrument_read(reg, sizeof(*reg));
69 	asm volatile("ld	 %[fpr],%[reg]\n"
70 		     :
71 		     : [fpr] "I" (fpr), [reg] "Q" (reg->ui)
72 		     : "memory");
73 }
74 
fpu_ldgr(u8 f1,u32 val)75 static __always_inline void fpu_ldgr(u8 f1, u32 val)
76 {
77 	asm volatile("ldgr	%[f1],%[val]\n"
78 		     :
79 		     : [f1] "I" (f1), [val] "d" (val)
80 		     : "memory");
81 }
82 
fpu_lfpc(unsigned int * fpc)83 static __always_inline void fpu_lfpc(unsigned int *fpc)
84 {
85 	instrument_read(fpc, sizeof(*fpc));
86 	asm volatile("lfpc	%[fpc]"
87 		     :
88 		     : [fpc] "Q" (*fpc)
89 		     : "memory");
90 }
91 
92 /**
93  * fpu_lfpc_safe - Load floating point control register safely.
94  * @fpc: new value for floating point control register
95  *
96  * Load floating point control register. This may lead to an exception,
97  * since a saved value may have been modified by user space (ptrace,
98  * signal return, kvm registers) to an invalid value. In such a case
99  * set the floating point control register to zero.
100  */
fpu_lfpc_safe(unsigned int * fpc)101 static inline void fpu_lfpc_safe(unsigned int *fpc)
102 {
103 	instrument_read(fpc, sizeof(*fpc));
104 	asm_inline volatile(
105 		"	lfpc	%[fpc]\n"
106 		"0:	nopr	%%r7\n"
107 		EX_TABLE_FPC(0b, 0b)
108 		:
109 		: [fpc] "Q" (*fpc)
110 		: "memory");
111 }
112 
fpu_std(unsigned short fpr,freg_t * reg)113 static __always_inline void fpu_std(unsigned short fpr, freg_t *reg)
114 {
115 	instrument_write(reg, sizeof(*reg));
116 	asm volatile("std	 %[fpr],%[reg]\n"
117 		     : [reg] "=Q" (reg->ui)
118 		     : [fpr] "I" (fpr)
119 		     : "memory");
120 }
121 
fpu_sfpc(unsigned int fpc)122 static __always_inline void fpu_sfpc(unsigned int fpc)
123 {
124 	asm volatile("sfpc	%[fpc]"
125 		     :
126 		     : [fpc] "d" (fpc)
127 		     : "memory");
128 }
129 
fpu_stfpc(unsigned int * fpc)130 static __always_inline void fpu_stfpc(unsigned int *fpc)
131 {
132 	instrument_write(fpc, sizeof(*fpc));
133 	asm volatile("stfpc	%[fpc]"
134 		     : [fpc] "=Q" (*fpc)
135 		     :
136 		     : "memory");
137 }
138 
fpu_vab(u8 v1,u8 v2,u8 v3)139 static __always_inline void fpu_vab(u8 v1, u8 v2, u8 v3)
140 {
141 	asm volatile("VAB	%[v1],%[v2],%[v3]"
142 		     :
143 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
144 		     : "memory");
145 }
146 
fpu_vcksm(u8 v1,u8 v2,u8 v3)147 static __always_inline void fpu_vcksm(u8 v1, u8 v2, u8 v3)
148 {
149 	asm volatile("VCKSM	%[v1],%[v2],%[v3]"
150 		     :
151 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
152 		     : "memory");
153 }
154 
fpu_vesravb(u8 v1,u8 v2,u8 v3)155 static __always_inline void fpu_vesravb(u8 v1, u8 v2, u8 v3)
156 {
157 	asm volatile("VESRAVB	%[v1],%[v2],%[v3]"
158 		     :
159 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
160 		     : "memory");
161 }
162 
fpu_vgfmag(u8 v1,u8 v2,u8 v3,u8 v4)163 static __always_inline void fpu_vgfmag(u8 v1, u8 v2, u8 v3, u8 v4)
164 {
165 	asm volatile("VGFMAG	%[v1],%[v2],%[v3],%[v4]"
166 		     :
167 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3), [v4] "I" (v4)
168 		     : "memory");
169 }
170 
fpu_vgfmg(u8 v1,u8 v2,u8 v3)171 static __always_inline void fpu_vgfmg(u8 v1, u8 v2, u8 v3)
172 {
173 	asm volatile("VGFMG	%[v1],%[v2],%[v3]"
174 		     :
175 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
176 		     : "memory");
177 }
178 
179 #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
180 
fpu_vl(u8 v1,const void * vxr)181 static __always_inline void fpu_vl(u8 v1, const void *vxr)
182 {
183 	instrument_read(vxr, sizeof(__vector128));
184 	asm volatile("VL	%[v1],%O[vxr],,%R[vxr]\n"
185 		     :
186 		     : [vxr] "Q" (*(__vector128 *)vxr),
187 		       [v1] "I" (v1)
188 		     : "memory");
189 }
190 
191 #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
192 
fpu_vl(u8 v1,const void * vxr)193 static __always_inline void fpu_vl(u8 v1, const void *vxr)
194 {
195 	instrument_read(vxr, sizeof(__vector128));
196 	asm volatile(
197 		"	la	1,%[vxr]\n"
198 		"	VL	%[v1],0,,1\n"
199 		:
200 		: [vxr] "R" (*(__vector128 *)vxr),
201 		  [v1] "I" (v1)
202 		: "memory", "1");
203 }
204 
205 #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
206 
fpu_vleib(u8 v,s16 val,u8 index)207 static __always_inline void fpu_vleib(u8 v, s16 val, u8 index)
208 {
209 	asm volatile("VLEIB	%[v],%[val],%[index]"
210 		     :
211 		     : [v] "I" (v), [val] "K" (val), [index] "I" (index)
212 		     : "memory");
213 }
214 
fpu_vleig(u8 v,s16 val,u8 index)215 static __always_inline void fpu_vleig(u8 v, s16 val, u8 index)
216 {
217 	asm volatile("VLEIG	%[v],%[val],%[index]"
218 		     :
219 		     : [v] "I" (v), [val] "K" (val), [index] "I" (index)
220 		     : "memory");
221 }
222 
fpu_vlgvf(u8 v,u16 index)223 static __always_inline u64 fpu_vlgvf(u8 v, u16 index)
224 {
225 	u64 val;
226 
227 	asm volatile("VLGVF	%[val],%[v],%[index]"
228 		     : [val] "=d" (val)
229 		     : [v] "I" (v), [index] "L" (index)
230 		     : "memory");
231 	return val;
232 }
233 
234 #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
235 
fpu_vll(u8 v1,u32 index,const void * vxr)236 static __always_inline void fpu_vll(u8 v1, u32 index, const void *vxr)
237 {
238 	unsigned int size;
239 
240 	size = min(index + 1, sizeof(__vector128));
241 	instrument_read(vxr, size);
242 	asm volatile("VLL	%[v1],%[index],%O[vxr],%R[vxr]\n"
243 		     :
244 		     : [vxr] "Q" (*(u8 *)vxr),
245 		       [index] "d" (index),
246 		       [v1] "I" (v1)
247 		     : "memory");
248 }
249 
250 #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
251 
fpu_vll(u8 v1,u32 index,const void * vxr)252 static __always_inline void fpu_vll(u8 v1, u32 index, const void *vxr)
253 {
254 	unsigned int size;
255 
256 	size = min(index + 1, sizeof(__vector128));
257 	instrument_read(vxr, size);
258 	asm volatile(
259 		"	la	1,%[vxr]\n"
260 		"	VLL	%[v1],%[index],0,1\n"
261 		:
262 		: [vxr] "R" (*(u8 *)vxr),
263 		  [index] "d" (index),
264 		  [v1] "I" (v1)
265 		: "memory", "1");
266 }
267 
268 #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
269 
270 #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
271 
272 #define fpu_vlm(_v1, _v3, _vxrs)					\
273 ({									\
274 	unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128);	\
275 	struct {							\
276 		__vector128 _v[(_v3) - (_v1) + 1];			\
277 	} *_v = (void *)(_vxrs);					\
278 									\
279 	instrument_read(_v, size);					\
280 	asm volatile("VLM	%[v1],%[v3],%O[vxrs],%R[vxrs]\n"	\
281 		     :							\
282 		     : [vxrs] "Q" (*_v),				\
283 		       [v1] "I" (_v1), [v3] "I" (_v3)			\
284 		     : "memory");					\
285 	(_v3) - (_v1) + 1;						\
286 })
287 
288 #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
289 
290 #define fpu_vlm(_v1, _v3, _vxrs)					\
291 ({									\
292 	unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128);	\
293 	struct {							\
294 		__vector128 _v[(_v3) - (_v1) + 1];			\
295 	} *_v = (void *)(_vxrs);					\
296 									\
297 	instrument_read(_v, size);					\
298 	asm volatile(							\
299 		"	la	1,%[vxrs]\n"				\
300 		"	VLM	%[v1],%[v3],0,1\n"			\
301 		:							\
302 		: [vxrs] "R" (*_v),					\
303 		  [v1] "I" (_v1), [v3] "I" (_v3)			\
304 		: "memory", "1");					\
305 	(_v3) - (_v1) + 1;						\
306 })
307 
308 #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
309 
fpu_vlr(u8 v1,u8 v2)310 static __always_inline void fpu_vlr(u8 v1, u8 v2)
311 {
312 	asm volatile("VLR	%[v1],%[v2]"
313 		     :
314 		     : [v1] "I" (v1), [v2] "I" (v2)
315 		     : "memory");
316 }
317 
fpu_vlvgf(u8 v,u32 val,u16 index)318 static __always_inline void fpu_vlvgf(u8 v, u32 val, u16 index)
319 {
320 	asm volatile("VLVGF	%[v],%[val],%[index]"
321 		     :
322 		     : [v] "I" (v), [val] "d" (val), [index] "L" (index)
323 		     : "memory");
324 }
325 
fpu_vn(u8 v1,u8 v2,u8 v3)326 static __always_inline void fpu_vn(u8 v1, u8 v2, u8 v3)
327 {
328 	asm volatile("VN	%[v1],%[v2],%[v3]"
329 		     :
330 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
331 		     : "memory");
332 }
333 
fpu_vperm(u8 v1,u8 v2,u8 v3,u8 v4)334 static __always_inline void fpu_vperm(u8 v1, u8 v2, u8 v3, u8 v4)
335 {
336 	asm volatile("VPERM	%[v1],%[v2],%[v3],%[v4]"
337 		     :
338 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3), [v4] "I" (v4)
339 		     : "memory");
340 }
341 
fpu_vrepib(u8 v1,s16 i2)342 static __always_inline void fpu_vrepib(u8 v1, s16 i2)
343 {
344 	asm volatile("VREPIB	%[v1],%[i2]"
345 		     :
346 		     : [v1] "I" (v1), [i2] "K" (i2)
347 		     : "memory");
348 }
349 
fpu_vsrlb(u8 v1,u8 v2,u8 v3)350 static __always_inline void fpu_vsrlb(u8 v1, u8 v2, u8 v3)
351 {
352 	asm volatile("VSRLB	%[v1],%[v2],%[v3]"
353 		     :
354 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
355 		     : "memory");
356 }
357 
358 #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
359 
fpu_vst(u8 v1,const void * vxr)360 static __always_inline void fpu_vst(u8 v1, const void *vxr)
361 {
362 	instrument_write(vxr, sizeof(__vector128));
363 	asm volatile("VST	%[v1],%O[vxr],,%R[vxr]\n"
364 		     : [vxr] "=Q" (*(__vector128 *)vxr)
365 		     : [v1] "I" (v1)
366 		     : "memory");
367 }
368 
369 #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
370 
fpu_vst(u8 v1,const void * vxr)371 static __always_inline void fpu_vst(u8 v1, const void *vxr)
372 {
373 	instrument_write(vxr, sizeof(__vector128));
374 	asm volatile(
375 		"	la	1,%[vxr]\n"
376 		"	VST	%[v1],0,,1\n"
377 		: [vxr] "=R" (*(__vector128 *)vxr)
378 		: [v1] "I" (v1)
379 		: "memory", "1");
380 }
381 
382 #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
383 
384 #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
385 
fpu_vstl(u8 v1,u32 index,const void * vxr)386 static __always_inline void fpu_vstl(u8 v1, u32 index, const void *vxr)
387 {
388 	unsigned int size;
389 
390 	size = min(index + 1, sizeof(__vector128));
391 	instrument_write(vxr, size);
392 	asm volatile("VSTL	%[v1],%[index],%O[vxr],%R[vxr]\n"
393 		     : [vxr] "=Q" (*(u8 *)vxr)
394 		     : [index] "d" (index), [v1] "I" (v1)
395 		     : "memory");
396 }
397 
398 #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
399 
fpu_vstl(u8 v1,u32 index,const void * vxr)400 static __always_inline void fpu_vstl(u8 v1, u32 index, const void *vxr)
401 {
402 	unsigned int size;
403 
404 	size = min(index + 1, sizeof(__vector128));
405 	instrument_write(vxr, size);
406 	asm volatile(
407 		"	la	1,%[vxr]\n"
408 		"	VSTL	%[v1],%[index],0,1\n"
409 		: [vxr] "=R" (*(u8 *)vxr)
410 		: [index] "d" (index), [v1] "I" (v1)
411 		: "memory", "1");
412 }
413 
414 #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
415 
416 #ifdef CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS
417 
418 #define fpu_vstm(_v1, _v3, _vxrs)					\
419 ({									\
420 	unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128);	\
421 	struct {							\
422 		__vector128 _v[(_v3) - (_v1) + 1];			\
423 	} *_v = (void *)(_vxrs);					\
424 									\
425 	instrument_write(_v, size);					\
426 	asm volatile("VSTM	%[v1],%[v3],%O[vxrs],%R[vxrs]\n"	\
427 		     : [vxrs] "=Q" (*_v)				\
428 		     : [v1] "I" (_v1), [v3] "I" (_v3)			\
429 		     : "memory");					\
430 	(_v3) - (_v1) + 1;						\
431 })
432 
433 #else /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
434 
435 #define fpu_vstm(_v1, _v3, _vxrs)					\
436 ({									\
437 	unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128);	\
438 	struct {							\
439 		__vector128 _v[(_v3) - (_v1) + 1];			\
440 	} *_v = (void *)(_vxrs);					\
441 									\
442 	instrument_write(_v, size);					\
443 	asm volatile(							\
444 		"	la	1,%[vxrs]\n"				\
445 		"	VSTM	%[v1],%[v3],0,1\n"			\
446 		: [vxrs] "=R" (*_v)					\
447 		: [v1] "I" (_v1), [v3] "I" (_v3)			\
448 		: "memory", "1");					\
449 	(_v3) - (_v1) + 1;						\
450 })
451 
452 #endif /* CONFIG_CC_HAS_ASM_AOR_FORMAT_FLAGS */
453 
fpu_vupllf(u8 v1,u8 v2)454 static __always_inline void fpu_vupllf(u8 v1, u8 v2)
455 {
456 	asm volatile("VUPLLF	%[v1],%[v2]"
457 		     :
458 		     : [v1] "I" (v1), [v2] "I" (v2)
459 		     : "memory");
460 }
461 
fpu_vx(u8 v1,u8 v2,u8 v3)462 static __always_inline void fpu_vx(u8 v1, u8 v2, u8 v3)
463 {
464 	asm volatile("VX	%[v1],%[v2],%[v3]"
465 		     :
466 		     : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3)
467 		     : "memory");
468 }
469 
fpu_vzero(u8 v)470 static __always_inline void fpu_vzero(u8 v)
471 {
472 	asm volatile("VZERO	%[v]"
473 		     :
474 		     : [v] "I" (v)
475 		     : "memory");
476 }
477 
478 #endif /* __ASSEMBLY__ */
479 #endif	/* __ASM_S390_FPU_INSN_H */
480