xref: /linux/arch/arm64/include/asm/fpsimd.h (revision c37fe6aff89cb0d842993fe2f69e48bf3ebe0ab0)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2012 ARM Ltd.
4  */
5 #ifndef __ASM_FP_H
6 #define __ASM_FP_H
7 
8 #include <asm/errno.h>
9 #include <asm/ptrace.h>
10 #include <asm/processor.h>
11 #include <asm/sigcontext.h>
12 #include <asm/sysreg.h>
13 
14 #ifndef __ASSEMBLY__
15 
16 #include <linux/bitmap.h>
17 #include <linux/build_bug.h>
18 #include <linux/bug.h>
19 #include <linux/cache.h>
20 #include <linux/init.h>
21 #include <linux/stddef.h>
22 #include <linux/types.h>
23 
24 #ifdef CONFIG_COMPAT
25 /* Masks for extracting the FPSR and FPCR from the FPSCR */
26 #define VFP_FPSCR_STAT_MASK	0xf800009f
27 #define VFP_FPSCR_CTRL_MASK	0x07f79f00
28 /*
29  * The VFP state has 32x64-bit registers and a single 32-bit
30  * control/status register.
31  */
32 #define VFP_STATE_SIZE		((32 * 8) + 4)
33 #endif
34 
35 struct task_struct;
36 
37 extern void fpsimd_save_state(struct user_fpsimd_state *state);
38 extern void fpsimd_load_state(struct user_fpsimd_state *state);
39 
40 extern void fpsimd_thread_switch(struct task_struct *next);
41 extern void fpsimd_flush_thread(void);
42 
43 extern void fpsimd_signal_preserve_current_state(void);
44 extern void fpsimd_preserve_current_state(void);
45 extern void fpsimd_restore_current_state(void);
46 extern void fpsimd_update_current_state(struct user_fpsimd_state const *state);
47 
48 extern void fpsimd_bind_task_to_cpu(void);
49 extern void fpsimd_bind_state_to_cpu(struct user_fpsimd_state *state,
50 				     void *sve_state, unsigned int sve_vl);
51 
52 extern void fpsimd_flush_task_state(struct task_struct *target);
53 extern void fpsimd_save_and_flush_cpu_state(void);
54 
55 /* Maximum VL that SVE VL-agnostic software can transparently support */
56 #define SVE_VL_ARCH_MAX 0x100
57 
58 /* Offset of FFR in the SVE register dump */
59 static inline size_t sve_ffr_offset(int vl)
60 {
61 	return SVE_SIG_FFR_OFFSET(sve_vq_from_vl(vl)) - SVE_SIG_REGS_OFFSET;
62 }
63 
64 static inline void *sve_pffr(struct thread_struct *thread)
65 {
66 	return (char *)thread->sve_state + sve_ffr_offset(thread->sve_vl);
67 }
68 
69 extern void sve_save_state(void *state, u32 *pfpsr);
70 extern void sve_load_state(void const *state, u32 const *pfpsr,
71 			   unsigned long vq_minus_1);
72 extern void sve_flush_live(void);
73 extern void sve_load_from_fpsimd_state(struct user_fpsimd_state const *state,
74 				       unsigned long vq_minus_1);
75 extern unsigned int sve_get_vl(void);
76 extern void sve_set_vq(unsigned long vq_minus_1);
77 
78 struct arm64_cpu_capabilities;
79 extern void sve_kernel_enable(const struct arm64_cpu_capabilities *__unused);
80 
81 extern u64 read_zcr_features(void);
82 
83 extern int __ro_after_init sve_max_vl;
84 extern int __ro_after_init sve_max_virtualisable_vl;
85 extern __ro_after_init DECLARE_BITMAP(sve_vq_map, SVE_VQ_MAX);
86 
87 /*
88  * Helpers to translate bit indices in sve_vq_map to VQ values (and
89  * vice versa).  This allows find_next_bit() to be used to find the
90  * _maximum_ VQ not exceeding a certain value.
91  */
92 static inline unsigned int __vq_to_bit(unsigned int vq)
93 {
94 	return SVE_VQ_MAX - vq;
95 }
96 
97 static inline unsigned int __bit_to_vq(unsigned int bit)
98 {
99 	return SVE_VQ_MAX - bit;
100 }
101 
102 /* Ensure vq >= SVE_VQ_MIN && vq <= SVE_VQ_MAX before calling this function */
103 static inline bool sve_vq_available(unsigned int vq)
104 {
105 	return test_bit(__vq_to_bit(vq), sve_vq_map);
106 }
107 
108 #ifdef CONFIG_ARM64_SVE
109 
110 extern size_t sve_state_size(struct task_struct const *task);
111 
112 extern void sve_alloc(struct task_struct *task);
113 extern void fpsimd_release_task(struct task_struct *task);
114 extern void fpsimd_sync_to_sve(struct task_struct *task);
115 extern void sve_sync_to_fpsimd(struct task_struct *task);
116 extern void sve_sync_from_fpsimd_zeropad(struct task_struct *task);
117 
118 extern int sve_set_vector_length(struct task_struct *task,
119 				 unsigned long vl, unsigned long flags);
120 
121 extern int sve_set_current_vl(unsigned long arg);
122 extern int sve_get_current_vl(void);
123 
124 static inline void sve_user_disable(void)
125 {
126 	sysreg_clear_set(cpacr_el1, CPACR_EL1_ZEN_EL0EN, 0);
127 }
128 
129 static inline void sve_user_enable(void)
130 {
131 	sysreg_clear_set(cpacr_el1, 0, CPACR_EL1_ZEN_EL0EN);
132 }
133 
134 #define sve_cond_update_zcr_vq(val, reg)		\
135 	do {						\
136 		u64 __zcr = read_sysreg_s((reg));	\
137 		u64 __new = __zcr & ~ZCR_ELx_LEN_MASK;	\
138 		__new |= (val) & ZCR_ELx_LEN_MASK;	\
139 		if (__zcr != __new)			\
140 			write_sysreg_s(__new, (reg));	\
141 	} while (0)
142 
143 /*
144  * Probing and setup functions.
145  * Calls to these functions must be serialised with one another.
146  */
147 extern void __init sve_init_vq_map(void);
148 extern void sve_update_vq_map(void);
149 extern int sve_verify_vq_map(void);
150 extern void __init sve_setup(void);
151 
152 #else /* ! CONFIG_ARM64_SVE */
153 
154 static inline void sve_alloc(struct task_struct *task) { }
155 static inline void fpsimd_release_task(struct task_struct *task) { }
156 static inline void sve_sync_to_fpsimd(struct task_struct *task) { }
157 static inline void sve_sync_from_fpsimd_zeropad(struct task_struct *task) { }
158 
159 static inline int sve_set_current_vl(unsigned long arg)
160 {
161 	return -EINVAL;
162 }
163 
164 static inline int sve_get_current_vl(void)
165 {
166 	return -EINVAL;
167 }
168 
169 static inline void sve_user_disable(void) { BUILD_BUG(); }
170 static inline void sve_user_enable(void) { BUILD_BUG(); }
171 
172 #define sve_cond_update_zcr_vq(val, reg) do { } while (0)
173 
174 static inline void sve_init_vq_map(void) { }
175 static inline void sve_update_vq_map(void) { }
176 static inline int sve_verify_vq_map(void) { return 0; }
177 static inline void sve_setup(void) { }
178 
179 #endif /* ! CONFIG_ARM64_SVE */
180 
181 /* For use by EFI runtime services calls only */
182 extern void __efi_fpsimd_begin(void);
183 extern void __efi_fpsimd_end(void);
184 
185 #endif
186 
187 #endif
188