xref: /linux/arch/loongarch/include/asm/fpu.h (revision e742bd199092e4991b559ca63d565457b519153a)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Author: Huacai Chen <chenhuacai@loongson.cn>
4  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
5  */
6 #ifndef _ASM_FPU_H
7 #define _ASM_FPU_H
8 
9 #include <linux/sched.h>
10 #include <linux/sched/task_stack.h>
11 #include <linux/ptrace.h>
12 #include <linux/thread_info.h>
13 #include <linux/bitops.h>
14 
15 #include <asm/cpu.h>
16 #include <asm/cpu-features.h>
17 #include <asm/current.h>
18 #include <asm/loongarch.h>
19 #include <asm/processor.h>
20 #include <asm/ptrace.h>
21 
22 struct sigcontext;
23 
24 #define kernel_fpu_available() cpu_has_fpu
25 
26 void kernel_fpu_begin(void);
27 void kernel_fpu_end(void);
28 
29 asmlinkage void _init_fpu(unsigned int);
30 asmlinkage void _save_fp(struct loongarch_fpu *);
31 asmlinkage void _restore_fp(struct loongarch_fpu *);
32 asmlinkage int _save_fp_context(void __user *fpregs, void __user *fcc, void __user *csr);
33 asmlinkage int _restore_fp_context(void __user *fpregs, void __user *fcc, void __user *csr);
34 
35 asmlinkage void _save_lsx(struct loongarch_fpu *fpu);
36 asmlinkage void _restore_lsx(struct loongarch_fpu *fpu);
37 asmlinkage void _init_lsx_upper(void);
38 asmlinkage void _restore_lsx_upper(struct loongarch_fpu *fpu);
39 asmlinkage int _save_lsx_context(void __user *fpregs, void __user *fcc, void __user *fcsr);
40 asmlinkage int _restore_lsx_context(void __user *fpregs, void __user *fcc, void __user *fcsr);
41 
42 asmlinkage void _save_lasx(struct loongarch_fpu *fpu);
43 asmlinkage void _restore_lasx(struct loongarch_fpu *fpu);
44 asmlinkage void _init_lasx_upper(void);
45 asmlinkage void _restore_lasx_upper(struct loongarch_fpu *fpu);
46 asmlinkage int _save_lasx_context(void __user *fpregs, void __user *fcc, void __user *fcsr);
47 asmlinkage int _restore_lasx_context(void __user *fpregs, void __user *fcc, void __user *fcsr);
48 
49 static inline void enable_lsx(void);
50 static inline void disable_lsx(void);
51 static inline void save_lsx(struct task_struct *t);
52 static inline void restore_lsx(struct task_struct *t);
53 
54 static inline void enable_lasx(void);
55 static inline void disable_lasx(void);
56 static inline void save_lasx(struct task_struct *t);
57 static inline void restore_lasx(struct task_struct *t);
58 
59 /*
60  * Mask the FCSR Cause bits according to the Enable bits, observing
61  * that Unimplemented is always enabled.
62  */
mask_fcsr_x(unsigned long fcsr)63 static inline unsigned long mask_fcsr_x(unsigned long fcsr)
64 {
65 	return fcsr & ((fcsr & FPU_CSR_ALL_E) <<
66 			(ffs(FPU_CSR_ALL_X) - ffs(FPU_CSR_ALL_E)));
67 }
68 
is_fp_enabled(void)69 static inline int is_fp_enabled(void)
70 {
71 	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_FPEN) ?
72 		1 : 0;
73 }
74 
is_lsx_enabled(void)75 static inline int is_lsx_enabled(void)
76 {
77 	if (!cpu_has_lsx)
78 		return 0;
79 
80 	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LSXEN) ?
81 		1 : 0;
82 }
83 
is_lasx_enabled(void)84 static inline int is_lasx_enabled(void)
85 {
86 	if (!cpu_has_lasx)
87 		return 0;
88 
89 	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LASXEN) ?
90 		1 : 0;
91 }
92 
is_simd_enabled(void)93 static inline int is_simd_enabled(void)
94 {
95 	return is_lsx_enabled() | is_lasx_enabled();
96 }
97 
98 #define enable_fpu()		set_csr_euen(CSR_EUEN_FPEN)
99 
100 #define disable_fpu()		clear_csr_euen(CSR_EUEN_FPEN)
101 
102 #define clear_fpu_owner()	clear_thread_flag(TIF_USEDFPU)
103 
is_fpu_owner(void)104 static inline int is_fpu_owner(void)
105 {
106 	return test_thread_flag(TIF_USEDFPU);
107 }
108 
__own_fpu(void)109 static inline void __own_fpu(void)
110 {
111 	enable_fpu();
112 	set_thread_flag(TIF_USEDFPU);
113 	KSTK_EUEN(current) |= CSR_EUEN_FPEN;
114 }
115 
own_fpu_inatomic(int restore)116 static inline void own_fpu_inatomic(int restore)
117 {
118 	if (cpu_has_fpu && !is_fpu_owner()) {
119 		__own_fpu();
120 		if (restore)
121 			_restore_fp(&current->thread.fpu);
122 	}
123 }
124 
own_fpu(int restore)125 static inline void own_fpu(int restore)
126 {
127 	preempt_disable();
128 	own_fpu_inatomic(restore);
129 	preempt_enable();
130 }
131 
lose_fpu_inatomic(int save,struct task_struct * tsk)132 static inline void lose_fpu_inatomic(int save, struct task_struct *tsk)
133 {
134 	if (is_fpu_owner()) {
135 		if (!is_simd_enabled()) {
136 			if (save)
137 				_save_fp(&tsk->thread.fpu);
138 			disable_fpu();
139 		} else {
140 			if (save) {
141 				if (!is_lasx_enabled())
142 					save_lsx(tsk);
143 				else
144 					save_lasx(tsk);
145 			}
146 			disable_fpu();
147 			disable_lsx();
148 			disable_lasx();
149 			clear_tsk_thread_flag(tsk, TIF_USEDSIMD);
150 		}
151 		clear_tsk_thread_flag(tsk, TIF_USEDFPU);
152 	}
153 	KSTK_EUEN(tsk) &= ~(CSR_EUEN_FPEN | CSR_EUEN_LSXEN | CSR_EUEN_LASXEN);
154 }
155 
lose_fpu(int save)156 static inline void lose_fpu(int save)
157 {
158 	preempt_disable();
159 	lose_fpu_inatomic(save, current);
160 	preempt_enable();
161 }
162 
init_fpu(void)163 static inline void init_fpu(void)
164 {
165 	unsigned int fcsr = current->thread.fpu.fcsr;
166 
167 	__own_fpu();
168 	_init_fpu(fcsr);
169 	set_used_math();
170 }
171 
save_fp(struct task_struct * tsk)172 static inline void save_fp(struct task_struct *tsk)
173 {
174 	if (cpu_has_fpu)
175 		_save_fp(&tsk->thread.fpu);
176 }
177 
restore_fp(struct task_struct * tsk)178 static inline void restore_fp(struct task_struct *tsk)
179 {
180 	if (cpu_has_fpu)
181 		_restore_fp(&tsk->thread.fpu);
182 }
183 
save_fpu_regs(struct task_struct * tsk)184 static inline void save_fpu_regs(struct task_struct *tsk)
185 {
186 	unsigned int euen;
187 
188 	if (tsk == current) {
189 		preempt_disable();
190 
191 		euen = csr_read32(LOONGARCH_CSR_EUEN);
192 
193 #ifdef CONFIG_CPU_HAS_LASX
194 		if (euen & CSR_EUEN_LASXEN)
195 			_save_lasx(&current->thread.fpu);
196 		else
197 #endif
198 #ifdef CONFIG_CPU_HAS_LSX
199 		if (euen & CSR_EUEN_LSXEN)
200 			_save_lsx(&current->thread.fpu);
201 		else
202 #endif
203 		if (euen & CSR_EUEN_FPEN)
204 			_save_fp(&current->thread.fpu);
205 
206 		preempt_enable();
207 	}
208 }
209 
is_simd_owner(void)210 static inline int is_simd_owner(void)
211 {
212 	return test_thread_flag(TIF_USEDSIMD);
213 }
214 
215 #ifdef CONFIG_CPU_HAS_LSX
216 
enable_lsx(void)217 static inline void enable_lsx(void)
218 {
219 	if (cpu_has_lsx)
220 		csr_xchg32(CSR_EUEN_LSXEN, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
221 }
222 
disable_lsx(void)223 static inline void disable_lsx(void)
224 {
225 	if (cpu_has_lsx)
226 		csr_xchg32(0, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
227 }
228 
save_lsx(struct task_struct * t)229 static inline void save_lsx(struct task_struct *t)
230 {
231 	if (cpu_has_lsx)
232 		_save_lsx(&t->thread.fpu);
233 }
234 
restore_lsx(struct task_struct * t)235 static inline void restore_lsx(struct task_struct *t)
236 {
237 	if (cpu_has_lsx)
238 		_restore_lsx(&t->thread.fpu);
239 }
240 
init_lsx_upper(void)241 static inline void init_lsx_upper(void)
242 {
243 	if (cpu_has_lsx)
244 		_init_lsx_upper();
245 }
246 
restore_lsx_upper(struct task_struct * t)247 static inline void restore_lsx_upper(struct task_struct *t)
248 {
249 	if (cpu_has_lsx)
250 		_restore_lsx_upper(&t->thread.fpu);
251 }
252 
253 #else
enable_lsx(void)254 static inline void enable_lsx(void) {}
disable_lsx(void)255 static inline void disable_lsx(void) {}
save_lsx(struct task_struct * t)256 static inline void save_lsx(struct task_struct *t) {}
restore_lsx(struct task_struct * t)257 static inline void restore_lsx(struct task_struct *t) {}
init_lsx_upper(void)258 static inline void init_lsx_upper(void) {}
restore_lsx_upper(struct task_struct * t)259 static inline void restore_lsx_upper(struct task_struct *t) {}
260 #endif
261 
262 #ifdef CONFIG_CPU_HAS_LASX
263 
enable_lasx(void)264 static inline void enable_lasx(void)
265 {
266 
267 	if (cpu_has_lasx)
268 		csr_xchg32(CSR_EUEN_LASXEN, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
269 }
270 
disable_lasx(void)271 static inline void disable_lasx(void)
272 {
273 	if (cpu_has_lasx)
274 		csr_xchg32(0, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
275 }
276 
save_lasx(struct task_struct * t)277 static inline void save_lasx(struct task_struct *t)
278 {
279 	if (cpu_has_lasx)
280 		_save_lasx(&t->thread.fpu);
281 }
282 
restore_lasx(struct task_struct * t)283 static inline void restore_lasx(struct task_struct *t)
284 {
285 	if (cpu_has_lasx)
286 		_restore_lasx(&t->thread.fpu);
287 }
288 
init_lasx_upper(void)289 static inline void init_lasx_upper(void)
290 {
291 	if (cpu_has_lasx)
292 		_init_lasx_upper();
293 }
294 
restore_lasx_upper(struct task_struct * t)295 static inline void restore_lasx_upper(struct task_struct *t)
296 {
297 	if (cpu_has_lasx)
298 		_restore_lasx_upper(&t->thread.fpu);
299 }
300 
301 #else
enable_lasx(void)302 static inline void enable_lasx(void) {}
disable_lasx(void)303 static inline void disable_lasx(void) {}
save_lasx(struct task_struct * t)304 static inline void save_lasx(struct task_struct *t) {}
restore_lasx(struct task_struct * t)305 static inline void restore_lasx(struct task_struct *t) {}
init_lasx_upper(void)306 static inline void init_lasx_upper(void) {}
restore_lasx_upper(struct task_struct * t)307 static inline void restore_lasx_upper(struct task_struct *t) {}
308 #endif
309 
thread_lsx_context_live(void)310 static inline int thread_lsx_context_live(void)
311 {
312 	if (!cpu_has_lsx)
313 		return 0;
314 
315 	return test_thread_flag(TIF_LSX_CTX_LIVE);
316 }
317 
thread_lasx_context_live(void)318 static inline int thread_lasx_context_live(void)
319 {
320 	if (!cpu_has_lasx)
321 		return 0;
322 
323 	return test_thread_flag(TIF_LASX_CTX_LIVE);
324 }
325 
326 #endif /* _ASM_FPU_H */
327