xref: /linux/arch/mips/kernel/r4k_fpu.S (revision ca64d84e93762f4e587e040a44ad9f6089afc777)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
7 *
8 * Multi-arch abstraction and asm macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 *
11 * Carsten Langgaard, carstenl@mips.com
12 * Copyright (C) 2000 MIPS Technologies, Inc.
13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
14 */
15#include <asm/asm.h>
16#include <asm/asmmacro.h>
17#include <asm/errno.h>
18#include <asm/export.h>
19#include <asm/fpregdef.h>
20#include <asm/mipsregs.h>
21#include <asm/asm-offsets.h>
22#include <asm/regdef.h>
23
24/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
25#undef fp
26
27	.macro	EX insn, reg, src
28	.set	push
29	SET_HARDFLOAT
30	.set	nomacro
31.ex\@:	\insn	\reg, \src
32	.set	pop
33	.section __ex_table,"a"
34	PTR	.ex\@, fault
35	.previous
36	.endm
37
38/*
39 * Save a thread's fp context.
40 */
41LEAF(_save_fp)
42EXPORT_SYMBOL(_save_fp)
43#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
44		defined(CONFIG_CPU_MIPSR6)
45	mfc0	t0, CP0_STATUS
46#endif
47	fpu_save_double a0 t0 t1		# clobbers t1
48	jr	ra
49	END(_save_fp)
50
51/*
52 * Restore a thread's fp context.
53 */
54LEAF(_restore_fp)
55#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
56		defined(CONFIG_CPU_MIPSR6)
57	mfc0	t0, CP0_STATUS
58#endif
59	fpu_restore_double a0 t0 t1		# clobbers t1
60	jr	ra
61	END(_restore_fp)
62
63#ifdef CONFIG_CPU_HAS_MSA
64
65/*
66 * Save a thread's MSA vector context.
67 */
68LEAF(_save_msa)
69EXPORT_SYMBOL(_save_msa)
70	msa_save_all	a0
71	jr	ra
72	END(_save_msa)
73
74/*
75 * Restore a thread's MSA vector context.
76 */
77LEAF(_restore_msa)
78	msa_restore_all	a0
79	jr	ra
80	END(_restore_msa)
81
82LEAF(_init_msa_upper)
83	msa_init_all_upper
84	jr	ra
85	END(_init_msa_upper)
86
87#endif
88
89	.set	noreorder
90
91/**
92 * _save_fp_context() - save FP context from the FPU
93 * @a0 - pointer to fpregs field of sigcontext
94 * @a1 - pointer to fpc_csr field of sigcontext
95 *
96 * Save FP context, including the 32 FP data registers and the FP
97 * control & status register, from the FPU to signal context.
98 */
99LEAF(_save_fp_context)
100	.set	push
101	SET_HARDFLOAT
102	cfc1	t1, fcr31
103	.set	pop
104
105#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
106		defined(CONFIG_CPU_MIPSR6)
107	.set	push
108	SET_HARDFLOAT
109#ifdef CONFIG_CPU_MIPSR2
110	.set	mips32r2
111	.set	fp=64
112	mfc0	t0, CP0_STATUS
113	sll	t0, t0, 5
114	bgez	t0, 1f			# skip storing odd if FR=0
115	 nop
116#endif
117	/* Store the 16 odd double precision registers */
118	EX	sdc1 $f1, 8(a0)
119	EX	sdc1 $f3, 24(a0)
120	EX	sdc1 $f5, 40(a0)
121	EX	sdc1 $f7, 56(a0)
122	EX	sdc1 $f9, 72(a0)
123	EX	sdc1 $f11, 88(a0)
124	EX	sdc1 $f13, 104(a0)
125	EX	sdc1 $f15, 120(a0)
126	EX	sdc1 $f17, 136(a0)
127	EX	sdc1 $f19, 152(a0)
128	EX	sdc1 $f21, 168(a0)
129	EX	sdc1 $f23, 184(a0)
130	EX	sdc1 $f25, 200(a0)
131	EX	sdc1 $f27, 216(a0)
132	EX	sdc1 $f29, 232(a0)
133	EX	sdc1 $f31, 248(a0)
1341:	.set	pop
135#endif
136
137	.set push
138	SET_HARDFLOAT
139	/* Store the 16 even double precision registers */
140	EX	sdc1 $f0, 0(a0)
141	EX	sdc1 $f2, 16(a0)
142	EX	sdc1 $f4, 32(a0)
143	EX	sdc1 $f6, 48(a0)
144	EX	sdc1 $f8, 64(a0)
145	EX	sdc1 $f10, 80(a0)
146	EX	sdc1 $f12, 96(a0)
147	EX	sdc1 $f14, 112(a0)
148	EX	sdc1 $f16, 128(a0)
149	EX	sdc1 $f18, 144(a0)
150	EX	sdc1 $f20, 160(a0)
151	EX	sdc1 $f22, 176(a0)
152	EX	sdc1 $f24, 192(a0)
153	EX	sdc1 $f26, 208(a0)
154	EX	sdc1 $f28, 224(a0)
155	EX	sdc1 $f30, 240(a0)
156	EX	sw t1, 0(a1)
157	jr	ra
158	 li	v0, 0					# success
159	.set pop
160	END(_save_fp_context)
161
162/**
163 * _restore_fp_context() - restore FP context to the FPU
164 * @a0 - pointer to fpregs field of sigcontext
165 * @a1 - pointer to fpc_csr field of sigcontext
166 *
167 * Restore FP context, including the 32 FP data registers and the FP
168 * control & status register, from signal context to the FPU.
169 */
170LEAF(_restore_fp_context)
171	EX	lw t1, 0(a1)
172
173#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2)  || \
174		defined(CONFIG_CPU_MIPSR6)
175	.set	push
176	SET_HARDFLOAT
177#ifdef CONFIG_CPU_MIPSR2
178	.set	mips32r2
179	.set	fp=64
180	mfc0	t0, CP0_STATUS
181	sll	t0, t0, 5
182	bgez	t0, 1f			# skip loading odd if FR=0
183	 nop
184#endif
185	EX	ldc1 $f1, 8(a0)
186	EX	ldc1 $f3, 24(a0)
187	EX	ldc1 $f5, 40(a0)
188	EX	ldc1 $f7, 56(a0)
189	EX	ldc1 $f9, 72(a0)
190	EX	ldc1 $f11, 88(a0)
191	EX	ldc1 $f13, 104(a0)
192	EX	ldc1 $f15, 120(a0)
193	EX	ldc1 $f17, 136(a0)
194	EX	ldc1 $f19, 152(a0)
195	EX	ldc1 $f21, 168(a0)
196	EX	ldc1 $f23, 184(a0)
197	EX	ldc1 $f25, 200(a0)
198	EX	ldc1 $f27, 216(a0)
199	EX	ldc1 $f29, 232(a0)
200	EX	ldc1 $f31, 248(a0)
2011:	.set pop
202#endif
203	.set push
204	SET_HARDFLOAT
205	EX	ldc1 $f0, 0(a0)
206	EX	ldc1 $f2, 16(a0)
207	EX	ldc1 $f4, 32(a0)
208	EX	ldc1 $f6, 48(a0)
209	EX	ldc1 $f8, 64(a0)
210	EX	ldc1 $f10, 80(a0)
211	EX	ldc1 $f12, 96(a0)
212	EX	ldc1 $f14, 112(a0)
213	EX	ldc1 $f16, 128(a0)
214	EX	ldc1 $f18, 144(a0)
215	EX	ldc1 $f20, 160(a0)
216	EX	ldc1 $f22, 176(a0)
217	EX	ldc1 $f24, 192(a0)
218	EX	ldc1 $f26, 208(a0)
219	EX	ldc1 $f28, 224(a0)
220	EX	ldc1 $f30, 240(a0)
221	ctc1	t1, fcr31
222	.set pop
223	jr	ra
224	 li	v0, 0					# success
225	END(_restore_fp_context)
226
227#ifdef CONFIG_CPU_HAS_MSA
228
229	.macro	op_one_wr	op, idx, base
230	.align	4
231\idx:	\op	\idx, 0, \base
232	jr	ra
233	 nop
234	.endm
235
236	.macro	op_msa_wr	name, op
237LEAF(\name)
238	.set		push
239	.set		noreorder
240	sll		t0, a0, 4
241	PTR_LA		t1, 0f
242	PTR_ADDU	t0, t0, t1
243	jr		t0
244	  nop
245	op_one_wr	\op, 0, a1
246	op_one_wr	\op, 1, a1
247	op_one_wr	\op, 2, a1
248	op_one_wr	\op, 3, a1
249	op_one_wr	\op, 4, a1
250	op_one_wr	\op, 5, a1
251	op_one_wr	\op, 6, a1
252	op_one_wr	\op, 7, a1
253	op_one_wr	\op, 8, a1
254	op_one_wr	\op, 9, a1
255	op_one_wr	\op, 10, a1
256	op_one_wr	\op, 11, a1
257	op_one_wr	\op, 12, a1
258	op_one_wr	\op, 13, a1
259	op_one_wr	\op, 14, a1
260	op_one_wr	\op, 15, a1
261	op_one_wr	\op, 16, a1
262	op_one_wr	\op, 17, a1
263	op_one_wr	\op, 18, a1
264	op_one_wr	\op, 19, a1
265	op_one_wr	\op, 20, a1
266	op_one_wr	\op, 21, a1
267	op_one_wr	\op, 22, a1
268	op_one_wr	\op, 23, a1
269	op_one_wr	\op, 24, a1
270	op_one_wr	\op, 25, a1
271	op_one_wr	\op, 26, a1
272	op_one_wr	\op, 27, a1
273	op_one_wr	\op, 28, a1
274	op_one_wr	\op, 29, a1
275	op_one_wr	\op, 30, a1
276	op_one_wr	\op, 31, a1
277	.set		pop
278	END(\name)
279	.endm
280
281	op_msa_wr	read_msa_wr_b, st_b
282	op_msa_wr	read_msa_wr_h, st_h
283	op_msa_wr	read_msa_wr_w, st_w
284	op_msa_wr	read_msa_wr_d, st_d
285
286	op_msa_wr	write_msa_wr_b, ld_b
287	op_msa_wr	write_msa_wr_h, ld_h
288	op_msa_wr	write_msa_wr_w, ld_w
289	op_msa_wr	write_msa_wr_d, ld_d
290
291#endif /* CONFIG_CPU_HAS_MSA */
292
293#ifdef CONFIG_CPU_HAS_MSA
294
295	.macro	save_msa_upper	wr, off, base
296	.set	push
297	.set	noat
298#ifdef CONFIG_64BIT
299	copy_s_d \wr, 1
300	EX sd	$1, \off(\base)
301#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
302	copy_s_w \wr, 2
303	EX sw	$1, \off(\base)
304	copy_s_w \wr, 3
305	EX sw	$1, (\off+4)(\base)
306#else /* CONFIG_CPU_BIG_ENDIAN */
307	copy_s_w \wr, 2
308	EX sw	$1, (\off+4)(\base)
309	copy_s_w \wr, 3
310	EX sw	$1, \off(\base)
311#endif
312	.set	pop
313	.endm
314
315LEAF(_save_msa_all_upper)
316	save_msa_upper	0, 0x00, a0
317	save_msa_upper	1, 0x08, a0
318	save_msa_upper	2, 0x10, a0
319	save_msa_upper	3, 0x18, a0
320	save_msa_upper	4, 0x20, a0
321	save_msa_upper	5, 0x28, a0
322	save_msa_upper	6, 0x30, a0
323	save_msa_upper	7, 0x38, a0
324	save_msa_upper	8, 0x40, a0
325	save_msa_upper	9, 0x48, a0
326	save_msa_upper	10, 0x50, a0
327	save_msa_upper	11, 0x58, a0
328	save_msa_upper	12, 0x60, a0
329	save_msa_upper	13, 0x68, a0
330	save_msa_upper	14, 0x70, a0
331	save_msa_upper	15, 0x78, a0
332	save_msa_upper	16, 0x80, a0
333	save_msa_upper	17, 0x88, a0
334	save_msa_upper	18, 0x90, a0
335	save_msa_upper	19, 0x98, a0
336	save_msa_upper	20, 0xa0, a0
337	save_msa_upper	21, 0xa8, a0
338	save_msa_upper	22, 0xb0, a0
339	save_msa_upper	23, 0xb8, a0
340	save_msa_upper	24, 0xc0, a0
341	save_msa_upper	25, 0xc8, a0
342	save_msa_upper	26, 0xd0, a0
343	save_msa_upper	27, 0xd8, a0
344	save_msa_upper	28, 0xe0, a0
345	save_msa_upper	29, 0xe8, a0
346	save_msa_upper	30, 0xf0, a0
347	save_msa_upper	31, 0xf8, a0
348	jr	ra
349	 li	v0, 0
350	END(_save_msa_all_upper)
351
352	.macro	restore_msa_upper	wr, off, base
353	.set	push
354	.set	noat
355#ifdef CONFIG_64BIT
356	EX ld	$1, \off(\base)
357	insert_d \wr, 1
358#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
359	EX lw	$1, \off(\base)
360	insert_w \wr, 2
361	EX lw	$1, (\off+4)(\base)
362	insert_w \wr, 3
363#else /* CONFIG_CPU_BIG_ENDIAN */
364	EX lw	$1, (\off+4)(\base)
365	insert_w \wr, 2
366	EX lw	$1, \off(\base)
367	insert_w \wr, 3
368#endif
369	.set	pop
370	.endm
371
372LEAF(_restore_msa_all_upper)
373	restore_msa_upper	0, 0x00, a0
374	restore_msa_upper	1, 0x08, a0
375	restore_msa_upper	2, 0x10, a0
376	restore_msa_upper	3, 0x18, a0
377	restore_msa_upper	4, 0x20, a0
378	restore_msa_upper	5, 0x28, a0
379	restore_msa_upper	6, 0x30, a0
380	restore_msa_upper	7, 0x38, a0
381	restore_msa_upper	8, 0x40, a0
382	restore_msa_upper	9, 0x48, a0
383	restore_msa_upper	10, 0x50, a0
384	restore_msa_upper	11, 0x58, a0
385	restore_msa_upper	12, 0x60, a0
386	restore_msa_upper	13, 0x68, a0
387	restore_msa_upper	14, 0x70, a0
388	restore_msa_upper	15, 0x78, a0
389	restore_msa_upper	16, 0x80, a0
390	restore_msa_upper	17, 0x88, a0
391	restore_msa_upper	18, 0x90, a0
392	restore_msa_upper	19, 0x98, a0
393	restore_msa_upper	20, 0xa0, a0
394	restore_msa_upper	21, 0xa8, a0
395	restore_msa_upper	22, 0xb0, a0
396	restore_msa_upper	23, 0xb8, a0
397	restore_msa_upper	24, 0xc0, a0
398	restore_msa_upper	25, 0xc8, a0
399	restore_msa_upper	26, 0xd0, a0
400	restore_msa_upper	27, 0xd8, a0
401	restore_msa_upper	28, 0xe0, a0
402	restore_msa_upper	29, 0xe8, a0
403	restore_msa_upper	30, 0xf0, a0
404	restore_msa_upper	31, 0xf8, a0
405	jr	ra
406	 li	v0, 0
407	END(_restore_msa_all_upper)
408
409#endif /* CONFIG_CPU_HAS_MSA */
410
411	.set	reorder
412
413	.type	fault, @function
414	.ent	fault
415fault:	li	v0, -EFAULT				# failure
416	jr	ra
417	.end	fault
418