xref: /linux/arch/mips/kernel/r4k_fpu.S (revision 4ab5a5d2a4a2289c2af07accbec7170ca5671f41)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
7 *
8 * Multi-arch abstraction and asm macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 *
11 * Carsten Langgaard, carstenl@mips.com
12 * Copyright (C) 2000 MIPS Technologies, Inc.
13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
14 */
15#include <asm/asm.h>
16#include <asm/asmmacro.h>
17#include <asm/errno.h>
18#include <asm/export.h>
19#include <asm/fpregdef.h>
20#include <asm/mipsregs.h>
21#include <asm/asm-offsets.h>
22#include <asm/regdef.h>
23
24/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
25#undef fp
26
27	.macro	EX insn, reg, src
28	.set	push
29	SET_HARDFLOAT
30	.set	nomacro
31.ex\@:	\insn	\reg, \src
32	.set	pop
33	.section __ex_table,"a"
34	PTR	.ex\@, fault
35	.previous
36	.endm
37
38/*
39 * Save a thread's fp context.
40 */
41LEAF(_save_fp)
42EXPORT_SYMBOL(_save_fp)
43#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
44		defined(CONFIG_CPU_MIPSR6)
45	mfc0	t0, CP0_STATUS
46#endif
47	fpu_save_double a0 t0 t1		# clobbers t1
48	jr	ra
49	END(_save_fp)
50
51/*
52 * Restore a thread's fp context.
53 */
54LEAF(_restore_fp)
55#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
56		defined(CONFIG_CPU_MIPSR6)
57	mfc0	t0, CP0_STATUS
58#endif
59	fpu_restore_double a0 t0 t1		# clobbers t1
60	jr	ra
61	END(_restore_fp)
62
63#ifdef CONFIG_CPU_HAS_MSA
64
65/*
66 * Save a thread's MSA vector context.
67 */
68LEAF(_save_msa)
69EXPORT_SYMBOL(_save_msa)
70	msa_save_all	a0
71	jr	ra
72	END(_save_msa)
73
74/*
75 * Restore a thread's MSA vector context.
76 */
77LEAF(_restore_msa)
78	msa_restore_all	a0
79	jr	ra
80	END(_restore_msa)
81
82LEAF(_init_msa_upper)
83	msa_init_all_upper
84	jr	ra
85	END(_init_msa_upper)
86
87#endif
88
89/*
90 * Load the FPU with signalling NANS.  This bit pattern we're using has
91 * the property that no matter whether considered as single or as double
92 * precision represents signaling NANS.
93 *
94 * The value to initialize fcr31 to comes in $a0.
95 */
96
97	.set push
98	SET_HARDFLOAT
99
100LEAF(_init_fpu)
101	mfc0	t0, CP0_STATUS
102	li	t1, ST0_CU1
103	or	t0, t1
104	mtc0	t0, CP0_STATUS
105	enable_fpu_hazard
106
107	ctc1	a0, fcr31
108
109	li	t1, -1				# SNaN
110
111#ifdef CONFIG_64BIT
112	sll	t0, t0, 5
113	bgez	t0, 1f				# 16 / 32 register mode?
114
115	dmtc1	t1, $f1
116	dmtc1	t1, $f3
117	dmtc1	t1, $f5
118	dmtc1	t1, $f7
119	dmtc1	t1, $f9
120	dmtc1	t1, $f11
121	dmtc1	t1, $f13
122	dmtc1	t1, $f15
123	dmtc1	t1, $f17
124	dmtc1	t1, $f19
125	dmtc1	t1, $f21
126	dmtc1	t1, $f23
127	dmtc1	t1, $f25
128	dmtc1	t1, $f27
129	dmtc1	t1, $f29
130	dmtc1	t1, $f31
1311:
132#endif
133
134#ifdef CONFIG_CPU_MIPS32
135	mtc1	t1, $f0
136	mtc1	t1, $f1
137	mtc1	t1, $f2
138	mtc1	t1, $f3
139	mtc1	t1, $f4
140	mtc1	t1, $f5
141	mtc1	t1, $f6
142	mtc1	t1, $f7
143	mtc1	t1, $f8
144	mtc1	t1, $f9
145	mtc1	t1, $f10
146	mtc1	t1, $f11
147	mtc1	t1, $f12
148	mtc1	t1, $f13
149	mtc1	t1, $f14
150	mtc1	t1, $f15
151	mtc1	t1, $f16
152	mtc1	t1, $f17
153	mtc1	t1, $f18
154	mtc1	t1, $f19
155	mtc1	t1, $f20
156	mtc1	t1, $f21
157	mtc1	t1, $f22
158	mtc1	t1, $f23
159	mtc1	t1, $f24
160	mtc1	t1, $f25
161	mtc1	t1, $f26
162	mtc1	t1, $f27
163	mtc1	t1, $f28
164	mtc1	t1, $f29
165	mtc1	t1, $f30
166	mtc1	t1, $f31
167
168#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS32_R6)
169	.set    push
170	.set    MIPS_ISA_LEVEL_RAW
171	.set	fp=64
172	sll     t0, t0, 5			# is Status.FR set?
173	bgez    t0, 1f				# no: skip setting upper 32b
174
175	mthc1   t1, $f0
176	mthc1   t1, $f1
177	mthc1   t1, $f2
178	mthc1   t1, $f3
179	mthc1   t1, $f4
180	mthc1   t1, $f5
181	mthc1   t1, $f6
182	mthc1   t1, $f7
183	mthc1   t1, $f8
184	mthc1   t1, $f9
185	mthc1   t1, $f10
186	mthc1   t1, $f11
187	mthc1   t1, $f12
188	mthc1   t1, $f13
189	mthc1   t1, $f14
190	mthc1   t1, $f15
191	mthc1   t1, $f16
192	mthc1   t1, $f17
193	mthc1   t1, $f18
194	mthc1   t1, $f19
195	mthc1   t1, $f20
196	mthc1   t1, $f21
197	mthc1   t1, $f22
198	mthc1   t1, $f23
199	mthc1   t1, $f24
200	mthc1   t1, $f25
201	mthc1   t1, $f26
202	mthc1   t1, $f27
203	mthc1   t1, $f28
204	mthc1   t1, $f29
205	mthc1   t1, $f30
206	mthc1   t1, $f31
2071:	.set    pop
208#endif /* CONFIG_CPU_MIPS32_R2 || CONFIG_CPU_MIPS32_R6 */
209#else
210	.set	MIPS_ISA_ARCH_LEVEL_RAW
211	dmtc1	t1, $f0
212	dmtc1	t1, $f2
213	dmtc1	t1, $f4
214	dmtc1	t1, $f6
215	dmtc1	t1, $f8
216	dmtc1	t1, $f10
217	dmtc1	t1, $f12
218	dmtc1	t1, $f14
219	dmtc1	t1, $f16
220	dmtc1	t1, $f18
221	dmtc1	t1, $f20
222	dmtc1	t1, $f22
223	dmtc1	t1, $f24
224	dmtc1	t1, $f26
225	dmtc1	t1, $f28
226	dmtc1	t1, $f30
227#endif
228	jr	ra
229	END(_init_fpu)
230
231	.set pop	/* SET_HARDFLOAT */
232
233	.set	noreorder
234
235/**
236 * _save_fp_context() - save FP context from the FPU
237 * @a0 - pointer to fpregs field of sigcontext
238 * @a1 - pointer to fpc_csr field of sigcontext
239 *
240 * Save FP context, including the 32 FP data registers and the FP
241 * control & status register, from the FPU to signal context.
242 */
243LEAF(_save_fp_context)
244	.set	push
245	SET_HARDFLOAT
246	cfc1	t1, fcr31
247	.set	pop
248
249#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
250		defined(CONFIG_CPU_MIPSR6)
251	.set	push
252	SET_HARDFLOAT
253#ifdef CONFIG_CPU_MIPSR2
254	.set	mips32r2
255	.set	fp=64
256	mfc0	t0, CP0_STATUS
257	sll	t0, t0, 5
258	bgez	t0, 1f			# skip storing odd if FR=0
259	 nop
260#endif
261	/* Store the 16 odd double precision registers */
262	EX	sdc1 $f1, 8(a0)
263	EX	sdc1 $f3, 24(a0)
264	EX	sdc1 $f5, 40(a0)
265	EX	sdc1 $f7, 56(a0)
266	EX	sdc1 $f9, 72(a0)
267	EX	sdc1 $f11, 88(a0)
268	EX	sdc1 $f13, 104(a0)
269	EX	sdc1 $f15, 120(a0)
270	EX	sdc1 $f17, 136(a0)
271	EX	sdc1 $f19, 152(a0)
272	EX	sdc1 $f21, 168(a0)
273	EX	sdc1 $f23, 184(a0)
274	EX	sdc1 $f25, 200(a0)
275	EX	sdc1 $f27, 216(a0)
276	EX	sdc1 $f29, 232(a0)
277	EX	sdc1 $f31, 248(a0)
2781:	.set	pop
279#endif
280
281	.set push
282	SET_HARDFLOAT
283	/* Store the 16 even double precision registers */
284	EX	sdc1 $f0, 0(a0)
285	EX	sdc1 $f2, 16(a0)
286	EX	sdc1 $f4, 32(a0)
287	EX	sdc1 $f6, 48(a0)
288	EX	sdc1 $f8, 64(a0)
289	EX	sdc1 $f10, 80(a0)
290	EX	sdc1 $f12, 96(a0)
291	EX	sdc1 $f14, 112(a0)
292	EX	sdc1 $f16, 128(a0)
293	EX	sdc1 $f18, 144(a0)
294	EX	sdc1 $f20, 160(a0)
295	EX	sdc1 $f22, 176(a0)
296	EX	sdc1 $f24, 192(a0)
297	EX	sdc1 $f26, 208(a0)
298	EX	sdc1 $f28, 224(a0)
299	EX	sdc1 $f30, 240(a0)
300	EX	sw t1, 0(a1)
301	jr	ra
302	 li	v0, 0					# success
303	.set pop
304	END(_save_fp_context)
305
306/**
307 * _restore_fp_context() - restore FP context to the FPU
308 * @a0 - pointer to fpregs field of sigcontext
309 * @a1 - pointer to fpc_csr field of sigcontext
310 *
311 * Restore FP context, including the 32 FP data registers and the FP
312 * control & status register, from signal context to the FPU.
313 */
314LEAF(_restore_fp_context)
315	EX	lw t1, 0(a1)
316
317#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2)  || \
318		defined(CONFIG_CPU_MIPSR6)
319	.set	push
320	SET_HARDFLOAT
321#ifdef CONFIG_CPU_MIPSR2
322	.set	mips32r2
323	.set	fp=64
324	mfc0	t0, CP0_STATUS
325	sll	t0, t0, 5
326	bgez	t0, 1f			# skip loading odd if FR=0
327	 nop
328#endif
329	EX	ldc1 $f1, 8(a0)
330	EX	ldc1 $f3, 24(a0)
331	EX	ldc1 $f5, 40(a0)
332	EX	ldc1 $f7, 56(a0)
333	EX	ldc1 $f9, 72(a0)
334	EX	ldc1 $f11, 88(a0)
335	EX	ldc1 $f13, 104(a0)
336	EX	ldc1 $f15, 120(a0)
337	EX	ldc1 $f17, 136(a0)
338	EX	ldc1 $f19, 152(a0)
339	EX	ldc1 $f21, 168(a0)
340	EX	ldc1 $f23, 184(a0)
341	EX	ldc1 $f25, 200(a0)
342	EX	ldc1 $f27, 216(a0)
343	EX	ldc1 $f29, 232(a0)
344	EX	ldc1 $f31, 248(a0)
3451:	.set pop
346#endif
347	.set push
348	SET_HARDFLOAT
349	EX	ldc1 $f0, 0(a0)
350	EX	ldc1 $f2, 16(a0)
351	EX	ldc1 $f4, 32(a0)
352	EX	ldc1 $f6, 48(a0)
353	EX	ldc1 $f8, 64(a0)
354	EX	ldc1 $f10, 80(a0)
355	EX	ldc1 $f12, 96(a0)
356	EX	ldc1 $f14, 112(a0)
357	EX	ldc1 $f16, 128(a0)
358	EX	ldc1 $f18, 144(a0)
359	EX	ldc1 $f20, 160(a0)
360	EX	ldc1 $f22, 176(a0)
361	EX	ldc1 $f24, 192(a0)
362	EX	ldc1 $f26, 208(a0)
363	EX	ldc1 $f28, 224(a0)
364	EX	ldc1 $f30, 240(a0)
365	ctc1	t1, fcr31
366	.set pop
367	jr	ra
368	 li	v0, 0					# success
369	END(_restore_fp_context)
370
371#ifdef CONFIG_CPU_HAS_MSA
372
373	.macro	op_one_wr	op, idx, base
374	.align	4
375\idx:	\op	\idx, 0, \base
376	jr	ra
377	 nop
378	.endm
379
380	.macro	op_msa_wr	name, op
381LEAF(\name)
382	.set		push
383	.set		noreorder
384	sll		t0, a0, 4
385	PTR_LA		t1, 0f
386	PTR_ADDU	t0, t0, t1
387	jr		t0
388	  nop
389	op_one_wr	\op, 0, a1
390	op_one_wr	\op, 1, a1
391	op_one_wr	\op, 2, a1
392	op_one_wr	\op, 3, a1
393	op_one_wr	\op, 4, a1
394	op_one_wr	\op, 5, a1
395	op_one_wr	\op, 6, a1
396	op_one_wr	\op, 7, a1
397	op_one_wr	\op, 8, a1
398	op_one_wr	\op, 9, a1
399	op_one_wr	\op, 10, a1
400	op_one_wr	\op, 11, a1
401	op_one_wr	\op, 12, a1
402	op_one_wr	\op, 13, a1
403	op_one_wr	\op, 14, a1
404	op_one_wr	\op, 15, a1
405	op_one_wr	\op, 16, a1
406	op_one_wr	\op, 17, a1
407	op_one_wr	\op, 18, a1
408	op_one_wr	\op, 19, a1
409	op_one_wr	\op, 20, a1
410	op_one_wr	\op, 21, a1
411	op_one_wr	\op, 22, a1
412	op_one_wr	\op, 23, a1
413	op_one_wr	\op, 24, a1
414	op_one_wr	\op, 25, a1
415	op_one_wr	\op, 26, a1
416	op_one_wr	\op, 27, a1
417	op_one_wr	\op, 28, a1
418	op_one_wr	\op, 29, a1
419	op_one_wr	\op, 30, a1
420	op_one_wr	\op, 31, a1
421	.set		pop
422	END(\name)
423	.endm
424
425	op_msa_wr	read_msa_wr_b, st_b
426	op_msa_wr	read_msa_wr_h, st_h
427	op_msa_wr	read_msa_wr_w, st_w
428	op_msa_wr	read_msa_wr_d, st_d
429
430	op_msa_wr	write_msa_wr_b, ld_b
431	op_msa_wr	write_msa_wr_h, ld_h
432	op_msa_wr	write_msa_wr_w, ld_w
433	op_msa_wr	write_msa_wr_d, ld_d
434
435#endif /* CONFIG_CPU_HAS_MSA */
436
437#ifdef CONFIG_CPU_HAS_MSA
438
439	.macro	save_msa_upper	wr, off, base
440	.set	push
441	.set	noat
442#ifdef CONFIG_64BIT
443	copy_s_d \wr, 1
444	EX sd	$1, \off(\base)
445#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
446	copy_s_w \wr, 2
447	EX sw	$1, \off(\base)
448	copy_s_w \wr, 3
449	EX sw	$1, (\off+4)(\base)
450#else /* CONFIG_CPU_BIG_ENDIAN */
451	copy_s_w \wr, 2
452	EX sw	$1, (\off+4)(\base)
453	copy_s_w \wr, 3
454	EX sw	$1, \off(\base)
455#endif
456	.set	pop
457	.endm
458
459LEAF(_save_msa_all_upper)
460	save_msa_upper	0, 0x00, a0
461	save_msa_upper	1, 0x08, a0
462	save_msa_upper	2, 0x10, a0
463	save_msa_upper	3, 0x18, a0
464	save_msa_upper	4, 0x20, a0
465	save_msa_upper	5, 0x28, a0
466	save_msa_upper	6, 0x30, a0
467	save_msa_upper	7, 0x38, a0
468	save_msa_upper	8, 0x40, a0
469	save_msa_upper	9, 0x48, a0
470	save_msa_upper	10, 0x50, a0
471	save_msa_upper	11, 0x58, a0
472	save_msa_upper	12, 0x60, a0
473	save_msa_upper	13, 0x68, a0
474	save_msa_upper	14, 0x70, a0
475	save_msa_upper	15, 0x78, a0
476	save_msa_upper	16, 0x80, a0
477	save_msa_upper	17, 0x88, a0
478	save_msa_upper	18, 0x90, a0
479	save_msa_upper	19, 0x98, a0
480	save_msa_upper	20, 0xa0, a0
481	save_msa_upper	21, 0xa8, a0
482	save_msa_upper	22, 0xb0, a0
483	save_msa_upper	23, 0xb8, a0
484	save_msa_upper	24, 0xc0, a0
485	save_msa_upper	25, 0xc8, a0
486	save_msa_upper	26, 0xd0, a0
487	save_msa_upper	27, 0xd8, a0
488	save_msa_upper	28, 0xe0, a0
489	save_msa_upper	29, 0xe8, a0
490	save_msa_upper	30, 0xf0, a0
491	save_msa_upper	31, 0xf8, a0
492	jr	ra
493	 li	v0, 0
494	END(_save_msa_all_upper)
495
496	.macro	restore_msa_upper	wr, off, base
497	.set	push
498	.set	noat
499#ifdef CONFIG_64BIT
500	EX ld	$1, \off(\base)
501	insert_d \wr, 1
502#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
503	EX lw	$1, \off(\base)
504	insert_w \wr, 2
505	EX lw	$1, (\off+4)(\base)
506	insert_w \wr, 3
507#else /* CONFIG_CPU_BIG_ENDIAN */
508	EX lw	$1, (\off+4)(\base)
509	insert_w \wr, 2
510	EX lw	$1, \off(\base)
511	insert_w \wr, 3
512#endif
513	.set	pop
514	.endm
515
516LEAF(_restore_msa_all_upper)
517	restore_msa_upper	0, 0x00, a0
518	restore_msa_upper	1, 0x08, a0
519	restore_msa_upper	2, 0x10, a0
520	restore_msa_upper	3, 0x18, a0
521	restore_msa_upper	4, 0x20, a0
522	restore_msa_upper	5, 0x28, a0
523	restore_msa_upper	6, 0x30, a0
524	restore_msa_upper	7, 0x38, a0
525	restore_msa_upper	8, 0x40, a0
526	restore_msa_upper	9, 0x48, a0
527	restore_msa_upper	10, 0x50, a0
528	restore_msa_upper	11, 0x58, a0
529	restore_msa_upper	12, 0x60, a0
530	restore_msa_upper	13, 0x68, a0
531	restore_msa_upper	14, 0x70, a0
532	restore_msa_upper	15, 0x78, a0
533	restore_msa_upper	16, 0x80, a0
534	restore_msa_upper	17, 0x88, a0
535	restore_msa_upper	18, 0x90, a0
536	restore_msa_upper	19, 0x98, a0
537	restore_msa_upper	20, 0xa0, a0
538	restore_msa_upper	21, 0xa8, a0
539	restore_msa_upper	22, 0xb0, a0
540	restore_msa_upper	23, 0xb8, a0
541	restore_msa_upper	24, 0xc0, a0
542	restore_msa_upper	25, 0xc8, a0
543	restore_msa_upper	26, 0xd0, a0
544	restore_msa_upper	27, 0xd8, a0
545	restore_msa_upper	28, 0xe0, a0
546	restore_msa_upper	29, 0xe8, a0
547	restore_msa_upper	30, 0xf0, a0
548	restore_msa_upper	31, 0xf8, a0
549	jr	ra
550	 li	v0, 0
551	END(_restore_msa_all_upper)
552
553#endif /* CONFIG_CPU_HAS_MSA */
554
555	.set	reorder
556
557	.type	fault, @function
558	.ent	fault
559fault:	li	v0, -EFAULT				# failure
560	jr	ra
561	.end	fault
562