xref: /linux/arch/mips/kernel/r4k_switch.S (revision 2fe05e1139a555ae91f00a812cb9520e7d3022ab)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 *    written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
15#include <asm/export.h>
16#include <asm/fpregdef.h>
17#include <asm/mipsregs.h>
18#include <asm/asm-offsets.h>
19#include <asm/regdef.h>
20#include <asm/stackframe.h>
21#include <asm/thread_info.h>
22
23#include <asm/asmmacro.h>
24
25/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
26#undef fp
27
28#ifndef USE_ALTERNATE_RESUME_IMPL
29/*
30 * task_struct *resume(task_struct *prev, task_struct *next,
31 *		       struct thread_info *next_ti)
32 */
33	.align	5
34	LEAF(resume)
35	mfc0	t1, CP0_STATUS
36	LONG_S	t1, THREAD_STATUS(a0)
37	cpu_save_nonscratch a0
38	LONG_S	ra, THREAD_REG31(a0)
39
40#if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
41	PTR_LA	t8, __stack_chk_guard
42	LONG_L	t9, TASK_STACK_CANARY(a1)
43	LONG_S	t9, 0(t8)
44#endif
45
46	/*
47	 * The order of restoring the registers takes care of the race
48	 * updating $28, $29 and kernelsp without disabling ints.
49	 */
50	move	$28, a2
51	cpu_restore_nonscratch a1
52
53	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
54	set_saved_sp	t0, t1, t2
55	mfc0	t1, CP0_STATUS		/* Do we really need this? */
56	li	a3, 0xff01
57	and	t1, a3
58	LONG_L	a2, THREAD_STATUS(a1)
59	nor	a3, $0, a3
60	and	a2, a3
61	or	a2, t1
62	mtc0	a2, CP0_STATUS
63	move	v0, a0
64	jr	ra
65	END(resume)
66
67#endif /* USE_ALTERNATE_RESUME_IMPL */
68
69/*
70 * Save a thread's fp context.
71 */
72LEAF(_save_fp)
73EXPORT_SYMBOL(_save_fp)
74#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
75		defined(CONFIG_CPU_MIPS32_R6)
76	mfc0	t0, CP0_STATUS
77#endif
78	fpu_save_double a0 t0 t1		# clobbers t1
79	jr	ra
80	END(_save_fp)
81
82/*
83 * Restore a thread's fp context.
84 */
85LEAF(_restore_fp)
86#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
87		defined(CONFIG_CPU_MIPS32_R6)
88	mfc0	t0, CP0_STATUS
89#endif
90	fpu_restore_double a0 t0 t1		# clobbers t1
91	jr	ra
92	END(_restore_fp)
93
94#ifdef CONFIG_CPU_HAS_MSA
95
96/*
97 * Save a thread's MSA vector context.
98 */
99LEAF(_save_msa)
100EXPORT_SYMBOL(_save_msa)
101	msa_save_all	a0
102	jr	ra
103	END(_save_msa)
104
105/*
106 * Restore a thread's MSA vector context.
107 */
108LEAF(_restore_msa)
109	msa_restore_all	a0
110	jr	ra
111	END(_restore_msa)
112
113LEAF(_init_msa_upper)
114	msa_init_all_upper
115	jr	ra
116	END(_init_msa_upper)
117
118#endif
119
120/*
121 * Load the FPU with signalling NANS.  This bit pattern we're using has
122 * the property that no matter whether considered as single or as double
123 * precision represents signaling NANS.
124 *
125 * The value to initialize fcr31 to comes in $a0.
126 */
127
128	.set push
129	SET_HARDFLOAT
130
131LEAF(_init_fpu)
132	mfc0	t0, CP0_STATUS
133	li	t1, ST0_CU1
134	or	t0, t1
135	mtc0	t0, CP0_STATUS
136	enable_fpu_hazard
137
138	ctc1	a0, fcr31
139
140	li	t1, -1				# SNaN
141
142#ifdef CONFIG_64BIT
143	sll	t0, t0, 5
144	bgez	t0, 1f				# 16 / 32 register mode?
145
146	dmtc1	t1, $f1
147	dmtc1	t1, $f3
148	dmtc1	t1, $f5
149	dmtc1	t1, $f7
150	dmtc1	t1, $f9
151	dmtc1	t1, $f11
152	dmtc1	t1, $f13
153	dmtc1	t1, $f15
154	dmtc1	t1, $f17
155	dmtc1	t1, $f19
156	dmtc1	t1, $f21
157	dmtc1	t1, $f23
158	dmtc1	t1, $f25
159	dmtc1	t1, $f27
160	dmtc1	t1, $f29
161	dmtc1	t1, $f31
1621:
163#endif
164
165#ifdef CONFIG_CPU_MIPS32
166	mtc1	t1, $f0
167	mtc1	t1, $f1
168	mtc1	t1, $f2
169	mtc1	t1, $f3
170	mtc1	t1, $f4
171	mtc1	t1, $f5
172	mtc1	t1, $f6
173	mtc1	t1, $f7
174	mtc1	t1, $f8
175	mtc1	t1, $f9
176	mtc1	t1, $f10
177	mtc1	t1, $f11
178	mtc1	t1, $f12
179	mtc1	t1, $f13
180	mtc1	t1, $f14
181	mtc1	t1, $f15
182	mtc1	t1, $f16
183	mtc1	t1, $f17
184	mtc1	t1, $f18
185	mtc1	t1, $f19
186	mtc1	t1, $f20
187	mtc1	t1, $f21
188	mtc1	t1, $f22
189	mtc1	t1, $f23
190	mtc1	t1, $f24
191	mtc1	t1, $f25
192	mtc1	t1, $f26
193	mtc1	t1, $f27
194	mtc1	t1, $f28
195	mtc1	t1, $f29
196	mtc1	t1, $f30
197	mtc1	t1, $f31
198
199#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS32_R6)
200	.set    push
201	.set    MIPS_ISA_LEVEL_RAW
202	.set	fp=64
203	sll     t0, t0, 5			# is Status.FR set?
204	bgez    t0, 1f				# no: skip setting upper 32b
205
206	mthc1   t1, $f0
207	mthc1   t1, $f1
208	mthc1   t1, $f2
209	mthc1   t1, $f3
210	mthc1   t1, $f4
211	mthc1   t1, $f5
212	mthc1   t1, $f6
213	mthc1   t1, $f7
214	mthc1   t1, $f8
215	mthc1   t1, $f9
216	mthc1   t1, $f10
217	mthc1   t1, $f11
218	mthc1   t1, $f12
219	mthc1   t1, $f13
220	mthc1   t1, $f14
221	mthc1   t1, $f15
222	mthc1   t1, $f16
223	mthc1   t1, $f17
224	mthc1   t1, $f18
225	mthc1   t1, $f19
226	mthc1   t1, $f20
227	mthc1   t1, $f21
228	mthc1   t1, $f22
229	mthc1   t1, $f23
230	mthc1   t1, $f24
231	mthc1   t1, $f25
232	mthc1   t1, $f26
233	mthc1   t1, $f27
234	mthc1   t1, $f28
235	mthc1   t1, $f29
236	mthc1   t1, $f30
237	mthc1   t1, $f31
2381:	.set    pop
239#endif /* CONFIG_CPU_MIPS32_R2 || CONFIG_CPU_MIPS32_R6 */
240#else
241	.set	MIPS_ISA_ARCH_LEVEL_RAW
242	dmtc1	t1, $f0
243	dmtc1	t1, $f2
244	dmtc1	t1, $f4
245	dmtc1	t1, $f6
246	dmtc1	t1, $f8
247	dmtc1	t1, $f10
248	dmtc1	t1, $f12
249	dmtc1	t1, $f14
250	dmtc1	t1, $f16
251	dmtc1	t1, $f18
252	dmtc1	t1, $f20
253	dmtc1	t1, $f22
254	dmtc1	t1, $f24
255	dmtc1	t1, $f26
256	dmtc1	t1, $f28
257	dmtc1	t1, $f30
258#endif
259	jr	ra
260	END(_init_fpu)
261
262	.set pop	/* SET_HARDFLOAT */
263