xref: /linux/arch/powerpc/kernel/head_32.h (revision 4fd18fc38757217c746aa063ba9e4729814dc737)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __HEAD_32_H__
3 #define __HEAD_32_H__
4 
5 #include <asm/ptrace.h>	/* for STACK_FRAME_REGS_MARKER */
6 
7 /*
8  * Exception entry code.  This code runs with address translation
9  * turned off, i.e. using physical addresses.
10  * We assume sprg3 has the physical address of the current
11  * task's thread_struct.
12  */
13 .macro EXCEPTION_PROLOG handle_dar_dsisr=0
14 	EXCEPTION_PROLOG_0	handle_dar_dsisr=\handle_dar_dsisr
15 	EXCEPTION_PROLOG_1
16 	EXCEPTION_PROLOG_2	handle_dar_dsisr=\handle_dar_dsisr
17 .endm
18 
19 .macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 	mtspr	SPRN_SPRG_SCRATCH0,r10
21 	mtspr	SPRN_SPRG_SCRATCH1,r11
22 #ifdef CONFIG_VMAP_STACK
23 	mfspr	r10, SPRN_SPRG_THREAD
24 	.if	\handle_dar_dsisr
25 	mfspr	r11, SPRN_DAR
26 	stw	r11, DAR(r10)
27 	mfspr	r11, SPRN_DSISR
28 	stw	r11, DSISR(r10)
29 	.endif
30 	mfspr	r11, SPRN_SRR0
31 	stw	r11, SRR0(r10)
32 #endif
33 	mfspr	r11, SPRN_SRR1		/* check whether user or kernel */
34 #ifdef CONFIG_VMAP_STACK
35 	stw	r11, SRR1(r10)
36 #endif
37 	mfcr	r10
38 	andi.	r11, r11, MSR_PR
39 .endm
40 
41 .macro EXCEPTION_PROLOG_1 for_rtas=0
42 #ifdef CONFIG_VMAP_STACK
43 	mtspr	SPRN_SPRG_SCRATCH2,r1
44 	subi	r1, r1, INT_FRAME_SIZE		/* use r1 if kernel */
45 	beq	1f
46 	mfspr	r1,SPRN_SPRG_THREAD
47 	lwz	r1,TASK_STACK-THREAD(r1)
48 	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
49 1:
50 	mtcrf	0x7f, r1
51 	bt	32 - THREAD_ALIGN_SHIFT, stack_overflow
52 #else
53 	subi	r11, r1, INT_FRAME_SIZE		/* use r1 if kernel */
54 	beq	1f
55 	mfspr	r11,SPRN_SPRG_THREAD
56 	lwz	r11,TASK_STACK-THREAD(r11)
57 	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
58 1:	tophys(r11, r11)
59 #endif
60 .endm
61 
62 .macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0
63 #ifdef CONFIG_VMAP_STACK
64 	li	r11, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
65 	mtmsr	r11
66 	isync
67 	mfspr	r11, SPRN_SPRG_SCRATCH2
68 	stw	r11,GPR1(r1)
69 	stw	r11,0(r1)
70 	mr	r11, r1
71 #else
72 	stw	r1,GPR1(r11)
73 	stw	r1,0(r11)
74 	tovirt(r1, r11)		/* set new kernel sp */
75 #endif
76 	stw	r10,_CCR(r11)		/* save registers */
77 	stw	r12,GPR12(r11)
78 	stw	r9,GPR9(r11)
79 	mfspr	r10,SPRN_SPRG_SCRATCH0
80 	mfspr	r12,SPRN_SPRG_SCRATCH1
81 	stw	r10,GPR10(r11)
82 	stw	r12,GPR11(r11)
83 	mflr	r10
84 	stw	r10,_LINK(r11)
85 #ifdef CONFIG_VMAP_STACK
86 	mfspr	r12, SPRN_SPRG_THREAD
87 	tovirt(r12, r12)
88 	.if	\handle_dar_dsisr
89 	lwz	r10, DAR(r12)
90 	stw	r10, _DAR(r11)
91 	lwz	r10, DSISR(r12)
92 	stw	r10, _DSISR(r11)
93 	.endif
94 	lwz	r9, SRR1(r12)
95 	lwz	r12, SRR0(r12)
96 #else
97 	mfspr	r12,SPRN_SRR0
98 	mfspr	r9,SPRN_SRR1
99 #endif
100 #ifdef CONFIG_40x
101 	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
102 #else
103 #ifdef CONFIG_VMAP_STACK
104 	li	r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */
105 #else
106 	li	r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */
107 #endif
108 	mtmsr	r10			/* (except for mach check in rtas) */
109 #endif
110 	stw	r0,GPR0(r11)
111 	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
112 	addi	r10,r10,STACK_FRAME_REGS_MARKER@l
113 	stw	r10,8(r11)
114 	SAVE_4GPRS(3, r11)
115 	SAVE_2GPRS(7, r11)
116 .endm
117 
118 .macro SYSCALL_ENTRY trapno
119 	mfspr	r12,SPRN_SPRG_THREAD
120 	mfspr	r9, SPRN_SRR1
121 #ifdef CONFIG_VMAP_STACK
122 	mfspr	r11, SPRN_SRR0
123 	mtctr	r11
124 	andi.	r11, r9, MSR_PR
125 	mr	r11, r1
126 	lwz	r1,TASK_STACK-THREAD(r12)
127 	beq-	99f
128 	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
129 	li	r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
130 	mtmsr	r10
131 	isync
132 	tovirt(r12, r12)
133 	stw	r11,GPR1(r1)
134 	stw	r11,0(r1)
135 	mr	r11, r1
136 #else
137 	andi.	r11, r9, MSR_PR
138 	lwz	r11,TASK_STACK-THREAD(r12)
139 	beq-	99f
140 	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
141 	tophys(r11, r11)
142 	stw	r1,GPR1(r11)
143 	stw	r1,0(r11)
144 	tovirt(r1, r11)		/* set new kernel sp */
145 #endif
146 	mflr	r10
147 	stw	r10, _LINK(r11)
148 #ifdef CONFIG_VMAP_STACK
149 	mfctr	r10
150 #else
151 	mfspr	r10,SPRN_SRR0
152 #endif
153 	stw	r10,_NIP(r11)
154 	mfcr	r10
155 	rlwinm	r10,r10,0,4,2	/* Clear SO bit in CR */
156 	stw	r10,_CCR(r11)		/* save registers */
157 #ifdef CONFIG_40x
158 	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
159 #else
160 #ifdef CONFIG_VMAP_STACK
161 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
162 #else
163 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
164 #endif
165 	mtmsr	r10			/* (except for mach check in rtas) */
166 #endif
167 	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
168 	stw	r2,GPR2(r11)
169 	addi	r10,r10,STACK_FRAME_REGS_MARKER@l
170 	stw	r9,_MSR(r11)
171 	li	r2, \trapno + 1
172 	stw	r10,8(r11)
173 	stw	r2,_TRAP(r11)
174 	SAVE_GPR(0, r11)
175 	SAVE_4GPRS(3, r11)
176 	SAVE_2GPRS(7, r11)
177 	addi	r11,r1,STACK_FRAME_OVERHEAD
178 	addi	r2,r12,-THREAD
179 	stw	r11,PT_REGS(r12)
180 #if defined(CONFIG_40x)
181 	/* Check to see if the dbcr0 register is set up to debug.  Use the
182 	   internal debug mode bit to do this. */
183 	lwz	r12,THREAD_DBCR0(r12)
184 	andis.	r12,r12,DBCR0_IDM@h
185 #endif
186 	ACCOUNT_CPU_USER_ENTRY(r2, r11, r12)
187 #if defined(CONFIG_40x)
188 	beq+	3f
189 	/* From user and task is ptraced - load up global dbcr0 */
190 	li	r12,-1			/* clear all pending debug events */
191 	mtspr	SPRN_DBSR,r12
192 	lis	r11,global_dbcr0@ha
193 	tophys(r11,r11)
194 	addi	r11,r11,global_dbcr0@l
195 	lwz	r12,0(r11)
196 	mtspr	SPRN_DBCR0,r12
197 	lwz	r12,4(r11)
198 	addi	r12,r12,-1
199 	stw	r12,4(r11)
200 #endif
201 
202 3:
203 	tovirt_novmstack r2, r2 	/* set r2 to current */
204 	lis	r11, transfer_to_syscall@h
205 	ori	r11, r11, transfer_to_syscall@l
206 #ifdef CONFIG_TRACE_IRQFLAGS
207 	/*
208 	 * If MSR is changing we need to keep interrupts disabled at this point
209 	 * otherwise we might risk taking an interrupt before we tell lockdep
210 	 * they are enabled.
211 	 */
212 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
213 	rlwimi	r10, r9, 0, MSR_EE
214 #else
215 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
216 #endif
217 #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
218 	mtspr	SPRN_NRI, r0
219 #endif
220 	mtspr	SPRN_SRR1,r10
221 	mtspr	SPRN_SRR0,r11
222 	rfi				/* jump to handler, enable MMU */
223 #ifdef CONFIG_40x
224 	b .	/* Prevent prefetch past rfi */
225 #endif
226 99:	b	ret_from_kernel_syscall
227 .endm
228 
229 .macro save_dar_dsisr_on_stack reg1, reg2, sp
230 #ifndef CONFIG_VMAP_STACK
231 	mfspr	\reg1, SPRN_DAR
232 	mfspr	\reg2, SPRN_DSISR
233 	stw	\reg1, _DAR(\sp)
234 	stw	\reg2, _DSISR(\sp)
235 #endif
236 .endm
237 
238 .macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp
239 #ifdef CONFIG_VMAP_STACK
240 	lwz	\reg1, _DAR(\sp)
241 	lwz	\reg2, _DSISR(\sp)
242 #else
243 	save_dar_dsisr_on_stack \reg1, \reg2, \sp
244 #endif
245 .endm
246 
247 .macro tovirt_vmstack dst, src
248 #ifdef CONFIG_VMAP_STACK
249 	tovirt(\dst, \src)
250 #else
251 	.ifnc	\dst, \src
252 	mr	\dst, \src
253 	.endif
254 #endif
255 .endm
256 
257 .macro tovirt_novmstack dst, src
258 #ifndef CONFIG_VMAP_STACK
259 	tovirt(\dst, \src)
260 #else
261 	.ifnc	\dst, \src
262 	mr	\dst, \src
263 	.endif
264 #endif
265 .endm
266 
267 .macro tophys_novmstack dst, src
268 #ifndef CONFIG_VMAP_STACK
269 	tophys(\dst, \src)
270 #else
271 	.ifnc	\dst, \src
272 	mr	\dst, \src
273 	.endif
274 #endif
275 .endm
276 
277 /*
278  * Note: code which follows this uses cr0.eq (set if from kernel),
279  * r11, r12 (SRR0), and r9 (SRR1).
280  *
281  * Note2: once we have set r1 we are in a position to take exceptions
282  * again, and we could thus set MSR:RI at that point.
283  */
284 
285 /*
286  * Exception vectors.
287  */
288 #ifdef CONFIG_PPC_BOOK3S
289 #define	START_EXCEPTION(n, label)		\
290 	. = n;					\
291 	DO_KVM n;				\
292 label:
293 
294 #else
295 #define	START_EXCEPTION(n, label)		\
296 	. = n;					\
297 label:
298 
299 #endif
300 
301 #define EXCEPTION(n, label, hdlr, xfer)		\
302 	START_EXCEPTION(n, label)		\
303 	EXCEPTION_PROLOG;			\
304 	addi	r3,r1,STACK_FRAME_OVERHEAD;	\
305 	xfer(n, hdlr)
306 
307 #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret)		\
308 	li	r10,trap;					\
309 	stw	r10,_TRAP(r11);					\
310 	LOAD_REG_IMMEDIATE(r10, msr);				\
311 	bl	tfer;						\
312 	.long	hdlr;						\
313 	.long	ret
314 
315 #define EXC_XFER_STD(n, hdlr)		\
316 	EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full,	\
317 			  ret_from_except_full)
318 
319 #define EXC_XFER_LITE(n, hdlr)		\
320 	EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \
321 			  ret_from_except)
322 
323 .macro vmap_stack_overflow_exception
324 #ifdef CONFIG_VMAP_STACK
325 #ifdef CONFIG_SMP
326 	mfspr	r1, SPRN_SPRG_THREAD
327 	lwz	r1, TASK_CPU - THREAD(r1)
328 	slwi	r1, r1, 3
329 	addis	r1, r1, emergency_ctx@ha
330 #else
331 	lis	r1, emergency_ctx@ha
332 #endif
333 	lwz	r1, emergency_ctx@l(r1)
334 	cmpwi	cr1, r1, 0
335 	bne	cr1, 1f
336 	lis	r1, init_thread_union@ha
337 	addi	r1, r1, init_thread_union@l
338 1:	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
339 	EXCEPTION_PROLOG_2
340 	SAVE_NVGPRS(r11)
341 	addi	r3, r1, STACK_FRAME_OVERHEAD
342 	EXC_XFER_STD(0, stack_overflow_exception)
343 #endif
344 .endm
345 
346 #endif /* __HEAD_32_H__ */
347