xref: /freebsd/sys/powerpc/powerpc/support.S (revision 2ff63af9b88c7413b7d71715b5532625752a248e)
1/*-
2 * SPDX-License-Identifier: BSD-2-Clause
3 *
4 * Copyright (c) 2018, Matthew Macy <mmacy@freebsd.org>
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
27 */
28
29/*
30 * Assembly variants of various functions, for those that don't need generic C
31 * implementations.  Currently this includes:
32 *
33 * - Direct-access versions of copyin/copyout methods.
34 *   - These are used by Radix AIM pmap (ISA 3.0), and Book-E, to avoid
35 *     unnecessary pmap_map_usr_ptr() calls.
36 */
37
38#include "assym.inc"
39#include "opt_sched.h"
40
41#include <sys/syscall.h>
42#include <sys/errno.h>
43
44#include <machine/param.h>
45#include <machine/asm.h>
46#include <machine/spr.h>
47#include <machine/trap.h>
48#include <machine/vmparam.h>
49
50#ifdef _CALL_ELF
51.abiversion _CALL_ELF
52#endif
53
54#ifdef __powerpc64__
55#define	LOAD	ld
56#define	STORE	std
57#define	WORD	8
58#define	CMPI	cmpdi
59#define	CMPLI	cmpldi
60/* log_2(8 * WORD) */
61#define	LOOP_LOG	6
62#define	LOG_WORD	3
63#define	CURTHREAD	%r13
64#else
65#define	LOAD	lwz
66#define	STORE	stw
67#define	WORD	4
68#define	CMPI	cmpwi
69#define	CMPLI	cmplwi
70/* log_2(8 * WORD) */
71#define	LOOP_LOG	5
72#define	LOG_WORD	2
73#define	CURTHREAD	%r2
74#endif
75
76#ifdef AIM
77#define	ENTRY_DIRECT(x)	ENTRY(x ## _direct)
78#define	END_DIRECT(x)	END(x ## _direct)
79#else
80#define	ENTRY_DIRECT(x)	ENTRY(x)
81#define	END_DIRECT(x)	END(x)
82#endif
83
84#ifdef __powerpc64__
85#define	PROLOGUE		;\
86	mflr	%r0 		;\
87	std	%r0, 16(%r1)	;\
88
89#define	EPILOGUE		;\
90	ld	%r0, 16(%r1)	;\
91	mtlr	%r0		;\
92	blr			;\
93	nop
94
95#define	VALIDATE_TRUNCATE_ADDR_COPY	VALIDATE_ADDR_COPY
96#define	VALIDATE_ADDR_COPY(raddr, len)	\
97	srdi  %r0, raddr, 52		;\
98	cmpwi %r0, 1			;\
99	bge-	copy_fault		;\
100	nop
101
102#define	VALIDATE_ADDR_FUSU(raddr)	;\
103	srdi  %r0, raddr, 52		;\
104	cmpwi %r0, 1			;\
105	bge-	fusufault		;\
106	nop
107
108#else
109#define	PROLOGUE		;\
110	mflr	%r0 		;\
111	stw	%r0, 4(%r1)	;\
112
113#define	EPILOGUE		;\
114	lwz	%r0, 4(%r1)	;\
115	mtlr	%r0		;\
116	blr			;\
117	nop
118
119/* %r0 is temporary */
120/*
121 * Validate address and length are valid.
122 * For VALIDATE_ADDR_COPY() have to account for wraparound.
123 */
124#define	VALIDATE_ADDR_COPY(raddr, len)		\
125	lis	%r0, VM_MAXUSER_ADDRESS@h	;\
126	ori	%r0, %r0, VM_MAXUSER_ADDRESS@l	;\
127	cmplw	%r0, raddr			;\
128	blt-	copy_fault			;\
129	add	%r0, raddr, len			;\
130	cmplw	7, %r0, raddr			;\
131	blt-	7, copy_fault			;\
132	mtcrf	0x80, %r0			;\
133	bt-	0, copy_fault			;\
134	nop
135
136#define	VALIDATE_TRUNCATE_ADDR_COPY(raddr, len)		\
137	lis	%r0, VM_MAXUSER_ADDRESS@h	;\
138	ori	%r0, %r0, VM_MAXUSER_ADDRESS@l	;\
139	cmplw	%r0, raddr			;\
140	blt-	copy_fault			;\
141	sub	%r0, %r0, raddr			;\
142	cmplw	len, %r0			;\
143	isel	len, len, %r0, 0		;\
144
145#define	VALIDATE_ADDR_FUSU(raddr)		\
146	lis	%r0, VM_MAXUSER_ADDRESS@h	;\
147	ori	%r0, %r0, VM_MAXUSER_ADDRESS@l	;\
148	cmplw	%r0, raddr			;\
149	ble-	fusufault
150
151#endif
152
153#define	SET_COPYFAULT(raddr, rpcb, len)	\
154	VALIDATE_ADDR_COPY(raddr, len)	;\
155	li	%r0, COPYFAULT		;\
156	LOAD	rpcb, TD_PCB(CURTHREAD)	;\
157	STORE	%r0, PCB_ONFAULT(rpcb)	;\
158
159#define	SET_COPYFAULT_TRUNCATE(raddr, rpcb, len)\
160	VALIDATE_TRUNCATE_ADDR_COPY(raddr, len)	;\
161	li	%r0, COPYFAULT			;\
162	LOAD	rpcb, TD_PCB(CURTHREAD)		;\
163	STORE	%r0, PCB_ONFAULT(rpcb)
164
165#define	SET_FUSUFAULT(raddr, rpcb)	\
166	VALIDATE_ADDR_FUSU(raddr)	;\
167	li	%r0, FUSUFAULT		;\
168	LOAD	rpcb, TD_PCB(CURTHREAD)	;\
169	STORE	%r0, PCB_ONFAULT(rpcb)
170
171#define	CLEAR_FAULT_NO_CLOBBER(rpcb)	\
172	LOAD	rpcb, TD_PCB(CURTHREAD)	;\
173	li	%r0, 0			;\
174	STORE	%r0, PCB_ONFAULT(rpcb)
175
176#define	CLEAR_FAULT(rpcb)		\
177	CLEAR_FAULT_NO_CLOBBER(rpcb)	;\
178	li	%r3, 0
179
180/*
181 *  bcopy(src, dst, len)
182 *        %r3  %r4  %r5
183 *
184 *  %r7 is the pcb pointer
185 *
186 *  %r0 and %r8-%r10 are volatile
187 *  %r11 and %r12 are generally volatile, used in linking and exception
188 *  handling.  Can be clobbered here.
189 *
190 * Does not allocate or use stack space, but clobbers all volatile registers.
191 */
192
193#define	rs	%r3
194#define	rd	%r4
195#define	rl	%r5
196
197#define	t1	%r6
198#define	t2	%r7
199#define	t3	%r8
200#define	t4	%r9
201#define	t5	%r10
202#define	t6	%r11
203#define	t7	%r12
204#define	t8	%r0
205
206#define Thresh	WORD * 8
207#define	W4	3
208#define	W2	2
209#define	W1	1
210#define	WORDS(n)	(32 - LOG_WORD - W##n)
211.text
212ENTRY(bcopy_generic)
213	CMPLI	0, %r5, 0
214	beq	.Lend
215	dcbtst	0, rd
216	dcbt	0, rs
217	CMPLI	rl, Thresh
218	blt	.Lsmall
219	b	.Llarge
220/* memcpy */
221/* ... */
222.Lsmall: 				/* < 8 words remaining */
223	mtcrf	0x3, rl
224.Lsmall_start:
225	bf	WORDS(4), 0f
226	LOAD	t1, 0(rs)
227	LOAD	t2, WORD*1(rs)
228	LOAD	t3, WORD*2(rs)
229	LOAD	t4, WORD*3(rs)
230	addi	rs, rs, WORD*4
231	STORE	t1, 0(rd)
232	STORE	t2, WORD*1(rd)
233	STORE	t3, WORD*2(rd)
234	STORE	t4, WORD*3(rd)
235	addi	rd, rd, WORD*4
2360:					/* < 4 words remaining */
237	bf	WORDS(2), 1f
238	LOAD	t1, 0(rs)
239	LOAD	t2, WORD*1(rs)
240	addi	rs, rs, WORD*2
241	STORE	t1, 0(rd)
242	STORE	t2, WORD*1(rd)
243	addi	rd, rd, WORD*2
2441:					/* < 2 words remaining */
245	bf	WORDS(1), 2f
246	LOAD	t1, 0(rs)
247	addi	rs, rs, WORD
248	STORE	t1, 0(rd)
249	addi	rd, rd, WORD
2502:					/* < 1 word remaining */
251#ifdef __powerpc64__
252	bf	29, 3f
253	lwz	t1, 0(rs)
254	addi	rs, rs, 4
255	stw	t1, 0(rd)
256	addi	rd, rd, 4
2573:					/* < 4 bytes remaining */
258#endif
259	bf	30, 4f
260	lhz	t1, 0(rs)
261	addi	rs, rs, 2
262	sth	t1, 0(rd)
263	addi	rd, rd, 2
2644:					/* < 2 bytes remaining */
265	bf	31, .Lout
266	lbz	t1, 0(rs)
267	addi	rs, rs, 1
268	stb	t1, 0(rd)
269	addi	rd, rd, 1
270	b	.Lout
271
272	.align 4
273.Llarge:
274	neg	t3, rd
275	andi.	t6, t3, WORD-1		/* Align rd to word size */
276	mtctr	t6
277	sub	rl, rl, t6
278	beq+	.Llargealigned
2791:
280	lbz	t1, 0(rs)
281	addi	rs, rs, 1
282	stb	t1, 0(rd)
283	addi	rd, rd, 1
284	bdnz	1b
285
286.Llargealigned:
287	srwi.	t2, rl, LOOP_LOG  /* length >> log_2(loop_size) => 8W iterations */
288	mtcrf	0x3, rl
289	beq	.Lsmall_start
290	mtctr	t2
291	b	1f
292
293	.align 5
2941:
295	LOAD	t1, 0(rs)
296	LOAD	t2, WORD(rs)
297	LOAD	t3, WORD*2(rs)
298	LOAD	t4, WORD*3(rs)
299	LOAD	t5, WORD*4(rs)
300	LOAD	t6, WORD*5(rs)
301	LOAD	t7, WORD*6(rs)
302	LOAD	t8, WORD*7(rs)
303	addi	rs, rs, WORD*8
304	STORE	t1, 0(rd)
305	STORE	t2, WORD*1(rd)
306	STORE	t3, WORD*2(rd)
307	STORE	t4, WORD*3(rd)
308	STORE	t5, WORD*4(rd)
309	STORE	t6, WORD*5(rd)
310	STORE	t7, WORD*6(rd)
311	STORE	t8, WORD*7(rd)
312	addi	rd, rd, WORD*8
313	bdnz	1b
314
315	b	.Lsmall_start
316.Lout:
317/* done */
318.Lend:
319	blr
320END(bcopy_generic)
321
322/*
323 * copyout(from_kernel, to_user, len)
324 *         %r3,        %r4,    %r5
325 */
326ENTRY_DIRECT(copyout)
327	PROLOGUE
328	SET_COPYFAULT(%r4, %r7, %r5)
329	bl bcopy_generic
330	nop
331	CLEAR_FAULT(%r7)
332	EPILOGUE
333END_DIRECT(copyout)
334
335/*
336 * copyin(from_user, to_kernel, len)
337 *        %r3,        %r4,    %r5
338 */
339ENTRY_DIRECT(copyin)
340	PROLOGUE
341	SET_COPYFAULT(%r3, %r7, %r5)
342	bl bcopy_generic
343	nop
344	CLEAR_FAULT(%r7)
345	EPILOGUE
346END_DIRECT(copyin)
347
348/*
349 * copyinstr(const void *udaddr, void *kaddr, size_t len, size_t *done)
350 *			%r3          %r4         %r5        %r6
351 */
352
353ENTRY_DIRECT(copyinstr)
354	PROLOGUE
355	SET_COPYFAULT_TRUNCATE(%r3, %r7, %r5)
356	addi	%r9, %r5, 1
357	mtctr	%r9
358	mr	%r8, %r3
359	addi	%r8, %r8, -1
360	addi	%r4, %r4, -1
361	li	%r3, ENAMETOOLONG
3620:
363	bdz-	2f
364	lbzu	%r0, 1(%r8)
365	stbu	%r0, 1(%r4)
366
367	// NULL byte reached ?
368	CMPI	%r0, 0
369	beq-	1f
370	b	0b
3711:
372	li	%r3, 0
3732:
374	/* skip storing length if done is NULL */
375	CMPI	%r6, 0
376	beq-	3f
377	mfctr	%r0
378	sub	%r0, %r9, %r0
379	STORE	%r0, 0(%r6)
3803:
381	CLEAR_FAULT_NO_CLOBBER(%r7)
382	EPILOGUE
383END_DIRECT(copyinstr)
384
385ENTRY_DIRECT(subyte)
386	PROLOGUE
387	SET_FUSUFAULT(%r3, %r7)
388	stb  %r4, 0(%r3)
389	CLEAR_FAULT(%r7)
390	EPILOGUE
391END_DIRECT(subyte)
392
393#ifndef __powerpc64__
394ENTRY_DIRECT(suword)
395	PROLOGUE
396	SET_FUSUFAULT(%r3, %r7)
397	stw  %r4, 0(%r3)
398	CLEAR_FAULT(%r7)
399	EPILOGUE
400END_DIRECT(suword)
401#endif
402
403ENTRY_DIRECT(suword16)
404	PROLOGUE
405	SET_FUSUFAULT(%r3, %r7)
406	sth  %r4, 0(%r3)
407	CLEAR_FAULT(%r7)
408	EPILOGUE
409END_DIRECT(suword16)
410
411ENTRY_DIRECT(suword32)
412	PROLOGUE
413	SET_FUSUFAULT(%r3, %r7)
414	stw  %r4, 0(%r3)
415	CLEAR_FAULT(%r7)
416	EPILOGUE
417END_DIRECT(suword32)
418
419#ifdef __powerpc64__
420ENTRY_DIRECT(suword64)
421	PROLOGUE
422	SET_FUSUFAULT(%r3, %r7)
423	std  %r4, 0(%r3)
424	CLEAR_FAULT(%r7)
425	EPILOGUE
426END_DIRECT(suword64)
427
428ENTRY_DIRECT(suword)
429	PROLOGUE
430	SET_FUSUFAULT(%r3, %r7)
431	std  %r4, 0(%r3)
432	CLEAR_FAULT(%r7)
433	EPILOGUE
434END_DIRECT(suword)
435#endif
436
437ENTRY_DIRECT(fubyte)
438	PROLOGUE
439	SET_FUSUFAULT(%r3, %r7)
440	lbz %r3, 0(%r3)
441	CLEAR_FAULT_NO_CLOBBER(%r7)
442	EPILOGUE
443END_DIRECT(fubyte)
444
445ENTRY_DIRECT(fuword16)
446	PROLOGUE
447	SET_FUSUFAULT(%r3, %r7)
448	lhz %r3, 0(%r3)
449	CLEAR_FAULT_NO_CLOBBER(%r7)
450	EPILOGUE
451END_DIRECT(fuword16)
452
453#ifndef __powerpc64__
454ENTRY_DIRECT(fueword)
455	PROLOGUE
456	SET_FUSUFAULT(%r3, %r7)
457	lwz  %r0, 0(%r3)
458	stw  %r0,  0(%r4)
459	CLEAR_FAULT(%r7)
460	EPILOGUE
461END_DIRECT(fueword)
462#endif
463ENTRY_DIRECT(fueword32)
464	PROLOGUE
465	SET_FUSUFAULT(%r3, %r7)
466	lwz  %r0, 0(%r3)
467	stw  %r0,  0(%r4)
468	CLEAR_FAULT(%r7)
469	EPILOGUE
470END_DIRECT(fueword32)
471
472#ifdef __powerpc64__
473ENTRY_DIRECT(fueword)
474	PROLOGUE
475	SET_FUSUFAULT(%r3, %r7)
476	ld  %r0, 0(%r3)
477	std %r0, 0(%r4)
478	CLEAR_FAULT(%r7)
479	EPILOGUE
480END_DIRECT(fueword)
481
482ENTRY_DIRECT(fueword64)
483	PROLOGUE
484	SET_FUSUFAULT(%r3, %r7)
485	ld  %r0, 0(%r3)
486	std %r0, 0(%r4)
487	CLEAR_FAULT(%r7)
488	EPILOGUE
489END_DIRECT(fueword64)
490#endif
491
492/*
493 * casueword(volatile u_long *base, u_long old, u_long *oldp, u_long new)
494 *			      %r3          %r4           %r5         %r6
495 */
496
497#define	CASUEWORD32(raddr, rpcb)					;\
498	PROLOGUE							;\
499	SET_FUSUFAULT(raddr, rpcb)					;\
500	li	%r8, 0							;\
5011:									;\
502	lwarx	%r0, 0, %r3						;\
503	cmplw	%r4, %r0						;\
504	bne	2f							;\
505	stwcx.	%r6, 0, %r3						;\
506	bne-	3f							;\
507	b	4f							;\
5082:									;\
509	stwcx.	%r0, 0, %r3       	/* clear reservation (74xx) */	;\
5103:									;\
511	li	%r8, 1							;\
5124:									;\
513	stw	%r0, 0(%r5)						;\
514	CLEAR_FAULT_NO_CLOBBER(rpcb)					;\
515	mr	%r3, %r8						;\
516	EPILOGUE
517
518ENTRY_DIRECT(casueword32)
519	CASUEWORD32(%r3, %r7)
520END_DIRECT(casueword32)
521
522#ifdef __powerpc64__
523#define	CASUEWORD64(raddr, rpcb)					;\
524	PROLOGUE							;\
525	SET_FUSUFAULT(raddr, rpcb)					;\
526	li	%r8, 0							;\
5271:									;\
528	ldarx	%r0, 0, %r3						;\
529	cmpld	%r4, %r0						;\
530	bne	2f							;\
531	stdcx.	%r6, 0, %r3						;\
532	bne-	3f							;\
533	b	4f							;\
5342:									;\
535	stdcx.	%r0, 0, %r3       	/* clear reservation (74xx) */	;\
5363:									;\
537	li	%r8, 1							;\
5384:									;\
539	std	%r0, 0(%r5)						;\
540	CLEAR_FAULT_NO_CLOBBER(rpcb)					;\
541	mr	%r3, %r8						;\
542	EPILOGUE
543
544ENTRY_DIRECT(casueword)
545	CASUEWORD64(%r3, %r7)
546END_DIRECT(casueword)
547
548ENTRY_DIRECT(casueword64)
549	CASUEWORD64(%r3, %r7)
550END_DIRECT(casueword64)
551#else
552ENTRY_DIRECT(casueword)
553	CASUEWORD32(%r3, %r7)
554END_DIRECT(casueword)
555#endif
556
557_NAKED_ENTRY(fusufault)
558	CLEAR_FAULT_NO_CLOBBER(%r7)
559	li %r3, -1
560	EPILOGUE
561_END(fusufault)
562
563_NAKED_ENTRY(copy_fault)
564	CLEAR_FAULT_NO_CLOBBER(%r7)
565	li %r3, EFAULT
566	EPILOGUE
567_END(copy_fault)
568