xref: /linux/arch/powerpc/lib/mem_64.S (revision be58f7103700a68d5c7ca60a2bc0b309907599ab)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * String handling functions for PowerPC.
4 *
5 * Copyright (C) 1996 Paul Mackerras.
6 */
7#include <asm/processor.h>
8#include <asm/errno.h>
9#include <asm/ppc_asm.h>
10#include <asm/export.h>
11#include <asm/kasan.h>
12
13#ifndef CONFIG_KASAN
14_GLOBAL(__memset16)
15	rlwimi	r4,r4,16,0,15
16	/* fall through */
17
18_GLOBAL(__memset32)
19	rldimi	r4,r4,32,0
20	/* fall through */
21
22_GLOBAL(__memset64)
23	neg	r0,r3
24	andi.	r0,r0,7
25	cmplw	cr1,r5,r0
26	b	.Lms
27EXPORT_SYMBOL(__memset16)
28EXPORT_SYMBOL(__memset32)
29EXPORT_SYMBOL(__memset64)
30#endif
31
32_GLOBAL_KASAN(memset)
33	neg	r0,r3
34	rlwimi	r4,r4,8,16,23
35	andi.	r0,r0,7			/* # bytes to be 8-byte aligned */
36	rlwimi	r4,r4,16,0,15
37	cmplw	cr1,r5,r0		/* do we get that far? */
38	rldimi	r4,r4,32,0
39.Lms:	PPC_MTOCRF(1,r0)
40	mr	r6,r3
41	blt	cr1,8f
42	beq	3f			/* if already 8-byte aligned */
43	subf	r5,r0,r5
44	bf	31,1f
45	stb	r4,0(r6)
46	addi	r6,r6,1
471:	bf	30,2f
48	sth	r4,0(r6)
49	addi	r6,r6,2
502:	bf	29,3f
51	stw	r4,0(r6)
52	addi	r6,r6,4
533:	srdi.	r0,r5,6
54	clrldi	r5,r5,58
55	mtctr	r0
56	beq	5f
57	.balign 16
584:	std	r4,0(r6)
59	std	r4,8(r6)
60	std	r4,16(r6)
61	std	r4,24(r6)
62	std	r4,32(r6)
63	std	r4,40(r6)
64	std	r4,48(r6)
65	std	r4,56(r6)
66	addi	r6,r6,64
67	bdnz	4b
685:	srwi.	r0,r5,3
69	clrlwi	r5,r5,29
70	PPC_MTOCRF(1,r0)
71	beq	8f
72	bf	29,6f
73	std	r4,0(r6)
74	std	r4,8(r6)
75	std	r4,16(r6)
76	std	r4,24(r6)
77	addi	r6,r6,32
786:	bf	30,7f
79	std	r4,0(r6)
80	std	r4,8(r6)
81	addi	r6,r6,16
827:	bf	31,8f
83	std	r4,0(r6)
84	addi	r6,r6,8
858:	cmpwi	r5,0
86	PPC_MTOCRF(1,r5)
87	beqlr
88	bf	29,9f
89	stw	r4,0(r6)
90	addi	r6,r6,4
919:	bf	30,10f
92	sth	r4,0(r6)
93	addi	r6,r6,2
9410:	bflr	31
95	stb	r4,0(r6)
96	blr
97EXPORT_SYMBOL(memset)
98EXPORT_SYMBOL_KASAN(memset)
99
100_GLOBAL_TOC_KASAN(memmove)
101	cmplw	0,r3,r4
102	bgt	backwards_memcpy
103	b	memcpy
104
105_GLOBAL(backwards_memcpy)
106	rlwinm.	r7,r5,32-3,3,31		/* r0 = r5 >> 3 */
107	add	r6,r3,r5
108	add	r4,r4,r5
109	beq	2f
110	andi.	r0,r6,3
111	mtctr	r7
112	bne	5f
113	.balign 16
1141:	lwz	r7,-4(r4)
115	lwzu	r8,-8(r4)
116	stw	r7,-4(r6)
117	stwu	r8,-8(r6)
118	bdnz	1b
119	andi.	r5,r5,7
1202:	cmplwi	0,r5,4
121	blt	3f
122	lwzu	r0,-4(r4)
123	subi	r5,r5,4
124	stwu	r0,-4(r6)
1253:	cmpwi	0,r5,0
126	beqlr
127	mtctr	r5
1284:	lbzu	r0,-1(r4)
129	stbu	r0,-1(r6)
130	bdnz	4b
131	blr
1325:	mtctr	r0
1336:	lbzu	r7,-1(r4)
134	stbu	r7,-1(r6)
135	bdnz	6b
136	subf	r5,r0,r5
137	rlwinm.	r7,r5,32-3,3,31
138	beq	2b
139	mtctr	r7
140	b	1b
141EXPORT_SYMBOL(memmove)
142EXPORT_SYMBOL_KASAN(memmove)
143