xref: /linux/arch/mips/lib/memset.S (revision b6ebbac51bedf9e98e837688bc838f400196da5e)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/regdef.h>
14
15#if LONGSIZE == 4
16#define LONG_S_L swl
17#define LONG_S_R swr
18#else
19#define LONG_S_L sdl
20#define LONG_S_R sdr
21#endif
22
23#ifdef CONFIG_CPU_MICROMIPS
24#define STORSIZE (LONGSIZE * 2)
25#define STORMASK (STORSIZE - 1)
26#define FILL64RG t8
27#define FILLPTRG t7
28#undef  LONG_S
29#define LONG_S LONG_SP
30#else
31#define STORSIZE LONGSIZE
32#define STORMASK LONGMASK
33#define FILL64RG a1
34#define FILLPTRG t0
35#endif
36
37#define LEGACY_MODE 1
38#define EVA_MODE    2
39
40/*
41 * No need to protect it with EVA #ifdefery. The generated block of code
42 * will never be assembled if EVA is not enabled.
43 */
44#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
45#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
46
47#define EX(insn,reg,addr,handler)			\
48	.if \mode == LEGACY_MODE;			\
499:		insn	reg, addr;			\
50	.else;						\
519:		___BUILD_EVA_INSN(insn, reg, addr);	\
52	.endif;						\
53	.section __ex_table,"a";			\
54	PTR	9b, handler;				\
55	.previous
56
57	.macro	f_fill64 dst, offset, val, fixup, mode
58	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
59	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
60	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
61	EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
62#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
63	EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
64	EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
65	EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
66	EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
67#endif
68#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
69	EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
70	EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
71	EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
72	EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
73	EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
74	EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
75	EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
76	EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
77#endif
78	.endm
79
80	.set	noreorder
81	.align	5
82
83	/*
84	 * Macro to generate the __bzero{,_user} symbol
85	 * Arguments:
86	 * mode: LEGACY_MODE or EVA_MODE
87	 */
88	.macro __BUILD_BZERO mode
89	/* Initialize __memset if this is the first time we call this macro */
90	.ifnotdef __memset
91	.set __memset, 1
92	.hidden __memset /* Make sure it does not leak */
93	.endif
94
95	sltiu		t0, a2, STORSIZE	/* very small region? */
96	bnez		t0, .Lsmall_memset\@
97	andi		t0, a0, STORMASK	/* aligned? */
98
99#ifdef CONFIG_CPU_MICROMIPS
100	move		t8, a1			/* used by 'swp' instruction */
101	move		t9, a1
102#endif
103#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104	beqz		t0, 1f
105	PTR_SUBU	t0, STORSIZE		/* alignment in bytes */
106#else
107	.set		noat
108	li		AT, STORSIZE
109	beqz		t0, 1f
110	PTR_SUBU	t0, AT			/* alignment in bytes */
111	.set		at
112#endif
113
114#ifndef CONFIG_CPU_MIPSR6
115	R10KCBARRIER(0(ra))
116#ifdef __MIPSEB__
117	EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
118#else
119	EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@)	/* make word/dword aligned */
120#endif
121	PTR_SUBU	a0, t0			/* long align ptr */
122	PTR_ADDU	a2, t0			/* correct size */
123
124#else /* CONFIG_CPU_MIPSR6 */
125#define STORE_BYTE(N)				\
126	EX(sb, a1, N(a0), .Lbyte_fixup\@);	\
127	beqz		t0, 0f;			\
128	PTR_ADDU	t0, 1;
129
130	PTR_ADDU	a2, t0			/* correct size */
131	PTR_ADDU	t0, 1
132	STORE_BYTE(0)
133	STORE_BYTE(1)
134#if LONGSIZE == 4
135	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
136#else
137	STORE_BYTE(2)
138	STORE_BYTE(3)
139	STORE_BYTE(4)
140	STORE_BYTE(5)
141	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
142#endif
1430:
144	ori		a0, STORMASK
145	xori		a0, STORMASK
146	PTR_ADDIU	a0, STORSIZE
147#endif /* CONFIG_CPU_MIPSR6 */
1481:	ori		t1, a2, 0x3f		/* # of full blocks */
149	xori		t1, 0x3f
150	beqz		t1, .Lmemset_partial\@	/* no block to fill */
151	andi		t0, a2, 0x40-STORSIZE
152
153	PTR_ADDU	t1, a0			/* end address */
154	.set		reorder
1551:	PTR_ADDIU	a0, 64
156	R10KCBARRIER(0(ra))
157	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
158	bne		t1, a0, 1b
159	.set		noreorder
160
161.Lmemset_partial\@:
162	R10KCBARRIER(0(ra))
163	PTR_LA		t1, 2f			/* where to start */
164#ifdef CONFIG_CPU_MICROMIPS
165	LONG_SRL	t7, t0, 1
166#endif
167#if LONGSIZE == 4
168	PTR_SUBU	t1, FILLPTRG
169#else
170	.set		noat
171	LONG_SRL	AT, FILLPTRG, 1
172	PTR_SUBU	t1, AT
173	.set		at
174#endif
175	jr		t1
176	PTR_ADDU	a0, t0			/* dest ptr */
177
178	.set		push
179	.set		noreorder
180	.set		nomacro
181	/* ... but first do longs ... */
182	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
1832:	.set		pop
184	andi		a2, STORMASK		/* At most one long to go */
185
186	beqz		a2, 1f
187#ifndef CONFIG_CPU_MIPSR6
188	PTR_ADDU	a0, a2			/* What's left */
189	R10KCBARRIER(0(ra))
190#ifdef __MIPSEB__
191	EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
192#else
193	EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
194#endif
195#else
196	PTR_SUBU	t0, $0, a2
197	PTR_ADDIU	t0, 1
198	STORE_BYTE(0)
199	STORE_BYTE(1)
200#if LONGSIZE == 4
201	EX(sb, a1, 2(a0), .Lbyte_fixup\@)
202#else
203	STORE_BYTE(2)
204	STORE_BYTE(3)
205	STORE_BYTE(4)
206	STORE_BYTE(5)
207	EX(sb, a1, 6(a0), .Lbyte_fixup\@)
208#endif
2090:
210#endif
2111:	jr		ra
212	move		a2, zero
213
214.Lsmall_memset\@:
215	beqz		a2, 2f
216	PTR_ADDU	t1, a0, a2
217
2181:	PTR_ADDIU	a0, 1			/* fill bytewise */
219	R10KCBARRIER(0(ra))
220	bne		t1, a0, 1b
221	sb		a1, -1(a0)
222
2232:	jr		ra			/* done */
224	move		a2, zero
225	.if __memset == 1
226	END(memset)
227	.set __memset, 0
228	.hidden __memset
229	.endif
230
231#ifdef CONFIG_CPU_MIPSR6
232.Lbyte_fixup\@:
233	PTR_SUBU	a2, $0, t0
234	jr		ra
235	 PTR_ADDIU	a2, 1
236#endif /* CONFIG_CPU_MIPSR6 */
237
238.Lfirst_fixup\@:
239	jr	ra
240	nop
241
242.Lfwd_fixup\@:
243	PTR_L		t0, TI_TASK($28)
244	andi		a2, 0x3f
245	LONG_L		t0, THREAD_BUADDR(t0)
246	LONG_ADDU	a2, t1
247	jr		ra
248	LONG_SUBU	a2, t0
249
250.Lpartial_fixup\@:
251	PTR_L		t0, TI_TASK($28)
252	andi		a2, STORMASK
253	LONG_L		t0, THREAD_BUADDR(t0)
254	LONG_ADDU	a2, t1
255	jr		ra
256	LONG_SUBU	a2, t0
257
258.Llast_fixup\@:
259	jr		ra
260	andi		v1, a2, STORMASK
261
262	.endm
263
264/*
265 * memset(void *s, int c, size_t n)
266 *
267 * a0: start of area to clear
268 * a1: char to fill with
269 * a2: size of area to clear
270 */
271
272LEAF(memset)
273	beqz		a1, 1f
274	move		v0, a0			/* result */
275
276	andi		a1, 0xff		/* spread fillword */
277	LONG_SLL		t1, a1, 8
278	or		a1, t1
279	LONG_SLL		t1, a1, 16
280#if LONGSIZE == 8
281	or		a1, t1
282	LONG_SLL		t1, a1, 32
283#endif
284	or		a1, t1
2851:
286#ifndef CONFIG_EVA
287FEXPORT(__bzero)
288#else
289FEXPORT(__bzero_kernel)
290#endif
291	__BUILD_BZERO LEGACY_MODE
292
293#ifdef CONFIG_EVA
294LEAF(__bzero)
295	__BUILD_BZERO EVA_MODE
296END(__bzero)
297#endif
298