xref: /linux/arch/sparc/lib/blockops.S (revision 80d443e8876602be2c130f79c4de81e12e2a700d)
1/*
2 * blockops.S: Common block zero optimized routines.
3 *
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
5 */
6
7#include <linux/linkage.h>
8#include <asm/page.h>
9#include <asm/export.h>
10
11	/* Zero out 64 bytes of memory at (buf + offset).
12	 * Assumes %g1 contains zero.
13	 */
14#define BLAST_BLOCK(buf, offset) \
15	std	%g0, [buf + offset + 0x38]; \
16	std	%g0, [buf + offset + 0x30]; \
17	std	%g0, [buf + offset + 0x28]; \
18	std	%g0, [buf + offset + 0x20]; \
19	std	%g0, [buf + offset + 0x18]; \
20	std	%g0, [buf + offset + 0x10]; \
21	std	%g0, [buf + offset + 0x08]; \
22	std	%g0, [buf + offset + 0x00];
23
24	/* Copy 32 bytes of memory at (src + offset) to
25	 * (dst + offset).
26	 */
27#define MIRROR_BLOCK(dst, src, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
28	ldd	[src + offset + 0x18], t0; \
29	ldd	[src + offset + 0x10], t2; \
30	ldd	[src + offset + 0x08], t4; \
31	ldd	[src + offset + 0x00], t6; \
32	std	t0, [dst + offset + 0x18]; \
33	std	t2, [dst + offset + 0x10]; \
34	std	t4, [dst + offset + 0x08]; \
35	std	t6, [dst + offset + 0x00];
36
37	/* Profiling evidence indicates that memset() is
38	 * commonly called for blocks of size PAGE_SIZE,
39	 * and (2 * PAGE_SIZE) (for kernel stacks)
40	 * and with a second arg of zero.  We assume in
41	 * all of these cases that the buffer is aligned
42	 * on at least an 8 byte boundary.
43	 *
44	 * Therefore we special case them to make them
45	 * as fast as possible.
46	 */
47
48	.text
49ENTRY(bzero_1page)
50/* NOTE: If you change the number of insns of this routine, please check
51 * arch/sparc/mm/hypersparc.S */
52	/* %o0 = buf */
53	or	%g0, %g0, %g1
54	or	%o0, %g0, %o1
55	or	%g0, (PAGE_SIZE >> 8), %g2
561:
57	BLAST_BLOCK(%o0, 0x00)
58	BLAST_BLOCK(%o0, 0x40)
59	BLAST_BLOCK(%o0, 0x80)
60	BLAST_BLOCK(%o0, 0xc0)
61	subcc	%g2, 1, %g2
62	bne	1b
63	 add	%o0, 0x100, %o0
64
65	retl
66	 nop
67ENDPROC(bzero_1page)
68EXPORT_SYMBOL(bzero_1page)
69
70ENTRY(__copy_1page)
71/* NOTE: If you change the number of insns of this routine, please check
72 * arch/sparc/mm/hypersparc.S */
73	/* %o0 = dst, %o1 = src */
74	or	%g0, (PAGE_SIZE >> 8), %g1
751:
76	MIRROR_BLOCK(%o0, %o1, 0x00, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
77	MIRROR_BLOCK(%o0, %o1, 0x20, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
78	MIRROR_BLOCK(%o0, %o1, 0x40, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
79	MIRROR_BLOCK(%o0, %o1, 0x60, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
80	MIRROR_BLOCK(%o0, %o1, 0x80, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
81	MIRROR_BLOCK(%o0, %o1, 0xa0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
82	MIRROR_BLOCK(%o0, %o1, 0xc0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
83	MIRROR_BLOCK(%o0, %o1, 0xe0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5)
84	subcc	%g1, 1, %g1
85	add	%o0, 0x100, %o0
86	bne	1b
87	 add	%o1, 0x100, %o1
88
89	retl
90	 nop
91ENDPROC(__copy_1page)
92EXPORT_SYMBOL(__copy_1page)
93