xref: /linux/arch/loongarch/lib/memcpy.S (revision 21ab7031cbff8c6b6f608234e18ffe0473e98f9d)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5
6#include <asm/alternative-asm.h>
7#include <asm/asm.h>
8#include <asm/asmmacro.h>
9#include <asm/cpu.h>
10#include <asm/export.h>
11#include <asm/regdef.h>
12
13SYM_FUNC_START(memcpy)
14	/*
15	 * Some CPUs support hardware unaligned access
16	 */
17	ALTERNATIVE	"b __memcpy_generic", \
18			"b __memcpy_fast", CPU_FEATURE_UAL
19SYM_FUNC_END(memcpy)
20
21EXPORT_SYMBOL(memcpy)
22
23/*
24 * void *__memcpy_generic(void *dst, const void *src, size_t n)
25 *
26 * a0: dst
27 * a1: src
28 * a2: n
29 */
30SYM_FUNC_START(__memcpy_generic)
31	move	a3, a0
32	beqz	a2, 2f
33
341:	ld.b	t0, a1, 0
35	st.b	t0, a0, 0
36	addi.d	a0, a0, 1
37	addi.d	a1, a1, 1
38	addi.d	a2, a2, -1
39	bgt	a2, zero, 1b
40
412:	move	a0, a3
42	jr	ra
43SYM_FUNC_END(__memcpy_generic)
44
45/*
46 * void *__memcpy_fast(void *dst, const void *src, size_t n)
47 *
48 * a0: dst
49 * a1: src
50 * a2: n
51 */
52SYM_FUNC_START(__memcpy_fast)
53	move	a3, a0
54	beqz	a2, 3f
55
56	ori	a4, zero, 64
57	blt	a2, a4, 2f
58
59	/* copy 64 bytes at a time */
601:	ld.d	t0, a1, 0
61	ld.d	t1, a1, 8
62	ld.d	t2, a1, 16
63	ld.d	t3, a1, 24
64	ld.d	t4, a1, 32
65	ld.d	t5, a1, 40
66	ld.d	t6, a1, 48
67	ld.d	t7, a1, 56
68	st.d	t0, a0, 0
69	st.d	t1, a0, 8
70	st.d	t2, a0, 16
71	st.d	t3, a0, 24
72	st.d	t4, a0, 32
73	st.d	t5, a0, 40
74	st.d	t6, a0, 48
75	st.d	t7, a0, 56
76
77	addi.d	a0, a0, 64
78	addi.d	a1, a1, 64
79	addi.d	a2, a2, -64
80	bge	a2, a4, 1b
81
82	beqz	a2, 3f
83
84	/* copy the remaining bytes */
852:	ld.b	t0, a1, 0
86	st.b	t0, a0, 0
87	addi.d	a0, a0, 1
88	addi.d	a1, a1, 1
89	addi.d	a2, a2, -1
90	bgt	a2, zero, 2b
91
92	/* return */
933:	move	a0, a3
94	jr	ra
95SYM_FUNC_END(__memcpy_fast)
96