xref: /linux/arch/csky/abiv2/memset.S (revision 3eb66e91a25497065c5322b1268cbc3953642227)
1/* SPDX-License-Identifier: GPL-2.0 */
2// Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
3
4#include <linux/linkage.h>
5#include "sysdep.h"
6
7	.weak memset
8ENTRY(__memset)
9ENTRY(memset)
10	/* Test if len less than 4 bytes.  */
11	mov	r12, r0
12	cmplti	r2, 8
13	bt	.L_set_by_byte
14
15	andi	r13, r0, 3
16	movi	r19, 4
17	/* Test if dest is not 4 bytes aligned.  */
18	bnez	r13, .L_dest_not_aligned
19	/* Hardware can handle unaligned access directly.  */
20.L_dest_aligned:
21        zextb   r3, r1
22        lsli    r1, 8
23        or      r1, r3
24        lsli    r3, r1, 16
25        or      r3, r1
26
27	/* If dest is aligned, then copy.  */
28	zext	r18, r2, 31, 4
29	/* Test if len less than 16 bytes.  */
30	bez	r18, .L_len_less_16bytes
31
32	LABLE_ALIGN
33.L_len_larger_16bytes:
34	stw	r3, (r0, 0)
35	stw	r3, (r0, 4)
36	stw	r3, (r0, 8)
37	stw	r3, (r0, 12)
38	PRE_BNEZAD (r18)
39	addi	r0, 16
40	BNEZAD (r18, .L_len_larger_16bytes)
41
42.L_len_less_16bytes:
43	zext	r18, r2, 3, 2
44	andi	r2, 3
45	bez	r18, .L_set_by_byte
46.L_len_less_16bytes_loop:
47	stw	r3, (r0, 0)
48	PRE_BNEZAD (r18)
49	addi	r0, 4
50	BNEZAD (r18, .L_len_less_16bytes_loop)
51
52	/* Test if len less than 4 bytes.  */
53.L_set_by_byte:
54	zext	r18, r2, 2, 0
55	bez	r18, .L_return
56.L_set_by_byte_loop:
57	stb	r1, (r0, 0)
58	PRE_BNEZAD (r18)
59	addi	r0, 1
60	BNEZAD (r18, .L_set_by_byte_loop)
61
62.L_return:
63	mov	r0, r12
64	rts
65
66	/* If dest is not aligned, just set some bytes makes the dest
67	   align.  */
68
69.L_dest_not_aligned:
70	sub	r13, r19, r13
71	sub	r2, r13
72.L_dest_not_aligned_loop:
73	/* Makes the dest align.  */
74	stb	r1, (r0, 0)
75	PRE_BNEZAD (r13)
76	addi	r0, 1
77	BNEZAD (r13, .L_dest_not_aligned_loop)
78	cmplti	r2, 8
79	bt	.L_set_by_byte
80	/* Check whether the src is aligned.  */
81	jbr	.L_dest_aligned
82ENDPROC(memset)
83ENDPROC(__memset)
84