xref: /linux/arch/arm64/lib/clear_user.S (revision 8626afb170dc44ed0512e04131e4d8eac0c5ec57)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2021 Arm Ltd.
4 */
5
6#include <linux/linkage.h>
7#include <asm/asm-uaccess.h>
8
9	.text
10
11/* Prototype: int __arch_clear_user(void *addr, size_t sz)
12 * Purpose  : clear some user memory
13 * Params   : addr - user memory address to clear
14 *          : sz   - number of bytes to clear
15 * Returns  : number of bytes NOT cleared
16 *
17 * Alignment fixed up by hardware.
18 */
19
20	.p2align 4
21	// Alignment is for the loop, but since the prologue (including BTI)
22	// is also 16 bytes we can keep any padding outside the function
23SYM_FUNC_START(__arch_clear_user)
24	add	x2, x0, x1
25	subs	x1, x1, #8
26	b.mi	2f
271:
28USER(9f, sttr	xzr, [x0])
29	add	x0, x0, #8
30	subs	x1, x1, #8
31	b.hi	1b
32USER(9f, sttr	xzr, [x2, #-8])
33	mov	x0, #0
34	ret
35
362:	tbz	x1, #2, 3f
37USER(9f, sttr	wzr, [x0])
38USER(8f, sttr	wzr, [x2, #-4])
39	mov	x0, #0
40	ret
41
423:	tbz	x1, #1, 4f
43USER(9f, sttrh	wzr, [x0])
444:	tbz	x1, #0, 5f
45USER(7f, sttrb	wzr, [x2, #-1])
465:	mov	x0, #0
47	ret
48
49	// Exception fixups
507:	sub	x0, x2, #5	// Adjust for faulting on the final byte...
518:	add	x0, x0, #4	// ...or the second word of the 4-7 byte case
529:	sub	x0, x2, x0
53	ret
54SYM_FUNC_END(__arch_clear_user)
55EXPORT_SYMBOL(__arch_clear_user)
56