xref: /linux/arch/arm/lib/csumpartialcopyuser.S (revision e9f0878c4b2004ac19581274c1ae4c61ae3ca70e)
1/*
2 *  linux/arch/arm/lib/csumpartialcopyuser.S
3 *
4 *  Copyright (C) 1995-1998 Russell King
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * 27/03/03 Ian Molton Clean up CONFIG_CPU
11 *
12 */
13#include <linux/linkage.h>
14#include <asm/assembler.h>
15#include <asm/errno.h>
16#include <asm/asm-offsets.h>
17
18		.text
19
20#ifdef CONFIG_CPU_SW_DOMAIN_PAN
21		.macro	save_regs
22		mrc	p15, 0, ip, c3, c0, 0
23		stmfd	sp!, {r1, r2, r4 - r8, ip, lr}
24		uaccess_enable ip
25		.endm
26
27		.macro	load_regs
28		ldmfd	sp!, {r1, r2, r4 - r8, ip, lr}
29		mcr	p15, 0, ip, c3, c0, 0
30		ret	lr
31		.endm
32#else
33		.macro	save_regs
34		stmfd	sp!, {r1, r2, r4 - r8, lr}
35		.endm
36
37		.macro	load_regs
38		ldmfd	sp!, {r1, r2, r4 - r8, pc}
39		.endm
40#endif
41
42		.macro	load1b,	reg1
43		ldrusr	\reg1, r0, 1
44		.endm
45
46		.macro	load2b, reg1, reg2
47		ldrusr	\reg1, r0, 1
48		ldrusr	\reg2, r0, 1
49		.endm
50
51		.macro	load1l, reg1
52		ldrusr	\reg1, r0, 4
53		.endm
54
55		.macro	load2l, reg1, reg2
56		ldrusr	\reg1, r0, 4
57		ldrusr	\reg2, r0, 4
58		.endm
59
60		.macro	load4l, reg1, reg2, reg3, reg4
61		ldrusr	\reg1, r0, 4
62		ldrusr	\reg2, r0, 4
63		ldrusr	\reg3, r0, 4
64		ldrusr	\reg4, r0, 4
65		.endm
66
67/*
68 * unsigned int
69 * csum_partial_copy_from_user(const char *src, char *dst, int len, int sum, int *err_ptr)
70 *  r0 = src, r1 = dst, r2 = len, r3 = sum, [sp] = *err_ptr
71 *  Returns : r0 = checksum, [[sp, #0], #0] = 0 or -EFAULT
72 */
73
74#define FN_ENTRY	ENTRY(csum_partial_copy_from_user)
75#define FN_EXIT		ENDPROC(csum_partial_copy_from_user)
76
77#include "csumpartialcopygeneric.S"
78
79/*
80 * FIXME: minor buglet here
81 * We don't return the checksum for the data present in the buffer.  To do
82 * so properly, we would have to add in whatever registers were loaded before
83 * the fault, which, with the current asm above is not predictable.
84 */
85		.pushsection .text.fixup,"ax"
86		.align	4
879001:		mov	r4, #-EFAULT
88#ifdef CONFIG_CPU_SW_DOMAIN_PAN
89		ldr	r5, [sp, #9*4]		@ *err_ptr
90#else
91		ldr	r5, [sp, #8*4]		@ *err_ptr
92#endif
93		str	r4, [r5]
94		ldmia	sp, {r1, r2}		@ retrieve dst, len
95		add	r2, r2, r1
96		mov	r0, #0			@ zero the buffer
979002:		teq	r2, r1
98		strneb	r0, [r1], #1
99		bne	9002b
100		load_regs
101		.popsection
102