xref: /linux/arch/sh/include/asm/uaccess_32.h (revision 87c9c16317882dd6dbbc07e349bc3223e14f3244)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * User space memory access functions
4  *
5  * Copyright (C) 1999, 2002  Niibe Yutaka
6  * Copyright (C) 2003 - 2008  Paul Mundt
7  *
8  *  Based on:
9  *     MIPS implementation version 1.15 by
10  *              Copyright (C) 1996, 1997, 1998 by Ralf Baechle
11  *     and i386 version.
12  */
13 #ifndef __ASM_SH_UACCESS_32_H
14 #define __ASM_SH_UACCESS_32_H
15 
16 #define __get_user_size(x,ptr,size,retval)			\
17 do {								\
18 	retval = 0;						\
19 	switch (size) {						\
20 	case 1:							\
21 		__get_user_asm(x, ptr, retval, "b");		\
22 		break;						\
23 	case 2:							\
24 		__get_user_asm(x, ptr, retval, "w");		\
25 		break;						\
26 	case 4:							\
27 		__get_user_asm(x, ptr, retval, "l");		\
28 		break;						\
29 	case 8:							\
30 		__get_user_u64(x, ptr, retval);			\
31 		break;						\
32 	default:						\
33 		__get_user_unknown();				\
34 		break;						\
35 	}							\
36 } while (0)
37 
38 #ifdef CONFIG_MMU
39 #define __get_user_asm(x, addr, err, insn) \
40 ({ \
41 __asm__ __volatile__( \
42 	"1:\n\t" \
43 	"mov." insn "	%2, %1\n\t" \
44 	"2:\n" \
45 	".section	.fixup,\"ax\"\n" \
46 	"3:\n\t" \
47 	"mov	#0, %1\n\t" \
48 	"mov.l	4f, %0\n\t" \
49 	"jmp	@%0\n\t" \
50 	" mov	%3, %0\n\t" \
51 	".balign	4\n" \
52 	"4:	.long	2b\n\t" \
53 	".previous\n" \
54 	".section	__ex_table,\"a\"\n\t" \
55 	".long	1b, 3b\n\t" \
56 	".previous" \
57 	:"=&r" (err), "=&r" (x) \
58 	:"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
59 #else
60 #define __get_user_asm(x, addr, err, insn)		\
61 do {							\
62 	__asm__ __volatile__ (				\
63 		"mov." insn "	%1, %0\n\t"		\
64 		: "=&r" (x)				\
65 		: "m" (__m(addr))			\
66 	);						\
67 } while (0)
68 #endif /* CONFIG_MMU */
69 
70 extern void __get_user_unknown(void);
71 
72 #if defined(CONFIG_CPU_LITTLE_ENDIAN)
73 #define __get_user_u64(x, addr, err) \
74 ({ \
75 __asm__ __volatile__( \
76 	"1:\n\t" \
77 	"mov.l	%2,%R1\n\t" \
78 	"mov.l	%T2,%S1\n\t" \
79 	"2:\n" \
80 	".section	.fixup,\"ax\"\n" \
81 	"3:\n\t" \
82 	"mov  #0,%R1\n\t"   \
83 	"mov  #0,%S1\n\t"   \
84 	"mov.l	4f, %0\n\t" \
85 	"jmp	@%0\n\t" \
86 	" mov	%3, %0\n\t" \
87 	".balign	4\n" \
88 	"4:	.long	2b\n\t" \
89 	".previous\n" \
90 	".section	__ex_table,\"a\"\n\t" \
91 	".long	1b, 3b\n\t" \
92 	".long	1b + 2, 3b\n\t" \
93 	".previous" \
94 	:"=&r" (err), "=&r" (x) \
95 	:"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
96 #else
97 #define __get_user_u64(x, addr, err) \
98 ({ \
99 __asm__ __volatile__( \
100 	"1:\n\t" \
101 	"mov.l	%2,%S1\n\t" \
102 	"mov.l	%T2,%R1\n\t" \
103 	"2:\n" \
104 	".section	.fixup,\"ax\"\n" \
105 	"3:\n\t" \
106 	"mov  #0,%S1\n\t"   \
107 	"mov  #0,%R1\n\t"   \
108 	"mov.l	4f, %0\n\t" \
109 	"jmp	@%0\n\t" \
110 	" mov	%3, %0\n\t" \
111 	".balign	4\n" \
112 	"4:	.long	2b\n\t" \
113 	".previous\n" \
114 	".section	__ex_table,\"a\"\n\t" \
115 	".long	1b, 3b\n\t" \
116 	".long	1b + 2, 3b\n\t" \
117 	".previous" \
118 	:"=&r" (err), "=&r" (x) \
119 	:"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
120 #endif
121 
122 #define __put_user_size(x,ptr,size,retval)		\
123 do {							\
124 	retval = 0;					\
125 	switch (size) {					\
126 	case 1:						\
127 		__put_user_asm(x, ptr, retval, "b");	\
128 		break;					\
129 	case 2:						\
130 		__put_user_asm(x, ptr, retval, "w");	\
131 		break;					\
132 	case 4:						\
133 		__put_user_asm(x, ptr, retval, "l");	\
134 		break;					\
135 	case 8:						\
136 		__put_user_u64(x, ptr, retval);		\
137 		break;					\
138 	default:					\
139 		__put_user_unknown();			\
140 	}						\
141 } while (0)
142 
143 #ifdef CONFIG_MMU
144 #define __put_user_asm(x, addr, err, insn)			\
145 do {								\
146 	__asm__ __volatile__ (					\
147 		"1:\n\t"					\
148 		"mov." insn "	%1, %2\n\t"			\
149 		"2:\n"						\
150 		".section	.fixup,\"ax\"\n"		\
151 		"3:\n\t"					\
152 		"mov.l	4f, %0\n\t"				\
153 		"jmp	@%0\n\t"				\
154 		" mov	%3, %0\n\t"				\
155 		".balign	4\n"				\
156 		"4:	.long	2b\n\t"				\
157 		".previous\n"					\
158 		".section	__ex_table,\"a\"\n\t"		\
159 		".long	1b, 3b\n\t"				\
160 		".previous"					\
161 		: "=&r" (err)					\
162 		: "r" (x), "m" (__m(addr)), "i" (-EFAULT),	\
163 		  "0" (err)					\
164 		: "memory"					\
165 	);							\
166 } while (0)
167 #else
168 #define __put_user_asm(x, addr, err, insn)		\
169 do {							\
170 	__asm__ __volatile__ (				\
171 		"mov." insn "	%0, %1\n\t"		\
172 		: /* no outputs */			\
173 		: "r" (x), "m" (__m(addr))		\
174 		: "memory"				\
175 	);						\
176 } while (0)
177 #endif /* CONFIG_MMU */
178 
179 #if defined(CONFIG_CPU_LITTLE_ENDIAN)
180 #define __put_user_u64(val,addr,retval) \
181 ({ \
182 __asm__ __volatile__( \
183 	"1:\n\t" \
184 	"mov.l	%R1,%2\n\t" \
185 	"mov.l	%S1,%T2\n\t" \
186 	"2:\n" \
187 	".section	.fixup,\"ax\"\n" \
188 	"3:\n\t" \
189 	"mov.l	4f,%0\n\t" \
190 	"jmp	@%0\n\t" \
191 	" mov	%3,%0\n\t" \
192 	".balign	4\n" \
193 	"4:	.long	2b\n\t" \
194 	".previous\n" \
195 	".section	__ex_table,\"a\"\n\t" \
196 	".long	1b, 3b\n\t" \
197 	".previous" \
198 	: "=r" (retval) \
199 	: "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
200         : "memory"); })
201 #else
202 #define __put_user_u64(val,addr,retval) \
203 ({ \
204 __asm__ __volatile__( \
205 	"1:\n\t" \
206 	"mov.l	%S1,%2\n\t" \
207 	"mov.l	%R1,%T2\n\t" \
208 	"2:\n" \
209 	".section	.fixup,\"ax\"\n" \
210 	"3:\n\t" \
211 	"mov.l	4f,%0\n\t" \
212 	"jmp	@%0\n\t" \
213 	" mov	%3,%0\n\t" \
214 	".balign	4\n" \
215 	"4:	.long	2b\n\t" \
216 	".previous\n" \
217 	".section	__ex_table,\"a\"\n\t" \
218 	".long	1b, 3b\n\t" \
219 	".previous" \
220 	: "=r" (retval) \
221 	: "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
222         : "memory"); })
223 #endif
224 
225 extern void __put_user_unknown(void);
226 
227 #endif /* __ASM_SH_UACCESS_32_H */
228