xref: /linux/arch/arc/include/asm/uaccess.h (revision b6ebbac51bedf9e98e837688bc838f400196da5e)
1 /*
2  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * vineetg: June 2010
9  *    -__clear_user( ) called multiple times during elf load was byte loop
10  *    converted to do as much word clear as possible.
11  *
12  * vineetg: Dec 2009
13  *    -Hand crafted constant propagation for "constant" copy sizes
14  *    -stock kernel shrunk by 33K at -O3
15  *
16  * vineetg: Sept 2009
17  *    -Added option to (UN)inline copy_(to|from)_user to reduce code sz
18  *    -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
19  *    -Enabled when doing -Os
20  *
21  * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
22  */
23 
24 #ifndef _ASM_ARC_UACCESS_H
25 #define _ASM_ARC_UACCESS_H
26 
27 #include <linux/sched.h>
28 #include <asm/errno.h>
29 #include <linux/string.h>	/* for generic string functions */
30 
31 
32 #define __kernel_ok		(segment_eq(get_fs(), KERNEL_DS))
33 
34 /*
35  * Algorithmically, for __user_ok() we want do:
36  * 	(start < TASK_SIZE) && (start+len < TASK_SIZE)
37  * where TASK_SIZE could either be retrieved from thread_info->addr_limit or
38  * emitted directly in code.
39  *
40  * This can however be rewritten as follows:
41  *	(len <= TASK_SIZE) && (start+len < TASK_SIZE)
42  *
43  * Because it essentially checks if buffer end is within limit and @len is
44  * non-ngeative, which implies that buffer start will be within limit too.
45  *
46  * The reason for rewriting being, for majority of cases, @len is generally
47  * compile time constant, causing first sub-expression to be compile time
48  * subsumed.
49  *
50  * The second part would generate weird large LIMMs e.g. (0x6000_0000 - 0x10),
51  * so we check for TASK_SIZE using get_fs() since the addr_limit load from mem
52  * would already have been done at this call site for __kernel_ok()
53  *
54  */
55 #define __user_ok(addr, sz)	(((sz) <= TASK_SIZE) && \
56 				 ((addr) <= (get_fs() - (sz))))
57 #define __access_ok(addr, sz)	(unlikely(__kernel_ok) || \
58 				 likely(__user_ok((addr), (sz))))
59 
60 /*********** Single byte/hword/word copies ******************/
61 
62 #define __get_user_fn(sz, u, k)					\
63 ({								\
64 	long __ret = 0;	/* success by default */	\
65 	switch (sz) {						\
66 	case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break;	\
67 	case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break;	\
68 	case 4: __arc_get_user_one(*(k), u, "ld", __ret);  break;	\
69 	case 8: __arc_get_user_one_64(*(k), u, __ret);     break;	\
70 	}							\
71 	__ret;							\
72 })
73 
74 /*
75  * Returns 0 on success, -EFAULT if not.
76  * @ret already contains 0 - given that errors will be less likely
77  * (hence +r asm constraint below).
78  * In case of error, fixup code will make it -EFAULT
79  */
80 #define __arc_get_user_one(dst, src, op, ret)	\
81 	__asm__ __volatile__(                   \
82 	"1:	"op"    %1,[%2]\n"		\
83 	"2:	;nop\n"				\
84 	"	.section .fixup, \"ax\"\n"	\
85 	"	.align 4\n"			\
86 	"3:	mov %0, %3\n"			\
87 	"	j   2b\n"			\
88 	"	.previous\n"			\
89 	"	.section __ex_table, \"a\"\n"	\
90 	"	.align 4\n"			\
91 	"	.word 1b,3b\n"			\
92 	"	.previous\n"			\
93 						\
94 	: "+r" (ret), "=r" (dst)		\
95 	: "r" (src), "ir" (-EFAULT))
96 
97 #define __arc_get_user_one_64(dst, src, ret)	\
98 	__asm__ __volatile__(                   \
99 	"1:	ld   %1,[%2]\n"			\
100 	"4:	ld  %R1,[%2, 4]\n"		\
101 	"2:	;nop\n"				\
102 	"	.section .fixup, \"ax\"\n"	\
103 	"	.align 4\n"			\
104 	"3:	mov %0, %3\n"			\
105 	"	j   2b\n"			\
106 	"	.previous\n"			\
107 	"	.section __ex_table, \"a\"\n"	\
108 	"	.align 4\n"			\
109 	"	.word 1b,3b\n"			\
110 	"	.word 4b,3b\n"			\
111 	"	.previous\n"			\
112 						\
113 	: "+r" (ret), "=r" (dst)		\
114 	: "r" (src), "ir" (-EFAULT))
115 
116 #define __put_user_fn(sz, u, k)					\
117 ({								\
118 	long __ret = 0;	/* success by default */	\
119 	switch (sz) {						\
120 	case 1: __arc_put_user_one(*(k), u, "stb", __ret); break;	\
121 	case 2: __arc_put_user_one(*(k), u, "stw", __ret); break;	\
122 	case 4: __arc_put_user_one(*(k), u, "st", __ret);  break;	\
123 	case 8: __arc_put_user_one_64(*(k), u, __ret);     break;	\
124 	}							\
125 	__ret;							\
126 })
127 
128 #define __arc_put_user_one(src, dst, op, ret)	\
129 	__asm__ __volatile__(                   \
130 	"1:	"op"    %1,[%2]\n"		\
131 	"2:	;nop\n"				\
132 	"	.section .fixup, \"ax\"\n"	\
133 	"	.align 4\n"			\
134 	"3:	mov %0, %3\n"			\
135 	"	j   2b\n"			\
136 	"	.previous\n"			\
137 	"	.section __ex_table, \"a\"\n"	\
138 	"	.align 4\n"			\
139 	"	.word 1b,3b\n"			\
140 	"	.previous\n"			\
141 						\
142 	: "+r" (ret)				\
143 	: "r" (src), "r" (dst), "ir" (-EFAULT))
144 
145 #define __arc_put_user_one_64(src, dst, ret)	\
146 	__asm__ __volatile__(                   \
147 	"1:	st   %1,[%2]\n"			\
148 	"4:	st  %R1,[%2, 4]\n"		\
149 	"2:	;nop\n"				\
150 	"	.section .fixup, \"ax\"\n"	\
151 	"	.align 4\n"			\
152 	"3:	mov %0, %3\n"			\
153 	"	j   2b\n"			\
154 	"	.previous\n"			\
155 	"	.section __ex_table, \"a\"\n"	\
156 	"	.align 4\n"			\
157 	"	.word 1b,3b\n"			\
158 	"	.word 4b,3b\n"			\
159 	"	.previous\n"			\
160 						\
161 	: "+r" (ret)				\
162 	: "r" (src), "r" (dst), "ir" (-EFAULT))
163 
164 
165 static inline unsigned long
166 __arc_copy_from_user(void *to, const void __user *from, unsigned long n)
167 {
168 	long res = 0;
169 	char val;
170 	unsigned long tmp1, tmp2, tmp3, tmp4;
171 	unsigned long orig_n = n;
172 
173 	if (n == 0)
174 		return 0;
175 
176 	/* unaligned */
177 	if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
178 
179 		unsigned char tmp;
180 
181 		__asm__ __volatile__ (
182 		"	mov.f   lp_count, %0		\n"
183 		"	lpnz 2f				\n"
184 		"1:	ldb.ab  %1, [%3, 1]		\n"
185 		"	stb.ab  %1, [%2, 1]		\n"
186 		"	sub     %0,%0,1			\n"
187 		"2:	;nop				\n"
188 		"	.section .fixup, \"ax\"		\n"
189 		"	.align 4			\n"
190 		"3:	j   2b				\n"
191 		"	.previous			\n"
192 		"	.section __ex_table, \"a\"	\n"
193 		"	.align 4			\n"
194 		"	.word   1b, 3b			\n"
195 		"	.previous			\n"
196 
197 		: "+r" (n),
198 		/*
199 		 * Note as an '&' earlyclobber operand to make sure the
200 		 * temporary register inside the loop is not the same as
201 		 *  FROM or TO.
202 		*/
203 		  "=&r" (tmp), "+r" (to), "+r" (from)
204 		:
205 		: "lp_count", "lp_start", "lp_end", "memory");
206 
207 		return n;
208 	}
209 
210 	/*
211 	 * Hand-crafted constant propagation to reduce code sz of the
212 	 * laddered copy 16x,8,4,2,1
213 	 */
214 	if (__builtin_constant_p(orig_n)) {
215 		res = orig_n;
216 
217 		if (orig_n / 16) {
218 			orig_n = orig_n % 16;
219 
220 			__asm__ __volatile__(
221 			"	lsr   lp_count, %7,4		\n"
222 			"	lp    3f			\n"
223 			"1:	ld.ab   %3, [%2, 4]		\n"
224 			"11:	ld.ab   %4, [%2, 4]		\n"
225 			"12:	ld.ab   %5, [%2, 4]		\n"
226 			"13:	ld.ab   %6, [%2, 4]		\n"
227 			"	st.ab   %3, [%1, 4]		\n"
228 			"	st.ab   %4, [%1, 4]		\n"
229 			"	st.ab   %5, [%1, 4]		\n"
230 			"	st.ab   %6, [%1, 4]		\n"
231 			"	sub     %0,%0,16		\n"
232 			"3:	;nop				\n"
233 			"	.section .fixup, \"ax\"		\n"
234 			"	.align 4			\n"
235 			"4:	j   3b				\n"
236 			"	.previous			\n"
237 			"	.section __ex_table, \"a\"	\n"
238 			"	.align 4			\n"
239 			"	.word   1b, 4b			\n"
240 			"	.word   11b,4b			\n"
241 			"	.word   12b,4b			\n"
242 			"	.word   13b,4b			\n"
243 			"	.previous			\n"
244 			: "+r" (res), "+r"(to), "+r"(from),
245 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
246 			: "ir"(n)
247 			: "lp_count", "memory");
248 		}
249 		if (orig_n / 8) {
250 			orig_n = orig_n % 8;
251 
252 			__asm__ __volatile__(
253 			"14:	ld.ab   %3, [%2,4]		\n"
254 			"15:	ld.ab   %4, [%2,4]		\n"
255 			"	st.ab   %3, [%1,4]		\n"
256 			"	st.ab   %4, [%1,4]		\n"
257 			"	sub     %0,%0,8			\n"
258 			"31:	;nop				\n"
259 			"	.section .fixup, \"ax\"		\n"
260 			"	.align 4			\n"
261 			"4:	j   31b				\n"
262 			"	.previous			\n"
263 			"	.section __ex_table, \"a\"	\n"
264 			"	.align 4			\n"
265 			"	.word   14b,4b			\n"
266 			"	.word   15b,4b			\n"
267 			"	.previous			\n"
268 			: "+r" (res), "+r"(to), "+r"(from),
269 			  "=r"(tmp1), "=r"(tmp2)
270 			:
271 			: "memory");
272 		}
273 		if (orig_n / 4) {
274 			orig_n = orig_n % 4;
275 
276 			__asm__ __volatile__(
277 			"16:	ld.ab   %3, [%2,4]		\n"
278 			"	st.ab   %3, [%1,4]		\n"
279 			"	sub     %0,%0,4			\n"
280 			"32:	;nop				\n"
281 			"	.section .fixup, \"ax\"		\n"
282 			"	.align 4			\n"
283 			"4:	j   32b				\n"
284 			"	.previous			\n"
285 			"	.section __ex_table, \"a\"	\n"
286 			"	.align 4			\n"
287 			"	.word   16b,4b			\n"
288 			"	.previous			\n"
289 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
290 			:
291 			: "memory");
292 		}
293 		if (orig_n / 2) {
294 			orig_n = orig_n % 2;
295 
296 			__asm__ __volatile__(
297 			"17:	ldw.ab   %3, [%2,2]		\n"
298 			"	stw.ab   %3, [%1,2]		\n"
299 			"	sub      %0,%0,2		\n"
300 			"33:	;nop				\n"
301 			"	.section .fixup, \"ax\"		\n"
302 			"	.align 4			\n"
303 			"4:	j   33b				\n"
304 			"	.previous			\n"
305 			"	.section __ex_table, \"a\"	\n"
306 			"	.align 4			\n"
307 			"	.word   17b,4b			\n"
308 			"	.previous			\n"
309 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
310 			:
311 			: "memory");
312 		}
313 		if (orig_n & 1) {
314 			__asm__ __volatile__(
315 			"18:	ldb.ab   %3, [%2,2]		\n"
316 			"	stb.ab   %3, [%1,2]		\n"
317 			"	sub      %0,%0,1		\n"
318 			"34:	; nop				\n"
319 			"	.section .fixup, \"ax\"		\n"
320 			"	.align 4			\n"
321 			"4:	j   34b				\n"
322 			"	.previous			\n"
323 			"	.section __ex_table, \"a\"	\n"
324 			"	.align 4			\n"
325 			"	.word   18b,4b			\n"
326 			"	.previous			\n"
327 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
328 			:
329 			: "memory");
330 		}
331 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
332 
333 		__asm__ __volatile__(
334 		"	mov %0,%3			\n"
335 		"	lsr.f   lp_count, %3,4		\n"  /* 16x bytes */
336 		"	lpnz    3f			\n"
337 		"1:	ld.ab   %5, [%2, 4]		\n"
338 		"11:	ld.ab   %6, [%2, 4]		\n"
339 		"12:	ld.ab   %7, [%2, 4]		\n"
340 		"13:	ld.ab   %8, [%2, 4]		\n"
341 		"	st.ab   %5, [%1, 4]		\n"
342 		"	st.ab   %6, [%1, 4]		\n"
343 		"	st.ab   %7, [%1, 4]		\n"
344 		"	st.ab   %8, [%1, 4]		\n"
345 		"	sub     %0,%0,16		\n"
346 		"3:	and.f   %3,%3,0xf		\n"  /* stragglers */
347 		"	bz      34f			\n"
348 		"	bbit0   %3,3,31f		\n"  /* 8 bytes left */
349 		"14:	ld.ab   %5, [%2,4]		\n"
350 		"15:	ld.ab   %6, [%2,4]		\n"
351 		"	st.ab   %5, [%1,4]		\n"
352 		"	st.ab   %6, [%1,4]		\n"
353 		"	sub.f   %0,%0,8			\n"
354 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
355 		"16:	ld.ab   %5, [%2,4]		\n"
356 		"	st.ab   %5, [%1,4]		\n"
357 		"	sub.f   %0,%0,4			\n"
358 		"32:	bbit0   %3,1,33f		\n"  /* 2 bytes left */
359 		"17:	ldw.ab  %5, [%2,2]		\n"
360 		"	stw.ab  %5, [%1,2]		\n"
361 		"	sub.f   %0,%0,2			\n"
362 		"33:	bbit0   %3,0,34f		\n"
363 		"18:	ldb.ab  %5, [%2,1]		\n"  /* 1 byte left */
364 		"	stb.ab  %5, [%1,1]		\n"
365 		"	sub.f   %0,%0,1			\n"
366 		"34:	;nop				\n"
367 		"	.section .fixup, \"ax\"		\n"
368 		"	.align 4			\n"
369 		"4:	j   34b				\n"
370 		"	.previous			\n"
371 		"	.section __ex_table, \"a\"	\n"
372 		"	.align 4			\n"
373 		"	.word   1b, 4b			\n"
374 		"	.word   11b,4b			\n"
375 		"	.word   12b,4b			\n"
376 		"	.word   13b,4b			\n"
377 		"	.word   14b,4b			\n"
378 		"	.word   15b,4b			\n"
379 		"	.word   16b,4b			\n"
380 		"	.word   17b,4b			\n"
381 		"	.word   18b,4b			\n"
382 		"	.previous			\n"
383 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
384 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
385 		:
386 		: "lp_count", "memory");
387 	}
388 
389 	return res;
390 }
391 
392 extern unsigned long slowpath_copy_to_user(void __user *to, const void *from,
393 					   unsigned long n);
394 
395 static inline unsigned long
396 __arc_copy_to_user(void __user *to, const void *from, unsigned long n)
397 {
398 	long res = 0;
399 	char val;
400 	unsigned long tmp1, tmp2, tmp3, tmp4;
401 	unsigned long orig_n = n;
402 
403 	if (n == 0)
404 		return 0;
405 
406 	/* unaligned */
407 	if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
408 
409 		unsigned char tmp;
410 
411 		__asm__ __volatile__(
412 		"	mov.f   lp_count, %0		\n"
413 		"	lpnz 3f				\n"
414 		"	ldb.ab  %1, [%3, 1]		\n"
415 		"1:	stb.ab  %1, [%2, 1]		\n"
416 		"	sub     %0, %0, 1		\n"
417 		"3:	;nop				\n"
418 		"	.section .fixup, \"ax\"		\n"
419 		"	.align 4			\n"
420 		"4:	j   3b				\n"
421 		"	.previous			\n"
422 		"	.section __ex_table, \"a\"	\n"
423 		"	.align 4			\n"
424 		"	.word   1b, 4b			\n"
425 		"	.previous			\n"
426 
427 		: "+r" (n),
428 		/* Note as an '&' earlyclobber operand to make sure the
429 		 * temporary register inside the loop is not the same as
430 		 * FROM or TO.
431 		 */
432 		  "=&r" (tmp), "+r" (to), "+r" (from)
433 		:
434 		: "lp_count", "lp_start", "lp_end", "memory");
435 
436 		return n;
437 	}
438 
439 	if (__builtin_constant_p(orig_n)) {
440 		res = orig_n;
441 
442 		if (orig_n / 16) {
443 			orig_n = orig_n % 16;
444 
445 			__asm__ __volatile__(
446 			"	lsr lp_count, %7,4		\n"
447 			"	lp  3f				\n"
448 			"	ld.ab %3, [%2, 4]		\n"
449 			"	ld.ab %4, [%2, 4]		\n"
450 			"	ld.ab %5, [%2, 4]		\n"
451 			"	ld.ab %6, [%2, 4]		\n"
452 			"1:	st.ab %3, [%1, 4]		\n"
453 			"11:	st.ab %4, [%1, 4]		\n"
454 			"12:	st.ab %5, [%1, 4]		\n"
455 			"13:	st.ab %6, [%1, 4]		\n"
456 			"	sub   %0, %0, 16		\n"
457 			"3:;nop					\n"
458 			"	.section .fixup, \"ax\"		\n"
459 			"	.align 4			\n"
460 			"4:	j   3b				\n"
461 			"	.previous			\n"
462 			"	.section __ex_table, \"a\"	\n"
463 			"	.align 4			\n"
464 			"	.word   1b, 4b			\n"
465 			"	.word   11b,4b			\n"
466 			"	.word   12b,4b			\n"
467 			"	.word   13b,4b			\n"
468 			"	.previous			\n"
469 			: "+r" (res), "+r"(to), "+r"(from),
470 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
471 			: "ir"(n)
472 			: "lp_count", "memory");
473 		}
474 		if (orig_n / 8) {
475 			orig_n = orig_n % 8;
476 
477 			__asm__ __volatile__(
478 			"	ld.ab   %3, [%2,4]		\n"
479 			"	ld.ab   %4, [%2,4]		\n"
480 			"14:	st.ab   %3, [%1,4]		\n"
481 			"15:	st.ab   %4, [%1,4]		\n"
482 			"	sub     %0, %0, 8		\n"
483 			"31:;nop				\n"
484 			"	.section .fixup, \"ax\"		\n"
485 			"	.align 4			\n"
486 			"4:	j   31b				\n"
487 			"	.previous			\n"
488 			"	.section __ex_table, \"a\"	\n"
489 			"	.align 4			\n"
490 			"	.word   14b,4b			\n"
491 			"	.word   15b,4b			\n"
492 			"	.previous			\n"
493 			: "+r" (res), "+r"(to), "+r"(from),
494 			  "=r"(tmp1), "=r"(tmp2)
495 			:
496 			: "memory");
497 		}
498 		if (orig_n / 4) {
499 			orig_n = orig_n % 4;
500 
501 			__asm__ __volatile__(
502 			"	ld.ab   %3, [%2,4]		\n"
503 			"16:	st.ab   %3, [%1,4]		\n"
504 			"	sub     %0, %0, 4		\n"
505 			"32:;nop				\n"
506 			"	.section .fixup, \"ax\"		\n"
507 			"	.align 4			\n"
508 			"4:	j   32b				\n"
509 			"	.previous			\n"
510 			"	.section __ex_table, \"a\"	\n"
511 			"	.align 4			\n"
512 			"	.word   16b,4b			\n"
513 			"	.previous			\n"
514 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
515 			:
516 			: "memory");
517 		}
518 		if (orig_n / 2) {
519 			orig_n = orig_n % 2;
520 
521 			__asm__ __volatile__(
522 			"	ldw.ab    %3, [%2,2]		\n"
523 			"17:	stw.ab    %3, [%1,2]		\n"
524 			"	sub       %0, %0, 2		\n"
525 			"33:;nop				\n"
526 			"	.section .fixup, \"ax\"		\n"
527 			"	.align 4			\n"
528 			"4:	j   33b				\n"
529 			"	.previous			\n"
530 			"	.section __ex_table, \"a\"	\n"
531 			"	.align 4			\n"
532 			"	.word   17b,4b			\n"
533 			"	.previous			\n"
534 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
535 			:
536 			: "memory");
537 		}
538 		if (orig_n & 1) {
539 			__asm__ __volatile__(
540 			"	ldb.ab  %3, [%2,1]		\n"
541 			"18:	stb.ab  %3, [%1,1]		\n"
542 			"	sub     %0, %0, 1		\n"
543 			"34:	;nop				\n"
544 			"	.section .fixup, \"ax\"		\n"
545 			"	.align 4			\n"
546 			"4:	j   34b				\n"
547 			"	.previous			\n"
548 			"	.section __ex_table, \"a\"	\n"
549 			"	.align 4			\n"
550 			"	.word   18b,4b			\n"
551 			"	.previous			\n"
552 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
553 			:
554 			: "memory");
555 		}
556 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
557 
558 		__asm__ __volatile__(
559 		"	mov   %0,%3			\n"
560 		"	lsr.f lp_count, %3,4		\n"  /* 16x bytes */
561 		"	lpnz  3f			\n"
562 		"	ld.ab %5, [%2, 4]		\n"
563 		"	ld.ab %6, [%2, 4]		\n"
564 		"	ld.ab %7, [%2, 4]		\n"
565 		"	ld.ab %8, [%2, 4]		\n"
566 		"1:	st.ab %5, [%1, 4]		\n"
567 		"11:	st.ab %6, [%1, 4]		\n"
568 		"12:	st.ab %7, [%1, 4]		\n"
569 		"13:	st.ab %8, [%1, 4]		\n"
570 		"	sub   %0, %0, 16		\n"
571 		"3:	and.f %3,%3,0xf			\n" /* stragglers */
572 		"	bz 34f				\n"
573 		"	bbit0   %3,3,31f		\n" /* 8 bytes left */
574 		"	ld.ab   %5, [%2,4]		\n"
575 		"	ld.ab   %6, [%2,4]		\n"
576 		"14:	st.ab   %5, [%1,4]		\n"
577 		"15:	st.ab   %6, [%1,4]		\n"
578 		"	sub.f   %0, %0, 8		\n"
579 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
580 		"	ld.ab   %5, [%2,4]		\n"
581 		"16:	st.ab   %5, [%1,4]		\n"
582 		"	sub.f   %0, %0, 4		\n"
583 		"32:	bbit0 %3,1,33f			\n"  /* 2 bytes left */
584 		"	ldw.ab    %5, [%2,2]		\n"
585 		"17:	stw.ab    %5, [%1,2]		\n"
586 		"	sub.f %0, %0, 2			\n"
587 		"33:	bbit0 %3,0,34f			\n"
588 		"	ldb.ab    %5, [%2,1]		\n"  /* 1 byte left */
589 		"18:	stb.ab  %5, [%1,1]		\n"
590 		"	sub.f %0, %0, 1			\n"
591 		"34:	;nop				\n"
592 		"	.section .fixup, \"ax\"		\n"
593 		"	.align 4			\n"
594 		"4:	j   34b				\n"
595 		"	.previous			\n"
596 		"	.section __ex_table, \"a\"	\n"
597 		"	.align 4			\n"
598 		"	.word   1b, 4b			\n"
599 		"	.word   11b,4b			\n"
600 		"	.word   12b,4b			\n"
601 		"	.word   13b,4b			\n"
602 		"	.word   14b,4b			\n"
603 		"	.word   15b,4b			\n"
604 		"	.word   16b,4b			\n"
605 		"	.word   17b,4b			\n"
606 		"	.word   18b,4b			\n"
607 		"	.previous			\n"
608 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
609 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
610 		:
611 		: "lp_count", "memory");
612 	}
613 
614 	return res;
615 }
616 
617 static inline unsigned long __arc_clear_user(void __user *to, unsigned long n)
618 {
619 	long res = n;
620 	unsigned char *d_char = to;
621 
622 	__asm__ __volatile__(
623 	"	bbit0   %0, 0, 1f		\n"
624 	"75:	stb.ab  %2, [%0,1]		\n"
625 	"	sub %1, %1, 1			\n"
626 	"1:	bbit0   %0, 1, 2f		\n"
627 	"76:	stw.ab  %2, [%0,2]		\n"
628 	"	sub %1, %1, 2			\n"
629 	"2:	asr.f   lp_count, %1, 2		\n"
630 	"	lpnz    3f			\n"
631 	"77:	st.ab   %2, [%0,4]		\n"
632 	"	sub %1, %1, 4			\n"
633 	"3:	bbit0   %1, 1, 4f		\n"
634 	"78:	stw.ab  %2, [%0,2]		\n"
635 	"	sub %1, %1, 2			\n"
636 	"4:	bbit0   %1, 0, 5f		\n"
637 	"79:	stb.ab  %2, [%0,1]		\n"
638 	"	sub %1, %1, 1			\n"
639 	"5:					\n"
640 	"	.section .fixup, \"ax\"		\n"
641 	"	.align 4			\n"
642 	"3:	j   5b				\n"
643 	"	.previous			\n"
644 	"	.section __ex_table, \"a\"	\n"
645 	"	.align 4			\n"
646 	"	.word   75b, 3b			\n"
647 	"	.word   76b, 3b			\n"
648 	"	.word   77b, 3b			\n"
649 	"	.word   78b, 3b			\n"
650 	"	.word   79b, 3b			\n"
651 	"	.previous			\n"
652 	: "+r"(d_char), "+r"(res)
653 	: "i"(0)
654 	: "lp_count", "lp_start", "lp_end", "memory");
655 
656 	return res;
657 }
658 
659 static inline long
660 __arc_strncpy_from_user(char *dst, const char __user *src, long count)
661 {
662 	long res = 0;
663 	char val;
664 
665 	if (count == 0)
666 		return 0;
667 
668 	__asm__ __volatile__(
669 	"	lp	3f			\n"
670 	"1:	ldb.ab  %3, [%2, 1]		\n"
671 	"	breq.d	%3, 0, 3f               \n"
672 	"	stb.ab  %3, [%1, 1]		\n"
673 	"	add	%0, %0, 1	# Num of NON NULL bytes copied	\n"
674 	"3:								\n"
675 	"	.section .fixup, \"ax\"		\n"
676 	"	.align 4			\n"
677 	"4:	mov %0, %4		# sets @res as -EFAULT	\n"
678 	"	j   3b				\n"
679 	"	.previous			\n"
680 	"	.section __ex_table, \"a\"	\n"
681 	"	.align 4			\n"
682 	"	.word   1b, 4b			\n"
683 	"	.previous			\n"
684 	: "+r"(res), "+r"(dst), "+r"(src), "=r"(val)
685 	: "g"(-EFAULT), "l"(count)
686 	: "memory");
687 
688 	return res;
689 }
690 
691 static inline long __arc_strnlen_user(const char __user *s, long n)
692 {
693 	long res, tmp1, cnt;
694 	char val;
695 
696 	__asm__ __volatile__(
697 	"	mov %2, %1			\n"
698 	"1:	ldb.ab  %3, [%0, 1]		\n"
699 	"	breq.d  %3, 0, 2f		\n"
700 	"	sub.f   %2, %2, 1		\n"
701 	"	bnz 1b				\n"
702 	"	sub %2, %2, 1			\n"
703 	"2:	sub %0, %1, %2			\n"
704 	"3:	;nop				\n"
705 	"	.section .fixup, \"ax\"		\n"
706 	"	.align 4			\n"
707 	"4:	mov %0, 0			\n"
708 	"	j   3b				\n"
709 	"	.previous			\n"
710 	"	.section __ex_table, \"a\"	\n"
711 	"	.align 4			\n"
712 	"	.word 1b, 4b			\n"
713 	"	.previous			\n"
714 	: "=r"(res), "=r"(tmp1), "=r"(cnt), "=r"(val)
715 	: "0"(s), "1"(n)
716 	: "memory");
717 
718 	return res;
719 }
720 
721 #ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE
722 #define __copy_from_user(t, f, n)	__arc_copy_from_user(t, f, n)
723 #define __copy_to_user(t, f, n)		__arc_copy_to_user(t, f, n)
724 #define __clear_user(d, n)		__arc_clear_user(d, n)
725 #define __strncpy_from_user(d, s, n)	__arc_strncpy_from_user(d, s, n)
726 #define __strnlen_user(s, n)		__arc_strnlen_user(s, n)
727 #else
728 extern long arc_copy_from_user_noinline(void *to, const void __user * from,
729 		unsigned long n);
730 extern long arc_copy_to_user_noinline(void __user *to, const void *from,
731 		unsigned long n);
732 extern unsigned long arc_clear_user_noinline(void __user *to,
733 		unsigned long n);
734 extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src,
735 		long count);
736 extern long arc_strnlen_user_noinline(const char __user *src, long n);
737 
738 #define __copy_from_user(t, f, n)	arc_copy_from_user_noinline(t, f, n)
739 #define __copy_to_user(t, f, n)		arc_copy_to_user_noinline(t, f, n)
740 #define __clear_user(d, n)		arc_clear_user_noinline(d, n)
741 #define __strncpy_from_user(d, s, n)	arc_strncpy_from_user_noinline(d, s, n)
742 #define __strnlen_user(s, n)		arc_strnlen_user_noinline(s, n)
743 
744 #endif
745 
746 #include <asm-generic/uaccess.h>
747 
748 extern int fixup_exception(struct pt_regs *regs);
749 
750 #endif
751