Lines Matching +full:3 +full:- +full:n
1 /* SPDX-License-Identifier: GPL-2.0 */
13 #include <asm-generic/access_ok.h>
32 asm volatile ("\n" \
33 "1: "inst"."#bwl" %2,%1\n" \
34 "2:\n" \
35 " .section .fixup,\"ax\"\n" \
36 " .even\n" \
37 "10: moveq.l %3,%0\n" \
38 " jra 2b\n" \
39 " .previous\n" \
40 "\n" \
41 " .section __ex_table,\"a\"\n" \
42 " .align 4\n" \
43 " .long 1b,10b\n" \
44 " .long 2b,10b\n" \
53 asm volatile ("\n" \
54 "1: "inst".l %2,(%1)+\n" \
55 "2: "inst".l %R2,(%1)\n" \
56 "3:\n" \
57 " .section .fixup,\"ax\"\n" \
58 " .even\n" \
59 "10: movel %3,%0\n" \
60 " jra 3b\n" \
61 " .previous\n" \
62 "\n" \
63 " .section __ex_table,\"a\"\n" \
64 " .align 4\n" \
65 " .long 1b,10b\n" \
66 " .long 2b,10b\n" \
67 " .long 3b,10b\n" \
70 : "r" (x), "i" (-EFAULT) \
75 * These are the main single-value transfer routines. They automatically
86 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, b, d, -EFAULT); \
89 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, w, r, -EFAULT); \
92 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, l, r, -EFAULT); \
107 asm volatile ("\n" \
108 "1: "inst"."#bwl" %2,%1\n" \
109 "2:\n" \
110 " .section .fixup,\"ax\"\n" \
111 " .even\n" \
112 "10: move.l %3,%0\n" \
113 " sub.l %1,%1\n" \
114 " jra 2b\n" \
115 " .previous\n" \
116 "\n" \
117 " .section __ex_table,\"a\"\n" \
118 " .align 4\n" \
119 " .long 1b,10b\n" \
134 asm volatile ("\n" \
135 "1: "inst".l (%2)+,%1\n" \
136 "2: "inst".l (%2),%R1\n" \
137 "3:\n" \
138 " .section .fixup,\"ax\"\n" \
139 " .even\n" \
140 "10: move.l %3,%0\n" \
141 " sub.l %1,%1\n" \
142 " sub.l %R1,%R1\n" \
143 " jra 3b\n" \
144 " .previous\n" \
145 "\n" \
146 " .section __ex_table,\"a\"\n" \
147 " .align 4\n" \
148 " .long 1b,10b\n" \
149 " .long 2b,10b\n" \
153 : "i" (-EFAULT) \
164 __get_user_asm(MOVES, __gu_err, x, ptr, u8, b, d, -EFAULT); \
167 __get_user_asm(MOVES, __gu_err, x, ptr, u16, w, r, -EFAULT); \
170 __get_user_asm(MOVES, __gu_err, x, ptr, u32, l, r, -EFAULT); \
182 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
183 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
191 asm volatile ("\n" \
192 "1: "MOVES"."#s1" (%2)+,%3\n" \
193 " move."#s1" %3,(%1)+\n" \
194 " .ifnc \""#s2"\",\"\"\n" \
195 "2: "MOVES"."#s2" (%2)+,%3\n" \
196 " move."#s2" %3,(%1)+\n" \
197 " .ifnc \""#s3"\",\"\"\n" \
198 "3: "MOVES"."#s3" (%2)+,%3\n" \
199 " move."#s3" %3,(%1)+\n" \
200 " .endif\n" \
201 " .endif\n" \
202 "4:\n" \
203 " .section __ex_table,\"a\"\n" \
204 " .align 4\n" \
205 " .long 1b,10f\n" \
206 " .ifnc \""#s2"\",\"\"\n" \
207 " .long 2b,20f\n" \
208 " .ifnc \""#s3"\",\"\"\n" \
209 " .long 3b,30f\n" \
210 " .endif\n" \
211 " .endif\n" \
212 " .previous\n" \
213 "\n" \
214 " .section .fixup,\"ax\"\n" \
215 " .even\n" \
216 "10: addq.l #"#n1",%0\n" \
217 " .ifnc \""#s2"\",\"\"\n" \
218 "20: addq.l #"#n2",%0\n" \
219 " .ifnc \""#s3"\",\"\"\n" \
220 "30: addq.l #"#n3",%0\n" \
221 " .endif\n" \
222 " .endif\n" \
223 " jra 4b\n" \
224 " .previous\n" \
235 __constant_copy_from_user(void *to, const void __user *from, unsigned long n) in __constant_copy_from_user() argument
239 switch (n) { in __constant_copy_from_user()
246 case 3: in __constant_copy_from_user()
274 /* we limit the inlined version to 3 moves */ in __constant_copy_from_user()
275 return __generic_copy_from_user(to, from, n); in __constant_copy_from_user()
281 #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \ argument
282 asm volatile ("\n" \
283 " move."#s1" (%2)+,%3\n" \
284 "11: "MOVES"."#s1" %3,(%1)+\n" \
285 "12: move."#s2" (%2)+,%3\n" \
286 "21: "MOVES"."#s2" %3,(%1)+\n" \
287 "22:\n" \
288 " .ifnc \""#s3"\",\"\"\n" \
289 " move."#s3" (%2)+,%3\n" \
290 "31: "MOVES"."#s3" %3,(%1)+\n" \
291 "32:\n" \
292 " .endif\n" \
293 "4:\n" \
294 "\n" \
295 " .section __ex_table,\"a\"\n" \
296 " .align 4\n" \
297 " .long 11b,5f\n" \
298 " .long 12b,5f\n" \
299 " .long 21b,5f\n" \
300 " .long 22b,5f\n" \
301 " .ifnc \""#s3"\",\"\"\n" \
302 " .long 31b,5f\n" \
303 " .long 32b,5f\n" \
304 " .endif\n" \
305 " .previous\n" \
306 "\n" \
307 " .section .fixup,\"ax\"\n" \
308 " .even\n" \
309 "5: moveq.l #"#n",%0\n" \
310 " jra 4b\n" \
311 " .previous\n" \
316 __constant_copy_to_user(void __user *to, const void *from, unsigned long n) in __constant_copy_to_user() argument
320 switch (n) { in __constant_copy_to_user()
329 case 3: in __constant_copy_to_user()
330 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,); in __constant_copy_to_user()
358 /* limit the inlined version to 3 moves */ in __constant_copy_to_user()
359 return __generic_copy_to_user(to, from, n); in __constant_copy_to_user()
366 raw_copy_from_user(void *to, const void __user *from, unsigned long n) in raw_copy_from_user() argument
368 if (__builtin_constant_p(n)) in raw_copy_from_user()
369 return __constant_copy_from_user(to, from, n); in raw_copy_from_user()
370 return __generic_copy_from_user(to, from, n); in raw_copy_from_user()
374 raw_copy_to_user(void __user *to, const void *from, unsigned long n) in raw_copy_to_user() argument
376 if (__builtin_constant_p(n)) in raw_copy_to_user()
377 return __constant_copy_to_user(to, from, n); in raw_copy_to_user()
378 return __generic_copy_to_user(to, from, n); in raw_copy_to_user()
392 u8, b, d, -EFAULT); \
396 u16, w, r, -EFAULT); \
400 u32, l, r, -EFAULT); \
421 b, d, -EFAULT); \
425 w, r, -EFAULT); \
429 l, r, -EFAULT); \
442 extern __must_check long strnlen_user(const char __user *str, long n);
444 unsigned long __clear_user(void __user *to, unsigned long n);
449 #include <asm-generic/uaccess.h>