uaccess.h (01eec1af5ec49b331948ace8f2287580e1594383) | uaccess.h (8ade83390930d61c64fe3ab49081990c9d43d0d2) |
---|---|
1/* SPDX-License-Identifier: GPL-2.0 */ 2#ifndef __M68K_UACCESS_H 3#define __M68K_UACCESS_H 4 5#ifdef CONFIG_MMU 6 7/* 8 * User space memory access functions --- 25 unchanged lines hidden (view full) --- 34 * So lets keep the code simple and just define in what we need to use. 35 */ 36#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES 37#define MOVES "moves" 38#else 39#define MOVES "move" 40#endif 41 | 1/* SPDX-License-Identifier: GPL-2.0 */ 2#ifndef __M68K_UACCESS_H 3#define __M68K_UACCESS_H 4 5#ifdef CONFIG_MMU 6 7/* 8 * User space memory access functions --- 25 unchanged lines hidden (view full) --- 34 * So lets keep the code simple and just define in what we need to use. 35 */ 36#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES 37#define MOVES "moves" 38#else 39#define MOVES "move" 40#endif 41 |
42#define __put_user_asm(res, x, ptr, bwl, reg, err) \ | 42#define __put_user_asm(inst, res, x, ptr, bwl, reg, err) \ |
43asm volatile ("\n" \ | 43asm volatile ("\n" \ |
44 "1: "MOVES"."#bwl" %2,%1\n" \ | 44 "1: "inst"."#bwl" %2,%1\n" \ |
45 "2:\n" \ 46 " .section .fixup,\"ax\"\n" \ 47 " .even\n" \ 48 "10: moveq.l %3,%0\n" \ 49 " jra 2b\n" \ 50 " .previous\n" \ 51 "\n" \ 52 " .section __ex_table,\"a\"\n" \ 53 " .align 4\n" \ 54 " .long 1b,10b\n" \ 55 " .long 2b,10b\n" \ 56 " .previous" \ 57 : "+d" (res), "=m" (*(ptr)) \ 58 : #reg (x), "i" (err)) 59 | 45 "2:\n" \ 46 " .section .fixup,\"ax\"\n" \ 47 " .even\n" \ 48 "10: moveq.l %3,%0\n" \ 49 " jra 2b\n" \ 50 " .previous\n" \ 51 "\n" \ 52 " .section __ex_table,\"a\"\n" \ 53 " .align 4\n" \ 54 " .long 1b,10b\n" \ 55 " .long 2b,10b\n" \ 56 " .previous" \ 57 : "+d" (res), "=m" (*(ptr)) \ 58 : #reg (x), "i" (err)) 59 |
60#define __put_user_asm8(res, x, ptr) \ | 60#define __put_user_asm8(inst, res, x, ptr) \ |
61do { \ 62 const void *__pu_ptr = (const void __force *)(ptr); \ 63 \ 64 asm volatile ("\n" \ | 61do { \ 62 const void *__pu_ptr = (const void __force *)(ptr); \ 63 \ 64 asm volatile ("\n" \ |
65 "1: "MOVES".l %2,(%1)+\n" \ 66 "2: "MOVES".l %R2,(%1)\n" \ | 65 "1: "inst".l %2,(%1)+\n" \ 66 "2: "inst".l %R2,(%1)\n" \ |
67 "3:\n" \ 68 " .section .fixup,\"ax\"\n" \ 69 " .even\n" \ 70 "10: movel %3,%0\n" \ 71 " jra 3b\n" \ 72 " .previous\n" \ 73 "\n" \ 74 " .section __ex_table,\"a\"\n" \ --- 14 unchanged lines hidden (view full) --- 89 90#define __put_user(x, ptr) \ 91({ \ 92 typeof(*(ptr)) __pu_val = (x); \ 93 int __pu_err = 0; \ 94 __chk_user_ptr(ptr); \ 95 switch (sizeof (*(ptr))) { \ 96 case 1: \ | 67 "3:\n" \ 68 " .section .fixup,\"ax\"\n" \ 69 " .even\n" \ 70 "10: movel %3,%0\n" \ 71 " jra 3b\n" \ 72 " .previous\n" \ 73 "\n" \ 74 " .section __ex_table,\"a\"\n" \ --- 14 unchanged lines hidden (view full) --- 89 90#define __put_user(x, ptr) \ 91({ \ 92 typeof(*(ptr)) __pu_val = (x); \ 93 int __pu_err = 0; \ 94 __chk_user_ptr(ptr); \ 95 switch (sizeof (*(ptr))) { \ 96 case 1: \ |
97 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \ | 97 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, b, d, -EFAULT); \ |
98 break; \ 99 case 2: \ | 98 break; \ 99 case 2: \ |
100 __put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT); \ | 100 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, w, r, -EFAULT); \ |
101 break; \ 102 case 4: \ | 101 break; \ 102 case 4: \ |
103 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \ | 103 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, l, r, -EFAULT); \ |
104 break; \ 105 case 8: \ | 104 break; \ 105 case 8: \ |
106 __put_user_asm8(__pu_err, __pu_val, ptr); \ | 106 __put_user_asm8(MOVES, __pu_err, __pu_val, ptr); \ |
107 break; \ 108 default: \ 109 BUILD_BUG(); \ 110 } \ 111 __pu_err; \ 112}) 113#define put_user(x, ptr) __put_user(x, ptr) 114 115 | 107 break; \ 108 default: \ 109 BUILD_BUG(); \ 110 } \ 111 __pu_err; \ 112}) 113#define put_user(x, ptr) __put_user(x, ptr) 114 115 |
116#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \ | 116#define __get_user_asm(inst, res, x, ptr, type, bwl, reg, err) ({ \ |
117 type __gu_val; \ 118 asm volatile ("\n" \ | 117 type __gu_val; \ 118 asm volatile ("\n" \ |
119 "1: "MOVES"."#bwl" %2,%1\n" \ | 119 "1: "inst"."#bwl" %2,%1\n" \ |
120 "2:\n" \ 121 " .section .fixup,\"ax\"\n" \ 122 " .even\n" \ 123 "10: move.l %3,%0\n" \ 124 " sub.l %1,%1\n" \ 125 " jra 2b\n" \ 126 " .previous\n" \ 127 "\n" \ 128 " .section __ex_table,\"a\"\n" \ 129 " .align 4\n" \ 130 " .long 1b,10b\n" \ 131 " .previous" \ 132 : "+d" (res), "=&" #reg (__gu_val) \ 133 : "m" (*(ptr)), "i" (err)); \ 134 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \ 135}) 136 | 120 "2:\n" \ 121 " .section .fixup,\"ax\"\n" \ 122 " .even\n" \ 123 "10: move.l %3,%0\n" \ 124 " sub.l %1,%1\n" \ 125 " jra 2b\n" \ 126 " .previous\n" \ 127 "\n" \ 128 " .section __ex_table,\"a\"\n" \ 129 " .align 4\n" \ 130 " .long 1b,10b\n" \ 131 " .previous" \ 132 : "+d" (res), "=&" #reg (__gu_val) \ 133 : "m" (*(ptr)), "i" (err)); \ 134 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \ 135}) 136 |
137#define __get_user_asm8(res, x, ptr) \ | 137#define __get_user_asm8(inst, res, x, ptr) \ |
138do { \ 139 const void *__gu_ptr = (const void __force *)(ptr); \ 140 union { \ 141 u64 l; \ 142 __typeof__(*(ptr)) t; \ 143 } __gu_val; \ 144 \ 145 asm volatile ("\n" \ | 138do { \ 139 const void *__gu_ptr = (const void __force *)(ptr); \ 140 union { \ 141 u64 l; \ 142 __typeof__(*(ptr)) t; \ 143 } __gu_val; \ 144 \ 145 asm volatile ("\n" \ |
146 "1: "MOVES".l (%2)+,%1\n" \ 147 "2: "MOVES".l (%2),%R1\n" \ | 146 "1: "inst".l (%2)+,%1\n" \ 147 "2: "inst".l (%2),%R1\n" \ |
148 "3:\n" \ 149 " .section .fixup,\"ax\"\n" \ 150 " .even\n" \ 151 "10: move.l %3,%0\n" \ 152 " sub.l %1,%1\n" \ 153 " sub.l %R1,%R1\n" \ 154 " jra 3b\n" \ 155 " .previous\n" \ --- 11 unchanged lines hidden (view full) --- 167} while (0) 168 169#define __get_user(x, ptr) \ 170({ \ 171 int __gu_err = 0; \ 172 __chk_user_ptr(ptr); \ 173 switch (sizeof(*(ptr))) { \ 174 case 1: \ | 148 "3:\n" \ 149 " .section .fixup,\"ax\"\n" \ 150 " .even\n" \ 151 "10: move.l %3,%0\n" \ 152 " sub.l %1,%1\n" \ 153 " sub.l %R1,%R1\n" \ 154 " jra 3b\n" \ 155 " .previous\n" \ --- 11 unchanged lines hidden (view full) --- 167} while (0) 168 169#define __get_user(x, ptr) \ 170({ \ 171 int __gu_err = 0; \ 172 __chk_user_ptr(ptr); \ 173 switch (sizeof(*(ptr))) { \ 174 case 1: \ |
175 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \ | 175 __get_user_asm(MOVES, __gu_err, x, ptr, u8, b, d, -EFAULT); \ |
176 break; \ 177 case 2: \ | 176 break; \ 177 case 2: \ |
178 __get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT); \ | 178 __get_user_asm(MOVES, __gu_err, x, ptr, u16, w, r, -EFAULT); \ |
179 break; \ 180 case 4: \ | 179 break; \ 180 case 4: \ |
181 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \ | 181 __get_user_asm(MOVES, __gu_err, x, ptr, u32, l, r, -EFAULT); \ |
182 break; \ 183 case 8: \ | 182 break; \ 183 case 8: \ |
184 __get_user_asm8(__gu_err, x, ptr); \ | 184 __get_user_asm8(MOVES, __gu_err, x, ptr); \ |
185 break; \ 186 default: \ 187 BUILD_BUG(); \ 188 } \ 189 __gu_err; \ 190}) 191#define get_user(x, ptr) __get_user(x, ptr) 192 --- 132 unchanged lines hidden (view full) --- 325 326static __always_inline unsigned long 327__constant_copy_to_user(void __user *to, const void *from, unsigned long n) 328{ 329 unsigned long res = 0, tmp; 330 331 switch (n) { 332 case 1: | 185 break; \ 186 default: \ 187 BUILD_BUG(); \ 188 } \ 189 __gu_err; \ 190}) 191#define get_user(x, ptr) __get_user(x, ptr) 192 --- 132 unchanged lines hidden (view full) --- 325 326static __always_inline unsigned long 327__constant_copy_to_user(void __user *to, const void *from, unsigned long n) 328{ 329 unsigned long res = 0, tmp; 330 331 switch (n) { 332 case 1: |
333 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1); | 333 __put_user_asm(MOVES, res, *(u8 *)from, (u8 __user *)to, 334 b, d, 1); |
334 break; 335 case 2: | 335 break; 336 case 2: |
336 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, r, 2); | 337 __put_user_asm(MOVES, res, *(u16 *)from, (u16 __user *)to, 338 w, r, 2); |
337 break; 338 case 3: 339 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,); 340 break; 341 case 4: | 339 break; 340 case 3: 341 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,); 342 break; 343 case 4: |
342 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4); | 344 __put_user_asm(MOVES, res, *(u32 *)from, (u32 __user *)to, 345 l, r, 4); |
343 break; 344 case 5: 345 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,); 346 break; 347 case 6: 348 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,); 349 break; 350 case 7: --- 32 unchanged lines hidden (view full) --- 383{ 384 if (__builtin_constant_p(n)) 385 return __constant_copy_to_user(to, from, n); 386 return __generic_copy_to_user(to, from, n); 387} 388#define INLINE_COPY_FROM_USER 389#define INLINE_COPY_TO_USER 390 | 346 break; 347 case 5: 348 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,); 349 break; 350 case 6: 351 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,); 352 break; 353 case 7: --- 32 unchanged lines hidden (view full) --- 386{ 387 if (__builtin_constant_p(n)) 388 return __constant_copy_to_user(to, from, n); 389 return __generic_copy_to_user(to, from, n); 390} 391#define INLINE_COPY_FROM_USER 392#define INLINE_COPY_TO_USER 393 |
394#define HAVE_GET_KERNEL_NOFAULT 395 396#define __get_kernel_nofault(dst, src, type, err_label) \ 397do { \ 398 type *__gk_dst = (type *)(dst); \ 399 type *__gk_src = (type *)(src); \ 400 int __gk_err = 0; \ 401 \ 402 switch (sizeof(type)) { \ 403 case 1: \ 404 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \ 405 u8, b, d, -EFAULT); \ 406 break; \ 407 case 2: \ 408 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \ 409 u16, w, r, -EFAULT); \ 410 break; \ 411 case 4: \ 412 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \ 413 u32, l, r, -EFAULT); \ 414 break; \ 415 case 8: \ 416 __get_user_asm8("move", __gk_err, *__gk_dst, __gk_src); \ 417 break; \ 418 default: \ 419 BUILD_BUG(); \ 420 } \ 421 if (unlikely(__gk_err)) \ 422 goto err_label; \ 423} while (0) 424 425#define __put_kernel_nofault(dst, src, type, err_label) \ 426do { \ 427 type __pk_src = *(type *)(src); \ 428 type *__pk_dst = (type *)(dst); \ 429 int __pk_err = 0; \ 430 \ 431 switch (sizeof(type)) { \ 432 case 1: \ 433 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \ 434 b, d, -EFAULT); \ 435 break; \ 436 case 2: \ 437 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \ 438 w, r, -EFAULT); \ 439 break; \ 440 case 4: \ 441 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \ 442 l, r, -EFAULT); \ 443 break; \ 444 case 8: \ 445 __put_user_asm8("move", __pk_err, __pk_src, __pk_dst); \ 446 break; \ 447 default: \ 448 BUILD_BUG(); \ 449 } \ 450 if (unlikely(__pk_err)) \ 451 goto err_label; \ 452} while (0) 453 |
|
391#define user_addr_max() \ 392 (uaccess_kernel() ? ~0UL : TASK_SIZE) 393 394extern long strncpy_from_user(char *dst, const char __user *src, long count); 395extern __must_check long strnlen_user(const char __user *str, long n); 396 397unsigned long __clear_user(void __user *to, unsigned long n); 398 399#define clear_user __clear_user 400 401#else /* !CONFIG_MMU */ 402#include <asm-generic/uaccess.h> 403#endif 404 405#endif /* _M68K_UACCESS_H */ | 454#define user_addr_max() \ 455 (uaccess_kernel() ? ~0UL : TASK_SIZE) 456 457extern long strncpy_from_user(char *dst, const char __user *src, long count); 458extern __must_check long strnlen_user(const char __user *str, long n); 459 460unsigned long __clear_user(void __user *to, unsigned long n); 461 462#define clear_user __clear_user 463 464#else /* !CONFIG_MMU */ 465#include <asm-generic/uaccess.h> 466#endif 467 468#endif /* _M68K_UACCESS_H */ |