1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * User space memory access functions 4 * 5 * Copyright (C) 1999, 2002 Niibe Yutaka 6 * Copyright (C) 2003 - 2008 Paul Mundt 7 * 8 * Based on: 9 * MIPS implementation version 1.15 by 10 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle 11 * and i386 version. 12 */ 13 #ifndef __ASM_SH_UACCESS_32_H 14 #define __ASM_SH_UACCESS_32_H 15 16 #define __get_user_size(x,ptr,size,retval) \ 17 do { \ 18 retval = 0; \ 19 switch (size) { \ 20 case 1: \ 21 __get_user_asm(x, ptr, retval, "b"); \ 22 break; \ 23 case 2: \ 24 __get_user_asm(x, ptr, retval, "w"); \ 25 break; \ 26 case 4: \ 27 __get_user_asm(x, ptr, retval, "l"); \ 28 break; \ 29 default: \ 30 __get_user_unknown(); \ 31 break; \ 32 } \ 33 } while (0) 34 35 #ifdef CONFIG_MMU 36 #define __get_user_asm(x, addr, err, insn) \ 37 ({ \ 38 __asm__ __volatile__( \ 39 "1:\n\t" \ 40 "mov." insn " %2, %1\n\t" \ 41 "2:\n" \ 42 ".section .fixup,\"ax\"\n" \ 43 "3:\n\t" \ 44 "mov #0, %1\n\t" \ 45 "mov.l 4f, %0\n\t" \ 46 "jmp @%0\n\t" \ 47 " mov %3, %0\n\t" \ 48 ".balign 4\n" \ 49 "4: .long 2b\n\t" \ 50 ".previous\n" \ 51 ".section __ex_table,\"a\"\n\t" \ 52 ".long 1b, 3b\n\t" \ 53 ".previous" \ 54 :"=&r" (err), "=&r" (x) \ 55 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); }) 56 #else 57 #define __get_user_asm(x, addr, err, insn) \ 58 do { \ 59 __asm__ __volatile__ ( \ 60 "mov." insn " %1, %0\n\t" \ 61 : "=&r" (x) \ 62 : "m" (__m(addr)) \ 63 ); \ 64 } while (0) 65 #endif /* CONFIG_MMU */ 66 67 extern void __get_user_unknown(void); 68 69 #define __put_user_size(x,ptr,size,retval) \ 70 do { \ 71 retval = 0; \ 72 switch (size) { \ 73 case 1: \ 74 __put_user_asm(x, ptr, retval, "b"); \ 75 break; \ 76 case 2: \ 77 __put_user_asm(x, ptr, retval, "w"); \ 78 break; \ 79 case 4: \ 80 __put_user_asm(x, ptr, retval, "l"); \ 81 break; \ 82 case 8: \ 83 __put_user_u64(x, ptr, retval); \ 84 break; \ 85 default: \ 86 __put_user_unknown(); \ 87 } \ 88 } while (0) 89 90 #ifdef CONFIG_MMU 91 #define __put_user_asm(x, addr, err, insn) \ 92 do { \ 93 __asm__ __volatile__ ( \ 94 "1:\n\t" \ 95 "mov." insn " %1, %2\n\t" \ 96 "2:\n" \ 97 ".section .fixup,\"ax\"\n" \ 98 "3:\n\t" \ 99 "mov.l 4f, %0\n\t" \ 100 "jmp @%0\n\t" \ 101 " mov %3, %0\n\t" \ 102 ".balign 4\n" \ 103 "4: .long 2b\n\t" \ 104 ".previous\n" \ 105 ".section __ex_table,\"a\"\n\t" \ 106 ".long 1b, 3b\n\t" \ 107 ".previous" \ 108 : "=&r" (err) \ 109 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \ 110 "0" (err) \ 111 : "memory" \ 112 ); \ 113 } while (0) 114 #else 115 #define __put_user_asm(x, addr, err, insn) \ 116 do { \ 117 __asm__ __volatile__ ( \ 118 "mov." insn " %0, %1\n\t" \ 119 : /* no outputs */ \ 120 : "r" (x), "m" (__m(addr)) \ 121 : "memory" \ 122 ); \ 123 } while (0) 124 #endif /* CONFIG_MMU */ 125 126 #if defined(CONFIG_CPU_LITTLE_ENDIAN) 127 #define __put_user_u64(val,addr,retval) \ 128 ({ \ 129 __asm__ __volatile__( \ 130 "1:\n\t" \ 131 "mov.l %R1,%2\n\t" \ 132 "mov.l %S1,%T2\n\t" \ 133 "2:\n" \ 134 ".section .fixup,\"ax\"\n" \ 135 "3:\n\t" \ 136 "mov.l 4f,%0\n\t" \ 137 "jmp @%0\n\t" \ 138 " mov %3,%0\n\t" \ 139 ".balign 4\n" \ 140 "4: .long 2b\n\t" \ 141 ".previous\n" \ 142 ".section __ex_table,\"a\"\n\t" \ 143 ".long 1b, 3b\n\t" \ 144 ".previous" \ 145 : "=r" (retval) \ 146 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \ 147 : "memory"); }) 148 #else 149 #define __put_user_u64(val,addr,retval) \ 150 ({ \ 151 __asm__ __volatile__( \ 152 "1:\n\t" \ 153 "mov.l %S1,%2\n\t" \ 154 "mov.l %R1,%T2\n\t" \ 155 "2:\n" \ 156 ".section .fixup,\"ax\"\n" \ 157 "3:\n\t" \ 158 "mov.l 4f,%0\n\t" \ 159 "jmp @%0\n\t" \ 160 " mov %3,%0\n\t" \ 161 ".balign 4\n" \ 162 "4: .long 2b\n\t" \ 163 ".previous\n" \ 164 ".section __ex_table,\"a\"\n\t" \ 165 ".long 1b, 3b\n\t" \ 166 ".previous" \ 167 : "=r" (retval) \ 168 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \ 169 : "memory"); }) 170 #endif 171 172 extern void __put_user_unknown(void); 173 174 #endif /* __ASM_SH_UACCESS_32_H */ 175