xref: /linux/arch/x86/include/asm/uaccess_64.h (revision 367b8112fe2ea5c39a7bb4d263dcdd9b612fae18)
1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
3 
4 /*
5  * User space memory access functions
6  */
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/prefetch.h>
10 #include <linux/lockdep.h>
11 #include <asm/page.h>
12 
13 /*
14  * Copy To/From Userspace
15  */
16 
17 /* Handles exceptions in both to and from, but doesn't do access_ok */
18 __must_check unsigned long
19 copy_user_generic(void *to, const void *from, unsigned len);
20 
21 __must_check unsigned long
22 copy_to_user(void __user *to, const void *from, unsigned len);
23 __must_check unsigned long
24 copy_from_user(void *to, const void __user *from, unsigned len);
25 __must_check unsigned long
26 copy_in_user(void __user *to, const void __user *from, unsigned len);
27 
28 static __always_inline __must_check
29 int __copy_from_user(void *dst, const void __user *src, unsigned size)
30 {
31 	int ret = 0;
32 	if (!__builtin_constant_p(size))
33 		return copy_user_generic(dst, (__force void *)src, size);
34 	switch (size) {
35 	case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
36 			      ret, "b", "b", "=q", 1);
37 		return ret;
38 	case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
39 			      ret, "w", "w", "=r", 2);
40 		return ret;
41 	case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
42 			      ret, "l", "k", "=r", 4);
43 		return ret;
44 	case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
45 			      ret, "q", "", "=r", 8);
46 		return ret;
47 	case 10:
48 		__get_user_asm(*(u64 *)dst, (u64 __user *)src,
49 			       ret, "q", "", "=r", 16);
50 		if (unlikely(ret))
51 			return ret;
52 		__get_user_asm(*(u16 *)(8 + (char *)dst),
53 			       (u16 __user *)(8 + (char __user *)src),
54 			       ret, "w", "w", "=r", 2);
55 		return ret;
56 	case 16:
57 		__get_user_asm(*(u64 *)dst, (u64 __user *)src,
58 			       ret, "q", "", "=r", 16);
59 		if (unlikely(ret))
60 			return ret;
61 		__get_user_asm(*(u64 *)(8 + (char *)dst),
62 			       (u64 __user *)(8 + (char __user *)src),
63 			       ret, "q", "", "=r", 8);
64 		return ret;
65 	default:
66 		return copy_user_generic(dst, (__force void *)src, size);
67 	}
68 }
69 
70 static __always_inline __must_check
71 int __copy_to_user(void __user *dst, const void *src, unsigned size)
72 {
73 	int ret = 0;
74 	if (!__builtin_constant_p(size))
75 		return copy_user_generic((__force void *)dst, src, size);
76 	switch (size) {
77 	case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
78 			      ret, "b", "b", "iq", 1);
79 		return ret;
80 	case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
81 			      ret, "w", "w", "ir", 2);
82 		return ret;
83 	case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
84 			      ret, "l", "k", "ir", 4);
85 		return ret;
86 	case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
87 			      ret, "q", "", "ir", 8);
88 		return ret;
89 	case 10:
90 		__put_user_asm(*(u64 *)src, (u64 __user *)dst,
91 			       ret, "q", "", "ir", 10);
92 		if (unlikely(ret))
93 			return ret;
94 		asm("":::"memory");
95 		__put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
96 			       ret, "w", "w", "ir", 2);
97 		return ret;
98 	case 16:
99 		__put_user_asm(*(u64 *)src, (u64 __user *)dst,
100 			       ret, "q", "", "ir", 16);
101 		if (unlikely(ret))
102 			return ret;
103 		asm("":::"memory");
104 		__put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
105 			       ret, "q", "", "ir", 8);
106 		return ret;
107 	default:
108 		return copy_user_generic((__force void *)dst, src, size);
109 	}
110 }
111 
112 static __always_inline __must_check
113 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
114 {
115 	int ret = 0;
116 	if (!__builtin_constant_p(size))
117 		return copy_user_generic((__force void *)dst,
118 					 (__force void *)src, size);
119 	switch (size) {
120 	case 1: {
121 		u8 tmp;
122 		__get_user_asm(tmp, (u8 __user *)src,
123 			       ret, "b", "b", "=q", 1);
124 		if (likely(!ret))
125 			__put_user_asm(tmp, (u8 __user *)dst,
126 				       ret, "b", "b", "iq", 1);
127 		return ret;
128 	}
129 	case 2: {
130 		u16 tmp;
131 		__get_user_asm(tmp, (u16 __user *)src,
132 			       ret, "w", "w", "=r", 2);
133 		if (likely(!ret))
134 			__put_user_asm(tmp, (u16 __user *)dst,
135 				       ret, "w", "w", "ir", 2);
136 		return ret;
137 	}
138 
139 	case 4: {
140 		u32 tmp;
141 		__get_user_asm(tmp, (u32 __user *)src,
142 			       ret, "l", "k", "=r", 4);
143 		if (likely(!ret))
144 			__put_user_asm(tmp, (u32 __user *)dst,
145 				       ret, "l", "k", "ir", 4);
146 		return ret;
147 	}
148 	case 8: {
149 		u64 tmp;
150 		__get_user_asm(tmp, (u64 __user *)src,
151 			       ret, "q", "", "=r", 8);
152 		if (likely(!ret))
153 			__put_user_asm(tmp, (u64 __user *)dst,
154 				       ret, "q", "", "ir", 8);
155 		return ret;
156 	}
157 	default:
158 		return copy_user_generic((__force void *)dst,
159 					 (__force void *)src, size);
160 	}
161 }
162 
163 __must_check long
164 strncpy_from_user(char *dst, const char __user *src, long count);
165 __must_check long
166 __strncpy_from_user(char *dst, const char __user *src, long count);
167 __must_check long strnlen_user(const char __user *str, long n);
168 __must_check long __strnlen_user(const char __user *str, long n);
169 __must_check long strlen_user(const char __user *str);
170 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
171 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
172 
173 __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
174 					    unsigned size);
175 
176 static __must_check __always_inline int
177 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
178 {
179 	return copy_user_generic((__force void *)dst, src, size);
180 }
181 
182 extern long __copy_user_nocache(void *dst, const void __user *src,
183 				unsigned size, int zerorest);
184 
185 static inline int __copy_from_user_nocache(void *dst, const void __user *src,
186 					   unsigned size)
187 {
188 	might_sleep();
189 	return __copy_user_nocache(dst, src, size, 1);
190 }
191 
192 static inline int __copy_from_user_inatomic_nocache(void *dst,
193 						    const void __user *src,
194 						    unsigned size)
195 {
196 	return __copy_user_nocache(dst, src, size, 0);
197 }
198 
199 unsigned long
200 copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
201 
202 #endif /* _ASM_X86_UACCESS_64_H */
203