xref: /linux/arch/s390/lib/uaccess.c (revision 3932b9ca55b0be314a36d3e84faff3e823c081f5)
1 /*
2  *  Standard user space access functions based on mvcp/mvcs and doing
3  *  interesting things in the secondary space mode.
4  *
5  *    Copyright IBM Corp. 2006,2014
6  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
7  *		 Gerald Schaefer (gerald.schaefer@de.ibm.com)
8  */
9 
10 #include <linux/jump_label.h>
11 #include <linux/uaccess.h>
12 #include <linux/export.h>
13 #include <linux/errno.h>
14 #include <linux/mm.h>
15 #include <asm/mmu_context.h>
16 #include <asm/facility.h>
17 
18 #ifndef CONFIG_64BIT
19 #define AHI	"ahi"
20 #define ALR	"alr"
21 #define CLR	"clr"
22 #define LHI	"lhi"
23 #define SLR	"slr"
24 #else
25 #define AHI	"aghi"
26 #define ALR	"algr"
27 #define CLR	"clgr"
28 #define LHI	"lghi"
29 #define SLR	"slgr"
30 #endif
31 
32 static struct static_key have_mvcos = STATIC_KEY_INIT_FALSE;
33 
34 static inline unsigned long copy_from_user_mvcos(void *x, const void __user *ptr,
35 						 unsigned long size)
36 {
37 	register unsigned long reg0 asm("0") = 0x81UL;
38 	unsigned long tmp1, tmp2;
39 
40 	tmp1 = -4096UL;
41 	asm volatile(
42 		"0: .insn ss,0xc80000000000,0(%0,%2),0(%1),0\n"
43 		"9: jz    7f\n"
44 		"1:"ALR"  %0,%3\n"
45 		"  "SLR"  %1,%3\n"
46 		"  "SLR"  %2,%3\n"
47 		"   j     0b\n"
48 		"2: la    %4,4095(%1)\n"/* %4 = ptr + 4095 */
49 		"   nr    %4,%3\n"	/* %4 = (ptr + 4095) & -4096 */
50 		"  "SLR"  %4,%1\n"
51 		"  "CLR"  %0,%4\n"	/* copy crosses next page boundary? */
52 		"   jnh   4f\n"
53 		"3: .insn ss,0xc80000000000,0(%4,%2),0(%1),0\n"
54 		"10:"SLR"  %0,%4\n"
55 		"  "ALR"  %2,%4\n"
56 		"4:"LHI"  %4,-1\n"
57 		"  "ALR"  %4,%0\n"	/* copy remaining size, subtract 1 */
58 		"   bras  %3,6f\n"	/* memset loop */
59 		"   xc    0(1,%2),0(%2)\n"
60 		"5: xc    0(256,%2),0(%2)\n"
61 		"   la    %2,256(%2)\n"
62 		"6:"AHI"  %4,-256\n"
63 		"   jnm   5b\n"
64 		"   ex    %4,0(%3)\n"
65 		"   j     8f\n"
66 		"7:"SLR"  %0,%0\n"
67 		"8:\n"
68 		EX_TABLE(0b,2b) EX_TABLE(3b,4b) EX_TABLE(9b,2b) EX_TABLE(10b,4b)
69 		: "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
70 		: "d" (reg0) : "cc", "memory");
71 	return size;
72 }
73 
74 static inline unsigned long copy_from_user_mvcp(void *x, const void __user *ptr,
75 						unsigned long size)
76 {
77 	unsigned long tmp1, tmp2;
78 
79 	load_kernel_asce();
80 	tmp1 = -256UL;
81 	asm volatile(
82 		"   sacf  0\n"
83 		"0: mvcp  0(%0,%2),0(%1),%3\n"
84 		"10:jz    8f\n"
85 		"1:"ALR"  %0,%3\n"
86 		"   la    %1,256(%1)\n"
87 		"   la    %2,256(%2)\n"
88 		"2: mvcp  0(%0,%2),0(%1),%3\n"
89 		"11:jnz   1b\n"
90 		"   j     8f\n"
91 		"3: la    %4,255(%1)\n"	/* %4 = ptr + 255 */
92 		"  "LHI"  %3,-4096\n"
93 		"   nr    %4,%3\n"	/* %4 = (ptr + 255) & -4096 */
94 		"  "SLR"  %4,%1\n"
95 		"  "CLR"  %0,%4\n"	/* copy crosses next page boundary? */
96 		"   jnh   5f\n"
97 		"4: mvcp  0(%4,%2),0(%1),%3\n"
98 		"12:"SLR"  %0,%4\n"
99 		"  "ALR"  %2,%4\n"
100 		"5:"LHI"  %4,-1\n"
101 		"  "ALR"  %4,%0\n"	/* copy remaining size, subtract 1 */
102 		"   bras  %3,7f\n"	/* memset loop */
103 		"   xc    0(1,%2),0(%2)\n"
104 		"6: xc    0(256,%2),0(%2)\n"
105 		"   la    %2,256(%2)\n"
106 		"7:"AHI"  %4,-256\n"
107 		"   jnm   6b\n"
108 		"   ex    %4,0(%3)\n"
109 		"   j     9f\n"
110 		"8:"SLR"  %0,%0\n"
111 		"9: sacf  768\n"
112 		EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,5b)
113 		EX_TABLE(10b,3b) EX_TABLE(11b,3b) EX_TABLE(12b,5b)
114 		: "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
115 		: : "cc", "memory");
116 	return size;
117 }
118 
119 unsigned long __copy_from_user(void *to, const void __user *from, unsigned long n)
120 {
121 	if (static_key_false(&have_mvcos))
122 		return copy_from_user_mvcos(to, from, n);
123 	return copy_from_user_mvcp(to, from, n);
124 }
125 EXPORT_SYMBOL(__copy_from_user);
126 
127 static inline unsigned long copy_to_user_mvcos(void __user *ptr, const void *x,
128 					       unsigned long size)
129 {
130 	register unsigned long reg0 asm("0") = 0x810000UL;
131 	unsigned long tmp1, tmp2;
132 
133 	tmp1 = -4096UL;
134 	asm volatile(
135 		"0: .insn ss,0xc80000000000,0(%0,%1),0(%2),0\n"
136 		"6: jz    4f\n"
137 		"1:"ALR"  %0,%3\n"
138 		"  "SLR"  %1,%3\n"
139 		"  "SLR"  %2,%3\n"
140 		"   j     0b\n"
141 		"2: la    %4,4095(%1)\n"/* %4 = ptr + 4095 */
142 		"   nr    %4,%3\n"	/* %4 = (ptr + 4095) & -4096 */
143 		"  "SLR"  %4,%1\n"
144 		"  "CLR"  %0,%4\n"	/* copy crosses next page boundary? */
145 		"   jnh   5f\n"
146 		"3: .insn ss,0xc80000000000,0(%4,%1),0(%2),0\n"
147 		"7:"SLR"  %0,%4\n"
148 		"   j     5f\n"
149 		"4:"SLR"  %0,%0\n"
150 		"5:\n"
151 		EX_TABLE(0b,2b) EX_TABLE(3b,5b) EX_TABLE(6b,2b) EX_TABLE(7b,5b)
152 		: "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
153 		: "d" (reg0) : "cc", "memory");
154 	return size;
155 }
156 
157 static inline unsigned long copy_to_user_mvcs(void __user *ptr, const void *x,
158 					      unsigned long size)
159 {
160 	unsigned long tmp1, tmp2;
161 
162 	load_kernel_asce();
163 	tmp1 = -256UL;
164 	asm volatile(
165 		"   sacf  0\n"
166 		"0: mvcs  0(%0,%1),0(%2),%3\n"
167 		"7: jz    5f\n"
168 		"1:"ALR"  %0,%3\n"
169 		"   la    %1,256(%1)\n"
170 		"   la    %2,256(%2)\n"
171 		"2: mvcs  0(%0,%1),0(%2),%3\n"
172 		"8: jnz   1b\n"
173 		"   j     5f\n"
174 		"3: la    %4,255(%1)\n" /* %4 = ptr + 255 */
175 		"  "LHI"  %3,-4096\n"
176 		"   nr    %4,%3\n"	/* %4 = (ptr + 255) & -4096 */
177 		"  "SLR"  %4,%1\n"
178 		"  "CLR"  %0,%4\n"	/* copy crosses next page boundary? */
179 		"   jnh   6f\n"
180 		"4: mvcs  0(%4,%1),0(%2),%3\n"
181 		"9:"SLR"  %0,%4\n"
182 		"   j     6f\n"
183 		"5:"SLR"  %0,%0\n"
184 		"6: sacf  768\n"
185 		EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,6b)
186 		EX_TABLE(7b,3b) EX_TABLE(8b,3b) EX_TABLE(9b,6b)
187 		: "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
188 		: : "cc", "memory");
189 	return size;
190 }
191 
192 unsigned long __copy_to_user(void __user *to, const void *from, unsigned long n)
193 {
194 	if (static_key_false(&have_mvcos))
195 		return copy_to_user_mvcos(to, from, n);
196 	return copy_to_user_mvcs(to, from, n);
197 }
198 EXPORT_SYMBOL(__copy_to_user);
199 
200 static inline unsigned long copy_in_user_mvcos(void __user *to, const void __user *from,
201 					       unsigned long size)
202 {
203 	register unsigned long reg0 asm("0") = 0x810081UL;
204 	unsigned long tmp1, tmp2;
205 
206 	tmp1 = -4096UL;
207 	/* FIXME: copy with reduced length. */
208 	asm volatile(
209 		"0: .insn ss,0xc80000000000,0(%0,%1),0(%2),0\n"
210 		"   jz	  2f\n"
211 		"1:"ALR"  %0,%3\n"
212 		"  "SLR"  %1,%3\n"
213 		"  "SLR"  %2,%3\n"
214 		"   j	  0b\n"
215 		"2:"SLR"  %0,%0\n"
216 		"3: \n"
217 		EX_TABLE(0b,3b)
218 		: "+a" (size), "+a" (to), "+a" (from), "+a" (tmp1), "=a" (tmp2)
219 		: "d" (reg0) : "cc", "memory");
220 	return size;
221 }
222 
223 static inline unsigned long copy_in_user_mvc(void __user *to, const void __user *from,
224 					     unsigned long size)
225 {
226 	unsigned long tmp1;
227 
228 	load_kernel_asce();
229 	asm volatile(
230 		"   sacf  256\n"
231 		"  "AHI"  %0,-1\n"
232 		"   jo	  5f\n"
233 		"   bras  %3,3f\n"
234 		"0:"AHI"  %0,257\n"
235 		"1: mvc	  0(1,%1),0(%2)\n"
236 		"   la	  %1,1(%1)\n"
237 		"   la	  %2,1(%2)\n"
238 		"  "AHI"  %0,-1\n"
239 		"   jnz	  1b\n"
240 		"   j	  5f\n"
241 		"2: mvc	  0(256,%1),0(%2)\n"
242 		"   la	  %1,256(%1)\n"
243 		"   la	  %2,256(%2)\n"
244 		"3:"AHI"  %0,-256\n"
245 		"   jnm	  2b\n"
246 		"4: ex	  %0,1b-0b(%3)\n"
247 		"5: "SLR"  %0,%0\n"
248 		"6: sacf  768\n"
249 		EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
250 		: "+a" (size), "+a" (to), "+a" (from), "=a" (tmp1)
251 		: : "cc", "memory");
252 	return size;
253 }
254 
255 unsigned long __copy_in_user(void __user *to, const void __user *from, unsigned long n)
256 {
257 	if (static_key_false(&have_mvcos))
258 		return copy_in_user_mvcos(to, from, n);
259 	return copy_in_user_mvc(to, from, n);
260 }
261 EXPORT_SYMBOL(__copy_in_user);
262 
263 static inline unsigned long clear_user_mvcos(void __user *to, unsigned long size)
264 {
265 	register unsigned long reg0 asm("0") = 0x810000UL;
266 	unsigned long tmp1, tmp2;
267 
268 	tmp1 = -4096UL;
269 	asm volatile(
270 		"0: .insn ss,0xc80000000000,0(%0,%1),0(%4),0\n"
271 		"   jz	  4f\n"
272 		"1:"ALR"  %0,%2\n"
273 		"  "SLR"  %1,%2\n"
274 		"   j	  0b\n"
275 		"2: la	  %3,4095(%1)\n"/* %4 = to + 4095 */
276 		"   nr	  %3,%2\n"	/* %4 = (to + 4095) & -4096 */
277 		"  "SLR"  %3,%1\n"
278 		"  "CLR"  %0,%3\n"	/* copy crosses next page boundary? */
279 		"   jnh	  5f\n"
280 		"3: .insn ss,0xc80000000000,0(%3,%1),0(%4),0\n"
281 		"  "SLR"  %0,%3\n"
282 		"   j	  5f\n"
283 		"4:"SLR"  %0,%0\n"
284 		"5:\n"
285 		EX_TABLE(0b,2b) EX_TABLE(3b,5b)
286 		: "+a" (size), "+a" (to), "+a" (tmp1), "=a" (tmp2)
287 		: "a" (empty_zero_page), "d" (reg0) : "cc", "memory");
288 	return size;
289 }
290 
291 static inline unsigned long clear_user_xc(void __user *to, unsigned long size)
292 {
293 	unsigned long tmp1, tmp2;
294 
295 	load_kernel_asce();
296 	asm volatile(
297 		"   sacf  256\n"
298 		"  "AHI"  %0,-1\n"
299 		"   jo    5f\n"
300 		"   bras  %3,3f\n"
301 		"   xc    0(1,%1),0(%1)\n"
302 		"0:"AHI"  %0,257\n"
303 		"   la    %2,255(%1)\n" /* %2 = ptr + 255 */
304 		"   srl   %2,12\n"
305 		"   sll   %2,12\n"	/* %2 = (ptr + 255) & -4096 */
306 		"  "SLR"  %2,%1\n"
307 		"  "CLR"  %0,%2\n"	/* clear crosses next page boundary? */
308 		"   jnh   5f\n"
309 		"  "AHI"  %2,-1\n"
310 		"1: ex    %2,0(%3)\n"
311 		"  "AHI"  %2,1\n"
312 		"  "SLR"  %0,%2\n"
313 		"   j     5f\n"
314 		"2: xc    0(256,%1),0(%1)\n"
315 		"   la    %1,256(%1)\n"
316 		"3:"AHI"  %0,-256\n"
317 		"   jnm   2b\n"
318 		"4: ex    %0,0(%3)\n"
319 		"5: "SLR"  %0,%0\n"
320 		"6: sacf  768\n"
321 		EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
322 		: "+a" (size), "+a" (to), "=a" (tmp1), "=a" (tmp2)
323 		: : "cc", "memory");
324 	return size;
325 }
326 
327 unsigned long __clear_user(void __user *to, unsigned long size)
328 {
329 	if (static_key_false(&have_mvcos))
330 			return clear_user_mvcos(to, size);
331 	return clear_user_xc(to, size);
332 }
333 EXPORT_SYMBOL(__clear_user);
334 
335 static inline unsigned long strnlen_user_srst(const char __user *src,
336 					      unsigned long size)
337 {
338 	register unsigned long reg0 asm("0") = 0;
339 	unsigned long tmp1, tmp2;
340 
341 	asm volatile(
342 		"   la    %2,0(%1)\n"
343 		"   la    %3,0(%0,%1)\n"
344 		"  "SLR"  %0,%0\n"
345 		"   sacf  256\n"
346 		"0: srst  %3,%2\n"
347 		"   jo    0b\n"
348 		"   la    %0,1(%3)\n"	/* strnlen_user results includes \0 */
349 		"  "SLR"  %0,%1\n"
350 		"1: sacf  768\n"
351 		EX_TABLE(0b,1b)
352 		: "+a" (size), "+a" (src), "=a" (tmp1), "=a" (tmp2)
353 		: "d" (reg0) : "cc", "memory");
354 	return size;
355 }
356 
357 unsigned long __strnlen_user(const char __user *src, unsigned long size)
358 {
359 	if (unlikely(!size))
360 		return 0;
361 	load_kernel_asce();
362 	return strnlen_user_srst(src, size);
363 }
364 EXPORT_SYMBOL(__strnlen_user);
365 
366 long __strncpy_from_user(char *dst, const char __user *src, long size)
367 {
368 	size_t done, len, offset, len_str;
369 
370 	if (unlikely(size <= 0))
371 		return 0;
372 	done = 0;
373 	do {
374 		offset = (size_t)src & ~PAGE_MASK;
375 		len = min(size - done, PAGE_SIZE - offset);
376 		if (copy_from_user(dst, src, len))
377 			return -EFAULT;
378 		len_str = strnlen(dst, len);
379 		done += len_str;
380 		src += len_str;
381 		dst += len_str;
382 	} while ((len_str == len) && (done < size));
383 	return done;
384 }
385 EXPORT_SYMBOL(__strncpy_from_user);
386 
387 /*
388  * The "old" uaccess variant without mvcos can be enforced with the
389  * uaccess_primary kernel parameter. This is mainly for debugging purposes.
390  */
391 static int uaccess_primary __initdata;
392 
393 static int __init parse_uaccess_pt(char *__unused)
394 {
395 	uaccess_primary = 1;
396 	return 0;
397 }
398 early_param("uaccess_primary", parse_uaccess_pt);
399 
400 static int __init uaccess_init(void)
401 {
402 	if (IS_ENABLED(CONFIG_64BIT) && !uaccess_primary && test_facility(27))
403 		static_key_slow_inc(&have_mvcos);
404 	return 0;
405 }
406 early_initcall(uaccess_init);
407