xref: /linux/arch/x86/lib/getuser.S (revision 02680c23d7b3febe45ea3d4f9818c2b2dc89020a)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs:	%[r|e]ax contains the address.
19 *
20 * Outputs:	%[r|e]ax is error code (0 or -EFAULT)
21 *		%[r|e]dx contains zero-extended value
22 *		%ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
39
40#ifdef CONFIG_X86_5LEVEL
41#define LOAD_TASK_SIZE_MINUS_N(n) \
42	ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
43		    __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
44#else
45#define LOAD_TASK_SIZE_MINUS_N(n) \
46	mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
47#endif
48
49	.text
50SYM_FUNC_START(__get_user_1)
51	LOAD_TASK_SIZE_MINUS_N(0)
52	cmp %_ASM_DX,%_ASM_AX
53	jae bad_get_user
54	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
55	and %_ASM_DX, %_ASM_AX
56	ASM_STAC
571:	movzbl (%_ASM_AX),%edx
58	xor %eax,%eax
59	ASM_CLAC
60	ret
61SYM_FUNC_END(__get_user_1)
62EXPORT_SYMBOL(__get_user_1)
63
64SYM_FUNC_START(__get_user_2)
65	LOAD_TASK_SIZE_MINUS_N(1)
66	cmp %_ASM_DX,%_ASM_AX
67	jae bad_get_user
68	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
69	and %_ASM_DX, %_ASM_AX
70	ASM_STAC
712:	movzwl (%_ASM_AX),%edx
72	xor %eax,%eax
73	ASM_CLAC
74	ret
75SYM_FUNC_END(__get_user_2)
76EXPORT_SYMBOL(__get_user_2)
77
78SYM_FUNC_START(__get_user_4)
79	LOAD_TASK_SIZE_MINUS_N(3)
80	cmp %_ASM_DX,%_ASM_AX
81	jae bad_get_user
82	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
83	and %_ASM_DX, %_ASM_AX
84	ASM_STAC
853:	movl (%_ASM_AX),%edx
86	xor %eax,%eax
87	ASM_CLAC
88	ret
89SYM_FUNC_END(__get_user_4)
90EXPORT_SYMBOL(__get_user_4)
91
92SYM_FUNC_START(__get_user_8)
93#ifdef CONFIG_X86_64
94	LOAD_TASK_SIZE_MINUS_N(7)
95	cmp %_ASM_DX,%_ASM_AX
96	jae bad_get_user
97	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
98	and %_ASM_DX, %_ASM_AX
99	ASM_STAC
1004:	movq (%_ASM_AX),%rdx
101	xor %eax,%eax
102	ASM_CLAC
103	ret
104#else
105	LOAD_TASK_SIZE_MINUS_N(7)
106	cmp %_ASM_DX,%_ASM_AX
107	jae bad_get_user_8
108	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
109	and %_ASM_DX, %_ASM_AX
110	ASM_STAC
1114:	movl (%_ASM_AX),%edx
1125:	movl 4(%_ASM_AX),%ecx
113	xor %eax,%eax
114	ASM_CLAC
115	ret
116#endif
117SYM_FUNC_END(__get_user_8)
118EXPORT_SYMBOL(__get_user_8)
119
120/* .. and the same for __get_user, just without the range checks */
121SYM_FUNC_START(__get_user_nocheck_1)
122	ASM_STAC
123	ASM_BARRIER_NOSPEC
1246:	movzbl (%_ASM_AX),%edx
125	xor %eax,%eax
126	ASM_CLAC
127	ret
128SYM_FUNC_END(__get_user_nocheck_1)
129EXPORT_SYMBOL(__get_user_nocheck_1)
130
131SYM_FUNC_START(__get_user_nocheck_2)
132	ASM_STAC
133	ASM_BARRIER_NOSPEC
1347:	movzwl (%_ASM_AX),%edx
135	xor %eax,%eax
136	ASM_CLAC
137	ret
138SYM_FUNC_END(__get_user_nocheck_2)
139EXPORT_SYMBOL(__get_user_nocheck_2)
140
141SYM_FUNC_START(__get_user_nocheck_4)
142	ASM_STAC
143	ASM_BARRIER_NOSPEC
1448:	movl (%_ASM_AX),%edx
145	xor %eax,%eax
146	ASM_CLAC
147	ret
148SYM_FUNC_END(__get_user_nocheck_4)
149EXPORT_SYMBOL(__get_user_nocheck_4)
150
151SYM_FUNC_START(__get_user_nocheck_8)
152	ASM_STAC
153	ASM_BARRIER_NOSPEC
154#ifdef CONFIG_X86_64
1559:	movq (%_ASM_AX),%rdx
156#else
1579:	movl (%_ASM_AX),%edx
15810:	movl 4(%_ASM_AX),%ecx
159#endif
160	xor %eax,%eax
161	ASM_CLAC
162	ret
163SYM_FUNC_END(__get_user_nocheck_8)
164EXPORT_SYMBOL(__get_user_nocheck_8)
165
166
167SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
168	ASM_CLAC
169bad_get_user:
170	xor %edx,%edx
171	mov $(-EFAULT),%_ASM_AX
172	ret
173SYM_CODE_END(.Lbad_get_user_clac)
174
175#ifdef CONFIG_X86_32
176SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
177	ASM_CLAC
178bad_get_user_8:
179	xor %edx,%edx
180	xor %ecx,%ecx
181	mov $(-EFAULT),%_ASM_AX
182	ret
183SYM_CODE_END(.Lbad_get_user_8_clac)
184#endif
185
186/* get_user */
187	_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
188	_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
189	_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
190#ifdef CONFIG_X86_64
191	_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
192#else
193	_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
194	_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
195#endif
196
197/* __get_user */
198	_ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
199	_ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
200	_ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
201#ifdef CONFIG_X86_64
202	_ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
203#else
204	_ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
205	_ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
206#endif
207