xref: /linux/arch/x86/lib/getuser.S (revision c01044cc819160323f3ca4acd44fca487c4432e6)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs:	%[r|e]ax contains the address.
19 *
20 * Outputs:	%[r|e]ax is error code (0 or -EFAULT)
21 *		%[r|e]dx contains zero-extended value
22 *		%ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
39
40	.text
41SYM_FUNC_START(__get_user_1)
42	mov PER_CPU_VAR(current_task), %_ASM_DX
43	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
44	jae bad_get_user
45	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
46	and %_ASM_DX, %_ASM_AX
47	ASM_STAC
481:	movzbl (%_ASM_AX),%edx
49	xor %eax,%eax
50	ASM_CLAC
51	ret
52SYM_FUNC_END(__get_user_1)
53EXPORT_SYMBOL(__get_user_1)
54
55SYM_FUNC_START(__get_user_2)
56	add $1,%_ASM_AX
57	jc bad_get_user
58	mov PER_CPU_VAR(current_task), %_ASM_DX
59	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
60	jae bad_get_user
61	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
62	and %_ASM_DX, %_ASM_AX
63	ASM_STAC
642:	movzwl -1(%_ASM_AX),%edx
65	xor %eax,%eax
66	ASM_CLAC
67	ret
68SYM_FUNC_END(__get_user_2)
69EXPORT_SYMBOL(__get_user_2)
70
71SYM_FUNC_START(__get_user_4)
72	add $3,%_ASM_AX
73	jc bad_get_user
74	mov PER_CPU_VAR(current_task), %_ASM_DX
75	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
76	jae bad_get_user
77	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
78	and %_ASM_DX, %_ASM_AX
79	ASM_STAC
803:	movl -3(%_ASM_AX),%edx
81	xor %eax,%eax
82	ASM_CLAC
83	ret
84SYM_FUNC_END(__get_user_4)
85EXPORT_SYMBOL(__get_user_4)
86
87SYM_FUNC_START(__get_user_8)
88#ifdef CONFIG_X86_64
89	add $7,%_ASM_AX
90	jc bad_get_user
91	mov PER_CPU_VAR(current_task), %_ASM_DX
92	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
93	jae bad_get_user
94	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
95	and %_ASM_DX, %_ASM_AX
96	ASM_STAC
974:	movq -7(%_ASM_AX),%rdx
98	xor %eax,%eax
99	ASM_CLAC
100	ret
101#else
102	add $7,%_ASM_AX
103	jc bad_get_user_8
104	mov PER_CPU_VAR(current_task), %_ASM_DX
105	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
106	jae bad_get_user_8
107	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
108	and %_ASM_DX, %_ASM_AX
109	ASM_STAC
1104:	movl -7(%_ASM_AX),%edx
1115:	movl -3(%_ASM_AX),%ecx
112	xor %eax,%eax
113	ASM_CLAC
114	ret
115#endif
116SYM_FUNC_END(__get_user_8)
117EXPORT_SYMBOL(__get_user_8)
118
119/* .. and the same for __get_user, just without the range checks */
120SYM_FUNC_START(__get_user_nocheck_1)
121	ASM_STAC
122	ASM_BARRIER_NOSPEC
1236:	movzbl (%_ASM_AX),%edx
124	xor %eax,%eax
125	ASM_CLAC
126	ret
127SYM_FUNC_END(__get_user_nocheck_1)
128EXPORT_SYMBOL(__get_user_nocheck_1)
129
130SYM_FUNC_START(__get_user_nocheck_2)
131	ASM_STAC
132	ASM_BARRIER_NOSPEC
1337:	movzwl (%_ASM_AX),%edx
134	xor %eax,%eax
135	ASM_CLAC
136	ret
137SYM_FUNC_END(__get_user_nocheck_2)
138EXPORT_SYMBOL(__get_user_nocheck_2)
139
140SYM_FUNC_START(__get_user_nocheck_4)
141	ASM_STAC
142	ASM_BARRIER_NOSPEC
1438:	movl (%_ASM_AX),%edx
144	xor %eax,%eax
145	ASM_CLAC
146	ret
147SYM_FUNC_END(__get_user_nocheck_4)
148EXPORT_SYMBOL(__get_user_nocheck_4)
149
150SYM_FUNC_START(__get_user_nocheck_8)
151	ASM_STAC
152	ASM_BARRIER_NOSPEC
153#ifdef CONFIG_X86_64
1549:	movq (%_ASM_AX),%rdx
155#else
1569:	movl (%_ASM_AX),%edx
15710:	movl 4(%_ASM_AX),%ecx
158#endif
159	xor %eax,%eax
160	ASM_CLAC
161	ret
162SYM_FUNC_END(__get_user_nocheck_8)
163EXPORT_SYMBOL(__get_user_nocheck_8)
164
165
166SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
167	ASM_CLAC
168bad_get_user:
169	xor %edx,%edx
170	mov $(-EFAULT),%_ASM_AX
171	ret
172SYM_CODE_END(.Lbad_get_user_clac)
173
174#ifdef CONFIG_X86_32
175SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
176	ASM_CLAC
177bad_get_user_8:
178	xor %edx,%edx
179	xor %ecx,%ecx
180	mov $(-EFAULT),%_ASM_AX
181	ret
182SYM_CODE_END(.Lbad_get_user_8_clac)
183#endif
184
185/* get_user */
186	_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
187	_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
188	_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
189#ifdef CONFIG_X86_64
190	_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
191#else
192	_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
193	_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
194#endif
195
196/* __get_user */
197	_ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
198	_ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
199	_ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
200#ifdef CONFIG_X86_64
201	_ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
202#else
203	_ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
204	_ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
205#endif
206