1b2441318SGreg Kroah-Hartman // SPDX-License-Identifier: GPL-2.0
244f0257fSThomas Gleixner /*
344f0257fSThomas Gleixner * User address space access functions.
444f0257fSThomas Gleixner * The non inlined parts of asm-i386/uaccess.h are here.
544f0257fSThomas Gleixner *
644f0257fSThomas Gleixner * Copyright 1997 Andi Kleen <ak@muc.de>
744f0257fSThomas Gleixner * Copyright 1997 Linus Torvalds
844f0257fSThomas Gleixner */
9e683014cSPaul Gortmaker #include <linux/export.h>
107c0f6ba6SLinus Torvalds #include <linux/uaccess.h>
119c675128SH. Peter Anvin #include <asm/asm.h>
1244f0257fSThomas Gleixner
138bfcb396SThomas Petazzoni #ifdef CONFIG_X86_INTEL_USERCOPY
148bfcb396SThomas Petazzoni /*
158bfcb396SThomas Petazzoni * Alignment at which movsl is preferred for bulk memory copies.
168bfcb396SThomas Petazzoni */
178bfcb396SThomas Petazzoni struct movsl_mask movsl_mask __read_mostly;
188bfcb396SThomas Petazzoni #endif
198bfcb396SThomas Petazzoni
__movsl_is_ok(unsigned long a1,unsigned long a2,unsigned long n)2044f0257fSThomas Gleixner static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
2144f0257fSThomas Gleixner {
2244f0257fSThomas Gleixner #ifdef CONFIG_X86_INTEL_USERCOPY
2344f0257fSThomas Gleixner if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
2444f0257fSThomas Gleixner return 0;
2544f0257fSThomas Gleixner #endif
2644f0257fSThomas Gleixner return 1;
2744f0257fSThomas Gleixner }
2844f0257fSThomas Gleixner #define movsl_is_ok(a1, a2, n) \
2944f0257fSThomas Gleixner __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
3044f0257fSThomas Gleixner
3144f0257fSThomas Gleixner /*
3244f0257fSThomas Gleixner * Zero Userspace
3344f0257fSThomas Gleixner */
3444f0257fSThomas Gleixner
3544f0257fSThomas Gleixner #define __do_clear_user(addr,size) \
3644f0257fSThomas Gleixner do { \
3744f0257fSThomas Gleixner int __d0; \
383ee1afa3SNick Piggin might_fault(); \
3944f0257fSThomas Gleixner __asm__ __volatile__( \
4063bcff2aSH. Peter Anvin ASM_STAC "\n" \
4144f0257fSThomas Gleixner "0: rep; stosl\n" \
4244f0257fSThomas Gleixner " movl %2,%0\n" \
4344f0257fSThomas Gleixner "1: rep; stosb\n" \
4463bcff2aSH. Peter Anvin "2: " ASM_CLAC "\n" \
45*d5d797dcSPeter Zijlstra _ASM_EXTABLE_TYPE_REG(0b, 2b, EX_TYPE_UCOPY_LEN4, %2) \
4675045f77SJann Horn _ASM_EXTABLE_UA(1b, 2b) \
4744f0257fSThomas Gleixner : "=&c"(size), "=&D" (__d0) \
4844f0257fSThomas Gleixner : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
4944f0257fSThomas Gleixner } while (0)
5044f0257fSThomas Gleixner
5144f0257fSThomas Gleixner /**
52bc8ff3caSMike Rapoport * clear_user - Zero a block of memory in user space.
5344f0257fSThomas Gleixner * @to: Destination address, in user space.
5444f0257fSThomas Gleixner * @n: Number of bytes to zero.
5544f0257fSThomas Gleixner *
5644f0257fSThomas Gleixner * Zero a block of memory in user space.
5744f0257fSThomas Gleixner *
58bc8ff3caSMike Rapoport * Return: number of bytes that could not be cleared.
5944f0257fSThomas Gleixner * On success, this will be zero.
6044f0257fSThomas Gleixner */
6144f0257fSThomas Gleixner unsigned long
clear_user(void __user * to,unsigned long n)6244f0257fSThomas Gleixner clear_user(void __user *to, unsigned long n)
6344f0257fSThomas Gleixner {
641d18ef48SIngo Molnar might_fault();
6596d4f267SLinus Torvalds if (access_ok(to, n))
6644f0257fSThomas Gleixner __do_clear_user(to, n);
6744f0257fSThomas Gleixner return n;
6844f0257fSThomas Gleixner }
6944f0257fSThomas Gleixner EXPORT_SYMBOL(clear_user);
7044f0257fSThomas Gleixner
7144f0257fSThomas Gleixner /**
72bc8ff3caSMike Rapoport * __clear_user - Zero a block of memory in user space, with less checking.
7344f0257fSThomas Gleixner * @to: Destination address, in user space.
7444f0257fSThomas Gleixner * @n: Number of bytes to zero.
7544f0257fSThomas Gleixner *
7644f0257fSThomas Gleixner * Zero a block of memory in user space. Caller must check
7744f0257fSThomas Gleixner * the specified block with access_ok() before calling this function.
7844f0257fSThomas Gleixner *
79bc8ff3caSMike Rapoport * Return: number of bytes that could not be cleared.
8044f0257fSThomas Gleixner * On success, this will be zero.
8144f0257fSThomas Gleixner */
8244f0257fSThomas Gleixner unsigned long
__clear_user(void __user * to,unsigned long n)8344f0257fSThomas Gleixner __clear_user(void __user *to, unsigned long n)
8444f0257fSThomas Gleixner {
8544f0257fSThomas Gleixner __do_clear_user(to, n);
8644f0257fSThomas Gleixner return n;
8744f0257fSThomas Gleixner }
8844f0257fSThomas Gleixner EXPORT_SYMBOL(__clear_user);
8944f0257fSThomas Gleixner
9044f0257fSThomas Gleixner #ifdef CONFIG_X86_INTEL_USERCOPY
9144f0257fSThomas Gleixner static unsigned long
__copy_user_intel(void __user * to,const void * from,unsigned long size)9244f0257fSThomas Gleixner __copy_user_intel(void __user *to, const void *from, unsigned long size)
9344f0257fSThomas Gleixner {
9444f0257fSThomas Gleixner int d0, d1;
9544f0257fSThomas Gleixner __asm__ __volatile__(
9644f0257fSThomas Gleixner " .align 2,0x90\n"
9744f0257fSThomas Gleixner "1: movl 32(%4), %%eax\n"
9844f0257fSThomas Gleixner " cmpl $67, %0\n"
9944f0257fSThomas Gleixner " jbe 3f\n"
10044f0257fSThomas Gleixner "2: movl 64(%4), %%eax\n"
10144f0257fSThomas Gleixner " .align 2,0x90\n"
10244f0257fSThomas Gleixner "3: movl 0(%4), %%eax\n"
10344f0257fSThomas Gleixner "4: movl 4(%4), %%edx\n"
10444f0257fSThomas Gleixner "5: movl %%eax, 0(%3)\n"
10544f0257fSThomas Gleixner "6: movl %%edx, 4(%3)\n"
10644f0257fSThomas Gleixner "7: movl 8(%4), %%eax\n"
10744f0257fSThomas Gleixner "8: movl 12(%4),%%edx\n"
10844f0257fSThomas Gleixner "9: movl %%eax, 8(%3)\n"
10944f0257fSThomas Gleixner "10: movl %%edx, 12(%3)\n"
11044f0257fSThomas Gleixner "11: movl 16(%4), %%eax\n"
11144f0257fSThomas Gleixner "12: movl 20(%4), %%edx\n"
11244f0257fSThomas Gleixner "13: movl %%eax, 16(%3)\n"
11344f0257fSThomas Gleixner "14: movl %%edx, 20(%3)\n"
11444f0257fSThomas Gleixner "15: movl 24(%4), %%eax\n"
11544f0257fSThomas Gleixner "16: movl 28(%4), %%edx\n"
11644f0257fSThomas Gleixner "17: movl %%eax, 24(%3)\n"
11744f0257fSThomas Gleixner "18: movl %%edx, 28(%3)\n"
11844f0257fSThomas Gleixner "19: movl 32(%4), %%eax\n"
11944f0257fSThomas Gleixner "20: movl 36(%4), %%edx\n"
12044f0257fSThomas Gleixner "21: movl %%eax, 32(%3)\n"
12144f0257fSThomas Gleixner "22: movl %%edx, 36(%3)\n"
12244f0257fSThomas Gleixner "23: movl 40(%4), %%eax\n"
12344f0257fSThomas Gleixner "24: movl 44(%4), %%edx\n"
12444f0257fSThomas Gleixner "25: movl %%eax, 40(%3)\n"
12544f0257fSThomas Gleixner "26: movl %%edx, 44(%3)\n"
12644f0257fSThomas Gleixner "27: movl 48(%4), %%eax\n"
12744f0257fSThomas Gleixner "28: movl 52(%4), %%edx\n"
12844f0257fSThomas Gleixner "29: movl %%eax, 48(%3)\n"
12944f0257fSThomas Gleixner "30: movl %%edx, 52(%3)\n"
13044f0257fSThomas Gleixner "31: movl 56(%4), %%eax\n"
13144f0257fSThomas Gleixner "32: movl 60(%4), %%edx\n"
13244f0257fSThomas Gleixner "33: movl %%eax, 56(%3)\n"
13344f0257fSThomas Gleixner "34: movl %%edx, 60(%3)\n"
13444f0257fSThomas Gleixner " addl $-64, %0\n"
13544f0257fSThomas Gleixner " addl $64, %4\n"
13644f0257fSThomas Gleixner " addl $64, %3\n"
13744f0257fSThomas Gleixner " cmpl $63, %0\n"
13844f0257fSThomas Gleixner " ja 1b\n"
13944f0257fSThomas Gleixner "35: movl %0, %%eax\n"
14044f0257fSThomas Gleixner " shrl $2, %0\n"
14144f0257fSThomas Gleixner " andl $3, %%eax\n"
14244f0257fSThomas Gleixner " cld\n"
14344f0257fSThomas Gleixner "99: rep; movsl\n"
14444f0257fSThomas Gleixner "36: movl %%eax, %0\n"
14544f0257fSThomas Gleixner "37: rep; movsb\n"
14644f0257fSThomas Gleixner "100:\n"
14775045f77SJann Horn _ASM_EXTABLE_UA(1b, 100b)
14875045f77SJann Horn _ASM_EXTABLE_UA(2b, 100b)
14975045f77SJann Horn _ASM_EXTABLE_UA(3b, 100b)
15075045f77SJann Horn _ASM_EXTABLE_UA(4b, 100b)
15175045f77SJann Horn _ASM_EXTABLE_UA(5b, 100b)
15275045f77SJann Horn _ASM_EXTABLE_UA(6b, 100b)
15375045f77SJann Horn _ASM_EXTABLE_UA(7b, 100b)
15475045f77SJann Horn _ASM_EXTABLE_UA(8b, 100b)
15575045f77SJann Horn _ASM_EXTABLE_UA(9b, 100b)
15675045f77SJann Horn _ASM_EXTABLE_UA(10b, 100b)
15775045f77SJann Horn _ASM_EXTABLE_UA(11b, 100b)
15875045f77SJann Horn _ASM_EXTABLE_UA(12b, 100b)
15975045f77SJann Horn _ASM_EXTABLE_UA(13b, 100b)
16075045f77SJann Horn _ASM_EXTABLE_UA(14b, 100b)
16175045f77SJann Horn _ASM_EXTABLE_UA(15b, 100b)
16275045f77SJann Horn _ASM_EXTABLE_UA(16b, 100b)
16375045f77SJann Horn _ASM_EXTABLE_UA(17b, 100b)
16475045f77SJann Horn _ASM_EXTABLE_UA(18b, 100b)
16575045f77SJann Horn _ASM_EXTABLE_UA(19b, 100b)
16675045f77SJann Horn _ASM_EXTABLE_UA(20b, 100b)
16775045f77SJann Horn _ASM_EXTABLE_UA(21b, 100b)
16875045f77SJann Horn _ASM_EXTABLE_UA(22b, 100b)
16975045f77SJann Horn _ASM_EXTABLE_UA(23b, 100b)
17075045f77SJann Horn _ASM_EXTABLE_UA(24b, 100b)
17175045f77SJann Horn _ASM_EXTABLE_UA(25b, 100b)
17275045f77SJann Horn _ASM_EXTABLE_UA(26b, 100b)
17375045f77SJann Horn _ASM_EXTABLE_UA(27b, 100b)
17475045f77SJann Horn _ASM_EXTABLE_UA(28b, 100b)
17575045f77SJann Horn _ASM_EXTABLE_UA(29b, 100b)
17675045f77SJann Horn _ASM_EXTABLE_UA(30b, 100b)
17775045f77SJann Horn _ASM_EXTABLE_UA(31b, 100b)
17875045f77SJann Horn _ASM_EXTABLE_UA(32b, 100b)
17975045f77SJann Horn _ASM_EXTABLE_UA(33b, 100b)
18075045f77SJann Horn _ASM_EXTABLE_UA(34b, 100b)
18175045f77SJann Horn _ASM_EXTABLE_UA(35b, 100b)
18275045f77SJann Horn _ASM_EXTABLE_UA(36b, 100b)
18375045f77SJann Horn _ASM_EXTABLE_UA(37b, 100b)
184*d5d797dcSPeter Zijlstra _ASM_EXTABLE_TYPE_REG(99b, 100b, EX_TYPE_UCOPY_LEN4, %%eax)
18544f0257fSThomas Gleixner : "=&c"(size), "=&D" (d0), "=&S" (d1)
18644f0257fSThomas Gleixner : "1"(to), "2"(from), "0"(size)
18744f0257fSThomas Gleixner : "eax", "edx", "memory");
18844f0257fSThomas Gleixner return size;
18944f0257fSThomas Gleixner }
19044f0257fSThomas Gleixner
__copy_user_intel_nocache(void * to,const void __user * from,unsigned long size)19144f0257fSThomas Gleixner static unsigned long __copy_user_intel_nocache(void *to,
19244f0257fSThomas Gleixner const void __user *from, unsigned long size)
19344f0257fSThomas Gleixner {
19444f0257fSThomas Gleixner int d0, d1;
19544f0257fSThomas Gleixner
19644f0257fSThomas Gleixner __asm__ __volatile__(
19744f0257fSThomas Gleixner " .align 2,0x90\n"
19844f0257fSThomas Gleixner "0: movl 32(%4), %%eax\n"
19944f0257fSThomas Gleixner " cmpl $67, %0\n"
20044f0257fSThomas Gleixner " jbe 2f\n"
20144f0257fSThomas Gleixner "1: movl 64(%4), %%eax\n"
20244f0257fSThomas Gleixner " .align 2,0x90\n"
20344f0257fSThomas Gleixner "2: movl 0(%4), %%eax\n"
20444f0257fSThomas Gleixner "21: movl 4(%4), %%edx\n"
20544f0257fSThomas Gleixner " movnti %%eax, 0(%3)\n"
20644f0257fSThomas Gleixner " movnti %%edx, 4(%3)\n"
20744f0257fSThomas Gleixner "3: movl 8(%4), %%eax\n"
20844f0257fSThomas Gleixner "31: movl 12(%4),%%edx\n"
20944f0257fSThomas Gleixner " movnti %%eax, 8(%3)\n"
21044f0257fSThomas Gleixner " movnti %%edx, 12(%3)\n"
21144f0257fSThomas Gleixner "4: movl 16(%4), %%eax\n"
21244f0257fSThomas Gleixner "41: movl 20(%4), %%edx\n"
21344f0257fSThomas Gleixner " movnti %%eax, 16(%3)\n"
21444f0257fSThomas Gleixner " movnti %%edx, 20(%3)\n"
21544f0257fSThomas Gleixner "10: movl 24(%4), %%eax\n"
21644f0257fSThomas Gleixner "51: movl 28(%4), %%edx\n"
21744f0257fSThomas Gleixner " movnti %%eax, 24(%3)\n"
21844f0257fSThomas Gleixner " movnti %%edx, 28(%3)\n"
21944f0257fSThomas Gleixner "11: movl 32(%4), %%eax\n"
22044f0257fSThomas Gleixner "61: movl 36(%4), %%edx\n"
22144f0257fSThomas Gleixner " movnti %%eax, 32(%3)\n"
22244f0257fSThomas Gleixner " movnti %%edx, 36(%3)\n"
22344f0257fSThomas Gleixner "12: movl 40(%4), %%eax\n"
22444f0257fSThomas Gleixner "71: movl 44(%4), %%edx\n"
22544f0257fSThomas Gleixner " movnti %%eax, 40(%3)\n"
22644f0257fSThomas Gleixner " movnti %%edx, 44(%3)\n"
22744f0257fSThomas Gleixner "13: movl 48(%4), %%eax\n"
22844f0257fSThomas Gleixner "81: movl 52(%4), %%edx\n"
22944f0257fSThomas Gleixner " movnti %%eax, 48(%3)\n"
23044f0257fSThomas Gleixner " movnti %%edx, 52(%3)\n"
23144f0257fSThomas Gleixner "14: movl 56(%4), %%eax\n"
23244f0257fSThomas Gleixner "91: movl 60(%4), %%edx\n"
23344f0257fSThomas Gleixner " movnti %%eax, 56(%3)\n"
23444f0257fSThomas Gleixner " movnti %%edx, 60(%3)\n"
23544f0257fSThomas Gleixner " addl $-64, %0\n"
23644f0257fSThomas Gleixner " addl $64, %4\n"
23744f0257fSThomas Gleixner " addl $64, %3\n"
23844f0257fSThomas Gleixner " cmpl $63, %0\n"
23944f0257fSThomas Gleixner " ja 0b\n"
24044f0257fSThomas Gleixner " sfence \n"
24144f0257fSThomas Gleixner "5: movl %0, %%eax\n"
24244f0257fSThomas Gleixner " shrl $2, %0\n"
24344f0257fSThomas Gleixner " andl $3, %%eax\n"
24444f0257fSThomas Gleixner " cld\n"
24544f0257fSThomas Gleixner "6: rep; movsl\n"
24644f0257fSThomas Gleixner " movl %%eax,%0\n"
24744f0257fSThomas Gleixner "7: rep; movsb\n"
24844f0257fSThomas Gleixner "8:\n"
24913e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(0b, 8b)
25013e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(1b, 8b)
25113e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(2b, 8b)
25213e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(21b, 8b)
25313e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(3b, 8b)
25413e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(31b, 8b)
25513e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(4b, 8b)
25613e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(41b, 8b)
25713e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(10b, 8b)
25813e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(51b, 8b)
25913e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(11b, 8b)
26013e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(61b, 8b)
26113e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(12b, 8b)
26213e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(71b, 8b)
26313e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(13b, 8b)
26413e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(81b, 8b)
26513e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(14b, 8b)
26613e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(91b, 8b)
267*d5d797dcSPeter Zijlstra _ASM_EXTABLE_TYPE_REG(6b, 8b, EX_TYPE_UCOPY_LEN4, %%eax)
26813e4bf1bSPeter Zijlstra _ASM_EXTABLE_UA(7b, 8b)
26944f0257fSThomas Gleixner : "=&c"(size), "=&D" (d0), "=&S" (d1)
27044f0257fSThomas Gleixner : "1"(to), "2"(from), "0"(size)
27144f0257fSThomas Gleixner : "eax", "edx", "memory");
27244f0257fSThomas Gleixner return size;
27344f0257fSThomas Gleixner }
27444f0257fSThomas Gleixner
27544f0257fSThomas Gleixner #else
27644f0257fSThomas Gleixner
27744f0257fSThomas Gleixner /*
27844f0257fSThomas Gleixner * Leave these declared but undefined. They should not be any references to
27944f0257fSThomas Gleixner * them
28044f0257fSThomas Gleixner */
28144f0257fSThomas Gleixner unsigned long __copy_user_intel(void __user *to, const void *from,
28244f0257fSThomas Gleixner unsigned long size);
28344f0257fSThomas Gleixner #endif /* CONFIG_X86_INTEL_USERCOPY */
28444f0257fSThomas Gleixner
28544f0257fSThomas Gleixner /* Generic arbitrary sized copy. */
28644f0257fSThomas Gleixner #define __copy_user(to, from, size) \
28744f0257fSThomas Gleixner do { \
28844f0257fSThomas Gleixner int __d0, __d1, __d2; \
28944f0257fSThomas Gleixner __asm__ __volatile__( \
29044f0257fSThomas Gleixner " cmp $7,%0\n" \
29144f0257fSThomas Gleixner " jbe 1f\n" \
29244f0257fSThomas Gleixner " movl %1,%0\n" \
29344f0257fSThomas Gleixner " negl %0\n" \
29444f0257fSThomas Gleixner " andl $7,%0\n" \
29544f0257fSThomas Gleixner " subl %0,%3\n" \
29644f0257fSThomas Gleixner "4: rep; movsb\n" \
29744f0257fSThomas Gleixner " movl %3,%0\n" \
29844f0257fSThomas Gleixner " shrl $2,%0\n" \
29944f0257fSThomas Gleixner " andl $3,%3\n" \
30044f0257fSThomas Gleixner " .align 2,0x90\n" \
30144f0257fSThomas Gleixner "0: rep; movsl\n" \
30244f0257fSThomas Gleixner " movl %3,%0\n" \
30344f0257fSThomas Gleixner "1: rep; movsb\n" \
30444f0257fSThomas Gleixner "2:\n" \
305*d5d797dcSPeter Zijlstra _ASM_EXTABLE_TYPE_REG(4b, 2b, EX_TYPE_UCOPY_LEN1, %3) \
306*d5d797dcSPeter Zijlstra _ASM_EXTABLE_TYPE_REG(0b, 2b, EX_TYPE_UCOPY_LEN4, %3) \
30775045f77SJann Horn _ASM_EXTABLE_UA(1b, 2b) \
30844f0257fSThomas Gleixner : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
30944f0257fSThomas Gleixner : "3"(size), "0"(size), "1"(to), "2"(from) \
31044f0257fSThomas Gleixner : "memory"); \
31144f0257fSThomas Gleixner } while (0)
31244f0257fSThomas Gleixner
__copy_user_ll(void * to,const void * from,unsigned long n)313beba3a20SAl Viro unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
31444f0257fSThomas Gleixner {
315304ec1b0SDan Williams __uaccess_begin_nospec();
31644f0257fSThomas Gleixner if (movsl_is_ok(to, from, n))
31744f0257fSThomas Gleixner __copy_user(to, from, n);
31844f0257fSThomas Gleixner else
31944f0257fSThomas Gleixner n = __copy_user_intel(to, from, n);
320b5c4ae4fSDan Williams __uaccess_end();
32144f0257fSThomas Gleixner return n;
32244f0257fSThomas Gleixner }
323beba3a20SAl Viro EXPORT_SYMBOL(__copy_user_ll);
32444f0257fSThomas Gleixner
__copy_from_user_ll_nocache_nozero(void * to,const void __user * from,unsigned long n)32544f0257fSThomas Gleixner unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
32644f0257fSThomas Gleixner unsigned long n)
32744f0257fSThomas Gleixner {
328304ec1b0SDan Williams __uaccess_begin_nospec();
32944f0257fSThomas Gleixner #ifdef CONFIG_X86_INTEL_USERCOPY
330054efb64SBorislav Petkov if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
33144f0257fSThomas Gleixner n = __copy_user_intel_nocache(to, from, n);
33244f0257fSThomas Gleixner else
33344f0257fSThomas Gleixner __copy_user(to, from, n);
33444f0257fSThomas Gleixner #else
33544f0257fSThomas Gleixner __copy_user(to, from, n);
33644f0257fSThomas Gleixner #endif
337b5c4ae4fSDan Williams __uaccess_end();
33844f0257fSThomas Gleixner return n;
33944f0257fSThomas Gleixner }
340914c8269SAndrew Morton EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
341