xref: /linux/arch/x86/include/asm/futex.h (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_FUTEX_H
3 #define _ASM_X86_FUTEX_H
4 
5 #ifdef __KERNEL__
6 
7 #include <linux/futex.h>
8 #include <linux/uaccess.h>
9 
10 #include <asm/asm.h>
11 #include <asm/errno.h>
12 #include <asm/processor.h>
13 #include <asm/smap.h>
14 
15 #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label)	\
16 do {								\
17 	int oldval = 0, ret;					\
18 	asm volatile("1:\t" insn "\n"				\
19 		     "2:\n"					\
20 		     _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %1) \
21 		     : "=r" (oldval), "=r" (ret), "+m" (*uaddr)	\
22 		     : "0" (oparg), "1" (0));	\
23 	if (ret)						\
24 		goto label;					\
25 	*oval = oldval;						\
26 } while(0)
27 
28 
29 #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label)	\
30 do {								\
31 	int oldval = 0, ret, tem;				\
32 	asm volatile("1:\tmovl	%2, %0\n"			\
33 		     "2:\tmovl\t%0, %3\n"			\
34 		     "\t" insn "\n"				\
35 		     "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"	\
36 		     "\tjnz\t2b\n"				\
37 		     "4:\n"					\
38 		     _ASM_EXTABLE_TYPE_REG(1b, 4b, EX_TYPE_EFAULT_REG, %1) \
39 		     _ASM_EXTABLE_TYPE_REG(3b, 4b, EX_TYPE_EFAULT_REG, %1) \
40 		     : "=&a" (oldval), "=&r" (ret),		\
41 		       "+m" (*uaddr), "=&r" (tem)		\
42 		     : "r" (oparg), "1" (0));			\
43 	if (ret)						\
44 		goto label;					\
45 	*oval = oldval;						\
46 } while(0)
47 
48 static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
49 		u32 __user *uaddr)
50 {
51 	if (can_do_masked_user_access())
52 		uaddr = masked_user_access_begin(uaddr);
53 	else if (!user_access_begin(uaddr, sizeof(u32)))
54 		return -EFAULT;
55 
56 	switch (op) {
57 	case FUTEX_OP_SET:
58 		unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
59 		break;
60 	case FUTEX_OP_ADD:
61 		unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
62 				   uaddr, oparg, Efault);
63 		break;
64 	case FUTEX_OP_OR:
65 		unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
66 		break;
67 	case FUTEX_OP_ANDN:
68 		unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
69 		break;
70 	case FUTEX_OP_XOR:
71 		unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
72 		break;
73 	default:
74 		user_access_end();
75 		return -ENOSYS;
76 	}
77 	user_access_end();
78 	return 0;
79 Efault:
80 	user_access_end();
81 	return -EFAULT;
82 }
83 
84 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
85 						u32 oldval, u32 newval)
86 {
87 	int ret = 0;
88 
89 	if (can_do_masked_user_access())
90 		uaddr = masked_user_access_begin(uaddr);
91 	else if (!user_access_begin(uaddr, sizeof(u32)))
92 		return -EFAULT;
93 	asm volatile("\n"
94 		"1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
95 		"2:\n"
96 		_ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \
97 		: "+r" (ret), "=a" (oldval), "+m" (*uaddr)
98 		: "r" (newval), "1" (oldval)
99 		: "memory"
100 	);
101 	user_access_end();
102 	*uval = oldval;
103 	return ret;
104 }
105 
106 #endif
107 #endif /* _ASM_X86_FUTEX_H */
108