xref: /linux/arch/arm/include/asm/futex.h (revision e58e871becec2d3b04ed91c0c16fe8deac9c9dfa)
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9 
10 #define __futex_atomic_ex_table(err_reg)			\
11 	"3:\n"							\
12 	"	.pushsection __ex_table,\"a\"\n"		\
13 	"	.align	3\n"					\
14 	"	.long	1b, 4f, 2b, 4f\n"			\
15 	"	.popsection\n"					\
16 	"	.pushsection .text.fixup,\"ax\"\n"		\
17 	"	.align	2\n"					\
18 	"4:	mov	%0, " err_reg "\n"			\
19 	"	b	3b\n"					\
20 	"	.popsection"
21 
22 #ifdef CONFIG_SMP
23 
24 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
25 ({								\
26 	unsigned int __ua_flags;				\
27 	smp_mb();						\
28 	prefetchw(uaddr);					\
29 	__ua_flags = uaccess_save_and_enable();			\
30 	__asm__ __volatile__(					\
31 	"1:	ldrex	%1, [%3]\n"				\
32 	"	" insn "\n"					\
33 	"2:	strex	%2, %0, [%3]\n"				\
34 	"	teq	%2, #0\n"				\
35 	"	bne	1b\n"					\
36 	"	mov	%0, #0\n"				\
37 	__futex_atomic_ex_table("%5")				\
38 	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
39 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
40 	: "cc", "memory");					\
41 	uaccess_restore(__ua_flags);				\
42 })
43 
44 static inline int
45 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
46 			      u32 oldval, u32 newval)
47 {
48 	unsigned int __ua_flags;
49 	int ret;
50 	u32 val;
51 
52 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
53 		return -EFAULT;
54 
55 	smp_mb();
56 	/* Prefetching cannot fault */
57 	prefetchw(uaddr);
58 	__ua_flags = uaccess_save_and_enable();
59 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
60 	"1:	ldrex	%1, [%4]\n"
61 	"	teq	%1, %2\n"
62 	"	ite	eq	@ explicit IT needed for the 2b label\n"
63 	"2:	strexeq	%0, %3, [%4]\n"
64 	"	movne	%0, #0\n"
65 	"	teq	%0, #0\n"
66 	"	bne	1b\n"
67 	__futex_atomic_ex_table("%5")
68 	: "=&r" (ret), "=&r" (val)
69 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
70 	: "cc", "memory");
71 	uaccess_restore(__ua_flags);
72 	smp_mb();
73 
74 	*uval = val;
75 	return ret;
76 }
77 
78 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
79 
80 #include <linux/preempt.h>
81 #include <asm/domain.h>
82 
83 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
84 ({								\
85 	unsigned int __ua_flags = uaccess_save_and_enable();	\
86 	__asm__ __volatile__(					\
87 	"1:	" TUSER(ldr) "	%1, [%3]\n"			\
88 	"	" insn "\n"					\
89 	"2:	" TUSER(str) "	%0, [%3]\n"			\
90 	"	mov	%0, #0\n"				\
91 	__futex_atomic_ex_table("%5")				\
92 	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
93 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
94 	: "cc", "memory");					\
95 	uaccess_restore(__ua_flags);				\
96 })
97 
98 static inline int
99 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
100 			      u32 oldval, u32 newval)
101 {
102 	unsigned int __ua_flags;
103 	int ret = 0;
104 	u32 val;
105 
106 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
107 		return -EFAULT;
108 
109 	preempt_disable();
110 	__ua_flags = uaccess_save_and_enable();
111 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
112 	"1:	" TUSER(ldr) "	%1, [%4]\n"
113 	"	teq	%1, %2\n"
114 	"	it	eq	@ explicit IT needed for the 2b label\n"
115 	"2:	" TUSER(streq) "	%3, [%4]\n"
116 	__futex_atomic_ex_table("%5")
117 	: "+r" (ret), "=&r" (val)
118 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
119 	: "cc", "memory");
120 	uaccess_restore(__ua_flags);
121 
122 	*uval = val;
123 	preempt_enable();
124 
125 	return ret;
126 }
127 
128 #endif /* !SMP */
129 
130 static inline int
131 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
132 {
133 	int op = (encoded_op >> 28) & 7;
134 	int cmp = (encoded_op >> 24) & 15;
135 	int oparg = (encoded_op << 8) >> 20;
136 	int cmparg = (encoded_op << 20) >> 20;
137 	int oldval = 0, ret, tmp;
138 
139 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
140 		oparg = 1 << oparg;
141 
142 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
143 		return -EFAULT;
144 
145 #ifndef CONFIG_SMP
146 	preempt_disable();
147 #endif
148 	pagefault_disable();
149 
150 	switch (op) {
151 	case FUTEX_OP_SET:
152 		__futex_atomic_op("mov	%0, %4", ret, oldval, tmp, uaddr, oparg);
153 		break;
154 	case FUTEX_OP_ADD:
155 		__futex_atomic_op("add	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
156 		break;
157 	case FUTEX_OP_OR:
158 		__futex_atomic_op("orr	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
159 		break;
160 	case FUTEX_OP_ANDN:
161 		__futex_atomic_op("and	%0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
162 		break;
163 	case FUTEX_OP_XOR:
164 		__futex_atomic_op("eor	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
165 		break;
166 	default:
167 		ret = -ENOSYS;
168 	}
169 
170 	pagefault_enable();
171 #ifndef CONFIG_SMP
172 	preempt_enable();
173 #endif
174 
175 	if (!ret) {
176 		switch (cmp) {
177 		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
178 		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
179 		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
180 		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
181 		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
182 		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
183 		default: ret = -ENOSYS;
184 		}
185 	}
186 	return ret;
187 }
188 
189 #endif /* __KERNEL__ */
190 #endif /* _ASM_ARM_FUTEX_H */
191