xref: /linux/arch/xtensa/include/asm/futex.h (revision c4ee0af3fa0dc65f690fc908f02b8355f9576ea0)
1 /*
2  * Atomic futex routines
3  *
4  * Based on the PowerPC implementataion
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * Copyright (C) 2013 TangoTec Ltd.
11  *
12  * Baruch Siach <baruch@tkos.co.il>
13  */
14 
15 #ifndef _ASM_XTENSA_FUTEX_H
16 #define _ASM_XTENSA_FUTEX_H
17 
18 #ifdef __KERNEL__
19 
20 #include <linux/futex.h>
21 #include <linux/uaccess.h>
22 #include <linux/errno.h>
23 
24 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
25 	__asm__ __volatile(				\
26 	"1:	l32i	%0, %2, 0\n"			\
27 		insn "\n"				\
28 	"	wsr	%0, scompare1\n"		\
29 	"2:	s32c1i	%1, %2, 0\n"			\
30 	"	bne	%1, %0, 1b\n"			\
31 	"	movi	%1, 0\n"			\
32 	"3:\n"						\
33 	"	.section .fixup,\"ax\"\n"		\
34 	"	.align 4\n"				\
35 	"4:	.long	3b\n"				\
36 	"5:	l32r	%0, 4b\n"			\
37 	"	movi	%1, %3\n"			\
38 	"	jx	%0\n"				\
39 	"	.previous\n"				\
40 	"	.section __ex_table,\"a\"\n"		\
41 	"	.long 1b,5b,2b,5b\n"			\
42 	"	.previous\n"				\
43 	: "=&r" (oldval), "=&r" (ret)			\
44 	: "r" (uaddr), "I" (-EFAULT), "r" (oparg)	\
45 	: "memory")
46 
47 static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
48 {
49 	int op = (encoded_op >> 28) & 7;
50 	int cmp = (encoded_op >> 24) & 15;
51 	int oparg = (encoded_op << 8) >> 20;
52 	int cmparg = (encoded_op << 20) >> 20;
53 	int oldval = 0, ret;
54 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
55 		oparg = 1 << oparg;
56 
57 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
58 		return -EFAULT;
59 
60 #if !XCHAL_HAVE_S32C1I
61 	return -ENOSYS;
62 #endif
63 
64 	pagefault_disable();
65 
66 	switch (op) {
67 	case FUTEX_OP_SET:
68 		__futex_atomic_op("mov %1, %4", ret, oldval, uaddr, oparg);
69 		break;
70 	case FUTEX_OP_ADD:
71 		__futex_atomic_op("add %1, %0, %4", ret, oldval, uaddr,
72 				oparg);
73 		break;
74 	case FUTEX_OP_OR:
75 		__futex_atomic_op("or %1, %0, %4", ret, oldval, uaddr,
76 				oparg);
77 		break;
78 	case FUTEX_OP_ANDN:
79 		__futex_atomic_op("and %1, %0, %4", ret, oldval, uaddr,
80 				~oparg);
81 		break;
82 	case FUTEX_OP_XOR:
83 		__futex_atomic_op("xor %1, %0, %4", ret, oldval, uaddr,
84 				oparg);
85 		break;
86 	default:
87 		ret = -ENOSYS;
88 	}
89 
90 	pagefault_enable();
91 
92 	if (ret)
93 		return ret;
94 
95 	switch (cmp) {
96 	case FUTEX_OP_CMP_EQ: return (oldval == cmparg);
97 	case FUTEX_OP_CMP_NE: return (oldval != cmparg);
98 	case FUTEX_OP_CMP_LT: return (oldval < cmparg);
99 	case FUTEX_OP_CMP_GE: return (oldval >= cmparg);
100 	case FUTEX_OP_CMP_LE: return (oldval <= cmparg);
101 	case FUTEX_OP_CMP_GT: return (oldval > cmparg);
102 	}
103 
104 	return -ENOSYS;
105 }
106 
107 static inline int
108 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
109 			      u32 oldval, u32 newval)
110 {
111 	int ret = 0;
112 	u32 prev;
113 
114 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
115 		return -EFAULT;
116 
117 #if !XCHAL_HAVE_S32C1I
118 	return -ENOSYS;
119 #endif
120 
121 	__asm__ __volatile__ (
122 	"	# futex_atomic_cmpxchg_inatomic\n"
123 	"1:	l32i	%1, %3, 0\n"
124 	"	mov	%0, %5\n"
125 	"	wsr	%1, scompare1\n"
126 	"2:	s32c1i	%0, %3, 0\n"
127 	"3:\n"
128 	"	.section .fixup,\"ax\"\n"
129 	"	.align 4\n"
130 	"4:	.long	3b\n"
131 	"5:	l32r	%1, 4b\n"
132 	"	movi	%0, %6\n"
133 	"	jx	%1\n"
134 	"	.previous\n"
135 	"	.section __ex_table,\"a\"\n"
136 	"	.long 1b,5b,2b,5b\n"
137 	"	.previous\n"
138 	: "+r" (ret), "=&r" (prev), "+m" (*uaddr)
139 	: "r" (uaddr), "r" (oldval), "r" (newval), "I" (-EFAULT)
140 	: "memory");
141 
142 	*uval = prev;
143 	return ret;
144 }
145 
146 #endif /* __KERNEL__ */
147 #endif /* _ASM_XTENSA_FUTEX_H */
148