xref: /linux/arch/sparc/include/asm/futex_64.h (revision 0d456bad36d42d16022be045c8a53ddbb59ee478)
1 #ifndef _SPARC64_FUTEX_H
2 #define _SPARC64_FUTEX_H
3 
4 #include <linux/futex.h>
5 #include <linux/uaccess.h>
6 #include <asm/errno.h>
7 
8 #define __futex_cas_op(insn, ret, oldval, uaddr, oparg)	\
9 	__asm__ __volatile__(				\
10 	"\n1:	lduwa	[%3] %%asi, %2\n"		\
11 	"	" insn "\n"				\
12 	"2:	casa	[%3] %%asi, %2, %1\n"		\
13 	"	cmp	%2, %1\n"			\
14 	"	bne,pn	%%icc, 1b\n"			\
15 	"	 mov	0, %0\n"			\
16 	"3:\n"						\
17 	"	.section .fixup,#alloc,#execinstr\n"	\
18 	"	.align	4\n"				\
19 	"4:	sethi	%%hi(3b), %0\n"			\
20 	"	jmpl	%0 + %%lo(3b), %%g0\n"		\
21 	"	 mov	%5, %0\n"			\
22 	"	.previous\n"				\
23 	"	.section __ex_table,\"a\"\n"		\
24 	"	.align	4\n"				\
25 	"	.word	1b, 4b\n"			\
26 	"	.word	2b, 4b\n"			\
27 	"	.previous\n"				\
28 	: "=&r" (ret), "=&r" (oldval), "=&r" (tem)	\
29 	: "r" (uaddr), "r" (oparg), "i" (-EFAULT)	\
30 	: "memory")
31 
32 static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
33 {
34 	int op = (encoded_op >> 28) & 7;
35 	int cmp = (encoded_op >> 24) & 15;
36 	int oparg = (encoded_op << 8) >> 20;
37 	int cmparg = (encoded_op << 20) >> 20;
38 	int oldval = 0, ret, tem;
39 
40 	if (unlikely(!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))))
41 		return -EFAULT;
42 	if (unlikely((((unsigned long) uaddr) & 0x3UL)))
43 		return -EINVAL;
44 
45 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
46 		oparg = 1 << oparg;
47 
48 	pagefault_disable();
49 
50 	switch (op) {
51 	case FUTEX_OP_SET:
52 		__futex_cas_op("mov\t%4, %1", ret, oldval, uaddr, oparg);
53 		break;
54 	case FUTEX_OP_ADD:
55 		__futex_cas_op("add\t%2, %4, %1", ret, oldval, uaddr, oparg);
56 		break;
57 	case FUTEX_OP_OR:
58 		__futex_cas_op("or\t%2, %4, %1", ret, oldval, uaddr, oparg);
59 		break;
60 	case FUTEX_OP_ANDN:
61 		__futex_cas_op("andn\t%2, %4, %1", ret, oldval, uaddr, oparg);
62 		break;
63 	case FUTEX_OP_XOR:
64 		__futex_cas_op("xor\t%2, %4, %1", ret, oldval, uaddr, oparg);
65 		break;
66 	default:
67 		ret = -ENOSYS;
68 	}
69 
70 	pagefault_enable();
71 
72 	if (!ret) {
73 		switch (cmp) {
74 		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
75 		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
76 		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
77 		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
78 		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
79 		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
80 		default: ret = -ENOSYS;
81 		}
82 	}
83 	return ret;
84 }
85 
86 static inline int
87 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
88 			      u32 oldval, u32 newval)
89 {
90 	int ret = 0;
91 
92 	__asm__ __volatile__(
93 	"\n1:	casa	[%4] %%asi, %3, %1\n"
94 	"2:\n"
95 	"	.section .fixup,#alloc,#execinstr\n"
96 	"	.align	4\n"
97 	"3:	sethi	%%hi(2b), %0\n"
98 	"	jmpl	%0 + %%lo(2b), %%g0\n"
99 	"	mov	%5, %0\n"
100 	"	.previous\n"
101 	"	.section __ex_table,\"a\"\n"
102 	"	.align	4\n"
103 	"	.word	1b, 3b\n"
104 	"	.previous\n"
105 	: "+r" (ret), "=r" (newval)
106 	: "1" (newval), "r" (oldval), "r" (uaddr), "i" (-EFAULT)
107 	: "memory");
108 
109 	*uval = newval;
110 	return ret;
111 }
112 
113 #endif /* !(_SPARC64_FUTEX_H) */
114