xref: /linux/arch/microblaze/include/asm/futex.h (revision 26b0d14106954ae46d2f4f7eec3481828a210f7d)
1 #ifndef _ASM_MICROBLAZE_FUTEX_H
2 #define _ASM_MICROBLAZE_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9 
10 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
11 ({									\
12 	__asm__ __volatile__ (						\
13 			"1:	lwx	%0, %2, r0; "			\
14 				insn					\
15 			"2:	swx	%1, %2, r0;			\
16 				addic	%1, r0, 0;			\
17 				bnei	%1, 1b;				\
18 			3:						\
19 			.section .fixup,\"ax\";				\
20 			4:	brid	3b;				\
21 				addik	%1, r0, %3;			\
22 			.previous;					\
23 			.section __ex_table,\"a\";			\
24 			.word	1b,4b,2b,4b;				\
25 			.previous;"					\
26 	: "=&r" (oldval), "=&r" (ret)					\
27 	: "r" (uaddr), "i" (-EFAULT), "r" (oparg)			\
28 	);								\
29 })
30 
31 static inline int
32 futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
33 {
34 	int op = (encoded_op >> 28) & 7;
35 	int cmp = (encoded_op >> 24) & 15;
36 	int oparg = (encoded_op << 8) >> 20;
37 	int cmparg = (encoded_op << 20) >> 20;
38 	int oldval = 0, ret;
39 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
40 		oparg = 1 << oparg;
41 
42 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
43 		return -EFAULT;
44 
45 	pagefault_disable();
46 
47 	switch (op) {
48 	case FUTEX_OP_SET:
49 		__futex_atomic_op("or %1,%4,%4;", ret, oldval, uaddr, oparg);
50 		break;
51 	case FUTEX_OP_ADD:
52 		__futex_atomic_op("add %1,%0,%4;", ret, oldval, uaddr, oparg);
53 		break;
54 	case FUTEX_OP_OR:
55 		__futex_atomic_op("or %1,%0,%4;", ret, oldval, uaddr, oparg);
56 		break;
57 	case FUTEX_OP_ANDN:
58 		__futex_atomic_op("andn %1,%0,%4;", ret, oldval, uaddr, oparg);
59 		break;
60 	case FUTEX_OP_XOR:
61 		__futex_atomic_op("xor %1,%0,%4;", ret, oldval, uaddr, oparg);
62 		break;
63 	default:
64 		ret = -ENOSYS;
65 	}
66 
67 	pagefault_enable();
68 
69 	if (!ret) {
70 		switch (cmp) {
71 		case FUTEX_OP_CMP_EQ:
72 			ret = (oldval == cmparg);
73 			break;
74 		case FUTEX_OP_CMP_NE:
75 			ret = (oldval != cmparg);
76 			break;
77 		case FUTEX_OP_CMP_LT:
78 			ret = (oldval < cmparg);
79 			break;
80 		case FUTEX_OP_CMP_GE:
81 			ret = (oldval >= cmparg);
82 			break;
83 		case FUTEX_OP_CMP_LE:
84 			ret = (oldval <= cmparg);
85 			break;
86 		case FUTEX_OP_CMP_GT:
87 			ret = (oldval > cmparg);
88 			break;
89 		default:
90 			ret = -ENOSYS;
91 		}
92 	}
93 	return ret;
94 }
95 
96 static inline int
97 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
98 			      u32 oldval, u32 newval)
99 {
100 	int ret = 0, cmp;
101 	u32 prev;
102 
103 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
104 		return -EFAULT;
105 
106 	__asm__ __volatile__ ("1:	lwx	%1, %3, r0;		\
107 					cmp	%2, %1, %4;		\
108 					beqi	%2, 3f;			\
109 				2:	swx	%5, %3, r0;		\
110 					addic	%2, r0, 0;		\
111 					bnei	%2, 1b;			\
112 				3:					\
113 				.section .fixup,\"ax\";			\
114 				4:	brid	3b;			\
115 					addik	%0, r0, %6;		\
116 				.previous;				\
117 				.section __ex_table,\"a\";		\
118 				.word	1b,4b,2b,4b;			\
119 				.previous;"				\
120 		: "+r" (ret), "=&r" (prev), "=&r"(cmp)	\
121 		: "r" (uaddr), "r" (oldval), "r" (newval), "i" (-EFAULT));
122 
123 	*uval = prev;
124 	return ret;
125 }
126 
127 #endif /* __KERNEL__ */
128 
129 #endif
130