xref: /linux/arch/arc/include/asm/futex.h (revision 93d90ad708b8da6efc0e487b66111aa9db7f70c7)
1 /*
2  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * Vineetg: August 2010: From Android kernel work
9  */
10 
11 #ifndef _ASM_FUTEX_H
12 #define _ASM_FUTEX_H
13 
14 #include <linux/futex.h>
15 #include <linux/preempt.h>
16 #include <linux/uaccess.h>
17 #include <asm/errno.h>
18 
19 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
20 							\
21 	__asm__ __volatile__(				\
22 	"1:	ld  %1, [%2]			\n"	\
23 		insn				"\n"	\
24 	"2:	st  %0, [%2]			\n"	\
25 	"	mov %0, 0			\n"	\
26 	"3:					\n"	\
27 	"	.section .fixup,\"ax\"		\n"	\
28 	"	.align  4			\n"	\
29 	"4:	mov %0, %4			\n"	\
30 	"	b   3b				\n"	\
31 	"	.previous			\n"	\
32 	"	.section __ex_table,\"a\"	\n"	\
33 	"	.align  4			\n"	\
34 	"	.word   1b, 4b			\n"	\
35 	"	.word   2b, 4b			\n"	\
36 	"	.previous			\n"	\
37 							\
38 	: "=&r" (ret), "=&r" (oldval)			\
39 	: "r" (uaddr), "r" (oparg), "ir" (-EFAULT)	\
40 	: "cc", "memory")
41 
42 static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
43 {
44 	int op = (encoded_op >> 28) & 7;
45 	int cmp = (encoded_op >> 24) & 15;
46 	int oparg = (encoded_op << 8) >> 20;
47 	int cmparg = (encoded_op << 20) >> 20;
48 	int oldval = 0, ret;
49 
50 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
51 		oparg = 1 << oparg;
52 
53 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
54 		return -EFAULT;
55 
56 	pagefault_disable();	/* implies preempt_disable() */
57 
58 	switch (op) {
59 	case FUTEX_OP_SET:
60 		__futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
61 		break;
62 	case FUTEX_OP_ADD:
63 		__futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
64 		break;
65 	case FUTEX_OP_OR:
66 		__futex_atomic_op("or  %0, %1, %3", ret, oldval, uaddr, oparg);
67 		break;
68 	case FUTEX_OP_ANDN:
69 		__futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg);
70 		break;
71 	case FUTEX_OP_XOR:
72 		__futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg);
73 		break;
74 	default:
75 		ret = -ENOSYS;
76 	}
77 
78 	pagefault_enable();	/* subsumes preempt_enable() */
79 
80 	if (!ret) {
81 		switch (cmp) {
82 		case FUTEX_OP_CMP_EQ:
83 			ret = (oldval == cmparg);
84 			break;
85 		case FUTEX_OP_CMP_NE:
86 			ret = (oldval != cmparg);
87 			break;
88 		case FUTEX_OP_CMP_LT:
89 			ret = (oldval < cmparg);
90 			break;
91 		case FUTEX_OP_CMP_GE:
92 			ret = (oldval >= cmparg);
93 			break;
94 		case FUTEX_OP_CMP_LE:
95 			ret = (oldval <= cmparg);
96 			break;
97 		case FUTEX_OP_CMP_GT:
98 			ret = (oldval > cmparg);
99 			break;
100 		default:
101 			ret = -ENOSYS;
102 		}
103 	}
104 	return ret;
105 }
106 
107 /* Compare-xchg with preemption disabled.
108  *  Notes:
109  *      -Best-Effort: Exchg happens only if compare succeeds.
110  *          If compare fails, returns; leaving retry/looping to upper layers
111  *      -successful cmp-xchg: return orig value in @addr (same as cmp val)
112  *      -Compare fails: return orig value in @addr
113  *      -user access r/w fails: return -EFAULT
114  */
115 static inline int
116 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval,
117 					u32 newval)
118 {
119 	u32 val;
120 
121 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
122 		return -EFAULT;
123 
124 	pagefault_disable();	/* implies preempt_disable() */
125 
126 	/* TBD : can use llock/scond */
127 	__asm__ __volatile__(
128 	"1:	ld    %0, [%3]	\n"
129 	"	brne  %0, %1, 3f	\n"
130 	"2:	st    %2, [%3]	\n"
131 	"3:	\n"
132 	"	.section .fixup,\"ax\"	\n"
133 	"4:	mov %0, %4	\n"
134 	"	b   3b	\n"
135 	"	.previous	\n"
136 	"	.section __ex_table,\"a\"	\n"
137 	"	.align  4	\n"
138 	"	.word   1b, 4b	\n"
139 	"	.word   2b, 4b	\n"
140 	"	.previous\n"
141 	: "=&r"(val)
142 	: "r"(oldval), "r"(newval), "r"(uaddr), "ir"(-EFAULT)
143 	: "cc", "memory");
144 
145 	pagefault_enable();	/* subsumes preempt_enable() */
146 
147 	*uval = val;
148 	return val;
149 }
150 
151 #endif
152