1 /* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org) 7 */ 8 #ifndef _ASM_FUTEX_H 9 #define _ASM_FUTEX_H 10 11 #ifdef __KERNEL__ 12 13 #include <linux/futex.h> 14 #include <linux/uaccess.h> 15 #include <asm/asm-eva.h> 16 #include <asm/barrier.h> 17 #include <asm/compiler.h> 18 #include <asm/errno.h> 19 #include <asm/war.h> 20 21 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 22 { \ 23 if (cpu_has_llsc && R10000_LLSC_WAR) { \ 24 __asm__ __volatile__( \ 25 " .set push \n" \ 26 " .set noat \n" \ 27 " .set push \n" \ 28 " .set arch=r4000 \n" \ 29 "1: ll %1, %4 # __futex_atomic_op \n" \ 30 " .set pop \n" \ 31 " " insn " \n" \ 32 " .set arch=r4000 \n" \ 33 "2: sc $1, %2 \n" \ 34 " beqzl $1, 1b \n" \ 35 __WEAK_LLSC_MB \ 36 "3: \n" \ 37 " .insn \n" \ 38 " .set pop \n" \ 39 " .section .fixup,\"ax\" \n" \ 40 "4: li %0, %6 \n" \ 41 " j 3b \n" \ 42 " .previous \n" \ 43 " .section __ex_table,\"a\" \n" \ 44 " "__UA_ADDR "\t1b, 4b \n" \ 45 " "__UA_ADDR "\t2b, 4b \n" \ 46 " .previous \n" \ 47 : "=r" (ret), "=&r" (oldval), \ 48 "=" GCC_OFF_SMALL_ASM() (*uaddr) \ 49 : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ 50 "i" (-EFAULT) \ 51 : "memory"); \ 52 } else if (cpu_has_llsc) { \ 53 __asm__ __volatile__( \ 54 " .set push \n" \ 55 " .set noat \n" \ 56 " .set push \n" \ 57 " .set "MIPS_ISA_ARCH_LEVEL" \n" \ 58 "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \ 59 " .set pop \n" \ 60 " " insn " \n" \ 61 " .set "MIPS_ISA_ARCH_LEVEL" \n" \ 62 "2: "user_sc("$1", "%2")" \n" \ 63 " beqz $1, 1b \n" \ 64 __WEAK_LLSC_MB \ 65 "3: \n" \ 66 " .insn \n" \ 67 " .set pop \n" \ 68 " .section .fixup,\"ax\" \n" \ 69 "4: li %0, %6 \n" \ 70 " j 3b \n" \ 71 " .previous \n" \ 72 " .section __ex_table,\"a\" \n" \ 73 " "__UA_ADDR "\t1b, 4b \n" \ 74 " "__UA_ADDR "\t2b, 4b \n" \ 75 " .previous \n" \ 76 : "=r" (ret), "=&r" (oldval), \ 77 "=" GCC_OFF_SMALL_ASM() (*uaddr) \ 78 : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ 79 "i" (-EFAULT) \ 80 : "memory"); \ 81 } else \ 82 ret = -ENOSYS; \ 83 } 84 85 static inline int 86 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr) 87 { 88 int oldval = 0, ret; 89 90 pagefault_disable(); 91 92 switch (op) { 93 case FUTEX_OP_SET: 94 __futex_atomic_op("move $1, %z5", ret, oldval, uaddr, oparg); 95 break; 96 97 case FUTEX_OP_ADD: 98 __futex_atomic_op("addu $1, %1, %z5", 99 ret, oldval, uaddr, oparg); 100 break; 101 case FUTEX_OP_OR: 102 __futex_atomic_op("or $1, %1, %z5", 103 ret, oldval, uaddr, oparg); 104 break; 105 case FUTEX_OP_ANDN: 106 __futex_atomic_op("and $1, %1, %z5", 107 ret, oldval, uaddr, ~oparg); 108 break; 109 case FUTEX_OP_XOR: 110 __futex_atomic_op("xor $1, %1, %z5", 111 ret, oldval, uaddr, oparg); 112 break; 113 default: 114 ret = -ENOSYS; 115 } 116 117 pagefault_enable(); 118 119 if (!ret) 120 *oval = oldval; 121 122 return ret; 123 } 124 125 static inline int 126 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, 127 u32 oldval, u32 newval) 128 { 129 int ret = 0; 130 u32 val; 131 132 if (!access_ok(uaddr, sizeof(u32))) 133 return -EFAULT; 134 135 if (cpu_has_llsc && R10000_LLSC_WAR) { 136 __asm__ __volatile__( 137 "# futex_atomic_cmpxchg_inatomic \n" 138 " .set push \n" 139 " .set noat \n" 140 " .set push \n" 141 " .set arch=r4000 \n" 142 "1: ll %1, %3 \n" 143 " bne %1, %z4, 3f \n" 144 " .set pop \n" 145 " move $1, %z5 \n" 146 " .set arch=r4000 \n" 147 "2: sc $1, %2 \n" 148 " beqzl $1, 1b \n" 149 __WEAK_LLSC_MB 150 "3: \n" 151 " .insn \n" 152 " .set pop \n" 153 " .section .fixup,\"ax\" \n" 154 "4: li %0, %6 \n" 155 " j 3b \n" 156 " .previous \n" 157 " .section __ex_table,\"a\" \n" 158 " "__UA_ADDR "\t1b, 4b \n" 159 " "__UA_ADDR "\t2b, 4b \n" 160 " .previous \n" 161 : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) 162 : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 163 "i" (-EFAULT) 164 : "memory"); 165 } else if (cpu_has_llsc) { 166 __asm__ __volatile__( 167 "# futex_atomic_cmpxchg_inatomic \n" 168 " .set push \n" 169 " .set noat \n" 170 " .set push \n" 171 " .set "MIPS_ISA_ARCH_LEVEL" \n" 172 "1: "user_ll("%1", "%3")" \n" 173 " bne %1, %z4, 3f \n" 174 " .set pop \n" 175 " move $1, %z5 \n" 176 " .set "MIPS_ISA_ARCH_LEVEL" \n" 177 "2: "user_sc("$1", "%2")" \n" 178 " beqz $1, 1b \n" 179 __WEAK_LLSC_MB 180 "3: \n" 181 " .insn \n" 182 " .set pop \n" 183 " .section .fixup,\"ax\" \n" 184 "4: li %0, %6 \n" 185 " j 3b \n" 186 " .previous \n" 187 " .section __ex_table,\"a\" \n" 188 " "__UA_ADDR "\t1b, 4b \n" 189 " "__UA_ADDR "\t2b, 4b \n" 190 " .previous \n" 191 : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) 192 : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 193 "i" (-EFAULT) 194 : "memory"); 195 } else 196 return -ENOSYS; 197 198 *uval = val; 199 return ret; 200 } 201 202 #endif 203 #endif /* _ASM_FUTEX_H */ 204