14baa9922SRussell King /* 24baa9922SRussell King * arch/arm/include/asm/atomic.h 34baa9922SRussell King * 44baa9922SRussell King * Copyright (C) 1996 Russell King. 54baa9922SRussell King * Copyright (C) 2002 Deep Blue Solutions Ltd. 64baa9922SRussell King * 74baa9922SRussell King * This program is free software; you can redistribute it and/or modify 84baa9922SRussell King * it under the terms of the GNU General Public License version 2 as 94baa9922SRussell King * published by the Free Software Foundation. 104baa9922SRussell King */ 114baa9922SRussell King #ifndef __ASM_ARM_ATOMIC_H 124baa9922SRussell King #define __ASM_ARM_ATOMIC_H 134baa9922SRussell King 144baa9922SRussell King #include <linux/compiler.h> 15f38d999cSWill Deacon #include <linux/prefetch.h> 16ea435467SMatthew Wilcox #include <linux/types.h> 179f97da78SDavid Howells #include <linux/irqflags.h> 189f97da78SDavid Howells #include <asm/barrier.h> 199f97da78SDavid Howells #include <asm/cmpxchg.h> 204baa9922SRussell King 214baa9922SRussell King #define ATOMIC_INIT(i) { (i) } 224baa9922SRussell King 234baa9922SRussell King #ifdef __KERNEL__ 244baa9922SRussell King 25200b812dSCatalin Marinas /* 26200b812dSCatalin Marinas * On ARM, ordinary assignment (str instruction) doesn't clear the local 27200b812dSCatalin Marinas * strex/ldrex monitor on some implementations. The reason we can use it for 28200b812dSCatalin Marinas * atomic_set() is the clrex or dummy strex done on every exception return. 29200b812dSCatalin Marinas */ 3062e8a325SPeter Zijlstra #define atomic_read(v) READ_ONCE((v)->counter) 3162e8a325SPeter Zijlstra #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) 324baa9922SRussell King 334baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6 344baa9922SRussell King 354baa9922SRussell King /* 364baa9922SRussell King * ARMv6 UP and SMP safe atomic ops. We use load exclusive and 374baa9922SRussell King * store exclusive to ensure that these are atomic. We may loop 38200b812dSCatalin Marinas * to ensure that the update happens. 394baa9922SRussell King */ 40bac4e960SRussell King 41aee9a554SPeter Zijlstra #define ATOMIC_OP(op, c_op, asm_op) \ 42aee9a554SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v) \ 43aee9a554SPeter Zijlstra { \ 44aee9a554SPeter Zijlstra unsigned long tmp; \ 45aee9a554SPeter Zijlstra int result; \ 46aee9a554SPeter Zijlstra \ 47aee9a554SPeter Zijlstra prefetchw(&v->counter); \ 48aee9a554SPeter Zijlstra __asm__ __volatile__("@ atomic_" #op "\n" \ 49aee9a554SPeter Zijlstra "1: ldrex %0, [%3]\n" \ 50aee9a554SPeter Zijlstra " " #asm_op " %0, %0, %4\n" \ 51aee9a554SPeter Zijlstra " strex %1, %0, [%3]\n" \ 52aee9a554SPeter Zijlstra " teq %1, #0\n" \ 53aee9a554SPeter Zijlstra " bne 1b" \ 54aee9a554SPeter Zijlstra : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \ 55aee9a554SPeter Zijlstra : "r" (&v->counter), "Ir" (i) \ 56aee9a554SPeter Zijlstra : "cc"); \ 57aee9a554SPeter Zijlstra } \ 58bac4e960SRussell King 59aee9a554SPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ 600ca326deSWill Deacon static inline int atomic_##op##_return_relaxed(int i, atomic_t *v) \ 61aee9a554SPeter Zijlstra { \ 62aee9a554SPeter Zijlstra unsigned long tmp; \ 63aee9a554SPeter Zijlstra int result; \ 64aee9a554SPeter Zijlstra \ 65aee9a554SPeter Zijlstra prefetchw(&v->counter); \ 66aee9a554SPeter Zijlstra \ 67aee9a554SPeter Zijlstra __asm__ __volatile__("@ atomic_" #op "_return\n" \ 68aee9a554SPeter Zijlstra "1: ldrex %0, [%3]\n" \ 69aee9a554SPeter Zijlstra " " #asm_op " %0, %0, %4\n" \ 70aee9a554SPeter Zijlstra " strex %1, %0, [%3]\n" \ 71aee9a554SPeter Zijlstra " teq %1, #0\n" \ 72aee9a554SPeter Zijlstra " bne 1b" \ 73aee9a554SPeter Zijlstra : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \ 74aee9a554SPeter Zijlstra : "r" (&v->counter), "Ir" (i) \ 75aee9a554SPeter Zijlstra : "cc"); \ 76aee9a554SPeter Zijlstra \ 77aee9a554SPeter Zijlstra return result; \ 784baa9922SRussell King } 794baa9922SRussell King 806da068c1SPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ 816da068c1SPeter Zijlstra static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v) \ 826da068c1SPeter Zijlstra { \ 836da068c1SPeter Zijlstra unsigned long tmp; \ 846da068c1SPeter Zijlstra int result, val; \ 856da068c1SPeter Zijlstra \ 866da068c1SPeter Zijlstra prefetchw(&v->counter); \ 876da068c1SPeter Zijlstra \ 886da068c1SPeter Zijlstra __asm__ __volatile__("@ atomic_fetch_" #op "\n" \ 896da068c1SPeter Zijlstra "1: ldrex %0, [%4]\n" \ 906da068c1SPeter Zijlstra " " #asm_op " %1, %0, %5\n" \ 916da068c1SPeter Zijlstra " strex %2, %1, [%4]\n" \ 926da068c1SPeter Zijlstra " teq %2, #0\n" \ 936da068c1SPeter Zijlstra " bne 1b" \ 946da068c1SPeter Zijlstra : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \ 956da068c1SPeter Zijlstra : "r" (&v->counter), "Ir" (i) \ 966da068c1SPeter Zijlstra : "cc"); \ 976da068c1SPeter Zijlstra \ 986da068c1SPeter Zijlstra return result; \ 996da068c1SPeter Zijlstra } 1006da068c1SPeter Zijlstra 1010ca326deSWill Deacon #define atomic_add_return_relaxed atomic_add_return_relaxed 1020ca326deSWill Deacon #define atomic_sub_return_relaxed atomic_sub_return_relaxed 1036da068c1SPeter Zijlstra #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed 1046da068c1SPeter Zijlstra #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed 1056da068c1SPeter Zijlstra 1066da068c1SPeter Zijlstra #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed 1076da068c1SPeter Zijlstra #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed 1086da068c1SPeter Zijlstra #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed 1096da068c1SPeter Zijlstra #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed 1100ca326deSWill Deacon 1110ca326deSWill Deacon static inline int atomic_cmpxchg_relaxed(atomic_t *ptr, int old, int new) 1124baa9922SRussell King { 1134dcc1cf7SChen Gang int oldval; 1144dcc1cf7SChen Gang unsigned long res; 1154baa9922SRussell King 116c32ffce0SWill Deacon prefetchw(&ptr->counter); 117bac4e960SRussell King 1184baa9922SRussell King do { 1194baa9922SRussell King __asm__ __volatile__("@ atomic_cmpxchg\n" 120398aa668SWill Deacon "ldrex %1, [%3]\n" 1214baa9922SRussell King "mov %0, #0\n" 122398aa668SWill Deacon "teq %1, %4\n" 123398aa668SWill Deacon "strexeq %0, %5, [%3]\n" 124398aa668SWill Deacon : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) 1254baa9922SRussell King : "r" (&ptr->counter), "Ir" (old), "r" (new) 1264baa9922SRussell King : "cc"); 1274baa9922SRussell King } while (res); 1284baa9922SRussell King 1294baa9922SRussell King return oldval; 1304baa9922SRussell King } 1310ca326deSWill Deacon #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed 1324baa9922SRussell King 133*bfc18e38SMark Rutland static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) 134db38ee87SWill Deacon { 135db38ee87SWill Deacon int oldval, newval; 136db38ee87SWill Deacon unsigned long tmp; 137db38ee87SWill Deacon 138db38ee87SWill Deacon smp_mb(); 139db38ee87SWill Deacon prefetchw(&v->counter); 140db38ee87SWill Deacon 141db38ee87SWill Deacon __asm__ __volatile__ ("@ atomic_add_unless\n" 142db38ee87SWill Deacon "1: ldrex %0, [%4]\n" 143db38ee87SWill Deacon " teq %0, %5\n" 144db38ee87SWill Deacon " beq 2f\n" 145db38ee87SWill Deacon " add %1, %0, %6\n" 146db38ee87SWill Deacon " strex %2, %1, [%4]\n" 147db38ee87SWill Deacon " teq %2, #0\n" 148db38ee87SWill Deacon " bne 1b\n" 149db38ee87SWill Deacon "2:" 150db38ee87SWill Deacon : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) 151db38ee87SWill Deacon : "r" (&v->counter), "r" (u), "r" (a) 152db38ee87SWill Deacon : "cc"); 153db38ee87SWill Deacon 154db38ee87SWill Deacon if (oldval != u) 155db38ee87SWill Deacon smp_mb(); 156db38ee87SWill Deacon 157db38ee87SWill Deacon return oldval; 158db38ee87SWill Deacon } 159db38ee87SWill Deacon 1604baa9922SRussell King #else /* ARM_ARCH_6 */ 1614baa9922SRussell King 1624baa9922SRussell King #ifdef CONFIG_SMP 1634baa9922SRussell King #error SMP not supported on pre-ARMv6 CPUs 1644baa9922SRussell King #endif 1654baa9922SRussell King 166aee9a554SPeter Zijlstra #define ATOMIC_OP(op, c_op, asm_op) \ 167aee9a554SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v) \ 168aee9a554SPeter Zijlstra { \ 169aee9a554SPeter Zijlstra unsigned long flags; \ 170aee9a554SPeter Zijlstra \ 171aee9a554SPeter Zijlstra raw_local_irq_save(flags); \ 172aee9a554SPeter Zijlstra v->counter c_op i; \ 173aee9a554SPeter Zijlstra raw_local_irq_restore(flags); \ 174aee9a554SPeter Zijlstra } \ 1754baa9922SRussell King 176aee9a554SPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ 177aee9a554SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v) \ 178aee9a554SPeter Zijlstra { \ 179aee9a554SPeter Zijlstra unsigned long flags; \ 180aee9a554SPeter Zijlstra int val; \ 181aee9a554SPeter Zijlstra \ 182aee9a554SPeter Zijlstra raw_local_irq_save(flags); \ 183aee9a554SPeter Zijlstra v->counter c_op i; \ 184aee9a554SPeter Zijlstra val = v->counter; \ 185aee9a554SPeter Zijlstra raw_local_irq_restore(flags); \ 186aee9a554SPeter Zijlstra \ 187aee9a554SPeter Zijlstra return val; \ 1884baa9922SRussell King } 1894baa9922SRussell King 1906da068c1SPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ 1916da068c1SPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v) \ 1926da068c1SPeter Zijlstra { \ 1936da068c1SPeter Zijlstra unsigned long flags; \ 1946da068c1SPeter Zijlstra int val; \ 1956da068c1SPeter Zijlstra \ 1966da068c1SPeter Zijlstra raw_local_irq_save(flags); \ 1976da068c1SPeter Zijlstra val = v->counter; \ 1986da068c1SPeter Zijlstra v->counter c_op i; \ 1996da068c1SPeter Zijlstra raw_local_irq_restore(flags); \ 2006da068c1SPeter Zijlstra \ 2016da068c1SPeter Zijlstra return val; \ 2026da068c1SPeter Zijlstra } 2036da068c1SPeter Zijlstra 2044baa9922SRussell King static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 2054baa9922SRussell King { 2064baa9922SRussell King int ret; 2074baa9922SRussell King unsigned long flags; 2084baa9922SRussell King 2094baa9922SRussell King raw_local_irq_save(flags); 2104baa9922SRussell King ret = v->counter; 2114baa9922SRussell King if (likely(ret == old)) 2124baa9922SRussell King v->counter = new; 2134baa9922SRussell King raw_local_irq_restore(flags); 2144baa9922SRussell King 2154baa9922SRussell King return ret; 2164baa9922SRussell King } 2174baa9922SRussell King 218*bfc18e38SMark Rutland static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) 2194baa9922SRussell King { 2204baa9922SRussell King int c, old; 2214baa9922SRussell King 2224baa9922SRussell King c = atomic_read(v); 2234baa9922SRussell King while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c) 2244baa9922SRussell King c = old; 225f24219b4SArun Sharma return c; 2264baa9922SRussell King } 2274baa9922SRussell King 228db38ee87SWill Deacon #endif /* __LINUX_ARM_ARCH__ */ 229db38ee87SWill Deacon 230aee9a554SPeter Zijlstra #define ATOMIC_OPS(op, c_op, asm_op) \ 231aee9a554SPeter Zijlstra ATOMIC_OP(op, c_op, asm_op) \ 2326da068c1SPeter Zijlstra ATOMIC_OP_RETURN(op, c_op, asm_op) \ 2336da068c1SPeter Zijlstra ATOMIC_FETCH_OP(op, c_op, asm_op) 234aee9a554SPeter Zijlstra 235aee9a554SPeter Zijlstra ATOMIC_OPS(add, +=, add) 236aee9a554SPeter Zijlstra ATOMIC_OPS(sub, -=, sub) 237aee9a554SPeter Zijlstra 23812589790SPeter Zijlstra #define atomic_andnot atomic_andnot 23912589790SPeter Zijlstra 2406da068c1SPeter Zijlstra #undef ATOMIC_OPS 2416da068c1SPeter Zijlstra #define ATOMIC_OPS(op, c_op, asm_op) \ 2426da068c1SPeter Zijlstra ATOMIC_OP(op, c_op, asm_op) \ 2436da068c1SPeter Zijlstra ATOMIC_FETCH_OP(op, c_op, asm_op) 2446da068c1SPeter Zijlstra 2456da068c1SPeter Zijlstra ATOMIC_OPS(and, &=, and) 2466da068c1SPeter Zijlstra ATOMIC_OPS(andnot, &= ~, bic) 2476da068c1SPeter Zijlstra ATOMIC_OPS(or, |=, orr) 2486da068c1SPeter Zijlstra ATOMIC_OPS(xor, ^=, eor) 24912589790SPeter Zijlstra 250aee9a554SPeter Zijlstra #undef ATOMIC_OPS 2516da068c1SPeter Zijlstra #undef ATOMIC_FETCH_OP 252aee9a554SPeter Zijlstra #undef ATOMIC_OP_RETURN 253aee9a554SPeter Zijlstra #undef ATOMIC_OP 254aee9a554SPeter Zijlstra 255db38ee87SWill Deacon #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 256db38ee87SWill Deacon 257bac4e960SRussell King #define atomic_inc(v) atomic_add(1, v) 258bac4e960SRussell King #define atomic_dec(v) atomic_sub(1, v) 2594baa9922SRussell King 2604baa9922SRussell King #define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0) 2614baa9922SRussell King #define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0) 2626e490b01SWill Deacon #define atomic_inc_return_relaxed(v) (atomic_add_return_relaxed(1, v)) 2636e490b01SWill Deacon #define atomic_dec_return_relaxed(v) (atomic_sub_return_relaxed(1, v)) 2644baa9922SRussell King #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0) 2654baa9922SRussell King 2664baa9922SRussell King #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0) 2674baa9922SRussell King 26824b44a66SWill Deacon #ifndef CONFIG_GENERIC_ATOMIC64 26924b44a66SWill Deacon typedef struct { 270237f1233SChen Gang long long counter; 27124b44a66SWill Deacon } atomic64_t; 27224b44a66SWill Deacon 27324b44a66SWill Deacon #define ATOMIC64_INIT(i) { (i) } 27424b44a66SWill Deacon 2754fd75911SWill Deacon #ifdef CONFIG_ARM_LPAE 276237f1233SChen Gang static inline long long atomic64_read(const atomic64_t *v) 2774fd75911SWill Deacon { 278237f1233SChen Gang long long result; 2794fd75911SWill Deacon 2804fd75911SWill Deacon __asm__ __volatile__("@ atomic64_read\n" 2814fd75911SWill Deacon " ldrd %0, %H0, [%1]" 2824fd75911SWill Deacon : "=&r" (result) 2834fd75911SWill Deacon : "r" (&v->counter), "Qo" (v->counter) 2844fd75911SWill Deacon ); 2854fd75911SWill Deacon 2864fd75911SWill Deacon return result; 2874fd75911SWill Deacon } 2884fd75911SWill Deacon 289237f1233SChen Gang static inline void atomic64_set(atomic64_t *v, long long i) 2904fd75911SWill Deacon { 2914fd75911SWill Deacon __asm__ __volatile__("@ atomic64_set\n" 2924fd75911SWill Deacon " strd %2, %H2, [%1]" 2934fd75911SWill Deacon : "=Qo" (v->counter) 2944fd75911SWill Deacon : "r" (&v->counter), "r" (i) 2954fd75911SWill Deacon ); 2964fd75911SWill Deacon } 2974fd75911SWill Deacon #else 298237f1233SChen Gang static inline long long atomic64_read(const atomic64_t *v) 29924b44a66SWill Deacon { 300237f1233SChen Gang long long result; 30124b44a66SWill Deacon 30224b44a66SWill Deacon __asm__ __volatile__("@ atomic64_read\n" 30324b44a66SWill Deacon " ldrexd %0, %H0, [%1]" 30424b44a66SWill Deacon : "=&r" (result) 305398aa668SWill Deacon : "r" (&v->counter), "Qo" (v->counter) 30624b44a66SWill Deacon ); 30724b44a66SWill Deacon 30824b44a66SWill Deacon return result; 30924b44a66SWill Deacon } 31024b44a66SWill Deacon 311237f1233SChen Gang static inline void atomic64_set(atomic64_t *v, long long i) 31224b44a66SWill Deacon { 313237f1233SChen Gang long long tmp; 31424b44a66SWill Deacon 315f38d999cSWill Deacon prefetchw(&v->counter); 31624b44a66SWill Deacon __asm__ __volatile__("@ atomic64_set\n" 317398aa668SWill Deacon "1: ldrexd %0, %H0, [%2]\n" 318398aa668SWill Deacon " strexd %0, %3, %H3, [%2]\n" 31924b44a66SWill Deacon " teq %0, #0\n" 32024b44a66SWill Deacon " bne 1b" 321398aa668SWill Deacon : "=&r" (tmp), "=Qo" (v->counter) 32224b44a66SWill Deacon : "r" (&v->counter), "r" (i) 32324b44a66SWill Deacon : "cc"); 32424b44a66SWill Deacon } 3254fd75911SWill Deacon #endif 32624b44a66SWill Deacon 327aee9a554SPeter Zijlstra #define ATOMIC64_OP(op, op1, op2) \ 328aee9a554SPeter Zijlstra static inline void atomic64_##op(long long i, atomic64_t *v) \ 329aee9a554SPeter Zijlstra { \ 330aee9a554SPeter Zijlstra long long result; \ 331aee9a554SPeter Zijlstra unsigned long tmp; \ 332aee9a554SPeter Zijlstra \ 333aee9a554SPeter Zijlstra prefetchw(&v->counter); \ 334aee9a554SPeter Zijlstra __asm__ __volatile__("@ atomic64_" #op "\n" \ 335aee9a554SPeter Zijlstra "1: ldrexd %0, %H0, [%3]\n" \ 336aee9a554SPeter Zijlstra " " #op1 " %Q0, %Q0, %Q4\n" \ 337aee9a554SPeter Zijlstra " " #op2 " %R0, %R0, %R4\n" \ 338aee9a554SPeter Zijlstra " strexd %1, %0, %H0, [%3]\n" \ 339aee9a554SPeter Zijlstra " teq %1, #0\n" \ 340aee9a554SPeter Zijlstra " bne 1b" \ 341aee9a554SPeter Zijlstra : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \ 342aee9a554SPeter Zijlstra : "r" (&v->counter), "r" (i) \ 343aee9a554SPeter Zijlstra : "cc"); \ 344aee9a554SPeter Zijlstra } \ 34524b44a66SWill Deacon 346aee9a554SPeter Zijlstra #define ATOMIC64_OP_RETURN(op, op1, op2) \ 3470ca326deSWill Deacon static inline long long \ 3480ca326deSWill Deacon atomic64_##op##_return_relaxed(long long i, atomic64_t *v) \ 349aee9a554SPeter Zijlstra { \ 350aee9a554SPeter Zijlstra long long result; \ 351aee9a554SPeter Zijlstra unsigned long tmp; \ 352aee9a554SPeter Zijlstra \ 353aee9a554SPeter Zijlstra prefetchw(&v->counter); \ 354aee9a554SPeter Zijlstra \ 355aee9a554SPeter Zijlstra __asm__ __volatile__("@ atomic64_" #op "_return\n" \ 356aee9a554SPeter Zijlstra "1: ldrexd %0, %H0, [%3]\n" \ 357aee9a554SPeter Zijlstra " " #op1 " %Q0, %Q0, %Q4\n" \ 358aee9a554SPeter Zijlstra " " #op2 " %R0, %R0, %R4\n" \ 359aee9a554SPeter Zijlstra " strexd %1, %0, %H0, [%3]\n" \ 360aee9a554SPeter Zijlstra " teq %1, #0\n" \ 361aee9a554SPeter Zijlstra " bne 1b" \ 362aee9a554SPeter Zijlstra : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \ 363aee9a554SPeter Zijlstra : "r" (&v->counter), "r" (i) \ 364aee9a554SPeter Zijlstra : "cc"); \ 365aee9a554SPeter Zijlstra \ 366aee9a554SPeter Zijlstra return result; \ 36724b44a66SWill Deacon } 36824b44a66SWill Deacon 3696da068c1SPeter Zijlstra #define ATOMIC64_FETCH_OP(op, op1, op2) \ 3706da068c1SPeter Zijlstra static inline long long \ 3716da068c1SPeter Zijlstra atomic64_fetch_##op##_relaxed(long long i, atomic64_t *v) \ 3726da068c1SPeter Zijlstra { \ 3736da068c1SPeter Zijlstra long long result, val; \ 3746da068c1SPeter Zijlstra unsigned long tmp; \ 3756da068c1SPeter Zijlstra \ 3766da068c1SPeter Zijlstra prefetchw(&v->counter); \ 3776da068c1SPeter Zijlstra \ 3786da068c1SPeter Zijlstra __asm__ __volatile__("@ atomic64_fetch_" #op "\n" \ 3796da068c1SPeter Zijlstra "1: ldrexd %0, %H0, [%4]\n" \ 3806da068c1SPeter Zijlstra " " #op1 " %Q1, %Q0, %Q5\n" \ 3816da068c1SPeter Zijlstra " " #op2 " %R1, %R0, %R5\n" \ 3826da068c1SPeter Zijlstra " strexd %2, %1, %H1, [%4]\n" \ 3836da068c1SPeter Zijlstra " teq %2, #0\n" \ 3846da068c1SPeter Zijlstra " bne 1b" \ 3856da068c1SPeter Zijlstra : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \ 3866da068c1SPeter Zijlstra : "r" (&v->counter), "r" (i) \ 3876da068c1SPeter Zijlstra : "cc"); \ 3886da068c1SPeter Zijlstra \ 3896da068c1SPeter Zijlstra return result; \ 3906da068c1SPeter Zijlstra } 3916da068c1SPeter Zijlstra 392aee9a554SPeter Zijlstra #define ATOMIC64_OPS(op, op1, op2) \ 393aee9a554SPeter Zijlstra ATOMIC64_OP(op, op1, op2) \ 3946da068c1SPeter Zijlstra ATOMIC64_OP_RETURN(op, op1, op2) \ 3956da068c1SPeter Zijlstra ATOMIC64_FETCH_OP(op, op1, op2) 39624b44a66SWill Deacon 397aee9a554SPeter Zijlstra ATOMIC64_OPS(add, adds, adc) 398aee9a554SPeter Zijlstra ATOMIC64_OPS(sub, subs, sbc) 39924b44a66SWill Deacon 4000ca326deSWill Deacon #define atomic64_add_return_relaxed atomic64_add_return_relaxed 4010ca326deSWill Deacon #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed 4026da068c1SPeter Zijlstra #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed 4036da068c1SPeter Zijlstra #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed 4046da068c1SPeter Zijlstra 4056da068c1SPeter Zijlstra #undef ATOMIC64_OPS 4066da068c1SPeter Zijlstra #define ATOMIC64_OPS(op, op1, op2) \ 4076da068c1SPeter Zijlstra ATOMIC64_OP(op, op1, op2) \ 4086da068c1SPeter Zijlstra ATOMIC64_FETCH_OP(op, op1, op2) 4090ca326deSWill Deacon 41012589790SPeter Zijlstra #define atomic64_andnot atomic64_andnot 41112589790SPeter Zijlstra 4126da068c1SPeter Zijlstra ATOMIC64_OPS(and, and, and) 4136da068c1SPeter Zijlstra ATOMIC64_OPS(andnot, bic, bic) 4146da068c1SPeter Zijlstra ATOMIC64_OPS(or, orr, orr) 4156da068c1SPeter Zijlstra ATOMIC64_OPS(xor, eor, eor) 4166da068c1SPeter Zijlstra 4176da068c1SPeter Zijlstra #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed 4186da068c1SPeter Zijlstra #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed 4196da068c1SPeter Zijlstra #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed 4206da068c1SPeter Zijlstra #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed 42112589790SPeter Zijlstra 422aee9a554SPeter Zijlstra #undef ATOMIC64_OPS 4236da068c1SPeter Zijlstra #undef ATOMIC64_FETCH_OP 424aee9a554SPeter Zijlstra #undef ATOMIC64_OP_RETURN 425aee9a554SPeter Zijlstra #undef ATOMIC64_OP 42624b44a66SWill Deacon 4270ca326deSWill Deacon static inline long long 4280ca326deSWill Deacon atomic64_cmpxchg_relaxed(atomic64_t *ptr, long long old, long long new) 42924b44a66SWill Deacon { 430237f1233SChen Gang long long oldval; 43124b44a66SWill Deacon unsigned long res; 43224b44a66SWill Deacon 433c32ffce0SWill Deacon prefetchw(&ptr->counter); 43424b44a66SWill Deacon 43524b44a66SWill Deacon do { 43624b44a66SWill Deacon __asm__ __volatile__("@ atomic64_cmpxchg\n" 437398aa668SWill Deacon "ldrexd %1, %H1, [%3]\n" 43824b44a66SWill Deacon "mov %0, #0\n" 439398aa668SWill Deacon "teq %1, %4\n" 440398aa668SWill Deacon "teqeq %H1, %H4\n" 441398aa668SWill Deacon "strexdeq %0, %5, %H5, [%3]" 442398aa668SWill Deacon : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) 44324b44a66SWill Deacon : "r" (&ptr->counter), "r" (old), "r" (new) 44424b44a66SWill Deacon : "cc"); 44524b44a66SWill Deacon } while (res); 44624b44a66SWill Deacon 44724b44a66SWill Deacon return oldval; 44824b44a66SWill Deacon } 4490ca326deSWill Deacon #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed 45024b44a66SWill Deacon 4510ca326deSWill Deacon static inline long long atomic64_xchg_relaxed(atomic64_t *ptr, long long new) 45224b44a66SWill Deacon { 453237f1233SChen Gang long long result; 45424b44a66SWill Deacon unsigned long tmp; 45524b44a66SWill Deacon 456c32ffce0SWill Deacon prefetchw(&ptr->counter); 45724b44a66SWill Deacon 45824b44a66SWill Deacon __asm__ __volatile__("@ atomic64_xchg\n" 459398aa668SWill Deacon "1: ldrexd %0, %H0, [%3]\n" 460398aa668SWill Deacon " strexd %1, %4, %H4, [%3]\n" 46124b44a66SWill Deacon " teq %1, #0\n" 46224b44a66SWill Deacon " bne 1b" 463398aa668SWill Deacon : "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter) 46424b44a66SWill Deacon : "r" (&ptr->counter), "r" (new) 46524b44a66SWill Deacon : "cc"); 46624b44a66SWill Deacon 46724b44a66SWill Deacon return result; 46824b44a66SWill Deacon } 4690ca326deSWill Deacon #define atomic64_xchg_relaxed atomic64_xchg_relaxed 47024b44a66SWill Deacon 471237f1233SChen Gang static inline long long atomic64_dec_if_positive(atomic64_t *v) 47224b44a66SWill Deacon { 473237f1233SChen Gang long long result; 47424b44a66SWill Deacon unsigned long tmp; 47524b44a66SWill Deacon 47624b44a66SWill Deacon smp_mb(); 477c32ffce0SWill Deacon prefetchw(&v->counter); 47824b44a66SWill Deacon 47924b44a66SWill Deacon __asm__ __volatile__("@ atomic64_dec_if_positive\n" 480398aa668SWill Deacon "1: ldrexd %0, %H0, [%3]\n" 4812245f924SVictor Kamensky " subs %Q0, %Q0, #1\n" 4822245f924SVictor Kamensky " sbc %R0, %R0, #0\n" 4832245f924SVictor Kamensky " teq %R0, #0\n" 48424b44a66SWill Deacon " bmi 2f\n" 485398aa668SWill Deacon " strexd %1, %0, %H0, [%3]\n" 48624b44a66SWill Deacon " teq %1, #0\n" 48724b44a66SWill Deacon " bne 1b\n" 48824b44a66SWill Deacon "2:" 489398aa668SWill Deacon : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) 49024b44a66SWill Deacon : "r" (&v->counter) 49124b44a66SWill Deacon : "cc"); 49224b44a66SWill Deacon 49324b44a66SWill Deacon smp_mb(); 49424b44a66SWill Deacon 49524b44a66SWill Deacon return result; 49624b44a66SWill Deacon } 49724b44a66SWill Deacon 498237f1233SChen Gang static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) 49924b44a66SWill Deacon { 500237f1233SChen Gang long long val; 50124b44a66SWill Deacon unsigned long tmp; 50224b44a66SWill Deacon int ret = 1; 50324b44a66SWill Deacon 50424b44a66SWill Deacon smp_mb(); 505c32ffce0SWill Deacon prefetchw(&v->counter); 50624b44a66SWill Deacon 50724b44a66SWill Deacon __asm__ __volatile__("@ atomic64_add_unless\n" 508398aa668SWill Deacon "1: ldrexd %0, %H0, [%4]\n" 509398aa668SWill Deacon " teq %0, %5\n" 510398aa668SWill Deacon " teqeq %H0, %H5\n" 51124b44a66SWill Deacon " moveq %1, #0\n" 51224b44a66SWill Deacon " beq 2f\n" 5132245f924SVictor Kamensky " adds %Q0, %Q0, %Q6\n" 5142245f924SVictor Kamensky " adc %R0, %R0, %R6\n" 515398aa668SWill Deacon " strexd %2, %0, %H0, [%4]\n" 51624b44a66SWill Deacon " teq %2, #0\n" 51724b44a66SWill Deacon " bne 1b\n" 51824b44a66SWill Deacon "2:" 519398aa668SWill Deacon : "=&r" (val), "+r" (ret), "=&r" (tmp), "+Qo" (v->counter) 52024b44a66SWill Deacon : "r" (&v->counter), "r" (u), "r" (a) 52124b44a66SWill Deacon : "cc"); 52224b44a66SWill Deacon 52324b44a66SWill Deacon if (ret) 52424b44a66SWill Deacon smp_mb(); 52524b44a66SWill Deacon 52624b44a66SWill Deacon return ret; 52724b44a66SWill Deacon } 52824b44a66SWill Deacon 52924b44a66SWill Deacon #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) 53024b44a66SWill Deacon #define atomic64_inc(v) atomic64_add(1LL, (v)) 5316e490b01SWill Deacon #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1LL, (v)) 53224b44a66SWill Deacon #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) 53324b44a66SWill Deacon #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) 53424b44a66SWill Deacon #define atomic64_dec(v) atomic64_sub(1LL, (v)) 5356e490b01SWill Deacon #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v)) 53624b44a66SWill Deacon #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) 53724b44a66SWill Deacon #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) 53824b44a66SWill Deacon 5397847777aSArun Sharma #endif /* !CONFIG_GENERIC_ATOMIC64 */ 5404baa9922SRussell King #endif 5414baa9922SRussell King #endif 542