12874c5fdSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-or-later */ 209d4e0edSPaul Mackerras /* 309d4e0edSPaul Mackerras * Generic implementation of 64-bit atomics using spinlocks, 409d4e0edSPaul Mackerras * useful on processors that don't have 64-bit atomic instructions. 509d4e0edSPaul Mackerras * 609d4e0edSPaul Mackerras * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> 709d4e0edSPaul Mackerras */ 809d4e0edSPaul Mackerras #ifndef _ASM_GENERIC_ATOMIC64_H 909d4e0edSPaul Mackerras #define _ASM_GENERIC_ATOMIC64_H 10ade5ef92SMark Rutland #include <linux/types.h> 1109d4e0edSPaul Mackerras 1209d4e0edSPaul Mackerras typedef struct { 139255813dSMark Rutland s64 counter; 1409d4e0edSPaul Mackerras } atomic64_t; 1509d4e0edSPaul Mackerras 1609d4e0edSPaul Mackerras #define ATOMIC64_INIT(i) { (i) } 1709d4e0edSPaul Mackerras 18*1bdadf46SMark Rutland extern s64 generic_atomic64_read(const atomic64_t *v); 19*1bdadf46SMark Rutland extern void generic_atomic64_set(atomic64_t *v, s64 i); 209d664c0aSPeter Zijlstra 21560cb12aSPeter Zijlstra #define ATOMIC64_OP(op) \ 22*1bdadf46SMark Rutland extern void generic_atomic64_##op(s64 a, atomic64_t *v); 23560cb12aSPeter Zijlstra 24560cb12aSPeter Zijlstra #define ATOMIC64_OP_RETURN(op) \ 25*1bdadf46SMark Rutland extern s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v); 26560cb12aSPeter Zijlstra 2728aa2bdaSPeter Zijlstra #define ATOMIC64_FETCH_OP(op) \ 28*1bdadf46SMark Rutland extern s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v); 2928aa2bdaSPeter Zijlstra 3028aa2bdaSPeter Zijlstra #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op) 31560cb12aSPeter Zijlstra 32560cb12aSPeter Zijlstra ATOMIC64_OPS(add) 33560cb12aSPeter Zijlstra ATOMIC64_OPS(sub) 34560cb12aSPeter Zijlstra 3528aa2bdaSPeter Zijlstra #undef ATOMIC64_OPS 3628aa2bdaSPeter Zijlstra #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op) 3728aa2bdaSPeter Zijlstra 3828aa2bdaSPeter Zijlstra ATOMIC64_OPS(and) 3928aa2bdaSPeter Zijlstra ATOMIC64_OPS(or) 4028aa2bdaSPeter Zijlstra ATOMIC64_OPS(xor) 41e6942b7dSPeter Zijlstra 42560cb12aSPeter Zijlstra #undef ATOMIC64_OPS 4328aa2bdaSPeter Zijlstra #undef ATOMIC64_FETCH_OP 44560cb12aSPeter Zijlstra #undef ATOMIC64_OP_RETURN 45560cb12aSPeter Zijlstra #undef ATOMIC64_OP 46560cb12aSPeter Zijlstra 47*1bdadf46SMark Rutland extern s64 generic_atomic64_dec_if_positive(atomic64_t *v); 48*1bdadf46SMark Rutland extern s64 generic_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n); 49*1bdadf46SMark Rutland extern s64 generic_atomic64_xchg(atomic64_t *v, s64 new); 50*1bdadf46SMark Rutland extern s64 generic_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u); 51*1bdadf46SMark Rutland 52*1bdadf46SMark Rutland #define arch_atomic64_read generic_atomic64_read 53*1bdadf46SMark Rutland #define arch_atomic64_set generic_atomic64_set 54*1bdadf46SMark Rutland #define arch_atomic64_set_release generic_atomic64_set 55*1bdadf46SMark Rutland 56*1bdadf46SMark Rutland #define arch_atomic64_add generic_atomic64_add 57*1bdadf46SMark Rutland #define arch_atomic64_add_return generic_atomic64_add_return 58*1bdadf46SMark Rutland #define arch_atomic64_fetch_add generic_atomic64_fetch_add 59*1bdadf46SMark Rutland #define arch_atomic64_sub generic_atomic64_sub 60*1bdadf46SMark Rutland #define arch_atomic64_sub_return generic_atomic64_sub_return 61*1bdadf46SMark Rutland #define arch_atomic64_fetch_sub generic_atomic64_fetch_sub 62*1bdadf46SMark Rutland 63*1bdadf46SMark Rutland #define arch_atomic64_and generic_atomic64_and 64*1bdadf46SMark Rutland #define arch_atomic64_fetch_and generic_atomic64_fetch_and 65*1bdadf46SMark Rutland #define arch_atomic64_or generic_atomic64_or 66*1bdadf46SMark Rutland #define arch_atomic64_fetch_or generic_atomic64_fetch_or 67*1bdadf46SMark Rutland #define arch_atomic64_xor generic_atomic64_xor 68*1bdadf46SMark Rutland #define arch_atomic64_fetch_xor generic_atomic64_fetch_xor 69*1bdadf46SMark Rutland 70*1bdadf46SMark Rutland #define arch_atomic64_dec_if_positive generic_atomic64_dec_if_positive 71*1bdadf46SMark Rutland #define arch_atomic64_cmpxchg generic_atomic64_cmpxchg 72*1bdadf46SMark Rutland #define arch_atomic64_xchg generic_atomic64_xchg 73*1bdadf46SMark Rutland #define arch_atomic64_fetch_add_unless generic_atomic64_fetch_add_unless 74*1bdadf46SMark Rutland 7509d4e0edSPaul Mackerras #endif /* _ASM_GENERIC_ATOMIC64_H */ 76