1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
21a3b1d89SBrian Gerst #ifndef _ASM_X86_ATOMIC64_32_H
31a3b1d89SBrian Gerst #define _ASM_X86_ATOMIC64_32_H
41a3b1d89SBrian Gerst
51a3b1d89SBrian Gerst #include <linux/compiler.h>
61a3b1d89SBrian Gerst #include <linux/types.h>
71a3b1d89SBrian Gerst //#include <asm/cmpxchg.h>
81a3b1d89SBrian Gerst
91a3b1d89SBrian Gerst /* An 64bit atomic type */
101a3b1d89SBrian Gerst
111a3b1d89SBrian Gerst typedef struct {
1279c53a83SMark Rutland s64 __aligned(8) counter;
131a3b1d89SBrian Gerst } atomic64_t;
141a3b1d89SBrian Gerst
151a3b1d89SBrian Gerst #define ATOMIC64_INIT(val) { (val) }
161a3b1d89SBrian Gerst
17e73c4e34SUros Bizjak /*
18e73c4e34SUros Bizjak * Read an atomic64_t non-atomically.
19e73c4e34SUros Bizjak *
20e73c4e34SUros Bizjak * This is intended to be used in cases where a subsequent atomic operation
21e73c4e34SUros Bizjak * will handle the torn value, and can be used to prime the first iteration
22e73c4e34SUros Bizjak * of unconditional try_cmpxchg() loops, e.g.:
23e73c4e34SUros Bizjak *
24e73c4e34SUros Bizjak * s64 val = arch_atomic64_read_nonatomic(v);
25e73c4e34SUros Bizjak * do { } while (!arch_atomic64_try_cmpxchg(v, &val, val OP i);
26e73c4e34SUros Bizjak *
27e73c4e34SUros Bizjak * This is NOT safe to use where the value is not always checked by a
28e73c4e34SUros Bizjak * subsequent atomic operation, such as in conditional try_cmpxchg() loops
29e73c4e34SUros Bizjak * that can break before the atomic operation, e.g.:
30e73c4e34SUros Bizjak *
31e73c4e34SUros Bizjak * s64 val = arch_atomic64_read_nonatomic(v);
32e73c4e34SUros Bizjak * do {
33e73c4e34SUros Bizjak * if (condition(val))
34e73c4e34SUros Bizjak * break;
35e73c4e34SUros Bizjak * } while (!arch_atomic64_try_cmpxchg(v, &val, val OP i);
36e73c4e34SUros Bizjak */
arch_atomic64_read_nonatomic(const atomic64_t * v)37e73c4e34SUros Bizjak static __always_inline s64 arch_atomic64_read_nonatomic(const atomic64_t *v)
38e73c4e34SUros Bizjak {
39e73c4e34SUros Bizjak /* See comment in arch_atomic_read(). */
40e73c4e34SUros Bizjak return __READ_ONCE(v->counter);
41e73c4e34SUros Bizjak }
42e73c4e34SUros Bizjak
43819165fbSJan Beulich #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
44819165fbSJan Beulich #ifndef ATOMIC64_EXPORT
45819165fbSJan Beulich #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
46a7e926abSLuca Barbieri #else
47819165fbSJan Beulich #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
48819165fbSJan Beulich ATOMIC64_EXPORT(atomic64_##sym)
49a7e926abSLuca Barbieri #endif
50a7e926abSLuca Barbieri
51819165fbSJan Beulich #ifdef CONFIG_X86_CMPXCHG64
52819165fbSJan Beulich #define __alternative_atomic64(f, g, out, in...) \
5341cd2e1eSUros Bizjak asm volatile("call %c[func]" \
54819165fbSJan Beulich : out : [func] "i" (atomic64_##g##_cx8), ## in)
55819165fbSJan Beulich
56819165fbSJan Beulich #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
57819165fbSJan Beulich #else
58819165fbSJan Beulich #define __alternative_atomic64(f, g, out, in...) \
59819165fbSJan Beulich alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
60819165fbSJan Beulich X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
61819165fbSJan Beulich
62819165fbSJan Beulich #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
63819165fbSJan Beulich ATOMIC64_DECL_ONE(sym##_386)
64819165fbSJan Beulich
65819165fbSJan Beulich ATOMIC64_DECL_ONE(add_386);
66819165fbSJan Beulich ATOMIC64_DECL_ONE(sub_386);
67819165fbSJan Beulich ATOMIC64_DECL_ONE(inc_386);
68819165fbSJan Beulich ATOMIC64_DECL_ONE(dec_386);
69819165fbSJan Beulich #endif
70819165fbSJan Beulich
71819165fbSJan Beulich #define alternative_atomic64(f, out, in...) \
72819165fbSJan Beulich __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
73819165fbSJan Beulich
74819165fbSJan Beulich ATOMIC64_DECL(read);
75819165fbSJan Beulich ATOMIC64_DECL(set);
76819165fbSJan Beulich ATOMIC64_DECL(xchg);
77819165fbSJan Beulich ATOMIC64_DECL(add_return);
78819165fbSJan Beulich ATOMIC64_DECL(sub_return);
79819165fbSJan Beulich ATOMIC64_DECL(inc_return);
80819165fbSJan Beulich ATOMIC64_DECL(dec_return);
81819165fbSJan Beulich ATOMIC64_DECL(dec_if_positive);
82819165fbSJan Beulich ATOMIC64_DECL(inc_not_zero);
83819165fbSJan Beulich ATOMIC64_DECL(add_unless);
84819165fbSJan Beulich
85819165fbSJan Beulich #undef ATOMIC64_DECL
86819165fbSJan Beulich #undef ATOMIC64_DECL_ONE
87819165fbSJan Beulich #undef __ATOMIC64_DECL
88819165fbSJan Beulich #undef ATOMIC64_EXPORT
89a7e926abSLuca Barbieri
arch_atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)90276b8930SUros Bizjak static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
91a7e926abSLuca Barbieri {
92276b8930SUros Bizjak return arch_cmpxchg64(&v->counter, old, new);
93a7e926abSLuca Barbieri }
9437f8173dSPeter Zijlstra #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
951a3b1d89SBrian Gerst
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)96276b8930SUros Bizjak static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
97276b8930SUros Bizjak {
98276b8930SUros Bizjak return arch_try_cmpxchg64(&v->counter, old, new);
99276b8930SUros Bizjak }
100276b8930SUros Bizjak #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
101276b8930SUros Bizjak
arch_atomic64_xchg(atomic64_t * v,s64 n)1027aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
103a7e926abSLuca Barbieri {
10479c53a83SMark Rutland s64 o;
105a7e926abSLuca Barbieri unsigned high = (unsigned)(n >> 32);
106a7e926abSLuca Barbieri unsigned low = (unsigned)n;
107819165fbSJan Beulich alternative_atomic64(xchg, "=&A" (o),
108819165fbSJan Beulich "S" (v), "b" (low), "c" (high)
109819165fbSJan Beulich : "memory");
110a7e926abSLuca Barbieri return o;
111a7e926abSLuca Barbieri }
11237f8173dSPeter Zijlstra #define arch_atomic64_xchg arch_atomic64_xchg
1131a3b1d89SBrian Gerst
arch_atomic64_set(atomic64_t * v,s64 i)1147aab7aa4SPeter Zijlstra static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
115a7e926abSLuca Barbieri {
116a7e926abSLuca Barbieri unsigned high = (unsigned)(i >> 32);
117a7e926abSLuca Barbieri unsigned low = (unsigned)i;
118819165fbSJan Beulich alternative_atomic64(set, /* no output */,
119819165fbSJan Beulich "S" (v), "b" (low), "c" (high)
120819165fbSJan Beulich : "eax", "edx", "memory");
121a7e926abSLuca Barbieri }
1221a3b1d89SBrian Gerst
arch_atomic64_read(const atomic64_t * v)1237aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
1241a3b1d89SBrian Gerst {
12579c53a83SMark Rutland s64 r;
126819165fbSJan Beulich alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
127a7e926abSLuca Barbieri return r;
1281a3b1d89SBrian Gerst }
1291a3b1d89SBrian Gerst
arch_atomic64_add_return(s64 i,atomic64_t * v)1307aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
131a7e926abSLuca Barbieri {
132819165fbSJan Beulich alternative_atomic64(add_return,
133819165fbSJan Beulich ASM_OUTPUT2("+A" (i), "+c" (v)),
134819165fbSJan Beulich ASM_NO_INPUT_CLOBBER("memory"));
135a7e926abSLuca Barbieri return i;
136a7e926abSLuca Barbieri }
13737f8173dSPeter Zijlstra #define arch_atomic64_add_return arch_atomic64_add_return
1381a3b1d89SBrian Gerst
arch_atomic64_sub_return(s64 i,atomic64_t * v)1397aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
140a7e926abSLuca Barbieri {
141819165fbSJan Beulich alternative_atomic64(sub_return,
142819165fbSJan Beulich ASM_OUTPUT2("+A" (i), "+c" (v)),
143819165fbSJan Beulich ASM_NO_INPUT_CLOBBER("memory"));
144a7e926abSLuca Barbieri return i;
145a7e926abSLuca Barbieri }
14637f8173dSPeter Zijlstra #define arch_atomic64_sub_return arch_atomic64_sub_return
147a7e926abSLuca Barbieri
arch_atomic64_inc_return(atomic64_t * v)1487aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v)
149a7e926abSLuca Barbieri {
15079c53a83SMark Rutland s64 a;
151819165fbSJan Beulich alternative_atomic64(inc_return, "=&A" (a),
152819165fbSJan Beulich "S" (v) : "memory", "ecx");
153a7e926abSLuca Barbieri return a;
154a7e926abSLuca Barbieri }
1559837559dSMark Rutland #define arch_atomic64_inc_return arch_atomic64_inc_return
156a7e926abSLuca Barbieri
arch_atomic64_dec_return(atomic64_t * v)1577aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v)
158a7e926abSLuca Barbieri {
15979c53a83SMark Rutland s64 a;
160819165fbSJan Beulich alternative_atomic64(dec_return, "=&A" (a),
161819165fbSJan Beulich "S" (v) : "memory", "ecx");
162a7e926abSLuca Barbieri return a;
163a7e926abSLuca Barbieri }
1649837559dSMark Rutland #define arch_atomic64_dec_return arch_atomic64_dec_return
1651a3b1d89SBrian Gerst
arch_atomic64_add(s64 i,atomic64_t * v)166*dce2a224SUros Bizjak static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
167a7e926abSLuca Barbieri {
168819165fbSJan Beulich __alternative_atomic64(add, add_return,
169819165fbSJan Beulich ASM_OUTPUT2("+A" (i), "+c" (v)),
170819165fbSJan Beulich ASM_NO_INPUT_CLOBBER("memory"));
171a7e926abSLuca Barbieri }
1721a3b1d89SBrian Gerst
arch_atomic64_sub(s64 i,atomic64_t * v)173*dce2a224SUros Bizjak static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v)
174a7e926abSLuca Barbieri {
175819165fbSJan Beulich __alternative_atomic64(sub, sub_return,
176819165fbSJan Beulich ASM_OUTPUT2("+A" (i), "+c" (v)),
177819165fbSJan Beulich ASM_NO_INPUT_CLOBBER("memory"));
178a7e926abSLuca Barbieri }
1791a3b1d89SBrian Gerst
arch_atomic64_inc(atomic64_t * v)1807aab7aa4SPeter Zijlstra static __always_inline void arch_atomic64_inc(atomic64_t *v)
181a7e926abSLuca Barbieri {
182819165fbSJan Beulich __alternative_atomic64(inc, inc_return, /* no output */,
183819165fbSJan Beulich "S" (v) : "memory", "eax", "ecx", "edx");
184a7e926abSLuca Barbieri }
1854331f4d5SRandy Dunlap #define arch_atomic64_inc arch_atomic64_inc
1861a3b1d89SBrian Gerst
arch_atomic64_dec(atomic64_t * v)1877aab7aa4SPeter Zijlstra static __always_inline void arch_atomic64_dec(atomic64_t *v)
188a7e926abSLuca Barbieri {
189819165fbSJan Beulich __alternative_atomic64(dec, dec_return, /* no output */,
190819165fbSJan Beulich "S" (v) : "memory", "eax", "ecx", "edx");
191a7e926abSLuca Barbieri }
1924331f4d5SRandy Dunlap #define arch_atomic64_dec arch_atomic64_dec
1931a3b1d89SBrian Gerst
arch_atomic64_add_unless(atomic64_t * v,s64 a,s64 u)1947aab7aa4SPeter Zijlstra static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
195a7e926abSLuca Barbieri {
196a7e926abSLuca Barbieri unsigned low = (unsigned)u;
197a7e926abSLuca Barbieri unsigned high = (unsigned)(u >> 32);
198819165fbSJan Beulich alternative_atomic64(add_unless,
199cb8095bbSJan Beulich ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
200cb8095bbSJan Beulich "S" (v) : "memory");
201a7e926abSLuca Barbieri return (int)a;
202a7e926abSLuca Barbieri }
20337f8173dSPeter Zijlstra #define arch_atomic64_add_unless arch_atomic64_add_unless
204a7e926abSLuca Barbieri
arch_atomic64_inc_not_zero(atomic64_t * v)2057aab7aa4SPeter Zijlstra static __always_inline int arch_atomic64_inc_not_zero(atomic64_t *v)
206a7e926abSLuca Barbieri {
207a7e926abSLuca Barbieri int r;
208819165fbSJan Beulich alternative_atomic64(inc_not_zero, "=&a" (r),
209819165fbSJan Beulich "S" (v) : "ecx", "edx", "memory");
210a7e926abSLuca Barbieri return r;
211a7e926abSLuca Barbieri }
2124331f4d5SRandy Dunlap #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
213a7e926abSLuca Barbieri
arch_atomic64_dec_if_positive(atomic64_t * v)2147aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
215a7e926abSLuca Barbieri {
21679c53a83SMark Rutland s64 r;
217819165fbSJan Beulich alternative_atomic64(dec_if_positive, "=&A" (r),
218819165fbSJan Beulich "S" (v) : "ecx", "memory");
219a7e926abSLuca Barbieri return r;
220a7e926abSLuca Barbieri }
2214331f4d5SRandy Dunlap #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
222a7e926abSLuca Barbieri
223819165fbSJan Beulich #undef alternative_atomic64
224819165fbSJan Beulich #undef __alternative_atomic64
2251a3b1d89SBrian Gerst
arch_atomic64_and(s64 i,atomic64_t * v)2267aab7aa4SPeter Zijlstra static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v)
227ba1c9f83SDmitry Vyukov {
22895ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
229ba1c9f83SDmitry Vyukov
23095ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
2317fc1845dSPeter Zijlstra }
2327fc1845dSPeter Zijlstra
arch_atomic64_fetch_and(s64 i,atomic64_t * v)2337aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
234ba1c9f83SDmitry Vyukov {
23595ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
236ba1c9f83SDmitry Vyukov
23795ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
238ba1c9f83SDmitry Vyukov
23995ece481SUros Bizjak return val;
240a8bcccabSPeter Zijlstra }
24137f8173dSPeter Zijlstra #define arch_atomic64_fetch_and arch_atomic64_fetch_and
2427fc1845dSPeter Zijlstra
arch_atomic64_or(s64 i,atomic64_t * v)2437aab7aa4SPeter Zijlstra static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v)
244ba1c9f83SDmitry Vyukov {
24595ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
246ba1c9f83SDmitry Vyukov
24795ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
248ba1c9f83SDmitry Vyukov }
249ba1c9f83SDmitry Vyukov
arch_atomic64_fetch_or(s64 i,atomic64_t * v)2507aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
251ba1c9f83SDmitry Vyukov {
25295ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
253ba1c9f83SDmitry Vyukov
25495ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
255ba1c9f83SDmitry Vyukov
25695ece481SUros Bizjak return val;
257ba1c9f83SDmitry Vyukov }
25837f8173dSPeter Zijlstra #define arch_atomic64_fetch_or arch_atomic64_fetch_or
259ba1c9f83SDmitry Vyukov
arch_atomic64_xor(s64 i,atomic64_t * v)2607aab7aa4SPeter Zijlstra static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v)
261ba1c9f83SDmitry Vyukov {
26295ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
263ba1c9f83SDmitry Vyukov
26495ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
265ba1c9f83SDmitry Vyukov }
266ba1c9f83SDmitry Vyukov
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)2677aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
268ba1c9f83SDmitry Vyukov {
26995ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
270ba1c9f83SDmitry Vyukov
27195ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
272ba1c9f83SDmitry Vyukov
27395ece481SUros Bizjak return val;
274ba1c9f83SDmitry Vyukov }
27537f8173dSPeter Zijlstra #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
276ba1c9f83SDmitry Vyukov
arch_atomic64_fetch_add(s64 i,atomic64_t * v)2777aab7aa4SPeter Zijlstra static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
278ba1c9f83SDmitry Vyukov {
27995ece481SUros Bizjak s64 val = arch_atomic64_read_nonatomic(v);
280ba1c9f83SDmitry Vyukov
28195ece481SUros Bizjak do { } while (!arch_atomic64_try_cmpxchg(v, &val, val + i));
282ba1c9f83SDmitry Vyukov
28395ece481SUros Bizjak return val;
284ba1c9f83SDmitry Vyukov }
28537f8173dSPeter Zijlstra #define arch_atomic64_fetch_add arch_atomic64_fetch_add
286a8bcccabSPeter Zijlstra
2878bf705d1SDmitry Vyukov #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v))
288a8bcccabSPeter Zijlstra
2891a3b1d89SBrian Gerst #endif /* _ASM_X86_ATOMIC64_32_H */
290