1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
21965aae3SH. Peter Anvin #ifndef _ASM_X86_CMPXCHG_32_H
31965aae3SH. Peter Anvin #define _ASM_X86_CMPXCHG_32_H
4bb898558SAl Viro
5bb898558SAl Viro /*
67016cc5dSUros Bizjak * Note: if you use __cmpxchg64(), or their variants,
7bb898558SAl Viro * you need to test for the feature in boot_cpu_data.
8bb898558SAl Viro */
9bb898558SAl Viro
107016cc5dSUros Bizjak union __u64_halves {
117016cc5dSUros Bizjak u64 full;
127016cc5dSUros Bizjak struct {
137016cc5dSUros Bizjak u32 low, high;
147016cc5dSUros Bizjak };
157016cc5dSUros Bizjak };
167016cc5dSUros Bizjak
177016cc5dSUros Bizjak #define __arch_cmpxchg64(_ptr, _old, _new, _lock) \
187016cc5dSUros Bizjak ({ \
197016cc5dSUros Bizjak union __u64_halves o = { .full = (_old), }, \
207016cc5dSUros Bizjak n = { .full = (_new), }; \
217016cc5dSUros Bizjak \
227016cc5dSUros Bizjak asm volatile(_lock "cmpxchg8b %[ptr]" \
237016cc5dSUros Bizjak : [ptr] "+m" (*(_ptr)), \
247016cc5dSUros Bizjak "+a" (o.low), "+d" (o.high) \
257016cc5dSUros Bizjak : "b" (n.low), "c" (n.high) \
267016cc5dSUros Bizjak : "memory"); \
277016cc5dSUros Bizjak \
287016cc5dSUros Bizjak o.full; \
297016cc5dSUros Bizjak })
307016cc5dSUros Bizjak
317016cc5dSUros Bizjak
__cmpxchg64(volatile u64 * ptr,u64 old,u64 new)327016cc5dSUros Bizjak static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
337016cc5dSUros Bizjak {
347016cc5dSUros Bizjak return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX);
357016cc5dSUros Bizjak }
367016cc5dSUros Bizjak
__cmpxchg64_local(volatile u64 * ptr,u64 old,u64 new)377016cc5dSUros Bizjak static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
387016cc5dSUros Bizjak {
397016cc5dSUros Bizjak return __arch_cmpxchg64(ptr, old, new,);
407016cc5dSUros Bizjak }
417016cc5dSUros Bizjak
427016cc5dSUros Bizjak #define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock) \
437016cc5dSUros Bizjak ({ \
447016cc5dSUros Bizjak union __u64_halves o = { .full = *(_oldp), }, \
457016cc5dSUros Bizjak n = { .full = (_new), }; \
467016cc5dSUros Bizjak bool ret; \
477016cc5dSUros Bizjak \
487016cc5dSUros Bizjak asm volatile(_lock "cmpxchg8b %[ptr]" \
497016cc5dSUros Bizjak CC_SET(e) \
507016cc5dSUros Bizjak : CC_OUT(e) (ret), \
517016cc5dSUros Bizjak [ptr] "+m" (*(_ptr)), \
527016cc5dSUros Bizjak "+a" (o.low), "+d" (o.high) \
537016cc5dSUros Bizjak : "b" (n.low), "c" (n.high) \
547016cc5dSUros Bizjak : "memory"); \
557016cc5dSUros Bizjak \
567016cc5dSUros Bizjak if (unlikely(!ret)) \
577016cc5dSUros Bizjak *(_oldp) = o.full; \
587016cc5dSUros Bizjak \
597016cc5dSUros Bizjak likely(ret); \
607016cc5dSUros Bizjak })
617016cc5dSUros Bizjak
__try_cmpxchg64(volatile u64 * ptr,u64 * oldp,u64 new)627016cc5dSUros Bizjak static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
637016cc5dSUros Bizjak {
647016cc5dSUros Bizjak return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX);
657016cc5dSUros Bizjak }
667016cc5dSUros Bizjak
__try_cmpxchg64_local(volatile u64 * ptr,u64 * oldp,u64 new)67d26e46f6SUros Bizjak static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
68d26e46f6SUros Bizjak {
69d26e46f6SUros Bizjak return __arch_try_cmpxchg64(ptr, oldp, new,);
70d26e46f6SUros Bizjak }
71d26e46f6SUros Bizjak
72bb898558SAl Viro #ifdef CONFIG_X86_CMPXCHG64
73bb898558SAl Viro
747016cc5dSUros Bizjak #define arch_cmpxchg64 __cmpxchg64
75bb898558SAl Viro
767016cc5dSUros Bizjak #define arch_cmpxchg64_local __cmpxchg64_local
77bb898558SAl Viro
787016cc5dSUros Bizjak #define arch_try_cmpxchg64 __try_cmpxchg64
79c2df0a6aSUros Bizjak
80d26e46f6SUros Bizjak #define arch_try_cmpxchg64_local __try_cmpxchg64_local
81d26e46f6SUros Bizjak
827016cc5dSUros Bizjak #else
83c2df0a6aSUros Bizjak
84bb898558SAl Viro /*
85bb898558SAl Viro * Building a kernel capable running on 80386 and 80486. It may be necessary
86bb898558SAl Viro * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
87bb898558SAl Viro */
88bb898558SAl Viro
8933eb8ab4SUros Bizjak #define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock) \
90bb898558SAl Viro ({ \
917016cc5dSUros Bizjak union __u64_halves o = { .full = (_old), }, \
927016cc5dSUros Bizjak n = { .full = (_new), }; \
937016cc5dSUros Bizjak \
9433eb8ab4SUros Bizjak asm volatile(ALTERNATIVE(_lock_loc \
959c76b384SLuca Barbieri "call cmpxchg8b_emu", \
96*76932725SLinus Torvalds _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
97*76932725SLinus Torvalds : "+a" (o.low), "+d" (o.high) \
98*76932725SLinus Torvalds : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr) \
9979e1dd05SArjan van de Ven : "memory"); \
1007016cc5dSUros Bizjak \
1017016cc5dSUros Bizjak o.full; \
1027016cc5dSUros Bizjak })
10379e1dd05SArjan van de Ven
arch_cmpxchg64(volatile u64 * ptr,u64 old,u64 new)1047016cc5dSUros Bizjak static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
1057016cc5dSUros Bizjak {
10633eb8ab4SUros Bizjak return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
1077016cc5dSUros Bizjak }
1087016cc5dSUros Bizjak #define arch_cmpxchg64 arch_cmpxchg64
10979e1dd05SArjan van de Ven
arch_cmpxchg64_local(volatile u64 * ptr,u64 old,u64 new)1107016cc5dSUros Bizjak static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
1117016cc5dSUros Bizjak {
11233eb8ab4SUros Bizjak return __arch_cmpxchg64_emu(ptr, old, new, ,);
1137016cc5dSUros Bizjak }
1147016cc5dSUros Bizjak #define arch_cmpxchg64_local arch_cmpxchg64_local
115bb898558SAl Viro
11633eb8ab4SUros Bizjak #define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock) \
117aef95dacSUros Bizjak ({ \
118aef95dacSUros Bizjak union __u64_halves o = { .full = *(_oldp), }, \
119aef95dacSUros Bizjak n = { .full = (_new), }; \
120aef95dacSUros Bizjak bool ret; \
121aef95dacSUros Bizjak \
12233eb8ab4SUros Bizjak asm volatile(ALTERNATIVE(_lock_loc \
123aef95dacSUros Bizjak "call cmpxchg8b_emu", \
124*76932725SLinus Torvalds _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
125aef95dacSUros Bizjak CC_SET(e) \
126aef95dacSUros Bizjak : CC_OUT(e) (ret), \
127aef95dacSUros Bizjak "+a" (o.low), "+d" (o.high) \
128*76932725SLinus Torvalds : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr) \
129aef95dacSUros Bizjak : "memory"); \
130aef95dacSUros Bizjak \
131aef95dacSUros Bizjak if (unlikely(!ret)) \
132aef95dacSUros Bizjak *(_oldp) = o.full; \
133aef95dacSUros Bizjak \
134aef95dacSUros Bizjak likely(ret); \
135aef95dacSUros Bizjak })
136aef95dacSUros Bizjak
arch_try_cmpxchg64(volatile u64 * ptr,u64 * oldp,u64 new)137aef95dacSUros Bizjak static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
138aef95dacSUros Bizjak {
13933eb8ab4SUros Bizjak return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
140aef95dacSUros Bizjak }
141aef95dacSUros Bizjak #define arch_try_cmpxchg64 arch_try_cmpxchg64
142aef95dacSUros Bizjak
arch_try_cmpxchg64_local(volatile u64 * ptr,u64 * oldp,u64 new)143d26e46f6SUros Bizjak static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
144d26e46f6SUros Bizjak {
14533eb8ab4SUros Bizjak return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
146d26e46f6SUros Bizjak }
147d26e46f6SUros Bizjak #define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
148d26e46f6SUros Bizjak
149bb898558SAl Viro #endif
150bb898558SAl Viro
151b23e139dSPeter Zijlstra #define system_has_cmpxchg64() boot_cpu_has(X86_FEATURE_CX8)
1523824abd1SChristoph Lameter
1531965aae3SH. Peter Anvin #endif /* _ASM_X86_CMPXCHG_32_H */
154