xref: /linux/arch/x86/include/asm/cmpxchg_64.h (revision 2f0a7504530c24f55daec7d2364d933bb1a1fa68)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_CMPXCHG_64_H
3 #define _ASM_X86_CMPXCHG_64_H
4 
5 #define arch_cmpxchg64(ptr, o, n)					\
6 ({									\
7 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
8 	arch_cmpxchg((ptr), (o), (n));					\
9 })
10 
11 #define arch_cmpxchg64_local(ptr, o, n)					\
12 ({									\
13 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
14 	arch_cmpxchg_local((ptr), (o), (n));				\
15 })
16 
17 #define arch_try_cmpxchg64(ptr, po, n)					\
18 ({									\
19 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
20 	arch_try_cmpxchg((ptr), (po), (n));				\
21 })
22 
23 #define arch_try_cmpxchg64_local(ptr, po, n)				\
24 ({									\
25 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
26 	arch_try_cmpxchg_local((ptr), (po), (n));			\
27 })
28 
29 union __u128_halves {
30 	u128 full;
31 	struct {
32 		u64 low, high;
33 	};
34 };
35 
36 #define __arch_cmpxchg128(_ptr, _old, _new, _lock)			\
37 ({									\
38 	union __u128_halves o = { .full = (_old), },			\
39 			    n = { .full = (_new), };			\
40 									\
41 	asm_inline volatile(_lock "cmpxchg16b %[ptr]"			\
42 		     : [ptr] "+m" (*(_ptr)),				\
43 		       "+a" (o.low), "+d" (o.high)			\
44 		     : "b" (n.low), "c" (n.high)			\
45 		     : "memory");					\
46 									\
47 	o.full;								\
48 })
49 
arch_cmpxchg128(volatile u128 * ptr,u128 old,u128 new)50 static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
51 {
52 	return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX);
53 }
54 #define arch_cmpxchg128 arch_cmpxchg128
55 
arch_cmpxchg128_local(volatile u128 * ptr,u128 old,u128 new)56 static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new)
57 {
58 	return __arch_cmpxchg128(ptr, old, new,);
59 }
60 #define arch_cmpxchg128_local arch_cmpxchg128_local
61 
62 #define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock)			\
63 ({									\
64 	union __u128_halves o = { .full = *(_oldp), },			\
65 			    n = { .full = (_new), };			\
66 	bool ret;							\
67 									\
68 	asm_inline volatile(_lock "cmpxchg16b %[ptr]"			\
69 		     : "=@ccz" (ret),					\
70 		       [ptr] "+m" (*(_ptr)),				\
71 		       "+a" (o.low), "+d" (o.high)			\
72 		     : "b" (n.low), "c" (n.high)			\
73 		     : "memory");					\
74 									\
75 	if (unlikely(!ret))						\
76 		*(_oldp) = o.full;					\
77 									\
78 	likely(ret);							\
79 })
80 
arch_try_cmpxchg128(volatile u128 * ptr,u128 * oldp,u128 new)81 static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new)
82 {
83 	return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX);
84 }
85 #define arch_try_cmpxchg128 arch_try_cmpxchg128
86 
arch_try_cmpxchg128_local(volatile u128 * ptr,u128 * oldp,u128 new)87 static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new)
88 {
89 	return __arch_try_cmpxchg128(ptr, oldp, new,);
90 }
91 #define arch_try_cmpxchg128_local arch_try_cmpxchg128_local
92 
93 #define system_has_cmpxchg128()		boot_cpu_has(X86_FEATURE_CX16)
94 
95 #endif /* _ASM_X86_CMPXCHG_64_H */
96