1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_CMPXCHG_32_H
3 #define _ASM_X86_CMPXCHG_32_H
4
5 /*
6 * Note: if you use __cmpxchg64(), or their variants,
7 * you need to test for the feature in boot_cpu_data.
8 */
9
10 union __u64_halves {
11 u64 full;
12 struct {
13 u32 low, high;
14 };
15 };
16
17 #define __arch_cmpxchg64(_ptr, _old, _new, _lock) \
18 ({ \
19 union __u64_halves o = { .full = (_old), }, \
20 n = { .full = (_new), }; \
21 \
22 asm_inline volatile(_lock "cmpxchg8b %[ptr]" \
23 : [ptr] "+m" (*(_ptr)), \
24 "+a" (o.low), "+d" (o.high) \
25 : "b" (n.low), "c" (n.high) \
26 : "memory"); \
27 \
28 o.full; \
29 })
30
31
__cmpxchg64(volatile u64 * ptr,u64 old,u64 new)32 static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
33 {
34 return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX);
35 }
36
__cmpxchg64_local(volatile u64 * ptr,u64 old,u64 new)37 static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
38 {
39 return __arch_cmpxchg64(ptr, old, new,);
40 }
41
42 #define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock) \
43 ({ \
44 union __u64_halves o = { .full = *(_oldp), }, \
45 n = { .full = (_new), }; \
46 bool ret; \
47 \
48 asm_inline volatile(_lock "cmpxchg8b %[ptr]" \
49 : "=@ccz" (ret), \
50 [ptr] "+m" (*(_ptr)), \
51 "+a" (o.low), "+d" (o.high) \
52 : "b" (n.low), "c" (n.high) \
53 : "memory"); \
54 \
55 if (unlikely(!ret)) \
56 *(_oldp) = o.full; \
57 \
58 likely(ret); \
59 })
60
__try_cmpxchg64(volatile u64 * ptr,u64 * oldp,u64 new)61 static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
62 {
63 return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX);
64 }
65
__try_cmpxchg64_local(volatile u64 * ptr,u64 * oldp,u64 new)66 static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
67 {
68 return __arch_try_cmpxchg64(ptr, oldp, new,);
69 }
70
71 #ifdef CONFIG_X86_CX8
72
73 #define arch_cmpxchg64 __cmpxchg64
74
75 #define arch_cmpxchg64_local __cmpxchg64_local
76
77 #define arch_try_cmpxchg64 __try_cmpxchg64
78
79 #define arch_try_cmpxchg64_local __try_cmpxchg64_local
80
81 #else
82
83 /*
84 * Building a kernel capable running on 80386 and 80486. It may be necessary
85 * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
86 */
87
88 #define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock) \
89 ({ \
90 union __u64_halves o = { .full = (_old), }, \
91 n = { .full = (_new), }; \
92 \
93 asm_inline volatile( \
94 ALTERNATIVE(_lock_loc \
95 "call cmpxchg8b_emu", \
96 _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
97 : ALT_OUTPUT_SP("+a" (o.low), "+d" (o.high)) \
98 : "b" (n.low), "c" (n.high), \
99 [ptr] "S" (_ptr) \
100 : "memory"); \
101 \
102 o.full; \
103 })
104
arch_cmpxchg64(volatile u64 * ptr,u64 old,u64 new)105 static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
106 {
107 return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock ");
108 }
109 #define arch_cmpxchg64 arch_cmpxchg64
110
arch_cmpxchg64_local(volatile u64 * ptr,u64 old,u64 new)111 static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
112 {
113 return __arch_cmpxchg64_emu(ptr, old, new, ,);
114 }
115 #define arch_cmpxchg64_local arch_cmpxchg64_local
116
117 #define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock) \
118 ({ \
119 union __u64_halves o = { .full = *(_oldp), }, \
120 n = { .full = (_new), }; \
121 bool ret; \
122 \
123 asm_inline volatile( \
124 ALTERNATIVE(_lock_loc \
125 "call cmpxchg8b_emu", \
126 _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
127 : ALT_OUTPUT_SP("=@ccz" (ret), \
128 "+a" (o.low), "+d" (o.high)) \
129 : "b" (n.low), "c" (n.high), \
130 [ptr] "S" (_ptr) \
131 : "memory"); \
132 \
133 if (unlikely(!ret)) \
134 *(_oldp) = o.full; \
135 \
136 likely(ret); \
137 })
138
arch_try_cmpxchg64(volatile u64 * ptr,u64 * oldp,u64 new)139 static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
140 {
141 return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock ");
142 }
143 #define arch_try_cmpxchg64 arch_try_cmpxchg64
144
arch_try_cmpxchg64_local(volatile u64 * ptr,u64 * oldp,u64 new)145 static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
146 {
147 return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
148 }
149 #define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
150
151 #endif
152
153 #define system_has_cmpxchg64() boot_cpu_has(X86_FEATURE_CX8)
154
155 #endif /* _ASM_X86_CMPXCHG_32_H */
156