xref: /linux/include/asm-generic/cmpxchg.h (revision ba199dc909a20fe62270ae4e93f263987bb9d119)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Generic UP xchg and cmpxchg using interrupt disablement.  Does not
4  * support SMP.
5  */
6 
7 #ifndef __ASM_GENERIC_CMPXCHG_H
8 #define __ASM_GENERIC_CMPXCHG_H
9 
10 #ifdef CONFIG_SMP
11 #error "Cannot use generic cmpxchg on SMP"
12 #endif
13 
14 #include <linux/types.h>
15 #include <linux/irqflags.h>
16 
17 /*
18  * This function doesn't exist, so you'll get a linker error if
19  * something tries to do an invalidly-sized xchg().
20  */
21 extern void __generic_xchg_called_with_bad_pointer(void);
22 
23 static inline
24 unsigned long __generic_xchg(unsigned long x, volatile void *ptr, int size)
25 {
26 	unsigned long ret, flags;
27 
28 	switch (size) {
29 	case 1:
30 #ifdef __xchg_u8
31 		return __xchg_u8(x, ptr);
32 #else
33 		local_irq_save(flags);
34 		ret = *(volatile u8 *)ptr;
35 		*(volatile u8 *)ptr = (x & 0xffu);
36 		local_irq_restore(flags);
37 		return ret;
38 #endif /* __xchg_u8 */
39 
40 	case 2:
41 #ifdef __xchg_u16
42 		return __xchg_u16(x, ptr);
43 #else
44 		local_irq_save(flags);
45 		ret = *(volatile u16 *)ptr;
46 		*(volatile u16 *)ptr = (x & 0xffffu);
47 		local_irq_restore(flags);
48 		return ret;
49 #endif /* __xchg_u16 */
50 
51 	case 4:
52 #ifdef __xchg_u32
53 		return __xchg_u32(x, ptr);
54 #else
55 		local_irq_save(flags);
56 		ret = *(volatile u32 *)ptr;
57 		*(volatile u32 *)ptr = (x & 0xffffffffu);
58 		local_irq_restore(flags);
59 		return ret;
60 #endif /* __xchg_u32 */
61 
62 #ifdef CONFIG_64BIT
63 	case 8:
64 #ifdef __xchg_u64
65 		return __xchg_u64(x, ptr);
66 #else
67 		local_irq_save(flags);
68 		ret = *(volatile u64 *)ptr;
69 		*(volatile u64 *)ptr = x;
70 		local_irq_restore(flags);
71 		return ret;
72 #endif /* __xchg_u64 */
73 #endif /* CONFIG_64BIT */
74 
75 	default:
76 		__generic_xchg_called_with_bad_pointer();
77 		return x;
78 	}
79 }
80 
81 #define generic_xchg(ptr, x) ({							\
82 	((__typeof__(*(ptr)))							\
83 		__generic_xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));	\
84 })
85 
86 /*
87  * Atomic compare and exchange.
88  */
89 #include <asm-generic/cmpxchg-local.h>
90 
91 #define generic_cmpxchg_local(ptr, o, n) ({					\
92 	((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),	\
93 			(unsigned long)(n), sizeof(*(ptr))));			\
94 })
95 
96 #define generic_cmpxchg64_local(ptr, o, n) \
97 	__generic_cmpxchg64_local((ptr), (o), (n))
98 
99 
100 #ifndef arch_xchg
101 #define arch_xchg		generic_xchg
102 #endif
103 
104 #ifndef arch_cmpxchg_local
105 #define arch_cmpxchg_local	generic_cmpxchg_local
106 #endif
107 
108 #ifndef arch_cmpxchg64_local
109 #define arch_cmpxchg64_local	generic_cmpxchg64_local
110 #endif
111 
112 #define arch_cmpxchg		arch_cmpxchg_local
113 #define arch_cmpxchg64		arch_cmpxchg64_local
114 
115 #endif /* __ASM_GENERIC_CMPXCHG_H */
116