1 #ifndef __ARCH_M68K_CMPXCHG__ 2 #define __ARCH_M68K_CMPXCHG__ 3 4 #include <linux/irqflags.h> 5 6 struct __xchg_dummy { unsigned long a[100]; }; 7 #define __xg(x) ((volatile struct __xchg_dummy *)(x)) 8 9 extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int); 10 11 #ifndef CONFIG_RMW_INSNS 12 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 13 { 14 unsigned long flags, tmp; 15 16 local_irq_save(flags); 17 18 switch (size) { 19 case 1: 20 tmp = *(u8 *)ptr; 21 *(u8 *)ptr = x; 22 x = tmp; 23 break; 24 case 2: 25 tmp = *(u16 *)ptr; 26 *(u16 *)ptr = x; 27 x = tmp; 28 break; 29 case 4: 30 tmp = *(u32 *)ptr; 31 *(u32 *)ptr = x; 32 x = tmp; 33 break; 34 default: 35 tmp = __invalid_xchg_size(x, ptr, size); 36 break; 37 } 38 39 local_irq_restore(flags); 40 return x; 41 } 42 #else 43 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 44 { 45 switch (size) { 46 case 1: 47 __asm__ __volatile__ 48 ("moveb %2,%0\n\t" 49 "1:\n\t" 50 "casb %0,%1,%2\n\t" 51 "jne 1b" 52 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); 53 break; 54 case 2: 55 __asm__ __volatile__ 56 ("movew %2,%0\n\t" 57 "1:\n\t" 58 "casw %0,%1,%2\n\t" 59 "jne 1b" 60 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); 61 break; 62 case 4: 63 __asm__ __volatile__ 64 ("movel %2,%0\n\t" 65 "1:\n\t" 66 "casl %0,%1,%2\n\t" 67 "jne 1b" 68 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory"); 69 break; 70 default: 71 x = __invalid_xchg_size(x, ptr, size); 72 break; 73 } 74 return x; 75 } 76 #endif 77 78 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 79 80 #include <asm-generic/cmpxchg-local.h> 81 82 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) 83 84 extern unsigned long __invalid_cmpxchg_size(volatile void *, 85 unsigned long, unsigned long, int); 86 87 /* 88 * Atomic compare and exchange. Compare OLD with MEM, if identical, 89 * store NEW in MEM. Return the initial value in MEM. Success is 90 * indicated by comparing RETURN with OLD. 91 */ 92 #ifdef CONFIG_RMW_INSNS 93 94 static inline unsigned long __cmpxchg(volatile void *p, unsigned long old, 95 unsigned long new, int size) 96 { 97 switch (size) { 98 case 1: 99 __asm__ __volatile__ ("casb %0,%2,%1" 100 : "=d" (old), "=m" (*(char *)p) 101 : "d" (new), "0" (old), "m" (*(char *)p)); 102 break; 103 case 2: 104 __asm__ __volatile__ ("casw %0,%2,%1" 105 : "=d" (old), "=m" (*(short *)p) 106 : "d" (new), "0" (old), "m" (*(short *)p)); 107 break; 108 case 4: 109 __asm__ __volatile__ ("casl %0,%2,%1" 110 : "=d" (old), "=m" (*(int *)p) 111 : "d" (new), "0" (old), "m" (*(int *)p)); 112 break; 113 default: 114 old = __invalid_cmpxchg_size(p, old, new, size); 115 break; 116 } 117 return old; 118 } 119 120 #define cmpxchg(ptr, o, n) \ 121 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ 122 (unsigned long)(n), sizeof(*(ptr)))) 123 #define cmpxchg_local(ptr, o, n) \ 124 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ 125 (unsigned long)(n), sizeof(*(ptr)))) 126 127 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) 128 129 #else 130 131 /* 132 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make 133 * them available. 134 */ 135 #define cmpxchg_local(ptr, o, n) \ 136 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 137 (unsigned long)(n), sizeof(*(ptr)))) 138 139 #include <asm-generic/cmpxchg.h> 140 141 #endif 142 143 #endif /* __ARCH_M68K_CMPXCHG__ */ 144