1 #ifndef __ASM_SH_CMPXCHG_LLSC_H 2 #define __ASM_SH_CMPXCHG_LLSC_H 3 4 static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val) 5 { 6 unsigned long retval; 7 unsigned long tmp; 8 9 __asm__ __volatile__ ( 10 "1: \n\t" 11 "movli.l @%2, %0 ! xchg_u32 \n\t" 12 "mov %0, %1 \n\t" 13 "mov %3, %0 \n\t" 14 "movco.l %0, @%2 \n\t" 15 "bf 1b \n\t" 16 "synco \n\t" 17 : "=&z"(tmp), "=&r" (retval) 18 : "r" (m), "r" (val) 19 : "t", "memory" 20 ); 21 22 return retval; 23 } 24 25 static inline unsigned long 26 __cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new) 27 { 28 unsigned long retval; 29 unsigned long tmp; 30 31 __asm__ __volatile__ ( 32 "1: \n\t" 33 "movli.l @%2, %0 ! __cmpxchg_u32 \n\t" 34 "mov %0, %1 \n\t" 35 "cmp/eq %1, %3 \n\t" 36 "bf 2f \n\t" 37 "mov %4, %0 \n\t" 38 "2: \n\t" 39 "movco.l %0, @%2 \n\t" 40 "bf 1b \n\t" 41 "synco \n\t" 42 : "=&z" (tmp), "=&r" (retval) 43 : "r" (m), "r" (old), "r" (new) 44 : "t", "memory" 45 ); 46 47 return retval; 48 } 49 50 #include <asm/cmpxchg-xchg.h> 51 52 #endif /* __ASM_SH_CMPXCHG_LLSC_H */ 53