1 #ifndef __ASM_SH_CMPXCHG_GRB_H 2 #define __ASM_SH_CMPXCHG_GRB_H 3 4 static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val) 5 { 6 unsigned long retval; 7 8 __asm__ __volatile__ ( 9 " .align 2 \n\t" 10 " mova 1f, r0 \n\t" /* r0 = end point */ 11 " nop \n\t" 12 " mov r15, r1 \n\t" /* r1 = saved sp */ 13 " mov #-4, r15 \n\t" /* LOGIN */ 14 " mov.l @%1, %0 \n\t" /* load old value */ 15 " mov.l %2, @%1 \n\t" /* store new value */ 16 "1: mov r1, r15 \n\t" /* LOGOUT */ 17 : "=&r" (retval), 18 "+r" (m), 19 "+r" (val) /* inhibit r15 overloading */ 20 : 21 : "memory", "r0", "r1"); 22 23 return retval; 24 } 25 26 static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val) 27 { 28 unsigned long retval; 29 30 __asm__ __volatile__ ( 31 " .align 2 \n\t" 32 " mova 1f, r0 \n\t" /* r0 = end point */ 33 " mov r15, r1 \n\t" /* r1 = saved sp */ 34 " mov #-6, r15 \n\t" /* LOGIN */ 35 " mov.b @%1, %0 \n\t" /* load old value */ 36 " extu.b %0, %0 \n\t" /* extend as unsigned */ 37 " mov.b %2, @%1 \n\t" /* store new value */ 38 "1: mov r1, r15 \n\t" /* LOGOUT */ 39 : "=&r" (retval), 40 "+r" (m), 41 "+r" (val) /* inhibit r15 overloading */ 42 : 43 : "memory" , "r0", "r1"); 44 45 return retval; 46 } 47 48 static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old, 49 unsigned long new) 50 { 51 unsigned long retval; 52 53 __asm__ __volatile__ ( 54 " .align 2 \n\t" 55 " mova 1f, r0 \n\t" /* r0 = end point */ 56 " nop \n\t" 57 " mov r15, r1 \n\t" /* r1 = saved sp */ 58 " mov #-8, r15 \n\t" /* LOGIN */ 59 " mov.l @%3, %0 \n\t" /* load old value */ 60 " cmp/eq %0, %1 \n\t" 61 " bf 1f \n\t" /* if not equal */ 62 " mov.l %2, @%3 \n\t" /* store new value */ 63 "1: mov r1, r15 \n\t" /* LOGOUT */ 64 : "=&r" (retval), 65 "+r" (old), "+r" (new) /* old or new can be r15 */ 66 : "r" (m) 67 : "memory" , "r0", "r1", "t"); 68 69 return retval; 70 } 71 72 #endif /* __ASM_SH_CMPXCHG_GRB_H */ 73