xref: /linux/arch/sh/include/asm/atomic-grb.h (revision 7d9794e7523798e1b9422ad9f4e4d808ae5d5932)
1f15cbe6fSPaul Mundt #ifndef __ASM_SH_ATOMIC_GRB_H
2f15cbe6fSPaul Mundt #define __ASM_SH_ATOMIC_GRB_H
3f15cbe6fSPaul Mundt 
4c6470150SPeter Zijlstra #define ATOMIC_OP(op)							\
5c6470150SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
6c6470150SPeter Zijlstra {									\
7c6470150SPeter Zijlstra 	int tmp;							\
8c6470150SPeter Zijlstra 									\
9c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
10c6470150SPeter Zijlstra 		"   .align 2              \n\t"				\
11c6470150SPeter Zijlstra 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
12c6470150SPeter Zijlstra 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
13c6470150SPeter Zijlstra 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
14c6470150SPeter Zijlstra 		"   mov.l  @%1,   %0      \n\t" /* load  old value */	\
15c6470150SPeter Zijlstra 		" " #op "   %2,   %0      \n\t" /* $op */		\
16c6470150SPeter Zijlstra 		"   mov.l   %0,   @%1     \n\t" /* store new value */	\
17c6470150SPeter Zijlstra 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
18c6470150SPeter Zijlstra 		: "=&r" (tmp),						\
19c6470150SPeter Zijlstra 		  "+r"  (v)						\
20c6470150SPeter Zijlstra 		: "r"   (i)						\
21c6470150SPeter Zijlstra 		: "memory" , "r0", "r1");				\
22c6470150SPeter Zijlstra }									\
23f15cbe6fSPaul Mundt 
24c6470150SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
25c6470150SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
26c6470150SPeter Zijlstra {									\
27c6470150SPeter Zijlstra 	int tmp;							\
28c6470150SPeter Zijlstra 									\
29c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
30c6470150SPeter Zijlstra 		"   .align 2              \n\t"				\
31c6470150SPeter Zijlstra 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
32c6470150SPeter Zijlstra 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
33c6470150SPeter Zijlstra 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
34c6470150SPeter Zijlstra 		"   mov.l  @%1,   %0      \n\t" /* load  old value */	\
35c6470150SPeter Zijlstra 		" " #op "   %2,   %0      \n\t" /* $op */		\
36c6470150SPeter Zijlstra 		"   mov.l   %0,   @%1     \n\t" /* store new value */	\
37c6470150SPeter Zijlstra 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
38c6470150SPeter Zijlstra 		: "=&r" (tmp),						\
39c6470150SPeter Zijlstra 		  "+r"  (v)						\
40c6470150SPeter Zijlstra 		: "r"   (i)						\
41c6470150SPeter Zijlstra 		: "memory" , "r0", "r1");				\
42c6470150SPeter Zijlstra 									\
43c6470150SPeter Zijlstra 	return tmp;							\
44f15cbe6fSPaul Mundt }
45f15cbe6fSPaul Mundt 
46*7d9794e7SPeter Zijlstra #define ATOMIC_FETCH_OP(op)						\
47*7d9794e7SPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v)			\
48*7d9794e7SPeter Zijlstra {									\
49*7d9794e7SPeter Zijlstra 	int res, tmp;							\
50*7d9794e7SPeter Zijlstra 									\
51*7d9794e7SPeter Zijlstra 	__asm__ __volatile__ (						\
52*7d9794e7SPeter Zijlstra 		"   .align 2              \n\t"				\
53*7d9794e7SPeter Zijlstra 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
54*7d9794e7SPeter Zijlstra 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
55*7d9794e7SPeter Zijlstra 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
56*7d9794e7SPeter Zijlstra 		"   mov.l  @%2,   %0      \n\t" /* load old value */	\
57*7d9794e7SPeter Zijlstra 		"   mov     %0,   %1      \n\t" /* save old value */	\
58*7d9794e7SPeter Zijlstra 		" " #op "   %3,   %0      \n\t" /* $op */		\
59*7d9794e7SPeter Zijlstra 		"   mov.l   %0,   @%2     \n\t" /* store new value */	\
60*7d9794e7SPeter Zijlstra 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
61*7d9794e7SPeter Zijlstra 		: "=&r" (tmp), "=&r" (res), "+r"  (v)			\
62*7d9794e7SPeter Zijlstra 		: "r"   (i)						\
63*7d9794e7SPeter Zijlstra 		: "memory" , "r0", "r1");				\
64*7d9794e7SPeter Zijlstra 									\
65*7d9794e7SPeter Zijlstra 	return res;							\
66*7d9794e7SPeter Zijlstra }
67*7d9794e7SPeter Zijlstra 
68*7d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
69f15cbe6fSPaul Mundt 
70c6470150SPeter Zijlstra ATOMIC_OPS(add)
71c6470150SPeter Zijlstra ATOMIC_OPS(sub)
72f15cbe6fSPaul Mundt 
73*7d9794e7SPeter Zijlstra #undef ATOMIC_OPS
74*7d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
75*7d9794e7SPeter Zijlstra 
76*7d9794e7SPeter Zijlstra ATOMIC_OPS(and)
77*7d9794e7SPeter Zijlstra ATOMIC_OPS(or)
78*7d9794e7SPeter Zijlstra ATOMIC_OPS(xor)
79658aa514SPeter Zijlstra 
80c6470150SPeter Zijlstra #undef ATOMIC_OPS
81*7d9794e7SPeter Zijlstra #undef ATOMIC_FETCH_OP
82c6470150SPeter Zijlstra #undef ATOMIC_OP_RETURN
83c6470150SPeter Zijlstra #undef ATOMIC_OP
84f15cbe6fSPaul Mundt 
85f15cbe6fSPaul Mundt #endif /* __ASM_SH_ATOMIC_GRB_H */
86