xref: /linux/arch/sh/include/asm/atomic-grb.h (revision 6e17c6de3ddf3073741d9c91a796ee696914d8a0)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_SH_ATOMIC_GRB_H
3 #define __ASM_SH_ATOMIC_GRB_H
4 
5 #define ATOMIC_OP(op)							\
6 static inline void arch_atomic_##op(int i, atomic_t *v)			\
7 {									\
8 	int tmp;							\
9 									\
10 	__asm__ __volatile__ (						\
11 		"   .align 2              \n\t"				\
12 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
13 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
14 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
15 		"   mov.l  @%1,   %0      \n\t" /* load  old value */	\
16 		" " #op "   %2,   %0      \n\t" /* $op */		\
17 		"   mov.l   %0,   @%1     \n\t" /* store new value */	\
18 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
19 		: "=&r" (tmp),						\
20 		  "+r"  (v)						\
21 		: "r"   (i)						\
22 		: "memory" , "r0", "r1");				\
23 }									\
24 
25 #define ATOMIC_OP_RETURN(op)						\
26 static inline int arch_atomic_##op##_return(int i, atomic_t *v)		\
27 {									\
28 	int tmp;							\
29 									\
30 	__asm__ __volatile__ (						\
31 		"   .align 2              \n\t"				\
32 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
33 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
34 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
35 		"   mov.l  @%1,   %0      \n\t" /* load  old value */	\
36 		" " #op "   %2,   %0      \n\t" /* $op */		\
37 		"   mov.l   %0,   @%1     \n\t" /* store new value */	\
38 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
39 		: "=&r" (tmp),						\
40 		  "+r"  (v)						\
41 		: "r"   (i)						\
42 		: "memory" , "r0", "r1");				\
43 									\
44 	return tmp;							\
45 }
46 
47 #define ATOMIC_FETCH_OP(op)						\
48 static inline int arch_atomic_fetch_##op(int i, atomic_t *v)		\
49 {									\
50 	int res, tmp;							\
51 									\
52 	__asm__ __volatile__ (						\
53 		"   .align 2              \n\t"				\
54 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
55 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
56 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
57 		"   mov.l  @%2,   %0      \n\t" /* load old value */	\
58 		"   mov     %0,   %1      \n\t" /* save old value */	\
59 		" " #op "   %3,   %0      \n\t" /* $op */		\
60 		"   mov.l   %0,   @%2     \n\t" /* store new value */	\
61 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
62 		: "=&r" (tmp), "=&r" (res), "+r"  (v)			\
63 		: "r"   (i)						\
64 		: "memory" , "r0", "r1");				\
65 									\
66 	return res;							\
67 }
68 
69 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
70 
71 ATOMIC_OPS(add)
72 ATOMIC_OPS(sub)
73 
74 #define arch_atomic_add_return	arch_atomic_add_return
75 #define arch_atomic_sub_return	arch_atomic_sub_return
76 #define arch_atomic_fetch_add	arch_atomic_fetch_add
77 #define arch_atomic_fetch_sub	arch_atomic_fetch_sub
78 
79 #undef ATOMIC_OPS
80 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
81 
82 ATOMIC_OPS(and)
83 ATOMIC_OPS(or)
84 ATOMIC_OPS(xor)
85 
86 #define arch_atomic_fetch_and	arch_atomic_fetch_and
87 #define arch_atomic_fetch_or	arch_atomic_fetch_or
88 #define arch_atomic_fetch_xor	arch_atomic_fetch_xor
89 
90 #undef ATOMIC_OPS
91 #undef ATOMIC_FETCH_OP
92 #undef ATOMIC_OP_RETURN
93 #undef ATOMIC_OP
94 
95 #endif /* __ASM_SH_ATOMIC_GRB_H */
96