xref: /linux/arch/sh/include/asm/atomic-grb.h (revision cdd5b5a9761fd66d17586e4f4ba6588c70e640ea)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2f15cbe6fSPaul Mundt #ifndef __ASM_SH_ATOMIC_GRB_H
3f15cbe6fSPaul Mundt #define __ASM_SH_ATOMIC_GRB_H
4f15cbe6fSPaul Mundt 
5c6470150SPeter Zijlstra #define ATOMIC_OP(op)							\
68c641755SMark Rutland static inline void arch_atomic_##op(int i, atomic_t *v)			\
7c6470150SPeter Zijlstra {									\
8c6470150SPeter Zijlstra 	int tmp;							\
9c6470150SPeter Zijlstra 									\
10c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
11c6470150SPeter Zijlstra 		"   .align 2              \n\t"				\
12c6470150SPeter Zijlstra 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
13c6470150SPeter Zijlstra 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
14c6470150SPeter Zijlstra 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
15c6470150SPeter Zijlstra 		"   mov.l  @%1,   %0      \n\t" /* load  old value */	\
16c6470150SPeter Zijlstra 		" " #op "   %2,   %0      \n\t" /* $op */		\
17c6470150SPeter Zijlstra 		"   mov.l   %0,   @%1     \n\t" /* store new value */	\
18c6470150SPeter Zijlstra 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
19c6470150SPeter Zijlstra 		: "=&r" (tmp),						\
20c6470150SPeter Zijlstra 		  "+r"  (v)						\
21c6470150SPeter Zijlstra 		: "r"   (i)						\
22c6470150SPeter Zijlstra 		: "memory" , "r0", "r1");				\
23c6470150SPeter Zijlstra }									\
24f15cbe6fSPaul Mundt 
25c6470150SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
268c641755SMark Rutland static inline int arch_atomic_##op##_return(int i, atomic_t *v)		\
27c6470150SPeter Zijlstra {									\
28c6470150SPeter Zijlstra 	int tmp;							\
29c6470150SPeter Zijlstra 									\
30c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
31c6470150SPeter Zijlstra 		"   .align 2              \n\t"				\
32c6470150SPeter Zijlstra 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
33c6470150SPeter Zijlstra 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
34c6470150SPeter Zijlstra 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
35c6470150SPeter Zijlstra 		"   mov.l  @%1,   %0      \n\t" /* load  old value */	\
36c6470150SPeter Zijlstra 		" " #op "   %2,   %0      \n\t" /* $op */		\
37c6470150SPeter Zijlstra 		"   mov.l   %0,   @%1     \n\t" /* store new value */	\
38c6470150SPeter Zijlstra 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
39c6470150SPeter Zijlstra 		: "=&r" (tmp),						\
40c6470150SPeter Zijlstra 		  "+r"  (v)						\
41c6470150SPeter Zijlstra 		: "r"   (i)						\
42c6470150SPeter Zijlstra 		: "memory" , "r0", "r1");				\
43c6470150SPeter Zijlstra 									\
44c6470150SPeter Zijlstra 	return tmp;							\
45f15cbe6fSPaul Mundt }
46f15cbe6fSPaul Mundt 
477d9794e7SPeter Zijlstra #define ATOMIC_FETCH_OP(op)						\
488c641755SMark Rutland static inline int arch_atomic_fetch_##op(int i, atomic_t *v)		\
497d9794e7SPeter Zijlstra {									\
507d9794e7SPeter Zijlstra 	int res, tmp;							\
517d9794e7SPeter Zijlstra 									\
527d9794e7SPeter Zijlstra 	__asm__ __volatile__ (						\
537d9794e7SPeter Zijlstra 		"   .align 2              \n\t"				\
547d9794e7SPeter Zijlstra 		"   mova    1f,   r0      \n\t" /* r0 = end point */	\
557d9794e7SPeter Zijlstra 		"   mov    r15,   r1      \n\t" /* r1 = saved sp */	\
567d9794e7SPeter Zijlstra 		"   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */	\
577d9794e7SPeter Zijlstra 		"   mov.l  @%2,   %0      \n\t" /* load old value */	\
587d9794e7SPeter Zijlstra 		"   mov     %0,   %1      \n\t" /* save old value */	\
597d9794e7SPeter Zijlstra 		" " #op "   %3,   %0      \n\t" /* $op */		\
607d9794e7SPeter Zijlstra 		"   mov.l   %0,   @%2     \n\t" /* store new value */	\
617d9794e7SPeter Zijlstra 		"1: mov     r1,   r15     \n\t" /* LOGOUT */		\
627d9794e7SPeter Zijlstra 		: "=&r" (tmp), "=&r" (res), "+r"  (v)			\
637d9794e7SPeter Zijlstra 		: "r"   (i)						\
647d9794e7SPeter Zijlstra 		: "memory" , "r0", "r1");				\
657d9794e7SPeter Zijlstra 									\
667d9794e7SPeter Zijlstra 	return res;							\
677d9794e7SPeter Zijlstra }
687d9794e7SPeter Zijlstra 
697d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
70f15cbe6fSPaul Mundt 
71c6470150SPeter Zijlstra ATOMIC_OPS(add)
72c6470150SPeter Zijlstra ATOMIC_OPS(sub)
73f15cbe6fSPaul Mundt 
74*770345adSMark Rutland #define arch_atomic_add_return	arch_atomic_add_return
75*770345adSMark Rutland #define arch_atomic_sub_return	arch_atomic_sub_return
76*770345adSMark Rutland #define arch_atomic_fetch_add	arch_atomic_fetch_add
77*770345adSMark Rutland #define arch_atomic_fetch_sub	arch_atomic_fetch_sub
78*770345adSMark Rutland 
797d9794e7SPeter Zijlstra #undef ATOMIC_OPS
807d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
817d9794e7SPeter Zijlstra 
827d9794e7SPeter Zijlstra ATOMIC_OPS(and)
837d9794e7SPeter Zijlstra ATOMIC_OPS(or)
847d9794e7SPeter Zijlstra ATOMIC_OPS(xor)
85658aa514SPeter Zijlstra 
86*770345adSMark Rutland #define arch_atomic_fetch_and	arch_atomic_fetch_and
87*770345adSMark Rutland #define arch_atomic_fetch_or	arch_atomic_fetch_or
88*770345adSMark Rutland #define arch_atomic_fetch_xor	arch_atomic_fetch_xor
89*770345adSMark Rutland 
90c6470150SPeter Zijlstra #undef ATOMIC_OPS
917d9794e7SPeter Zijlstra #undef ATOMIC_FETCH_OP
92c6470150SPeter Zijlstra #undef ATOMIC_OP_RETURN
93c6470150SPeter Zijlstra #undef ATOMIC_OP
94f15cbe6fSPaul Mundt 
95f15cbe6fSPaul Mundt #endif /* __ASM_SH_ATOMIC_GRB_H */
96