xref: /linux/arch/sh/include/asm/atomic-irq.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 #ifndef __ASM_SH_ATOMIC_IRQ_H
2 #define __ASM_SH_ATOMIC_IRQ_H
3 
4 #include <linux/irqflags.h>
5 
6 /*
7  * To get proper branch prediction for the main line, we must branch
8  * forward to code at the end of this object's .text section, then
9  * branch back to restart the operation.
10  */
11 
12 #define ATOMIC_OP(op, c_op)						\
13 static inline void atomic_##op(int i, atomic_t *v)			\
14 {									\
15 	unsigned long flags;						\
16 									\
17 	raw_local_irq_save(flags);					\
18 	v->counter c_op i;						\
19 	raw_local_irq_restore(flags);					\
20 }
21 
22 #define ATOMIC_OP_RETURN(op, c_op)					\
23 static inline int atomic_##op##_return(int i, atomic_t *v)		\
24 {									\
25 	unsigned long temp, flags;					\
26 									\
27 	raw_local_irq_save(flags);					\
28 	temp = v->counter;						\
29 	temp c_op i;							\
30 	v->counter = temp;						\
31 	raw_local_irq_restore(flags);					\
32 									\
33 	return temp;							\
34 }
35 
36 #define ATOMIC_OPS(op, c_op) ATOMIC_OP(op, c_op) ATOMIC_OP_RETURN(op, c_op)
37 
38 ATOMIC_OPS(add, +=)
39 ATOMIC_OPS(sub, -=)
40 ATOMIC_OP(and, &=)
41 ATOMIC_OP(or, |=)
42 ATOMIC_OP(xor, ^=)
43 
44 #undef ATOMIC_OPS
45 #undef ATOMIC_OP_RETURN
46 #undef ATOMIC_OP
47 
48 #endif /* __ASM_SH_ATOMIC_IRQ_H */
49