xref: /linux/arch/sh/include/asm/atomic-irq.h (revision e5c86679d5e864947a52fb31e45a425dea3e7fa9)
1 #ifndef __ASM_SH_ATOMIC_IRQ_H
2 #define __ASM_SH_ATOMIC_IRQ_H
3 
4 #include <linux/irqflags.h>
5 
6 /*
7  * To get proper branch prediction for the main line, we must branch
8  * forward to code at the end of this object's .text section, then
9  * branch back to restart the operation.
10  */
11 
12 #define ATOMIC_OP(op, c_op)						\
13 static inline void atomic_##op(int i, atomic_t *v)			\
14 {									\
15 	unsigned long flags;						\
16 									\
17 	raw_local_irq_save(flags);					\
18 	v->counter c_op i;						\
19 	raw_local_irq_restore(flags);					\
20 }
21 
22 #define ATOMIC_OP_RETURN(op, c_op)					\
23 static inline int atomic_##op##_return(int i, atomic_t *v)		\
24 {									\
25 	unsigned long temp, flags;					\
26 									\
27 	raw_local_irq_save(flags);					\
28 	temp = v->counter;						\
29 	temp c_op i;							\
30 	v->counter = temp;						\
31 	raw_local_irq_restore(flags);					\
32 									\
33 	return temp;							\
34 }
35 
36 #define ATOMIC_FETCH_OP(op, c_op)					\
37 static inline int atomic_fetch_##op(int i, atomic_t *v)			\
38 {									\
39 	unsigned long temp, flags;					\
40 									\
41 	raw_local_irq_save(flags);					\
42 	temp = v->counter;						\
43 	v->counter c_op i;						\
44 	raw_local_irq_restore(flags);					\
45 									\
46 	return temp;							\
47 }
48 
49 #define ATOMIC_OPS(op, c_op)						\
50 	ATOMIC_OP(op, c_op)						\
51 	ATOMIC_OP_RETURN(op, c_op)					\
52 	ATOMIC_FETCH_OP(op, c_op)
53 
54 ATOMIC_OPS(add, +=)
55 ATOMIC_OPS(sub, -=)
56 
57 #undef ATOMIC_OPS
58 #define ATOMIC_OPS(op, c_op)						\
59 	ATOMIC_OP(op, c_op)						\
60 	ATOMIC_FETCH_OP(op, c_op)
61 
62 ATOMIC_OPS(and, &=)
63 ATOMIC_OPS(or, |=)
64 ATOMIC_OPS(xor, ^=)
65 
66 #undef ATOMIC_OPS
67 #undef ATOMIC_FETCH_OP
68 #undef ATOMIC_OP_RETURN
69 #undef ATOMIC_OP
70 
71 #endif /* __ASM_SH_ATOMIC_IRQ_H */
72