xref: /linux/include/asm-generic/atomic.h (revision 827634added7f38b7d724cab1dccdb2b004c13c3)
1 /*
2  * Generic C implementation of atomic counter operations. Usable on
3  * UP systems only. Do not include in machine independent code.
4  *
5  * Originally implemented for MN10300.
6  *
7  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
8  * Written by David Howells (dhowells@redhat.com)
9  *
10  * This program is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU General Public Licence
12  * as published by the Free Software Foundation; either version
13  * 2 of the Licence, or (at your option) any later version.
14  */
15 #ifndef __ASM_GENERIC_ATOMIC_H
16 #define __ASM_GENERIC_ATOMIC_H
17 
18 #include <asm/cmpxchg.h>
19 #include <asm/barrier.h>
20 
21 /*
22  * atomic_$op() - $op integer to atomic variable
23  * @i: integer value to $op
24  * @v: pointer to the atomic variable
25  *
26  * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
27  * smp_mb__{before,after}_atomic().
28  */
29 
30 /*
31  * atomic_$op_return() - $op interer to atomic variable and returns the result
32  * @i: integer value to $op
33  * @v: pointer to the atomic variable
34  *
35  * Atomically $ops @i to @v. Does imply a full memory barrier.
36  */
37 
38 #ifdef CONFIG_SMP
39 
40 /* we can build all atomic primitives from cmpxchg */
41 
42 #define ATOMIC_OP(op, c_op)						\
43 static inline void atomic_##op(int i, atomic_t *v)			\
44 {									\
45 	int c, old;							\
46 									\
47 	c = v->counter;							\
48 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
49 		c = old;						\
50 }
51 
52 #define ATOMIC_OP_RETURN(op, c_op)					\
53 static inline int atomic_##op##_return(int i, atomic_t *v)		\
54 {									\
55 	int c, old;							\
56 									\
57 	c = v->counter;							\
58 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
59 		c = old;						\
60 									\
61 	return c c_op i;						\
62 }
63 
64 #else
65 
66 #include <linux/irqflags.h>
67 
68 #define ATOMIC_OP(op, c_op)						\
69 static inline void atomic_##op(int i, atomic_t *v)			\
70 {									\
71 	unsigned long flags;						\
72 									\
73 	raw_local_irq_save(flags);					\
74 	v->counter = v->counter c_op i;					\
75 	raw_local_irq_restore(flags);					\
76 }
77 
78 #define ATOMIC_OP_RETURN(op, c_op)					\
79 static inline int atomic_##op##_return(int i, atomic_t *v)		\
80 {									\
81 	unsigned long flags;						\
82 	int ret;							\
83 									\
84 	raw_local_irq_save(flags);					\
85 	ret = (v->counter = v->counter c_op i);				\
86 	raw_local_irq_restore(flags);					\
87 									\
88 	return ret;							\
89 }
90 
91 #endif /* CONFIG_SMP */
92 
93 #ifndef atomic_add_return
94 ATOMIC_OP_RETURN(add, +)
95 #endif
96 
97 #ifndef atomic_sub_return
98 ATOMIC_OP_RETURN(sub, -)
99 #endif
100 
101 #ifndef atomic_clear_mask
102 ATOMIC_OP(and, &)
103 #define atomic_clear_mask(i, v) atomic_and(~(i), (v))
104 #endif
105 
106 #ifndef atomic_set_mask
107 #define CONFIG_ARCH_HAS_ATOMIC_OR
108 ATOMIC_OP(or, |)
109 #define atomic_set_mask(i, v)	atomic_or((i), (v))
110 #endif
111 
112 #undef ATOMIC_OP_RETURN
113 #undef ATOMIC_OP
114 
115 /*
116  * Atomic operations that C can't guarantee us.  Useful for
117  * resource counting etc..
118  */
119 
120 #define ATOMIC_INIT(i)	{ (i) }
121 
122 /**
123  * atomic_read - read atomic variable
124  * @v: pointer of type atomic_t
125  *
126  * Atomically reads the value of @v.
127  */
128 #ifndef atomic_read
129 #define atomic_read(v)	ACCESS_ONCE((v)->counter)
130 #endif
131 
132 /**
133  * atomic_set - set atomic variable
134  * @v: pointer of type atomic_t
135  * @i: required value
136  *
137  * Atomically sets the value of @v to @i.
138  */
139 #define atomic_set(v, i) (((v)->counter) = (i))
140 
141 #include <linux/irqflags.h>
142 
143 static inline int atomic_add_negative(int i, atomic_t *v)
144 {
145 	return atomic_add_return(i, v) < 0;
146 }
147 
148 static inline void atomic_add(int i, atomic_t *v)
149 {
150 	atomic_add_return(i, v);
151 }
152 
153 static inline void atomic_sub(int i, atomic_t *v)
154 {
155 	atomic_sub_return(i, v);
156 }
157 
158 static inline void atomic_inc(atomic_t *v)
159 {
160 	atomic_add_return(1, v);
161 }
162 
163 static inline void atomic_dec(atomic_t *v)
164 {
165 	atomic_sub_return(1, v);
166 }
167 
168 #define atomic_dec_return(v)		atomic_sub_return(1, (v))
169 #define atomic_inc_return(v)		atomic_add_return(1, (v))
170 
171 #define atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
172 #define atomic_dec_and_test(v)		(atomic_dec_return(v) == 0)
173 #define atomic_inc_and_test(v)		(atomic_inc_return(v) == 0)
174 
175 #define atomic_xchg(ptr, v)		(xchg(&(ptr)->counter, (v)))
176 #define atomic_cmpxchg(v, old, new)	(cmpxchg(&((v)->counter), (old), (new)))
177 
178 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
179 {
180 	int c, old;
181 	c = atomic_read(v);
182 	while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
183 		c = old;
184 	return c;
185 }
186 
187 #endif /* __ASM_GENERIC_ATOMIC_H */
188