xref: /linux/arch/sparc/lib/atomic32.c (revision b85d45947951d23cb22d90caecf4c1eb81342c96)
1 /*
2  * atomic32.c: 32-bit atomic_t implementation
3  *
4  * Copyright (C) 2004 Keith M Wesolowski
5  * Copyright (C) 2007 Kyle McMartin
6  *
7  * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
8  */
9 
10 #include <linux/atomic.h>
11 #include <linux/spinlock.h>
12 #include <linux/module.h>
13 
14 #ifdef CONFIG_SMP
15 #define ATOMIC_HASH_SIZE	4
16 #define ATOMIC_HASH(a)	(&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
17 
18 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
19 	[0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
20 };
21 
22 #else /* SMP */
23 
24 static DEFINE_SPINLOCK(dummy);
25 #define ATOMIC_HASH_SIZE	1
26 #define ATOMIC_HASH(a)		(&dummy)
27 
28 #endif /* SMP */
29 
30 #define ATOMIC_OP_RETURN(op, c_op)					\
31 int atomic_##op##_return(int i, atomic_t *v)				\
32 {									\
33 	int ret;							\
34 	unsigned long flags;						\
35 	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
36 									\
37 	ret = (v->counter c_op i);					\
38 									\
39 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
40 	return ret;							\
41 }									\
42 EXPORT_SYMBOL(atomic_##op##_return);
43 
44 #define ATOMIC_OP(op, c_op)						\
45 void atomic_##op(int i, atomic_t *v)					\
46 {									\
47 	unsigned long flags;						\
48 	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
49 									\
50 	v->counter c_op i;						\
51 									\
52 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
53 }									\
54 EXPORT_SYMBOL(atomic_##op);
55 
56 ATOMIC_OP_RETURN(add, +=)
57 ATOMIC_OP(and, &=)
58 ATOMIC_OP(or, |=)
59 ATOMIC_OP(xor, ^=)
60 
61 #undef ATOMIC_OP_RETURN
62 #undef ATOMIC_OP
63 
64 int atomic_xchg(atomic_t *v, int new)
65 {
66 	int ret;
67 	unsigned long flags;
68 
69 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
70 	ret = v->counter;
71 	v->counter = new;
72 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
73 	return ret;
74 }
75 EXPORT_SYMBOL(atomic_xchg);
76 
77 int atomic_cmpxchg(atomic_t *v, int old, int new)
78 {
79 	int ret;
80 	unsigned long flags;
81 
82 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
83 	ret = v->counter;
84 	if (likely(ret == old))
85 		v->counter = new;
86 
87 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
88 	return ret;
89 }
90 EXPORT_SYMBOL(atomic_cmpxchg);
91 
92 int __atomic_add_unless(atomic_t *v, int a, int u)
93 {
94 	int ret;
95 	unsigned long flags;
96 
97 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
98 	ret = v->counter;
99 	if (ret != u)
100 		v->counter += a;
101 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
102 	return ret;
103 }
104 EXPORT_SYMBOL(__atomic_add_unless);
105 
106 /* Atomic operations are already serializing */
107 void atomic_set(atomic_t *v, int i)
108 {
109 	unsigned long flags;
110 
111 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
112 	v->counter = i;
113 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
114 }
115 EXPORT_SYMBOL(atomic_set);
116 
117 unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
118 {
119 	unsigned long old, flags;
120 
121 	spin_lock_irqsave(ATOMIC_HASH(addr), flags);
122 	old = *addr;
123 	*addr = old | mask;
124 	spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
125 
126 	return old & mask;
127 }
128 EXPORT_SYMBOL(___set_bit);
129 
130 unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
131 {
132 	unsigned long old, flags;
133 
134 	spin_lock_irqsave(ATOMIC_HASH(addr), flags);
135 	old = *addr;
136 	*addr = old & ~mask;
137 	spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
138 
139 	return old & mask;
140 }
141 EXPORT_SYMBOL(___clear_bit);
142 
143 unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
144 {
145 	unsigned long old, flags;
146 
147 	spin_lock_irqsave(ATOMIC_HASH(addr), flags);
148 	old = *addr;
149 	*addr = old ^ mask;
150 	spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
151 
152 	return old & mask;
153 }
154 EXPORT_SYMBOL(___change_bit);
155 
156 unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
157 {
158 	unsigned long flags;
159 	u32 prev;
160 
161 	spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
162 	if ((prev = *ptr) == old)
163 		*ptr = new;
164 	spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
165 
166 	return (unsigned long)prev;
167 }
168 EXPORT_SYMBOL(__cmpxchg_u32);
169 
170 unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
171 {
172 	unsigned long flags;
173 	u32 prev;
174 
175 	spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
176 	prev = *ptr;
177 	*ptr = new;
178 	spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
179 
180 	return (unsigned long)prev;
181 }
182 EXPORT_SYMBOL(__xchg_u32);
183