xref: /linux/arch/sparc/lib/atomic32.c (revision f3d9478b2ce468c3115b02ecae7e975990697f15)
1 /*
2  * atomic32.c: 32-bit atomic_t implementation
3  *
4  * Copyright (C) 2004 Keith M Wesolowski
5  *
6  * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
7  */
8 
9 #include <asm/atomic.h>
10 #include <linux/spinlock.h>
11 #include <linux/module.h>
12 
13 #ifdef CONFIG_SMP
14 #define ATOMIC_HASH_SIZE	4
15 #define ATOMIC_HASH(a)	(&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
16 
17 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
18 	[0 ... (ATOMIC_HASH_SIZE-1)] = SPIN_LOCK_UNLOCKED
19 };
20 
21 #else /* SMP */
22 
23 static DEFINE_SPINLOCK(dummy);
24 #define ATOMIC_HASH_SIZE	1
25 #define ATOMIC_HASH(a)		(&dummy)
26 
27 #endif /* SMP */
28 
29 int __atomic_add_return(int i, atomic_t *v)
30 {
31 	int ret;
32 	unsigned long flags;
33 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
34 
35 	ret = (v->counter += i);
36 
37 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
38 	return ret;
39 }
40 EXPORT_SYMBOL(__atomic_add_return);
41 
42 int atomic_cmpxchg(atomic_t *v, int old, int new)
43 {
44 	int ret;
45 	unsigned long flags;
46 
47 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
48 	ret = v->counter;
49 	if (likely(ret == old))
50 		v->counter = new;
51 
52 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
53 	return ret;
54 }
55 
56 int atomic_add_unless(atomic_t *v, int a, int u)
57 {
58 	int ret;
59 	unsigned long flags;
60 
61 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
62 	ret = v->counter;
63 	if (ret != u)
64 		v->counter += a;
65 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
66 	return ret != u;
67 }
68 
69 /* Atomic operations are already serializing */
70 void atomic_set(atomic_t *v, int i)
71 {
72 	unsigned long flags;
73 
74 	spin_lock_irqsave(ATOMIC_HASH(v), flags);
75 	v->counter = i;
76 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
77 }
78 EXPORT_SYMBOL(atomic_set);
79