xref: /linux/arch/alpha/include/asm/spinlock.h (revision 23c48a124b469cee2eb0c75e6d22d366d1caa118)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_SPINLOCK_H
3 #define _ALPHA_SPINLOCK_H
4 
5 #include <linux/kernel.h>
6 #include <asm/current.h>
7 #include <asm/barrier.h>
8 #include <asm/processor.h>
9 
10 /*
11  * Simple spin lock operations.  There are two variants, one clears IRQ's
12  * on the local processor, one does not.
13  *
14  * We make no fairness assumptions. They have a cost.
15  */
16 
17 #define arch_spin_is_locked(x)	((x)->lock != 0)
18 
19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
20 {
21         return lock.lock == 0;
22 }
23 
24 static inline void arch_spin_unlock(arch_spinlock_t * lock)
25 {
26 	mb();
27 	lock->lock = 0;
28 }
29 
30 static inline void arch_spin_lock(arch_spinlock_t * lock)
31 {
32 	long tmp;
33 
34 	__asm__ __volatile__(
35 	"1:	ldl_l	%0,%1\n"
36 	"	bne	%0,2f\n"
37 	"	lda	%0,1\n"
38 	"	stl_c	%0,%1\n"
39 	"	beq	%0,2f\n"
40 	"	mb\n"
41 	".subsection 2\n"
42 	"2:	ldl	%0,%1\n"
43 	"	bne	%0,2b\n"
44 	"	br	1b\n"
45 	".previous"
46 	: "=&r" (tmp), "=m" (lock->lock)
47 	: "m"(lock->lock) : "memory");
48 }
49 
50 static inline int arch_spin_trylock(arch_spinlock_t *lock)
51 {
52 	return !test_and_set_bit(0, &lock->lock);
53 }
54 
55 /***********************************************************/
56 
57 static inline void arch_read_lock(arch_rwlock_t *lock)
58 {
59 	long regx;
60 
61 	__asm__ __volatile__(
62 	"1:	ldl_l	%1,%0\n"
63 	"	blbs	%1,6f\n"
64 	"	subl	%1,2,%1\n"
65 	"	stl_c	%1,%0\n"
66 	"	beq	%1,6f\n"
67 	"	mb\n"
68 	".subsection 2\n"
69 	"6:	ldl	%1,%0\n"
70 	"	blbs	%1,6b\n"
71 	"	br	1b\n"
72 	".previous"
73 	: "=m" (*lock), "=&r" (regx)
74 	: "m" (*lock) : "memory");
75 }
76 
77 static inline void arch_write_lock(arch_rwlock_t *lock)
78 {
79 	long regx;
80 
81 	__asm__ __volatile__(
82 	"1:	ldl_l	%1,%0\n"
83 	"	bne	%1,6f\n"
84 	"	lda	%1,1\n"
85 	"	stl_c	%1,%0\n"
86 	"	beq	%1,6f\n"
87 	"	mb\n"
88 	".subsection 2\n"
89 	"6:	ldl	%1,%0\n"
90 	"	bne	%1,6b\n"
91 	"	br	1b\n"
92 	".previous"
93 	: "=m" (*lock), "=&r" (regx)
94 	: "m" (*lock) : "memory");
95 }
96 
97 static inline int arch_read_trylock(arch_rwlock_t * lock)
98 {
99 	long regx;
100 	int success;
101 
102 	__asm__ __volatile__(
103 	"1:	ldl_l	%1,%0\n"
104 	"	lda	%2,0\n"
105 	"	blbs	%1,2f\n"
106 	"	subl	%1,2,%2\n"
107 	"	stl_c	%2,%0\n"
108 	"	beq	%2,6f\n"
109 	"2:	mb\n"
110 	".subsection 2\n"
111 	"6:	br	1b\n"
112 	".previous"
113 	: "=m" (*lock), "=&r" (regx), "=&r" (success)
114 	: "m" (*lock) : "memory");
115 
116 	return success;
117 }
118 
119 static inline int arch_write_trylock(arch_rwlock_t * lock)
120 {
121 	long regx;
122 	int success;
123 
124 	__asm__ __volatile__(
125 	"1:	ldl_l	%1,%0\n"
126 	"	lda	%2,0\n"
127 	"	bne	%1,2f\n"
128 	"	lda	%2,1\n"
129 	"	stl_c	%2,%0\n"
130 	"	beq	%2,6f\n"
131 	"2:	mb\n"
132 	".subsection 2\n"
133 	"6:	br	1b\n"
134 	".previous"
135 	: "=m" (*lock), "=&r" (regx), "=&r" (success)
136 	: "m" (*lock) : "memory");
137 
138 	return success;
139 }
140 
141 static inline void arch_read_unlock(arch_rwlock_t * lock)
142 {
143 	long regx;
144 	__asm__ __volatile__(
145 	"	mb\n"
146 	"1:	ldl_l	%1,%0\n"
147 	"	addl	%1,2,%1\n"
148 	"	stl_c	%1,%0\n"
149 	"	beq	%1,6f\n"
150 	".subsection 2\n"
151 	"6:	br	1b\n"
152 	".previous"
153 	: "=m" (*lock), "=&r" (regx)
154 	: "m" (*lock) : "memory");
155 }
156 
157 static inline void arch_write_unlock(arch_rwlock_t * lock)
158 {
159 	mb();
160 	lock->lock = 0;
161 }
162 
163 #endif /* _ALPHA_SPINLOCK_H */
164