xref: /linux/arch/powerpc/include/asm/atomic.h (revision ec2212088c42ff7d1362629ec26dda4f3e8bdad3)
1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
3 
4 /*
5  * PowerPC atomic operations
6  */
7 
8 #include <linux/types.h>
9 
10 #ifdef __KERNEL__
11 #include <linux/compiler.h>
12 #include <asm/synch.h>
13 #include <asm/asm-compat.h>
14 #include <asm/system.h>
15 
16 #define ATOMIC_INIT(i)		{ (i) }
17 
18 static __inline__ int atomic_read(const atomic_t *v)
19 {
20 	int t;
21 
22 	__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
23 
24 	return t;
25 }
26 
27 static __inline__ void atomic_set(atomic_t *v, int i)
28 {
29 	__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
30 }
31 
32 static __inline__ void atomic_add(int a, atomic_t *v)
33 {
34 	int t;
35 
36 	__asm__ __volatile__(
37 "1:	lwarx	%0,0,%3		# atomic_add\n\
38 	add	%0,%2,%0\n"
39 	PPC405_ERR77(0,%3)
40 "	stwcx.	%0,0,%3 \n\
41 	bne-	1b"
42 	: "=&r" (t), "+m" (v->counter)
43 	: "r" (a), "r" (&v->counter)
44 	: "cc");
45 }
46 
47 static __inline__ int atomic_add_return(int a, atomic_t *v)
48 {
49 	int t;
50 
51 	__asm__ __volatile__(
52 	PPC_ATOMIC_ENTRY_BARRIER
53 "1:	lwarx	%0,0,%2		# atomic_add_return\n\
54 	add	%0,%1,%0\n"
55 	PPC405_ERR77(0,%2)
56 "	stwcx.	%0,0,%2 \n\
57 	bne-	1b"
58 	PPC_ATOMIC_EXIT_BARRIER
59 	: "=&r" (t)
60 	: "r" (a), "r" (&v->counter)
61 	: "cc", "memory");
62 
63 	return t;
64 }
65 
66 #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
67 
68 static __inline__ void atomic_sub(int a, atomic_t *v)
69 {
70 	int t;
71 
72 	__asm__ __volatile__(
73 "1:	lwarx	%0,0,%3		# atomic_sub\n\
74 	subf	%0,%2,%0\n"
75 	PPC405_ERR77(0,%3)
76 "	stwcx.	%0,0,%3 \n\
77 	bne-	1b"
78 	: "=&r" (t), "+m" (v->counter)
79 	: "r" (a), "r" (&v->counter)
80 	: "cc");
81 }
82 
83 static __inline__ int atomic_sub_return(int a, atomic_t *v)
84 {
85 	int t;
86 
87 	__asm__ __volatile__(
88 	PPC_ATOMIC_ENTRY_BARRIER
89 "1:	lwarx	%0,0,%2		# atomic_sub_return\n\
90 	subf	%0,%1,%0\n"
91 	PPC405_ERR77(0,%2)
92 "	stwcx.	%0,0,%2 \n\
93 	bne-	1b"
94 	PPC_ATOMIC_EXIT_BARRIER
95 	: "=&r" (t)
96 	: "r" (a), "r" (&v->counter)
97 	: "cc", "memory");
98 
99 	return t;
100 }
101 
102 static __inline__ void atomic_inc(atomic_t *v)
103 {
104 	int t;
105 
106 	__asm__ __volatile__(
107 "1:	lwarx	%0,0,%2		# atomic_inc\n\
108 	addic	%0,%0,1\n"
109 	PPC405_ERR77(0,%2)
110 "	stwcx.	%0,0,%2 \n\
111 	bne-	1b"
112 	: "=&r" (t), "+m" (v->counter)
113 	: "r" (&v->counter)
114 	: "cc", "xer");
115 }
116 
117 static __inline__ int atomic_inc_return(atomic_t *v)
118 {
119 	int t;
120 
121 	__asm__ __volatile__(
122 	PPC_ATOMIC_ENTRY_BARRIER
123 "1:	lwarx	%0,0,%1		# atomic_inc_return\n\
124 	addic	%0,%0,1\n"
125 	PPC405_ERR77(0,%1)
126 "	stwcx.	%0,0,%1 \n\
127 	bne-	1b"
128 	PPC_ATOMIC_EXIT_BARRIER
129 	: "=&r" (t)
130 	: "r" (&v->counter)
131 	: "cc", "xer", "memory");
132 
133 	return t;
134 }
135 
136 /*
137  * atomic_inc_and_test - increment and test
138  * @v: pointer of type atomic_t
139  *
140  * Atomically increments @v by 1
141  * and returns true if the result is zero, or false for all
142  * other cases.
143  */
144 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
145 
146 static __inline__ void atomic_dec(atomic_t *v)
147 {
148 	int t;
149 
150 	__asm__ __volatile__(
151 "1:	lwarx	%0,0,%2		# atomic_dec\n\
152 	addic	%0,%0,-1\n"
153 	PPC405_ERR77(0,%2)\
154 "	stwcx.	%0,0,%2\n\
155 	bne-	1b"
156 	: "=&r" (t), "+m" (v->counter)
157 	: "r" (&v->counter)
158 	: "cc", "xer");
159 }
160 
161 static __inline__ int atomic_dec_return(atomic_t *v)
162 {
163 	int t;
164 
165 	__asm__ __volatile__(
166 	PPC_ATOMIC_ENTRY_BARRIER
167 "1:	lwarx	%0,0,%1		# atomic_dec_return\n\
168 	addic	%0,%0,-1\n"
169 	PPC405_ERR77(0,%1)
170 "	stwcx.	%0,0,%1\n\
171 	bne-	1b"
172 	PPC_ATOMIC_EXIT_BARRIER
173 	: "=&r" (t)
174 	: "r" (&v->counter)
175 	: "cc", "xer", "memory");
176 
177 	return t;
178 }
179 
180 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
181 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
182 
183 /**
184  * __atomic_add_unless - add unless the number is a given value
185  * @v: pointer of type atomic_t
186  * @a: the amount to add to v...
187  * @u: ...unless v is equal to u.
188  *
189  * Atomically adds @a to @v, so long as it was not @u.
190  * Returns the old value of @v.
191  */
192 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
193 {
194 	int t;
195 
196 	__asm__ __volatile__ (
197 	PPC_ATOMIC_ENTRY_BARRIER
198 "1:	lwarx	%0,0,%1		# __atomic_add_unless\n\
199 	cmpw	0,%0,%3 \n\
200 	beq-	2f \n\
201 	add	%0,%2,%0 \n"
202 	PPC405_ERR77(0,%2)
203 "	stwcx.	%0,0,%1 \n\
204 	bne-	1b \n"
205 	PPC_ATOMIC_EXIT_BARRIER
206 "	subf	%0,%2,%0 \n\
207 2:"
208 	: "=&r" (t)
209 	: "r" (&v->counter), "r" (a), "r" (u)
210 	: "cc", "memory");
211 
212 	return t;
213 }
214 
215 /**
216  * atomic_inc_not_zero - increment unless the number is zero
217  * @v: pointer of type atomic_t
218  *
219  * Atomically increments @v by 1, so long as @v is non-zero.
220  * Returns non-zero if @v was non-zero, and zero otherwise.
221  */
222 static __inline__ int atomic_inc_not_zero(atomic_t *v)
223 {
224 	int t1, t2;
225 
226 	__asm__ __volatile__ (
227 	PPC_ATOMIC_ENTRY_BARRIER
228 "1:	lwarx	%0,0,%2		# atomic_inc_not_zero\n\
229 	cmpwi	0,%0,0\n\
230 	beq-	2f\n\
231 	addic	%1,%0,1\n"
232 	PPC405_ERR77(0,%2)
233 "	stwcx.	%1,0,%2\n\
234 	bne-	1b\n"
235 	PPC_ATOMIC_EXIT_BARRIER
236 	"\n\
237 2:"
238 	: "=&r" (t1), "=&r" (t2)
239 	: "r" (&v->counter)
240 	: "cc", "xer", "memory");
241 
242 	return t1;
243 }
244 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
245 
246 #define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0)
247 #define atomic_dec_and_test(v)		(atomic_dec_return((v)) == 0)
248 
249 /*
250  * Atomically test *v and decrement if it is greater than 0.
251  * The function returns the old value of *v minus 1, even if
252  * the atomic variable, v, was not decremented.
253  */
254 static __inline__ int atomic_dec_if_positive(atomic_t *v)
255 {
256 	int t;
257 
258 	__asm__ __volatile__(
259 	PPC_ATOMIC_ENTRY_BARRIER
260 "1:	lwarx	%0,0,%1		# atomic_dec_if_positive\n\
261 	cmpwi	%0,1\n\
262 	addi	%0,%0,-1\n\
263 	blt-	2f\n"
264 	PPC405_ERR77(0,%1)
265 "	stwcx.	%0,0,%1\n\
266 	bne-	1b"
267 	PPC_ATOMIC_EXIT_BARRIER
268 	"\n\
269 2:"	: "=&b" (t)
270 	: "r" (&v->counter)
271 	: "cc", "memory");
272 
273 	return t;
274 }
275 
276 #define smp_mb__before_atomic_dec()     smp_mb()
277 #define smp_mb__after_atomic_dec()      smp_mb()
278 #define smp_mb__before_atomic_inc()     smp_mb()
279 #define smp_mb__after_atomic_inc()      smp_mb()
280 
281 #ifdef __powerpc64__
282 
283 #define ATOMIC64_INIT(i)	{ (i) }
284 
285 static __inline__ long atomic64_read(const atomic64_t *v)
286 {
287 	long t;
288 
289 	__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
290 
291 	return t;
292 }
293 
294 static __inline__ void atomic64_set(atomic64_t *v, long i)
295 {
296 	__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
297 }
298 
299 static __inline__ void atomic64_add(long a, atomic64_t *v)
300 {
301 	long t;
302 
303 	__asm__ __volatile__(
304 "1:	ldarx	%0,0,%3		# atomic64_add\n\
305 	add	%0,%2,%0\n\
306 	stdcx.	%0,0,%3 \n\
307 	bne-	1b"
308 	: "=&r" (t), "+m" (v->counter)
309 	: "r" (a), "r" (&v->counter)
310 	: "cc");
311 }
312 
313 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
314 {
315 	long t;
316 
317 	__asm__ __volatile__(
318 	PPC_ATOMIC_ENTRY_BARRIER
319 "1:	ldarx	%0,0,%2		# atomic64_add_return\n\
320 	add	%0,%1,%0\n\
321 	stdcx.	%0,0,%2 \n\
322 	bne-	1b"
323 	PPC_ATOMIC_EXIT_BARRIER
324 	: "=&r" (t)
325 	: "r" (a), "r" (&v->counter)
326 	: "cc", "memory");
327 
328 	return t;
329 }
330 
331 #define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0)
332 
333 static __inline__ void atomic64_sub(long a, atomic64_t *v)
334 {
335 	long t;
336 
337 	__asm__ __volatile__(
338 "1:	ldarx	%0,0,%3		# atomic64_sub\n\
339 	subf	%0,%2,%0\n\
340 	stdcx.	%0,0,%3 \n\
341 	bne-	1b"
342 	: "=&r" (t), "+m" (v->counter)
343 	: "r" (a), "r" (&v->counter)
344 	: "cc");
345 }
346 
347 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
348 {
349 	long t;
350 
351 	__asm__ __volatile__(
352 	PPC_ATOMIC_ENTRY_BARRIER
353 "1:	ldarx	%0,0,%2		# atomic64_sub_return\n\
354 	subf	%0,%1,%0\n\
355 	stdcx.	%0,0,%2 \n\
356 	bne-	1b"
357 	PPC_ATOMIC_EXIT_BARRIER
358 	: "=&r" (t)
359 	: "r" (a), "r" (&v->counter)
360 	: "cc", "memory");
361 
362 	return t;
363 }
364 
365 static __inline__ void atomic64_inc(atomic64_t *v)
366 {
367 	long t;
368 
369 	__asm__ __volatile__(
370 "1:	ldarx	%0,0,%2		# atomic64_inc\n\
371 	addic	%0,%0,1\n\
372 	stdcx.	%0,0,%2 \n\
373 	bne-	1b"
374 	: "=&r" (t), "+m" (v->counter)
375 	: "r" (&v->counter)
376 	: "cc", "xer");
377 }
378 
379 static __inline__ long atomic64_inc_return(atomic64_t *v)
380 {
381 	long t;
382 
383 	__asm__ __volatile__(
384 	PPC_ATOMIC_ENTRY_BARRIER
385 "1:	ldarx	%0,0,%1		# atomic64_inc_return\n\
386 	addic	%0,%0,1\n\
387 	stdcx.	%0,0,%1 \n\
388 	bne-	1b"
389 	PPC_ATOMIC_EXIT_BARRIER
390 	: "=&r" (t)
391 	: "r" (&v->counter)
392 	: "cc", "xer", "memory");
393 
394 	return t;
395 }
396 
397 /*
398  * atomic64_inc_and_test - increment and test
399  * @v: pointer of type atomic64_t
400  *
401  * Atomically increments @v by 1
402  * and returns true if the result is zero, or false for all
403  * other cases.
404  */
405 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
406 
407 static __inline__ void atomic64_dec(atomic64_t *v)
408 {
409 	long t;
410 
411 	__asm__ __volatile__(
412 "1:	ldarx	%0,0,%2		# atomic64_dec\n\
413 	addic	%0,%0,-1\n\
414 	stdcx.	%0,0,%2\n\
415 	bne-	1b"
416 	: "=&r" (t), "+m" (v->counter)
417 	: "r" (&v->counter)
418 	: "cc", "xer");
419 }
420 
421 static __inline__ long atomic64_dec_return(atomic64_t *v)
422 {
423 	long t;
424 
425 	__asm__ __volatile__(
426 	PPC_ATOMIC_ENTRY_BARRIER
427 "1:	ldarx	%0,0,%1		# atomic64_dec_return\n\
428 	addic	%0,%0,-1\n\
429 	stdcx.	%0,0,%1\n\
430 	bne-	1b"
431 	PPC_ATOMIC_EXIT_BARRIER
432 	: "=&r" (t)
433 	: "r" (&v->counter)
434 	: "cc", "xer", "memory");
435 
436 	return t;
437 }
438 
439 #define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0)
440 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
441 
442 /*
443  * Atomically test *v and decrement if it is greater than 0.
444  * The function returns the old value of *v minus 1.
445  */
446 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
447 {
448 	long t;
449 
450 	__asm__ __volatile__(
451 	PPC_ATOMIC_ENTRY_BARRIER
452 "1:	ldarx	%0,0,%1		# atomic64_dec_if_positive\n\
453 	addic.	%0,%0,-1\n\
454 	blt-	2f\n\
455 	stdcx.	%0,0,%1\n\
456 	bne-	1b"
457 	PPC_ATOMIC_EXIT_BARRIER
458 	"\n\
459 2:"	: "=&r" (t)
460 	: "r" (&v->counter)
461 	: "cc", "xer", "memory");
462 
463 	return t;
464 }
465 
466 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
467 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
468 
469 /**
470  * atomic64_add_unless - add unless the number is a given value
471  * @v: pointer of type atomic64_t
472  * @a: the amount to add to v...
473  * @u: ...unless v is equal to u.
474  *
475  * Atomically adds @a to @v, so long as it was not @u.
476  * Returns the old value of @v.
477  */
478 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
479 {
480 	long t;
481 
482 	__asm__ __volatile__ (
483 	PPC_ATOMIC_ENTRY_BARRIER
484 "1:	ldarx	%0,0,%1		# __atomic_add_unless\n\
485 	cmpd	0,%0,%3 \n\
486 	beq-	2f \n\
487 	add	%0,%2,%0 \n"
488 "	stdcx.	%0,0,%1 \n\
489 	bne-	1b \n"
490 	PPC_ATOMIC_EXIT_BARRIER
491 "	subf	%0,%2,%0 \n\
492 2:"
493 	: "=&r" (t)
494 	: "r" (&v->counter), "r" (a), "r" (u)
495 	: "cc", "memory");
496 
497 	return t != u;
498 }
499 
500 /**
501  * atomic_inc64_not_zero - increment unless the number is zero
502  * @v: pointer of type atomic64_t
503  *
504  * Atomically increments @v by 1, so long as @v is non-zero.
505  * Returns non-zero if @v was non-zero, and zero otherwise.
506  */
507 static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
508 {
509 	long t1, t2;
510 
511 	__asm__ __volatile__ (
512 	PPC_ATOMIC_ENTRY_BARRIER
513 "1:	ldarx	%0,0,%2		# atomic64_inc_not_zero\n\
514 	cmpdi	0,%0,0\n\
515 	beq-	2f\n\
516 	addic	%1,%0,1\n\
517 	stdcx.	%1,0,%2\n\
518 	bne-	1b\n"
519 	PPC_ATOMIC_EXIT_BARRIER
520 	"\n\
521 2:"
522 	: "=&r" (t1), "=&r" (t2)
523 	: "r" (&v->counter)
524 	: "cc", "xer", "memory");
525 
526 	return t1;
527 }
528 
529 #endif /* __powerpc64__ */
530 
531 #endif /* __KERNEL__ */
532 #endif /* _ASM_POWERPC_ATOMIC_H_ */
533