xref: /freebsd/sys/powerpc/include/atomic.h (revision ec4deee4e4f2aef1b97d9424f25d04e91fd7dc10)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3  *
4  * Copyright (c) 2008 Marcel Moolenaar
5  * Copyright (c) 2001 Benno Rice
6  * Copyright (c) 2001 David E. O'Brien
7  * Copyright (c) 1998 Doug Rabson
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29  * SUCH DAMAGE.
30  *
31  * $FreeBSD$
32  */
33 
34 #ifndef _MACHINE_ATOMIC_H_
35 #define	_MACHINE_ATOMIC_H_
36 
37 #ifndef _SYS_CDEFS_H_
38 #error this file needs sys/cdefs.h as a prerequisite
39 #endif
40 
41 #include <sys/atomic_common.h>
42 
43 #ifndef __powerpc64__
44 #include <sys/_atomic64e.h>
45 #endif
46 
47 /*
48  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
49  * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
50  * of this file. See also Appendix B.2 of Book II of the architecture manual.
51  *
52  * Note that not all Book-E processors accept the light-weight sync variant.
53  * In particular, early models of E500 cores are known to wedge. Bank on all
54  * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
55  * to use the heavier-weight sync.
56  */
57 
58 #ifdef __powerpc64__
59 #define mb()		__asm __volatile("sync" : : : "memory")
60 #define rmb()		__asm __volatile("lwsync" : : : "memory")
61 #define wmb()		__asm __volatile("lwsync" : : : "memory")
62 #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
63 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
64 #else
65 #define mb()		__asm __volatile("sync" : : : "memory")
66 #define rmb()		__asm __volatile("sync" : : : "memory")
67 #define wmb()		__asm __volatile("sync" : : : "memory")
68 #define __ATOMIC_REL()	__asm __volatile("sync" : : : "memory")
69 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
70 #endif
71 
72 static __inline void
73 powerpc_lwsync(void)
74 {
75 
76 #ifdef __powerpc64__
77 	__asm __volatile("lwsync" : : : "memory");
78 #else
79 	__asm __volatile("sync" : : : "memory");
80 #endif
81 }
82 
83 /*
84  * atomic_add(p, v)
85  * { *p += v; }
86  */
87 
88 #define __atomic_add_int(p, v, t)				\
89     __asm __volatile(						\
90 	"1:	lwarx	%0, 0, %2\n"				\
91 	"	add	%0, %3, %0\n"				\
92 	"	stwcx.	%0, 0, %2\n"				\
93 	"	bne-	1b\n"					\
94 	: "=&r" (t), "=m" (*p)					\
95 	: "r" (p), "r" (v), "m" (*p)				\
96 	: "cr0", "memory")					\
97     /* __atomic_add_int */
98 
99 #ifdef __powerpc64__
100 #define __atomic_add_long(p, v, t)				\
101     __asm __volatile(						\
102 	"1:	ldarx	%0, 0, %2\n"				\
103 	"	add	%0, %3, %0\n"				\
104 	"	stdcx.	%0, 0, %2\n"				\
105 	"	bne-	1b\n"					\
106 	: "=&r" (t), "=m" (*p)					\
107 	: "r" (p), "r" (v), "m" (*p)				\
108 	: "cr0", "memory")					\
109     /* __atomic_add_long */
110 #else
111 #define	__atomic_add_long(p, v, t)				\
112     __asm __volatile(						\
113 	"1:	lwarx	%0, 0, %2\n"				\
114 	"	add	%0, %3, %0\n"				\
115 	"	stwcx.	%0, 0, %2\n"				\
116 	"	bne-	1b\n"					\
117 	: "=&r" (t), "=m" (*p)					\
118 	: "r" (p), "r" (v), "m" (*p)				\
119 	: "cr0", "memory")					\
120     /* __atomic_add_long */
121 #endif
122 
123 #define	_ATOMIC_ADD(type)					\
124     static __inline void					\
125     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
126 	u_##type t;						\
127 	__atomic_add_##type(p, v, t);				\
128     }								\
129 								\
130     static __inline void					\
131     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
132 	u_##type t;						\
133 	__atomic_add_##type(p, v, t);				\
134 	__ATOMIC_ACQ();						\
135     }								\
136 								\
137     static __inline void					\
138     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
139 	u_##type t;						\
140 	__ATOMIC_REL();						\
141 	__atomic_add_##type(p, v, t);				\
142     }								\
143     /* _ATOMIC_ADD */
144 
145 _ATOMIC_ADD(int)
146 _ATOMIC_ADD(long)
147 
148 #define	atomic_add_32		atomic_add_int
149 #define	atomic_add_acq_32	atomic_add_acq_int
150 #define	atomic_add_rel_32	atomic_add_rel_int
151 
152 #ifdef __powerpc64__
153 #define	atomic_add_64		atomic_add_long
154 #define	atomic_add_acq_64	atomic_add_acq_long
155 #define	atomic_add_rel_64	atomic_add_rel_long
156 
157 #define	atomic_add_ptr		atomic_add_long
158 #define	atomic_add_acq_ptr	atomic_add_acq_long
159 #define	atomic_add_rel_ptr	atomic_add_rel_long
160 #else
161 #define	atomic_add_ptr		atomic_add_int
162 #define	atomic_add_acq_ptr	atomic_add_acq_int
163 #define	atomic_add_rel_ptr	atomic_add_rel_int
164 #endif
165 #undef _ATOMIC_ADD
166 #undef __atomic_add_long
167 #undef __atomic_add_int
168 
169 /*
170  * atomic_clear(p, v)
171  * { *p &= ~v; }
172  */
173 
174 #define __atomic_clear_int(p, v, t)				\
175     __asm __volatile(						\
176 	"1:	lwarx	%0, 0, %2\n"				\
177 	"	andc	%0, %0, %3\n"				\
178 	"	stwcx.	%0, 0, %2\n"				\
179 	"	bne-	1b\n"					\
180 	: "=&r" (t), "=m" (*p)					\
181 	: "r" (p), "r" (v), "m" (*p)				\
182 	: "cr0", "memory")					\
183     /* __atomic_clear_int */
184 
185 #ifdef __powerpc64__
186 #define __atomic_clear_long(p, v, t)				\
187     __asm __volatile(						\
188 	"1:	ldarx	%0, 0, %2\n"				\
189 	"	andc	%0, %0, %3\n"				\
190 	"	stdcx.	%0, 0, %2\n"				\
191 	"	bne-	1b\n"					\
192 	: "=&r" (t), "=m" (*p)					\
193 	: "r" (p), "r" (v), "m" (*p)				\
194 	: "cr0", "memory")					\
195     /* __atomic_clear_long */
196 #else
197 #define	__atomic_clear_long(p, v, t)				\
198     __asm __volatile(						\
199 	"1:	lwarx	%0, 0, %2\n"				\
200 	"	andc	%0, %0, %3\n"				\
201 	"	stwcx.	%0, 0, %2\n"				\
202 	"	bne-	1b\n"					\
203 	: "=&r" (t), "=m" (*p)					\
204 	: "r" (p), "r" (v), "m" (*p)				\
205 	: "cr0", "memory")					\
206     /* __atomic_clear_long */
207 #endif
208 
209 #define	_ATOMIC_CLEAR(type)					\
210     static __inline void					\
211     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
212 	u_##type t;						\
213 	__atomic_clear_##type(p, v, t);				\
214     }								\
215 								\
216     static __inline void					\
217     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
218 	u_##type t;						\
219 	__atomic_clear_##type(p, v, t);				\
220 	__ATOMIC_ACQ();						\
221     }								\
222 								\
223     static __inline void					\
224     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
225 	u_##type t;						\
226 	__ATOMIC_REL();						\
227 	__atomic_clear_##type(p, v, t);				\
228     }								\
229     /* _ATOMIC_CLEAR */
230 
231 
232 _ATOMIC_CLEAR(int)
233 _ATOMIC_CLEAR(long)
234 
235 #define	atomic_clear_32		atomic_clear_int
236 #define	atomic_clear_acq_32	atomic_clear_acq_int
237 #define	atomic_clear_rel_32	atomic_clear_rel_int
238 
239 #ifdef __powerpc64__
240 #define	atomic_clear_64		atomic_clear_long
241 #define	atomic_clear_acq_64	atomic_clear_acq_long
242 #define	atomic_clear_rel_64	atomic_clear_rel_long
243 
244 #define	atomic_clear_ptr	atomic_clear_long
245 #define	atomic_clear_acq_ptr	atomic_clear_acq_long
246 #define	atomic_clear_rel_ptr	atomic_clear_rel_long
247 #else
248 #define	atomic_clear_ptr	atomic_clear_int
249 #define	atomic_clear_acq_ptr	atomic_clear_acq_int
250 #define	atomic_clear_rel_ptr	atomic_clear_rel_int
251 #endif
252 #undef _ATOMIC_CLEAR
253 #undef __atomic_clear_long
254 #undef __atomic_clear_int
255 
256 /*
257  * atomic_cmpset(p, o, n)
258  */
259 /* TODO -- see below */
260 
261 /*
262  * atomic_load_acq(p)
263  */
264 /* TODO -- see below */
265 
266 /*
267  * atomic_readandclear(p)
268  */
269 /* TODO -- see below */
270 
271 /*
272  * atomic_set(p, v)
273  * { *p |= v; }
274  */
275 
276 #define __atomic_set_int(p, v, t)				\
277     __asm __volatile(						\
278 	"1:	lwarx	%0, 0, %2\n"				\
279 	"	or	%0, %3, %0\n"				\
280 	"	stwcx.	%0, 0, %2\n"				\
281 	"	bne-	1b\n"					\
282 	: "=&r" (t), "=m" (*p)					\
283 	: "r" (p), "r" (v), "m" (*p)				\
284 	: "cr0", "memory")					\
285     /* __atomic_set_int */
286 
287 #ifdef __powerpc64__
288 #define __atomic_set_long(p, v, t)				\
289     __asm __volatile(						\
290 	"1:	ldarx	%0, 0, %2\n"				\
291 	"	or	%0, %3, %0\n"				\
292 	"	stdcx.	%0, 0, %2\n"				\
293 	"	bne-	1b\n"					\
294 	: "=&r" (t), "=m" (*p)					\
295 	: "r" (p), "r" (v), "m" (*p)				\
296 	: "cr0", "memory")					\
297     /* __atomic_set_long */
298 #else
299 #define	__atomic_set_long(p, v, t)				\
300     __asm __volatile(						\
301 	"1:	lwarx	%0, 0, %2\n"				\
302 	"	or	%0, %3, %0\n"				\
303 	"	stwcx.	%0, 0, %2\n"				\
304 	"	bne-	1b\n"					\
305 	: "=&r" (t), "=m" (*p)					\
306 	: "r" (p), "r" (v), "m" (*p)				\
307 	: "cr0", "memory")					\
308     /* __atomic_set_long */
309 #endif
310 
311 #define	_ATOMIC_SET(type)					\
312     static __inline void					\
313     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
314 	u_##type t;						\
315 	__atomic_set_##type(p, v, t);				\
316     }								\
317 								\
318     static __inline void					\
319     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
320 	u_##type t;						\
321 	__atomic_set_##type(p, v, t);				\
322 	__ATOMIC_ACQ();						\
323     }								\
324 								\
325     static __inline void					\
326     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
327 	u_##type t;						\
328 	__ATOMIC_REL();						\
329 	__atomic_set_##type(p, v, t);				\
330     }								\
331     /* _ATOMIC_SET */
332 
333 _ATOMIC_SET(int)
334 _ATOMIC_SET(long)
335 
336 #define	atomic_set_32		atomic_set_int
337 #define	atomic_set_acq_32	atomic_set_acq_int
338 #define	atomic_set_rel_32	atomic_set_rel_int
339 
340 #ifdef __powerpc64__
341 #define	atomic_set_64		atomic_set_long
342 #define	atomic_set_acq_64	atomic_set_acq_long
343 #define	atomic_set_rel_64	atomic_set_rel_long
344 
345 #define	atomic_set_ptr		atomic_set_long
346 #define	atomic_set_acq_ptr	atomic_set_acq_long
347 #define	atomic_set_rel_ptr	atomic_set_rel_long
348 #else
349 #define	atomic_set_ptr		atomic_set_int
350 #define	atomic_set_acq_ptr	atomic_set_acq_int
351 #define	atomic_set_rel_ptr	atomic_set_rel_int
352 #endif
353 #undef _ATOMIC_SET
354 #undef __atomic_set_long
355 #undef __atomic_set_int
356 
357 /*
358  * atomic_subtract(p, v)
359  * { *p -= v; }
360  */
361 
362 #define __atomic_subtract_int(p, v, t)				\
363     __asm __volatile(						\
364 	"1:	lwarx	%0, 0, %2\n"				\
365 	"	subf	%0, %3, %0\n"				\
366 	"	stwcx.	%0, 0, %2\n"				\
367 	"	bne-	1b\n"					\
368 	: "=&r" (t), "=m" (*p)					\
369 	: "r" (p), "r" (v), "m" (*p)				\
370 	: "cr0", "memory")					\
371     /* __atomic_subtract_int */
372 
373 #ifdef __powerpc64__
374 #define __atomic_subtract_long(p, v, t)				\
375     __asm __volatile(						\
376 	"1:	ldarx	%0, 0, %2\n"				\
377 	"	subf	%0, %3, %0\n"				\
378 	"	stdcx.	%0, 0, %2\n"				\
379 	"	bne-	1b\n"					\
380 	: "=&r" (t), "=m" (*p)					\
381 	: "r" (p), "r" (v), "m" (*p)				\
382 	: "cr0", "memory")					\
383     /* __atomic_subtract_long */
384 #else
385 #define	__atomic_subtract_long(p, v, t)				\
386     __asm __volatile(						\
387 	"1:	lwarx	%0, 0, %2\n"				\
388 	"	subf	%0, %3, %0\n"				\
389 	"	stwcx.	%0, 0, %2\n"				\
390 	"	bne-	1b\n"					\
391 	: "=&r" (t), "=m" (*p)					\
392 	: "r" (p), "r" (v), "m" (*p)				\
393 	: "cr0", "memory")					\
394     /* __atomic_subtract_long */
395 #endif
396 
397 #define	_ATOMIC_SUBTRACT(type)						\
398     static __inline void						\
399     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
400 	u_##type t;							\
401 	__atomic_subtract_##type(p, v, t);				\
402     }									\
403 									\
404     static __inline void						\
405     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
406 	u_##type t;							\
407 	__atomic_subtract_##type(p, v, t);				\
408 	__ATOMIC_ACQ();							\
409     }									\
410 									\
411     static __inline void						\
412     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
413 	u_##type t;							\
414 	__ATOMIC_REL();							\
415 	__atomic_subtract_##type(p, v, t);				\
416     }									\
417     /* _ATOMIC_SUBTRACT */
418 
419 _ATOMIC_SUBTRACT(int)
420 _ATOMIC_SUBTRACT(long)
421 
422 #define	atomic_subtract_32	atomic_subtract_int
423 #define	atomic_subtract_acq_32	atomic_subtract_acq_int
424 #define	atomic_subtract_rel_32	atomic_subtract_rel_int
425 
426 #ifdef __powerpc64__
427 #define	atomic_subtract_64	atomic_subtract_long
428 #define	atomic_subtract_acq_64	atomic_subract_acq_long
429 #define	atomic_subtract_rel_64	atomic_subtract_rel_long
430 
431 #define	atomic_subtract_ptr	atomic_subtract_long
432 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
433 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
434 #else
435 #define	atomic_subtract_ptr	atomic_subtract_int
436 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
437 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
438 #endif
439 #undef _ATOMIC_SUBTRACT
440 #undef __atomic_subtract_long
441 #undef __atomic_subtract_int
442 
443 /*
444  * atomic_store_rel(p, v)
445  */
446 /* TODO -- see below */
447 
448 /*
449  * Old/original implementations that still need revisiting.
450  */
451 
452 static __inline u_int
453 atomic_readandclear_int(volatile u_int *addr)
454 {
455 	u_int result,temp;
456 
457 	__asm __volatile (
458 		"\tsync\n"			/* drain writes */
459 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
460 		"li %1, 0\n\t"			/* load new value */
461 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
462 		"bne- 1b\n\t"			/* spin if failed */
463 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
464 		: "r" (addr), "m" (*addr)
465 		: "cr0", "memory");
466 
467 	return (result);
468 }
469 
470 #ifdef __powerpc64__
471 static __inline u_long
472 atomic_readandclear_long(volatile u_long *addr)
473 {
474 	u_long result,temp;
475 
476 	__asm __volatile (
477 		"\tsync\n"			/* drain writes */
478 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
479 		"li %1, 0\n\t"			/* load new value */
480 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
481 		"bne- 1b\n\t"			/* spin if failed */
482 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
483 		: "r" (addr), "m" (*addr)
484 		: "cr0", "memory");
485 
486 	return (result);
487 }
488 #endif
489 
490 #define	atomic_readandclear_32		atomic_readandclear_int
491 
492 #ifdef __powerpc64__
493 #define	atomic_readandclear_64		atomic_readandclear_long
494 
495 #define	atomic_readandclear_ptr		atomic_readandclear_long
496 #else
497 static __inline u_long
498 atomic_readandclear_long(volatile u_long *addr)
499 {
500 
501 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
502 }
503 
504 #define	atomic_readandclear_ptr		atomic_readandclear_int
505 #endif
506 
507 /*
508  * We assume that a = b will do atomic loads and stores.
509  */
510 #define	ATOMIC_STORE_LOAD(TYPE)					\
511 static __inline u_##TYPE					\
512 atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
513 {								\
514 	u_##TYPE v;						\
515 								\
516 	v = *p;							\
517 	powerpc_lwsync();					\
518 	return (v);						\
519 }								\
520 								\
521 static __inline void						\
522 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
523 {								\
524 								\
525 	powerpc_lwsync();					\
526 	*p = v;							\
527 }
528 
529 ATOMIC_STORE_LOAD(int)
530 
531 #define	atomic_load_acq_32	atomic_load_acq_int
532 #define	atomic_store_rel_32	atomic_store_rel_int
533 
534 #ifdef __powerpc64__
535 ATOMIC_STORE_LOAD(long)
536 
537 #define	atomic_load_acq_64	atomic_load_acq_long
538 #define	atomic_store_rel_64	atomic_store_rel_long
539 
540 #define	atomic_load_acq_ptr	atomic_load_acq_long
541 #define	atomic_store_rel_ptr	atomic_store_rel_long
542 #else
543 static __inline u_long
544 atomic_load_acq_long(volatile u_long *addr)
545 {
546 
547 	return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
548 }
549 
550 static __inline void
551 atomic_store_rel_long(volatile u_long *addr, u_long val)
552 {
553 
554 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
555 }
556 
557 #define	atomic_load_acq_ptr	atomic_load_acq_int
558 #define	atomic_store_rel_ptr	atomic_store_rel_int
559 #endif
560 #undef ATOMIC_STORE_LOAD
561 
562 /*
563  * Atomically compare the value stored at *p with cmpval and if the
564  * two values are equal, update the value of *p with newval. Returns
565  * zero if the compare failed, nonzero otherwise.
566  */
567 #ifdef ISA_206_ATOMICS
568 static __inline int
569 atomic_cmpset_char(volatile u_char *p, u_char cmpval, u_char newval)
570 {
571 	int	ret;
572 
573 	__asm __volatile (
574 		"1:\tlbarx %0, 0, %2\n\t"	/* load old value */
575 		"cmplw %3, %0\n\t"		/* compare */
576 		"bne- 2f\n\t"			/* exit if not equal */
577 		"stbcx. %4, 0, %2\n\t"      	/* attempt to store */
578 		"bne- 1b\n\t"			/* spin if failed */
579 		"li %0, 1\n\t"			/* success - retval = 1 */
580 		"b 3f\n\t"			/* we've succeeded */
581 		"2:\n\t"
582 		"stbcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
583 		"li %0, 0\n\t"			/* failure - retval = 0 */
584 		"3:\n\t"
585 		: "=&r" (ret), "=m" (*p)
586 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
587 		: "cr0", "memory");
588 
589 	return (ret);
590 }
591 
592 static __inline int
593 atomic_cmpset_short(volatile u_short *p, u_short cmpval, u_short newval)
594 {
595 	int	ret;
596 
597 	__asm __volatile (
598 		"1:\tlharx %0, 0, %2\n\t"	/* load old value */
599 		"cmplw %3, %0\n\t"		/* compare */
600 		"bne- 2f\n\t"			/* exit if not equal */
601 		"sthcx. %4, 0, %2\n\t"      	/* attempt to store */
602 		"bne- 1b\n\t"			/* spin if failed */
603 		"li %0, 1\n\t"			/* success - retval = 1 */
604 		"b 3f\n\t"			/* we've succeeded */
605 		"2:\n\t"
606 		"sthcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
607 		"li %0, 0\n\t"			/* failure - retval = 0 */
608 		"3:\n\t"
609 		: "=&r" (ret), "=m" (*p)
610 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
611 		: "cr0", "memory");
612 
613 	return (ret);
614 }
615 #else
616 static __inline int
617 atomic_cmpset_masked(uint32_t *p, uint32_t cmpval, uint32_t newval,
618     uint32_t mask)
619 {
620 	int		ret;
621 	uint32_t	tmp;
622 
623 	__asm __volatile (
624 		"1:\tlwarx %2, 0, %3\n\t"	/* load old value */
625 		"and %0, %2, %7\n\t"
626 		"cmplw %4, %0\n\t"		/* compare */
627 		"bne- 2f\n\t"			/* exit if not equal */
628 		"andc %2, %2, %7\n\t"
629 		"or %2, %2, %5\n\t"
630 		"stwcx. %2, 0, %3\n\t"      	/* attempt to store */
631 		"bne- 1b\n\t"			/* spin if failed */
632 		"li %0, 1\n\t"			/* success - retval = 1 */
633 		"b 3f\n\t"			/* we've succeeded */
634 		"2:\n\t"
635 		"stwcx. %2, 0, %3\n\t"       	/* clear reservation (74xx) */
636 		"li %0, 0\n\t"			/* failure - retval = 0 */
637 		"3:\n\t"
638 		: "=&r" (ret), "=m" (*p), "+&r" (tmp)
639 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p),
640 		  "r" (mask)
641 		: "cr0", "memory");
642 
643 	return (ret);
644 }
645 
646 #define	_atomic_cmpset_masked_word(a,o,v,m) atomic_cmpset_masked(a, o, v, m)
647 #endif
648 
649 static __inline int
650 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
651 {
652 	int	ret;
653 
654 	__asm __volatile (
655 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
656 		"cmplw %3, %0\n\t"		/* compare */
657 		"bne- 2f\n\t"			/* exit if not equal */
658 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
659 		"bne- 1b\n\t"			/* spin if failed */
660 		"li %0, 1\n\t"			/* success - retval = 1 */
661 		"b 3f\n\t"			/* we've succeeded */
662 		"2:\n\t"
663 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
664 		"li %0, 0\n\t"			/* failure - retval = 0 */
665 		"3:\n\t"
666 		: "=&r" (ret), "=m" (*p)
667 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
668 		: "cr0", "memory");
669 
670 	return (ret);
671 }
672 static __inline int
673 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
674 {
675 	int ret;
676 
677 	__asm __volatile (
678 	    #ifdef __powerpc64__
679 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
680 		"cmpld %3, %0\n\t"		/* compare */
681 		"bne- 2f\n\t"			/* exit if not equal */
682 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
683 	    #else
684 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
685 		"cmplw %3, %0\n\t"		/* compare */
686 		"bne- 2f\n\t"			/* exit if not equal */
687 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
688 	    #endif
689 		"bne- 1b\n\t"			/* spin if failed */
690 		"li %0, 1\n\t"			/* success - retval = 1 */
691 		"b 3f\n\t"			/* we've succeeded */
692 		"2:\n\t"
693 	    #ifdef __powerpc64__
694 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
695 	    #else
696 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
697 	    #endif
698 		"li %0, 0\n\t"			/* failure - retval = 0 */
699 		"3:\n\t"
700 		: "=&r" (ret), "=m" (*p)
701 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
702 		: "cr0", "memory");
703 
704 	return (ret);
705 }
706 
707 #define	ATOMIC_CMPSET_ACQ_REL(type) \
708     static __inline int \
709     atomic_cmpset_acq_##type(volatile u_##type *p, \
710 	    u_##type cmpval, u_##type newval)\
711     {\
712 	u_##type retval; \
713 	retval = atomic_cmpset_##type(p, cmpval, newval);\
714 	__ATOMIC_ACQ();\
715 	return (retval);\
716     }\
717     static __inline int \
718     atomic_cmpset_rel_##type(volatile u_##type *p, \
719 	    u_##type cmpval, u_##type newval)\
720     {\
721 	__ATOMIC_REL();\
722 	return (atomic_cmpset_##type(p, cmpval, newval));\
723     }\
724     struct hack
725 
726 ATOMIC_CMPSET_ACQ_REL(int);
727 ATOMIC_CMPSET_ACQ_REL(long);
728 
729 
730 #ifdef ISA_206_ATOMICS
731 #define	atomic_cmpset_8		atomic_cmpset_char
732 #endif
733 #define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
734 #define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
735 
736 #ifdef ISA_206_ATOMICS
737 #define	atomic_cmpset_16	atomic_cmpset_short
738 #endif
739 #define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
740 #define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
741 
742 #define	atomic_cmpset_32	atomic_cmpset_int
743 #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
744 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
745 
746 #ifdef __powerpc64__
747 #define	atomic_cmpset_64	atomic_cmpset_long
748 #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
749 #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
750 
751 #define	atomic_cmpset_ptr	atomic_cmpset_long
752 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
753 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
754 #else
755 #define	atomic_cmpset_ptr	atomic_cmpset_int
756 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
757 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
758 #endif
759 
760 /*
761  * Atomically compare the value stored at *p with *cmpval and if the
762  * two values are equal, update the value of *p with newval. Returns
763  * zero if the compare failed and sets *cmpval to the read value from *p,
764  * nonzero otherwise.
765  */
766 #ifdef ISA_206_ATOMICS
767 static __inline int
768 atomic_fcmpset_char(volatile u_char *p, u_char *cmpval, u_char newval)
769 {
770 	int	ret;
771 
772 	__asm __volatile (
773 		"lbarx %0, 0, %3\n\t"		/* load old value */
774 		"cmplw %4, %0\n\t"		/* compare */
775 		"bne- 1f\n\t"			/* exit if not equal */
776 		"stbcx. %5, 0, %3\n\t"      	/* attempt to store */
777 		"bne- 1f\n\t"			/* exit if failed */
778 		"li %0, 1\n\t"			/* success - retval = 1 */
779 		"b 2f\n\t"			/* we've succeeded */
780 		"1:\n\t"
781 		"stbcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
782 		"stwx %0, 0, %7\n\t"
783 		"li %0, 0\n\t"			/* failure - retval = 0 */
784 		"2:\n\t"
785 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
786 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
787 		: "cr0", "memory");
788 
789 	return (ret);
790 }
791 
792 static __inline int
793 atomic_fcmpset_short(volatile u_short *p, u_short *cmpval, u_short newval)
794 {
795 	int	ret;
796 
797 	__asm __volatile (
798 		"lharx %0, 0, %3\n\t"		/* load old value */
799 		"cmplw %4, %0\n\t"		/* compare */
800 		"bne- 1f\n\t"			/* exit if not equal */
801 		"sthcx. %5, 0, %3\n\t"      	/* attempt to store */
802 		"bne- 1f\n\t"			/* exit if failed */
803 		"li %0, 1\n\t"			/* success - retval = 1 */
804 		"b 2f\n\t"			/* we've succeeded */
805 		"1:\n\t"
806 		"sthcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
807 		"stwx %0, 0, %7\n\t"
808 		"li %0, 0\n\t"			/* failure - retval = 0 */
809 		"2:\n\t"
810 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
811 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
812 		: "cr0", "memory");
813 
814 	return (ret);
815 }
816 #endif	/* ISA_206_ATOMICS */
817 
818 static __inline int
819 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
820 {
821 	int	ret;
822 
823 	__asm __volatile (
824 		"lwarx %0, 0, %3\n\t"		/* load old value */
825 		"cmplw %4, %0\n\t"		/* compare */
826 		"bne- 1f\n\t"			/* exit if not equal */
827 		"stwcx. %5, 0, %3\n\t"      	/* attempt to store */
828 		"bne- 1f\n\t"			/* exit if failed */
829 		"li %0, 1\n\t"			/* success - retval = 1 */
830 		"b 2f\n\t"			/* we've succeeded */
831 		"1:\n\t"
832 		"stwcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
833 		"stwx %0, 0, %7\n\t"
834 		"li %0, 0\n\t"			/* failure - retval = 0 */
835 		"2:\n\t"
836 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
837 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
838 		: "cr0", "memory");
839 
840 	return (ret);
841 }
842 static __inline int
843 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
844 {
845 	int ret;
846 
847 	__asm __volatile (
848 	    #ifdef __powerpc64__
849 		"ldarx %0, 0, %3\n\t"		/* load old value */
850 		"cmpld %4, %0\n\t"		/* compare */
851 		"bne- 1f\n\t"			/* exit if not equal */
852 		"stdcx. %5, 0, %3\n\t"		/* attempt to store */
853 	    #else
854 		"lwarx %0, 0, %3\n\t"		/* load old value */
855 		"cmplw %4, %0\n\t"		/* compare */
856 		"bne- 1f\n\t"			/* exit if not equal */
857 		"stwcx. %5, 0, %3\n\t"		/* attempt to store */
858 	    #endif
859 		"bne- 1f\n\t"			/* exit if failed */
860 		"li %0, 1\n\t"			/* success - retval = 1 */
861 		"b 2f\n\t"			/* we've succeeded */
862 		"1:\n\t"
863 	    #ifdef __powerpc64__
864 		"stdcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
865 		"stdx %0, 0, %7\n\t"
866 	    #else
867 		"stwcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
868 		"stwx %0, 0, %7\n\t"
869 	    #endif
870 		"li %0, 0\n\t"			/* failure - retval = 0 */
871 		"2:\n\t"
872 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
873 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
874 		: "cr0", "memory");
875 
876 	return (ret);
877 }
878 
879 #define	ATOMIC_FCMPSET_ACQ_REL(type) \
880     static __inline int \
881     atomic_fcmpset_acq_##type(volatile u_##type *p, \
882 	    u_##type *cmpval, u_##type newval)\
883     {\
884 	u_##type retval; \
885 	retval = atomic_fcmpset_##type(p, cmpval, newval);\
886 	__ATOMIC_ACQ();\
887 	return (retval);\
888     }\
889     static __inline int \
890     atomic_fcmpset_rel_##type(volatile u_##type *p, \
891 	    u_##type *cmpval, u_##type newval)\
892     {\
893 	__ATOMIC_REL();\
894 	return (atomic_fcmpset_##type(p, cmpval, newval));\
895     }\
896     struct hack
897 
898 ATOMIC_FCMPSET_ACQ_REL(int);
899 ATOMIC_FCMPSET_ACQ_REL(long);
900 
901 #ifdef ISA_206_ATOMICS
902 #define	atomic_fcmpset_8	atomic_fcmpset_char
903 #endif
904 #define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
905 #define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
906 
907 #ifdef ISA_206_ATOMICS
908 #define	atomic_fcmpset_16	atomic_fcmpset_short
909 #endif
910 #define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
911 #define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
912 
913 #define	atomic_fcmpset_32	atomic_fcmpset_int
914 #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
915 #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
916 
917 #ifdef __powerpc64__
918 #define	atomic_fcmpset_64	atomic_fcmpset_long
919 #define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
920 #define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
921 
922 #define	atomic_fcmpset_ptr	atomic_fcmpset_long
923 #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
924 #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
925 #else
926 #define	atomic_fcmpset_ptr	atomic_fcmpset_int
927 #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_int
928 #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_int
929 #endif
930 
931 static __inline u_int
932 atomic_fetchadd_int(volatile u_int *p, u_int v)
933 {
934 	u_int value;
935 
936 	do {
937 		value = *p;
938 	} while (!atomic_cmpset_int(p, value, value + v));
939 	return (value);
940 }
941 
942 static __inline u_long
943 atomic_fetchadd_long(volatile u_long *p, u_long v)
944 {
945 	u_long value;
946 
947 	do {
948 		value = *p;
949 	} while (!atomic_cmpset_long(p, value, value + v));
950 	return (value);
951 }
952 
953 static __inline u_int
954 atomic_swap_32(volatile u_int *p, u_int v)
955 {
956 	u_int prev;
957 
958 	__asm __volatile(
959 	"1:	lwarx	%0,0,%2\n"
960 	"	stwcx.	%3,0,%2\n"
961 	"	bne-	1b\n"
962 	: "=&r" (prev), "+m" (*(volatile u_int *)p)
963 	: "r" (p), "r" (v)
964 	: "cr0", "memory");
965 
966 	return (prev);
967 }
968 
969 #ifdef __powerpc64__
970 static __inline u_long
971 atomic_swap_64(volatile u_long *p, u_long v)
972 {
973 	u_long prev;
974 
975 	__asm __volatile(
976 	"1:	ldarx	%0,0,%2\n"
977 	"	stdcx.	%3,0,%2\n"
978 	"	bne-	1b\n"
979 	: "=&r" (prev), "+m" (*(volatile u_long *)p)
980 	: "r" (p), "r" (v)
981 	: "cr0", "memory");
982 
983 	return (prev);
984 }
985 #endif
986 
987 #define	atomic_fetchadd_32	atomic_fetchadd_int
988 #define	atomic_swap_int		atomic_swap_32
989 
990 #ifdef __powerpc64__
991 #define	atomic_fetchadd_64	atomic_fetchadd_long
992 #define	atomic_swap_long	atomic_swap_64
993 #define	atomic_swap_ptr		atomic_swap_64
994 #else
995 #define	atomic_swap_long(p,v)	atomic_swap_32((volatile u_int *)(p), v)
996 #define	atomic_swap_ptr(p,v)	atomic_swap_32((volatile u_int *)(p), v)
997 #endif
998 
999 static __inline void
1000 atomic_thread_fence_acq(void)
1001 {
1002 
1003 	powerpc_lwsync();
1004 }
1005 
1006 static __inline void
1007 atomic_thread_fence_rel(void)
1008 {
1009 
1010 	powerpc_lwsync();
1011 }
1012 
1013 static __inline void
1014 atomic_thread_fence_acq_rel(void)
1015 {
1016 
1017 	powerpc_lwsync();
1018 }
1019 
1020 static __inline void
1021 atomic_thread_fence_seq_cst(void)
1022 {
1023 
1024 	__asm __volatile("sync" : : : "memory");
1025 }
1026 
1027 #ifndef ISA_206_ATOMICS
1028 #include <sys/_atomic_subword.h>
1029 #define	atomic_cmpset_char	atomic_cmpset_8
1030 #define	atomic_cmpset_short	atomic_cmpset_16
1031 #define	atomic_fcmpset_char	atomic_fcmpset_8
1032 #define	atomic_fcmpset_short	atomic_fcmpset_16
1033 #endif
1034 
1035 /* These need sys/_atomic_subword.h on non-ISA-2.06-atomic platforms. */
1036 ATOMIC_CMPSET_ACQ_REL(char);
1037 ATOMIC_CMPSET_ACQ_REL(short);
1038 
1039 ATOMIC_FCMPSET_ACQ_REL(char);
1040 ATOMIC_FCMPSET_ACQ_REL(short);
1041 
1042 #undef __ATOMIC_REL
1043 #undef __ATOMIC_ACQ
1044 
1045 #endif /* ! _MACHINE_ATOMIC_H_ */
1046