xref: /freebsd/sys/powerpc/include/atomic.h (revision aa24f48b361effe51163877d84f1b70d32b77e04)
1 /*-
2  * Copyright (c) 2008 Marcel Moolenaar
3  * Copyright (c) 2001 Benno Rice
4  * Copyright (c) 2001 David E. O'Brien
5  * Copyright (c) 1998 Doug Rabson
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice, this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
18  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
21  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27  * SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 
32 #ifndef _MACHINE_ATOMIC_H_
33 #define	_MACHINE_ATOMIC_H_
34 
35 #ifndef _SYS_CDEFS_H_
36 #error this file needs sys/cdefs.h as a prerequisite
37 #endif
38 
39 /*
40  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
41  * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
42  * of this file. See also Appendix B.2 of Book II of the architecture manual.
43  *
44  * Note that not all Book-E processors accept the light-weight sync variant.
45  * In particular, early models of E500 cores are known to wedge. Bank on all
46  * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
47  * to use the heavier-weight sync.
48  */
49 
50 #ifdef __powerpc64__
51 #define mb()		__asm __volatile("sync" : : : "memory")
52 #define rmb()		__asm __volatile("lwsync" : : : "memory")
53 #define wmb()		__asm __volatile("lwsync" : : : "memory")
54 #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
55 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
56 #else
57 #define mb()		__asm __volatile("sync" : : : "memory")
58 #define rmb()		__asm __volatile("sync" : : : "memory")
59 #define wmb()		__asm __volatile("sync" : : : "memory")
60 #define __ATOMIC_REL()	__asm __volatile("sync" : : : "memory")
61 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
62 #endif
63 
64 static __inline void
65 powerpc_lwsync(void)
66 {
67 
68 #ifdef __powerpc64__
69 	__asm __volatile("lwsync" : : : "memory");
70 #else
71 	__asm __volatile("sync" : : : "memory");
72 #endif
73 }
74 
75 /*
76  * atomic_add(p, v)
77  * { *p += v; }
78  */
79 
80 #define __atomic_add_int(p, v, t)				\
81     __asm __volatile(						\
82 	"1:	lwarx	%0, 0, %2\n"				\
83 	"	add	%0, %3, %0\n"				\
84 	"	stwcx.	%0, 0, %2\n"				\
85 	"	bne-	1b\n"					\
86 	: "=&r" (t), "=m" (*p)					\
87 	: "r" (p), "r" (v), "m" (*p)				\
88 	: "cr0", "memory")					\
89     /* __atomic_add_int */
90 
91 #ifdef __powerpc64__
92 #define __atomic_add_long(p, v, t)				\
93     __asm __volatile(						\
94 	"1:	ldarx	%0, 0, %2\n"				\
95 	"	add	%0, %3, %0\n"				\
96 	"	stdcx.	%0, 0, %2\n"				\
97 	"	bne-	1b\n"					\
98 	: "=&r" (t), "=m" (*p)					\
99 	: "r" (p), "r" (v), "m" (*p)				\
100 	: "cr0", "memory")					\
101     /* __atomic_add_long */
102 #else
103 #define	__atomic_add_long(p, v, t)				\
104     __asm __volatile(						\
105 	"1:	lwarx	%0, 0, %2\n"				\
106 	"	add	%0, %3, %0\n"				\
107 	"	stwcx.	%0, 0, %2\n"				\
108 	"	bne-	1b\n"					\
109 	: "=&r" (t), "=m" (*p)					\
110 	: "r" (p), "r" (v), "m" (*p)				\
111 	: "cr0", "memory")					\
112     /* __atomic_add_long */
113 #endif
114 
115 #define	_ATOMIC_ADD(type)					\
116     static __inline void					\
117     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
118 	u_##type t;						\
119 	__atomic_add_##type(p, v, t);				\
120     }								\
121 								\
122     static __inline void					\
123     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
124 	u_##type t;						\
125 	__atomic_add_##type(p, v, t);				\
126 	__ATOMIC_ACQ();						\
127     }								\
128 								\
129     static __inline void					\
130     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
131 	u_##type t;						\
132 	__ATOMIC_REL();						\
133 	__atomic_add_##type(p, v, t);				\
134     }								\
135     /* _ATOMIC_ADD */
136 
137 _ATOMIC_ADD(int)
138 _ATOMIC_ADD(long)
139 
140 #define	atomic_add_32		atomic_add_int
141 #define	atomic_add_acq_32	atomic_add_acq_int
142 #define	atomic_add_rel_32	atomic_add_rel_int
143 
144 #ifdef __powerpc64__
145 #define	atomic_add_64		atomic_add_long
146 #define	atomic_add_acq_64	atomic_add_acq_long
147 #define	atomic_add_rel_64	atomic_add_rel_long
148 
149 #define	atomic_add_ptr		atomic_add_long
150 #define	atomic_add_acq_ptr	atomic_add_acq_long
151 #define	atomic_add_rel_ptr	atomic_add_rel_long
152 #else
153 #define	atomic_add_ptr		atomic_add_int
154 #define	atomic_add_acq_ptr	atomic_add_acq_int
155 #define	atomic_add_rel_ptr	atomic_add_rel_int
156 #endif
157 #undef _ATOMIC_ADD
158 #undef __atomic_add_long
159 #undef __atomic_add_int
160 
161 /*
162  * atomic_clear(p, v)
163  * { *p &= ~v; }
164  */
165 
166 #define __atomic_clear_int(p, v, t)				\
167     __asm __volatile(						\
168 	"1:	lwarx	%0, 0, %2\n"				\
169 	"	andc	%0, %0, %3\n"				\
170 	"	stwcx.	%0, 0, %2\n"				\
171 	"	bne-	1b\n"					\
172 	: "=&r" (t), "=m" (*p)					\
173 	: "r" (p), "r" (v), "m" (*p)				\
174 	: "cr0", "memory")					\
175     /* __atomic_clear_int */
176 
177 #ifdef __powerpc64__
178 #define __atomic_clear_long(p, v, t)				\
179     __asm __volatile(						\
180 	"1:	ldarx	%0, 0, %2\n"				\
181 	"	andc	%0, %0, %3\n"				\
182 	"	stdcx.	%0, 0, %2\n"				\
183 	"	bne-	1b\n"					\
184 	: "=&r" (t), "=m" (*p)					\
185 	: "r" (p), "r" (v), "m" (*p)				\
186 	: "cr0", "memory")					\
187     /* __atomic_clear_long */
188 #else
189 #define	__atomic_clear_long(p, v, t)				\
190     __asm __volatile(						\
191 	"1:	lwarx	%0, 0, %2\n"				\
192 	"	andc	%0, %0, %3\n"				\
193 	"	stwcx.	%0, 0, %2\n"				\
194 	"	bne-	1b\n"					\
195 	: "=&r" (t), "=m" (*p)					\
196 	: "r" (p), "r" (v), "m" (*p)				\
197 	: "cr0", "memory")					\
198     /* __atomic_clear_long */
199 #endif
200 
201 #define	_ATOMIC_CLEAR(type)					\
202     static __inline void					\
203     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
204 	u_##type t;						\
205 	__atomic_clear_##type(p, v, t);				\
206     }								\
207 								\
208     static __inline void					\
209     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
210 	u_##type t;						\
211 	__atomic_clear_##type(p, v, t);				\
212 	__ATOMIC_ACQ();						\
213     }								\
214 								\
215     static __inline void					\
216     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
217 	u_##type t;						\
218 	__ATOMIC_REL();						\
219 	__atomic_clear_##type(p, v, t);				\
220     }								\
221     /* _ATOMIC_CLEAR */
222 
223 
224 _ATOMIC_CLEAR(int)
225 _ATOMIC_CLEAR(long)
226 
227 #define	atomic_clear_32		atomic_clear_int
228 #define	atomic_clear_acq_32	atomic_clear_acq_int
229 #define	atomic_clear_rel_32	atomic_clear_rel_int
230 
231 #ifdef __powerpc64__
232 #define	atomic_clear_64		atomic_clear_long
233 #define	atomic_clear_acq_64	atomic_clear_acq_long
234 #define	atomic_clear_rel_64	atomic_clear_rel_long
235 
236 #define	atomic_clear_ptr	atomic_clear_long
237 #define	atomic_clear_acq_ptr	atomic_clear_acq_long
238 #define	atomic_clear_rel_ptr	atomic_clear_rel_long
239 #else
240 #define	atomic_clear_ptr	atomic_clear_int
241 #define	atomic_clear_acq_ptr	atomic_clear_acq_int
242 #define	atomic_clear_rel_ptr	atomic_clear_rel_int
243 #endif
244 #undef _ATOMIC_CLEAR
245 #undef __atomic_clear_long
246 #undef __atomic_clear_int
247 
248 /*
249  * atomic_cmpset(p, o, n)
250  */
251 /* TODO -- see below */
252 
253 /*
254  * atomic_load_acq(p)
255  */
256 /* TODO -- see below */
257 
258 /*
259  * atomic_readandclear(p)
260  */
261 /* TODO -- see below */
262 
263 /*
264  * atomic_set(p, v)
265  * { *p |= v; }
266  */
267 
268 #define __atomic_set_int(p, v, t)				\
269     __asm __volatile(						\
270 	"1:	lwarx	%0, 0, %2\n"				\
271 	"	or	%0, %3, %0\n"				\
272 	"	stwcx.	%0, 0, %2\n"				\
273 	"	bne-	1b\n"					\
274 	: "=&r" (t), "=m" (*p)					\
275 	: "r" (p), "r" (v), "m" (*p)				\
276 	: "cr0", "memory")					\
277     /* __atomic_set_int */
278 
279 #ifdef __powerpc64__
280 #define __atomic_set_long(p, v, t)				\
281     __asm __volatile(						\
282 	"1:	ldarx	%0, 0, %2\n"				\
283 	"	or	%0, %3, %0\n"				\
284 	"	stdcx.	%0, 0, %2\n"				\
285 	"	bne-	1b\n"					\
286 	: "=&r" (t), "=m" (*p)					\
287 	: "r" (p), "r" (v), "m" (*p)				\
288 	: "cr0", "memory")					\
289     /* __atomic_set_long */
290 #else
291 #define	__atomic_set_long(p, v, t)				\
292     __asm __volatile(						\
293 	"1:	lwarx	%0, 0, %2\n"				\
294 	"	or	%0, %3, %0\n"				\
295 	"	stwcx.	%0, 0, %2\n"				\
296 	"	bne-	1b\n"					\
297 	: "=&r" (t), "=m" (*p)					\
298 	: "r" (p), "r" (v), "m" (*p)				\
299 	: "cr0", "memory")					\
300     /* __atomic_set_long */
301 #endif
302 
303 #define	_ATOMIC_SET(type)					\
304     static __inline void					\
305     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
306 	u_##type t;						\
307 	__atomic_set_##type(p, v, t);				\
308     }								\
309 								\
310     static __inline void					\
311     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
312 	u_##type t;						\
313 	__atomic_set_##type(p, v, t);				\
314 	__ATOMIC_ACQ();						\
315     }								\
316 								\
317     static __inline void					\
318     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
319 	u_##type t;						\
320 	__ATOMIC_REL();						\
321 	__atomic_set_##type(p, v, t);				\
322     }								\
323     /* _ATOMIC_SET */
324 
325 _ATOMIC_SET(int)
326 _ATOMIC_SET(long)
327 
328 #define	atomic_set_32		atomic_set_int
329 #define	atomic_set_acq_32	atomic_set_acq_int
330 #define	atomic_set_rel_32	atomic_set_rel_int
331 
332 #ifdef __powerpc64__
333 #define	atomic_set_64		atomic_set_long
334 #define	atomic_set_acq_64	atomic_set_acq_long
335 #define	atomic_set_rel_64	atomic_set_rel_long
336 
337 #define	atomic_set_ptr		atomic_set_long
338 #define	atomic_set_acq_ptr	atomic_set_acq_long
339 #define	atomic_set_rel_ptr	atomic_set_rel_long
340 #else
341 #define	atomic_set_ptr		atomic_set_int
342 #define	atomic_set_acq_ptr	atomic_set_acq_int
343 #define	atomic_set_rel_ptr	atomic_set_rel_int
344 #endif
345 #undef _ATOMIC_SET
346 #undef __atomic_set_long
347 #undef __atomic_set_int
348 
349 /*
350  * atomic_subtract(p, v)
351  * { *p -= v; }
352  */
353 
354 #define __atomic_subtract_int(p, v, t)				\
355     __asm __volatile(						\
356 	"1:	lwarx	%0, 0, %2\n"				\
357 	"	subf	%0, %3, %0\n"				\
358 	"	stwcx.	%0, 0, %2\n"				\
359 	"	bne-	1b\n"					\
360 	: "=&r" (t), "=m" (*p)					\
361 	: "r" (p), "r" (v), "m" (*p)				\
362 	: "cr0", "memory")					\
363     /* __atomic_subtract_int */
364 
365 #ifdef __powerpc64__
366 #define __atomic_subtract_long(p, v, t)				\
367     __asm __volatile(						\
368 	"1:	ldarx	%0, 0, %2\n"				\
369 	"	subf	%0, %3, %0\n"				\
370 	"	stdcx.	%0, 0, %2\n"				\
371 	"	bne-	1b\n"					\
372 	: "=&r" (t), "=m" (*p)					\
373 	: "r" (p), "r" (v), "m" (*p)				\
374 	: "cr0", "memory")					\
375     /* __atomic_subtract_long */
376 #else
377 #define	__atomic_subtract_long(p, v, t)				\
378     __asm __volatile(						\
379 	"1:	lwarx	%0, 0, %2\n"				\
380 	"	subf	%0, %3, %0\n"				\
381 	"	stwcx.	%0, 0, %2\n"				\
382 	"	bne-	1b\n"					\
383 	: "=&r" (t), "=m" (*p)					\
384 	: "r" (p), "r" (v), "m" (*p)				\
385 	: "cr0", "memory")					\
386     /* __atomic_subtract_long */
387 #endif
388 
389 #define	_ATOMIC_SUBTRACT(type)						\
390     static __inline void						\
391     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
392 	u_##type t;							\
393 	__atomic_subtract_##type(p, v, t);				\
394     }									\
395 									\
396     static __inline void						\
397     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
398 	u_##type t;							\
399 	__atomic_subtract_##type(p, v, t);				\
400 	__ATOMIC_ACQ();							\
401     }									\
402 									\
403     static __inline void						\
404     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
405 	u_##type t;							\
406 	__ATOMIC_REL();							\
407 	__atomic_subtract_##type(p, v, t);				\
408     }									\
409     /* _ATOMIC_SUBTRACT */
410 
411 _ATOMIC_SUBTRACT(int)
412 _ATOMIC_SUBTRACT(long)
413 
414 #define	atomic_subtract_32	atomic_subtract_int
415 #define	atomic_subtract_acq_32	atomic_subtract_acq_int
416 #define	atomic_subtract_rel_32	atomic_subtract_rel_int
417 
418 #ifdef __powerpc64__
419 #define	atomic_subtract_64	atomic_subtract_long
420 #define	atomic_subtract_acq_64	atomic_subract_acq_long
421 #define	atomic_subtract_rel_64	atomic_subtract_rel_long
422 
423 #define	atomic_subtract_ptr	atomic_subtract_long
424 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
425 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
426 #else
427 #define	atomic_subtract_ptr	atomic_subtract_int
428 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
429 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
430 #endif
431 #undef _ATOMIC_SUBTRACT
432 #undef __atomic_subtract_long
433 #undef __atomic_subtract_int
434 
435 /*
436  * atomic_store_rel(p, v)
437  */
438 /* TODO -- see below */
439 
440 /*
441  * Old/original implementations that still need revisiting.
442  */
443 
444 static __inline u_int
445 atomic_readandclear_int(volatile u_int *addr)
446 {
447 	u_int result,temp;
448 
449 	__asm __volatile (
450 		"\tsync\n"			/* drain writes */
451 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
452 		"li %1, 0\n\t"			/* load new value */
453 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
454 		"bne- 1b\n\t"			/* spin if failed */
455 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
456 		: "r" (addr), "m" (*addr)
457 		: "cr0", "memory");
458 
459 	return (result);
460 }
461 
462 #ifdef __powerpc64__
463 static __inline u_long
464 atomic_readandclear_long(volatile u_long *addr)
465 {
466 	u_long result,temp;
467 
468 	__asm __volatile (
469 		"\tsync\n"			/* drain writes */
470 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
471 		"li %1, 0\n\t"			/* load new value */
472 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
473 		"bne- 1b\n\t"			/* spin if failed */
474 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
475 		: "r" (addr), "m" (*addr)
476 		: "cr0", "memory");
477 
478 	return (result);
479 }
480 #endif
481 
482 #define	atomic_readandclear_32		atomic_readandclear_int
483 
484 #ifdef __powerpc64__
485 #define	atomic_readandclear_64		atomic_readandclear_long
486 
487 #define	atomic_readandclear_ptr		atomic_readandclear_long
488 #else
489 static __inline u_long
490 atomic_readandclear_long(volatile u_long *addr)
491 {
492 
493 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
494 }
495 
496 #define	atomic_readandclear_ptr		atomic_readandclear_int
497 #endif
498 
499 /*
500  * We assume that a = b will do atomic loads and stores.
501  */
502 #define	ATOMIC_STORE_LOAD(TYPE)					\
503 static __inline u_##TYPE					\
504 atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
505 {								\
506 	u_##TYPE v;						\
507 								\
508 	v = *p;							\
509 	mb();							\
510 	return (v);						\
511 }								\
512 								\
513 static __inline void						\
514 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
515 {								\
516 								\
517 	powerpc_lwsync();					\
518 	*p = v;							\
519 }
520 
521 ATOMIC_STORE_LOAD(int)
522 
523 #define	atomic_load_acq_32	atomic_load_acq_int
524 #define	atomic_store_rel_32	atomic_store_rel_int
525 
526 #ifdef __powerpc64__
527 ATOMIC_STORE_LOAD(long)
528 
529 #define	atomic_load_acq_64	atomic_load_acq_long
530 #define	atomic_store_rel_64	atomic_store_rel_long
531 
532 #define	atomic_load_acq_ptr	atomic_load_acq_long
533 #define	atomic_store_rel_ptr	atomic_store_rel_long
534 #else
535 static __inline u_long
536 atomic_load_acq_long(volatile u_long *addr)
537 {
538 
539 	return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
540 }
541 
542 static __inline void
543 atomic_store_rel_long(volatile u_long *addr, u_long val)
544 {
545 
546 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
547 }
548 
549 #define	atomic_load_acq_ptr	atomic_load_acq_int
550 #define	atomic_store_rel_ptr	atomic_store_rel_int
551 #endif
552 #undef ATOMIC_STORE_LOAD
553 
554 /*
555  * Atomically compare the value stored at *p with cmpval and if the
556  * two values are equal, update the value of *p with newval. Returns
557  * zero if the compare failed, nonzero otherwise.
558  */
559 static __inline int
560 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
561 {
562 	int	ret;
563 
564 	__asm __volatile (
565 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
566 		"cmplw %3, %0\n\t"		/* compare */
567 		"bne 2f\n\t"			/* exit if not equal */
568 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
569 		"bne- 1b\n\t"			/* spin if failed */
570 		"li %0, 1\n\t"			/* success - retval = 1 */
571 		"b 3f\n\t"			/* we've succeeded */
572 		"2:\n\t"
573 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
574 		"li %0, 0\n\t"			/* failure - retval = 0 */
575 		"3:\n\t"
576 		: "=&r" (ret), "=m" (*p)
577 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
578 		: "cr0", "memory");
579 
580 	return (ret);
581 }
582 static __inline int
583 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
584 {
585 	int ret;
586 
587 	__asm __volatile (
588 	    #ifdef __powerpc64__
589 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
590 		"cmpld %3, %0\n\t"		/* compare */
591 		"bne 2f\n\t"			/* exit if not equal */
592 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
593 	    #else
594 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
595 		"cmplw %3, %0\n\t"		/* compare */
596 		"bne 2f\n\t"			/* exit if not equal */
597 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
598 	    #endif
599 		"bne- 1b\n\t"			/* spin if failed */
600 		"li %0, 1\n\t"			/* success - retval = 1 */
601 		"b 3f\n\t"			/* we've succeeded */
602 		"2:\n\t"
603 	    #ifdef __powerpc64__
604 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
605 	    #else
606 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
607 	    #endif
608 		"li %0, 0\n\t"			/* failure - retval = 0 */
609 		"3:\n\t"
610 		: "=&r" (ret), "=m" (*p)
611 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
612 		: "cr0", "memory");
613 
614 	return (ret);
615 }
616 
617 static __inline int
618 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval)
619 {
620 	int retval;
621 
622 	retval = atomic_cmpset_int(p, cmpval, newval);
623 	__ATOMIC_ACQ();
624 	return (retval);
625 }
626 
627 static __inline int
628 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
629 {
630 	__ATOMIC_REL();
631 	return (atomic_cmpset_int(p, cmpval, newval));
632 }
633 
634 static __inline int
635 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
636 {
637 	u_long retval;
638 
639 	retval = atomic_cmpset_long(p, cmpval, newval);
640 	__ATOMIC_ACQ();
641 	return (retval);
642 }
643 
644 static __inline int
645 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
646 {
647 	__ATOMIC_REL();
648 	return (atomic_cmpset_long(p, cmpval, newval));
649 }
650 
651 #define	atomic_cmpset_32	atomic_cmpset_int
652 #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
653 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
654 
655 #ifdef __powerpc64__
656 #define	atomic_cmpset_64	atomic_cmpset_long
657 #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
658 #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
659 
660 #define	atomic_cmpset_ptr	atomic_cmpset_long
661 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
662 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
663 #else
664 #define	atomic_cmpset_ptr	atomic_cmpset_int
665 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
666 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
667 #endif
668 
669 /*
670  * Atomically compare the value stored at *p with *cmpval and if the
671  * two values are equal, update the value of *p with newval. Returns
672  * zero if the compare failed and sets *cmpval to the read value from *p,
673  * nonzero otherwise.
674  */
675 static __inline int
676 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
677 {
678 	int	ret;
679 
680 	__asm __volatile (
681 		"lwarx %0, 0, %3\n\t"	/* load old value */
682 		"cmplw %4, %0\n\t"		/* compare */
683 		"bne 1f\n\t"			/* exit if not equal */
684 		"stwcx. %5, 0, %3\n\t"      	/* attempt to store */
685 		"bne- 1f\n\t"			/* exit if failed */
686 		"li %0, 1\n\t"			/* success - retval = 1 */
687 		"b 2f\n\t"			/* we've succeeded */
688 		"1:\n\t"
689 		"stwcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
690 		"stwx %0, 0, %7\n\t"
691 		"li %0, 0\n\t"			/* failure - retval = 0 */
692 		"2:\n\t"
693 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
694 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
695 		: "cr0", "memory");
696 
697 	return (ret);
698 }
699 static __inline int
700 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
701 {
702 	int ret;
703 
704 	__asm __volatile (
705 	    #ifdef __powerpc64__
706 		"ldarx %0, 0, %3\n\t"	/* load old value */
707 		"cmpld %4, %0\n\t"		/* compare */
708 		"bne 1f\n\t"			/* exit if not equal */
709 		"stdcx. %5, 0, %3\n\t"		/* attempt to store */
710 	    #else
711 		"lwarx %0, 0, %3\n\t"	/* load old value */
712 		"cmplw %4, %0\n\t"		/* compare */
713 		"bne 1f\n\t"			/* exit if not equal */
714 		"stwcx. %5, 0, %3\n\t"		/* attempt to store */
715 	    #endif
716 		"bne- 1f\n\t"			/* exit if failed */
717 		"li %0, 1\n\t"			/* success - retval = 1 */
718 		"b 2f\n\t"			/* we've succeeded */
719 		"1:\n\t"
720 	    #ifdef __powerpc64__
721 		"stdcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
722 		"stdx %0, 0, %7\n\t"
723 	    #else
724 		"stwcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
725 		"stwx %0, 0, %7\n\t"
726 	    #endif
727 		"li %0, 0\n\t"			/* failure - retval = 0 */
728 		"2:\n\t"
729 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
730 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
731 		: "cr0", "memory");
732 
733 	return (ret);
734 }
735 
736 static __inline int
737 atomic_fcmpset_acq_int(volatile u_int *p, u_int *cmpval, u_int newval)
738 {
739 	int retval;
740 
741 	retval = atomic_fcmpset_int(p, cmpval, newval);
742 	__ATOMIC_ACQ();
743 	return (retval);
744 }
745 
746 static __inline int
747 atomic_fcmpset_rel_int(volatile u_int *p, u_int *cmpval, u_int newval)
748 {
749 	__ATOMIC_REL();
750 	return (atomic_fcmpset_int(p, cmpval, newval));
751 }
752 
753 static __inline int
754 atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval)
755 {
756 	u_long retval;
757 
758 	retval = atomic_fcmpset_long(p, cmpval, newval);
759 	__ATOMIC_ACQ();
760 	return (retval);
761 }
762 
763 static __inline int
764 atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval)
765 {
766 	__ATOMIC_REL();
767 	return (atomic_fcmpset_long(p, cmpval, newval));
768 }
769 
770 #define	atomic_fcmpset_32	atomic_fcmpset_int
771 #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
772 #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
773 
774 #ifdef __powerpc64__
775 #define	atomic_fcmpset_64	atomic_fcmpset_long
776 #define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
777 #define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
778 
779 #define	atomic_fcmpset_ptr	atomic_fcmpset_long
780 #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
781 #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
782 #else
783 #define	atomic_fcmpset_ptr	atomic_fcmpset_int
784 #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_int
785 #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_int
786 #endif
787 
788 static __inline u_int
789 atomic_fetchadd_int(volatile u_int *p, u_int v)
790 {
791 	u_int value;
792 
793 	do {
794 		value = *p;
795 	} while (!atomic_cmpset_int(p, value, value + v));
796 	return (value);
797 }
798 
799 static __inline u_long
800 atomic_fetchadd_long(volatile u_long *p, u_long v)
801 {
802 	u_long value;
803 
804 	do {
805 		value = *p;
806 	} while (!atomic_cmpset_long(p, value, value + v));
807 	return (value);
808 }
809 
810 static __inline u_int
811 atomic_swap_32(volatile u_int *p, u_int v)
812 {
813 	u_int prev;
814 
815 	__asm __volatile(
816 	"1:	lwarx	%0,0,%2\n"
817 	"	stwcx.	%3,0,%2\n"
818 	"	bne-	1b\n"
819 	: "=&r" (prev), "+m" (*(volatile u_int *)p)
820 	: "r" (p), "r" (v)
821 	: "cr0", "memory");
822 
823 	return (prev);
824 }
825 
826 #ifdef __powerpc64__
827 static __inline u_long
828 atomic_swap_64(volatile u_long *p, u_long v)
829 {
830 	u_long prev;
831 
832 	__asm __volatile(
833 	"1:	ldarx	%0,0,%2\n"
834 	"	stdcx.	%3,0,%2\n"
835 	"	bne-	1b\n"
836 	: "=&r" (prev), "+m" (*(volatile u_long *)p)
837 	: "r" (p), "r" (v)
838 	: "cr0", "memory");
839 
840 	return (prev);
841 }
842 #endif
843 
844 #define	atomic_fetchadd_32	atomic_fetchadd_int
845 #define	atomic_swap_int		atomic_swap_32
846 
847 #ifdef __powerpc64__
848 #define	atomic_fetchadd_64	atomic_fetchadd_long
849 #define	atomic_swap_long	atomic_swap_64
850 #define	atomic_swap_ptr		atomic_swap_64
851 #endif
852 
853 #undef __ATOMIC_REL
854 #undef __ATOMIC_ACQ
855 
856 static __inline void
857 atomic_thread_fence_acq(void)
858 {
859 
860 	powerpc_lwsync();
861 }
862 
863 static __inline void
864 atomic_thread_fence_rel(void)
865 {
866 
867 	powerpc_lwsync();
868 }
869 
870 static __inline void
871 atomic_thread_fence_acq_rel(void)
872 {
873 
874 	powerpc_lwsync();
875 }
876 
877 static __inline void
878 atomic_thread_fence_seq_cst(void)
879 {
880 
881 	__asm __volatile("sync" : : : "memory");
882 }
883 
884 #endif /* ! _MACHINE_ATOMIC_H_ */
885