xref: /freebsd/sys/powerpc/include/atomic.h (revision 2b15cb3d0922bd70ea592f0da9b4a5b167f4d53f)
1 /*-
2  * Copyright (c) 2008 Marcel Moolenaar
3  * Copyright (c) 2001 Benno Rice
4  * Copyright (c) 2001 David E. O'Brien
5  * Copyright (c) 1998 Doug Rabson
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice, this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
18  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
21  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27  * SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 
32 #ifndef _MACHINE_ATOMIC_H_
33 #define	_MACHINE_ATOMIC_H_
34 
35 #ifndef _SYS_CDEFS_H_
36 #error this file needs sys/cdefs.h as a prerequisite
37 #endif
38 
39 /*
40  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
41  * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
42  * of this file. See also Appendix B.2 of Book II of the architecture manual.
43  *
44  * Note that not all Book-E processors accept the light-weight sync variant.
45  * In particular, early models of E500 cores are known to wedge. Bank on all
46  * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
47  * to use the heavier-weight sync.
48  */
49 
50 #ifdef __powerpc64__
51 #define mb()		__asm __volatile("lwsync" : : : "memory")
52 #define rmb()		__asm __volatile("lwsync" : : : "memory")
53 #define wmb()		__asm __volatile("lwsync" : : : "memory")
54 #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
55 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
56 #else
57 #define mb()		__asm __volatile("sync" : : : "memory")
58 #define rmb()		__asm __volatile("sync" : : : "memory")
59 #define wmb()		__asm __volatile("sync" : : : "memory")
60 #define __ATOMIC_REL()	__asm __volatile("sync" : : : "memory")
61 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
62 #endif
63 
64 /*
65  * atomic_add(p, v)
66  * { *p += v; }
67  */
68 
69 #define __atomic_add_int(p, v, t)				\
70     __asm __volatile(						\
71 	"1:	lwarx	%0, 0, %2\n"				\
72 	"	add	%0, %3, %0\n"				\
73 	"	stwcx.	%0, 0, %2\n"				\
74 	"	bne-	1b\n"					\
75 	: "=&r" (t), "=m" (*p)					\
76 	: "r" (p), "r" (v), "m" (*p)				\
77 	: "cr0", "memory")					\
78     /* __atomic_add_int */
79 
80 #ifdef __powerpc64__
81 #define __atomic_add_long(p, v, t)				\
82     __asm __volatile(						\
83 	"1:	ldarx	%0, 0, %2\n"				\
84 	"	add	%0, %3, %0\n"				\
85 	"	stdcx.	%0, 0, %2\n"				\
86 	"	bne-	1b\n"					\
87 	: "=&r" (t), "=m" (*p)					\
88 	: "r" (p), "r" (v), "m" (*p)				\
89 	: "cr0", "memory")					\
90     /* __atomic_add_long */
91 #else
92 #define	__atomic_add_long(p, v, t)				\
93     __asm __volatile(						\
94 	"1:	lwarx	%0, 0, %2\n"				\
95 	"	add	%0, %3, %0\n"				\
96 	"	stwcx.	%0, 0, %2\n"				\
97 	"	bne-	1b\n"					\
98 	: "=&r" (t), "=m" (*p)					\
99 	: "r" (p), "r" (v), "m" (*p)				\
100 	: "cr0", "memory")					\
101     /* __atomic_add_long */
102 #endif
103 
104 #define	_ATOMIC_ADD(type)					\
105     static __inline void					\
106     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
107 	u_##type t;						\
108 	__atomic_add_##type(p, v, t);				\
109     }								\
110 								\
111     static __inline void					\
112     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
113 	u_##type t;						\
114 	__atomic_add_##type(p, v, t);				\
115 	__ATOMIC_ACQ();						\
116     }								\
117 								\
118     static __inline void					\
119     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
120 	u_##type t;						\
121 	__ATOMIC_REL();						\
122 	__atomic_add_##type(p, v, t);				\
123     }								\
124     /* _ATOMIC_ADD */
125 
126 _ATOMIC_ADD(int)
127 _ATOMIC_ADD(long)
128 
129 #define	atomic_add_32		atomic_add_int
130 #define	atomic_add_acq_32	atomic_add_acq_int
131 #define	atomic_add_rel_32	atomic_add_rel_int
132 
133 #ifdef __powerpc64__
134 #define	atomic_add_64		atomic_add_long
135 #define	atomic_add_acq_64	atomic_add_acq_long
136 #define	atomic_add_rel_64	atomic_add_rel_long
137 
138 #define	atomic_add_ptr		atomic_add_long
139 #define	atomic_add_acq_ptr	atomic_add_acq_long
140 #define	atomic_add_rel_ptr	atomic_add_rel_long
141 #else
142 #define	atomic_add_ptr		atomic_add_int
143 #define	atomic_add_acq_ptr	atomic_add_acq_int
144 #define	atomic_add_rel_ptr	atomic_add_rel_int
145 #endif
146 #undef _ATOMIC_ADD
147 #undef __atomic_add_long
148 #undef __atomic_add_int
149 
150 /*
151  * atomic_clear(p, v)
152  * { *p &= ~v; }
153  */
154 
155 #define __atomic_clear_int(p, v, t)				\
156     __asm __volatile(						\
157 	"1:	lwarx	%0, 0, %2\n"				\
158 	"	andc	%0, %0, %3\n"				\
159 	"	stwcx.	%0, 0, %2\n"				\
160 	"	bne-	1b\n"					\
161 	: "=&r" (t), "=m" (*p)					\
162 	: "r" (p), "r" (v), "m" (*p)				\
163 	: "cr0", "memory")					\
164     /* __atomic_clear_int */
165 
166 #ifdef __powerpc64__
167 #define __atomic_clear_long(p, v, t)				\
168     __asm __volatile(						\
169 	"1:	ldarx	%0, 0, %2\n"				\
170 	"	andc	%0, %0, %3\n"				\
171 	"	stdcx.	%0, 0, %2\n"				\
172 	"	bne-	1b\n"					\
173 	: "=&r" (t), "=m" (*p)					\
174 	: "r" (p), "r" (v), "m" (*p)				\
175 	: "cr0", "memory")					\
176     /* __atomic_clear_long */
177 #else
178 #define	__atomic_clear_long(p, v, t)				\
179     __asm __volatile(						\
180 	"1:	lwarx	%0, 0, %2\n"				\
181 	"	andc	%0, %0, %3\n"				\
182 	"	stwcx.	%0, 0, %2\n"				\
183 	"	bne-	1b\n"					\
184 	: "=&r" (t), "=m" (*p)					\
185 	: "r" (p), "r" (v), "m" (*p)				\
186 	: "cr0", "memory")					\
187     /* __atomic_clear_long */
188 #endif
189 
190 #define	_ATOMIC_CLEAR(type)					\
191     static __inline void					\
192     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
193 	u_##type t;						\
194 	__atomic_clear_##type(p, v, t);				\
195     }								\
196 								\
197     static __inline void					\
198     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
199 	u_##type t;						\
200 	__atomic_clear_##type(p, v, t);				\
201 	__ATOMIC_ACQ();						\
202     }								\
203 								\
204     static __inline void					\
205     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
206 	u_##type t;						\
207 	__ATOMIC_REL();						\
208 	__atomic_clear_##type(p, v, t);				\
209     }								\
210     /* _ATOMIC_CLEAR */
211 
212 
213 _ATOMIC_CLEAR(int)
214 _ATOMIC_CLEAR(long)
215 
216 #define	atomic_clear_32		atomic_clear_int
217 #define	atomic_clear_acq_32	atomic_clear_acq_int
218 #define	atomic_clear_rel_32	atomic_clear_rel_int
219 
220 #ifdef __powerpc64__
221 #define	atomic_clear_64		atomic_clear_long
222 #define	atomic_clear_acq_64	atomic_clear_acq_long
223 #define	atomic_clear_rel_64	atomic_clear_rel_long
224 
225 #define	atomic_clear_ptr	atomic_clear_long
226 #define	atomic_clear_acq_ptr	atomic_clear_acq_long
227 #define	atomic_clear_rel_ptr	atomic_clear_rel_long
228 #else
229 #define	atomic_clear_ptr	atomic_clear_int
230 #define	atomic_clear_acq_ptr	atomic_clear_acq_int
231 #define	atomic_clear_rel_ptr	atomic_clear_rel_int
232 #endif
233 #undef _ATOMIC_CLEAR
234 #undef __atomic_clear_long
235 #undef __atomic_clear_int
236 
237 /*
238  * atomic_cmpset(p, o, n)
239  */
240 /* TODO -- see below */
241 
242 /*
243  * atomic_load_acq(p)
244  */
245 /* TODO -- see below */
246 
247 /*
248  * atomic_readandclear(p)
249  */
250 /* TODO -- see below */
251 
252 /*
253  * atomic_set(p, v)
254  * { *p |= v; }
255  */
256 
257 #define __atomic_set_int(p, v, t)				\
258     __asm __volatile(						\
259 	"1:	lwarx	%0, 0, %2\n"				\
260 	"	or	%0, %3, %0\n"				\
261 	"	stwcx.	%0, 0, %2\n"				\
262 	"	bne-	1b\n"					\
263 	: "=&r" (t), "=m" (*p)					\
264 	: "r" (p), "r" (v), "m" (*p)				\
265 	: "cr0", "memory")					\
266     /* __atomic_set_int */
267 
268 #ifdef __powerpc64__
269 #define __atomic_set_long(p, v, t)				\
270     __asm __volatile(						\
271 	"1:	ldarx	%0, 0, %2\n"				\
272 	"	or	%0, %3, %0\n"				\
273 	"	stdcx.	%0, 0, %2\n"				\
274 	"	bne-	1b\n"					\
275 	: "=&r" (t), "=m" (*p)					\
276 	: "r" (p), "r" (v), "m" (*p)				\
277 	: "cr0", "memory")					\
278     /* __atomic_set_long */
279 #else
280 #define	__atomic_set_long(p, v, t)				\
281     __asm __volatile(						\
282 	"1:	lwarx	%0, 0, %2\n"				\
283 	"	or	%0, %3, %0\n"				\
284 	"	stwcx.	%0, 0, %2\n"				\
285 	"	bne-	1b\n"					\
286 	: "=&r" (t), "=m" (*p)					\
287 	: "r" (p), "r" (v), "m" (*p)				\
288 	: "cr0", "memory")					\
289     /* __atomic_set_long */
290 #endif
291 
292 #define	_ATOMIC_SET(type)					\
293     static __inline void					\
294     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
295 	u_##type t;						\
296 	__atomic_set_##type(p, v, t);				\
297     }								\
298 								\
299     static __inline void					\
300     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
301 	u_##type t;						\
302 	__atomic_set_##type(p, v, t);				\
303 	__ATOMIC_ACQ();						\
304     }								\
305 								\
306     static __inline void					\
307     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
308 	u_##type t;						\
309 	__ATOMIC_REL();						\
310 	__atomic_set_##type(p, v, t);				\
311     }								\
312     /* _ATOMIC_SET */
313 
314 _ATOMIC_SET(int)
315 _ATOMIC_SET(long)
316 
317 #define	atomic_set_32		atomic_set_int
318 #define	atomic_set_acq_32	atomic_set_acq_int
319 #define	atomic_set_rel_32	atomic_set_rel_int
320 
321 #ifdef __powerpc64__
322 #define	atomic_set_64		atomic_set_long
323 #define	atomic_set_acq_64	atomic_set_acq_long
324 #define	atomic_set_rel_64	atomic_set_rel_long
325 
326 #define	atomic_set_ptr		atomic_set_long
327 #define	atomic_set_acq_ptr	atomic_set_acq_long
328 #define	atomic_set_rel_ptr	atomic_set_rel_long
329 #else
330 #define	atomic_set_ptr		atomic_set_int
331 #define	atomic_set_acq_ptr	atomic_set_acq_int
332 #define	atomic_set_rel_ptr	atomic_set_rel_int
333 #endif
334 #undef _ATOMIC_SET
335 #undef __atomic_set_long
336 #undef __atomic_set_int
337 
338 /*
339  * atomic_subtract(p, v)
340  * { *p -= v; }
341  */
342 
343 #define __atomic_subtract_int(p, v, t)				\
344     __asm __volatile(						\
345 	"1:	lwarx	%0, 0, %2\n"				\
346 	"	subf	%0, %3, %0\n"				\
347 	"	stwcx.	%0, 0, %2\n"				\
348 	"	bne-	1b\n"					\
349 	: "=&r" (t), "=m" (*p)					\
350 	: "r" (p), "r" (v), "m" (*p)				\
351 	: "cr0", "memory")					\
352     /* __atomic_subtract_int */
353 
354 #ifdef __powerpc64__
355 #define __atomic_subtract_long(p, v, t)				\
356     __asm __volatile(						\
357 	"1:	ldarx	%0, 0, %2\n"				\
358 	"	subf	%0, %3, %0\n"				\
359 	"	stdcx.	%0, 0, %2\n"				\
360 	"	bne-	1b\n"					\
361 	: "=&r" (t), "=m" (*p)					\
362 	: "r" (p), "r" (v), "m" (*p)				\
363 	: "cr0", "memory")					\
364     /* __atomic_subtract_long */
365 #else
366 #define	__atomic_subtract_long(p, v, t)				\
367     __asm __volatile(						\
368 	"1:	lwarx	%0, 0, %2\n"				\
369 	"	subf	%0, %3, %0\n"				\
370 	"	stwcx.	%0, 0, %2\n"				\
371 	"	bne-	1b\n"					\
372 	: "=&r" (t), "=m" (*p)					\
373 	: "r" (p), "r" (v), "m" (*p)				\
374 	: "cr0", "memory")					\
375     /* __atomic_subtract_long */
376 #endif
377 
378 #define	_ATOMIC_SUBTRACT(type)						\
379     static __inline void						\
380     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
381 	u_##type t;							\
382 	__atomic_subtract_##type(p, v, t);				\
383     }									\
384 									\
385     static __inline void						\
386     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
387 	u_##type t;							\
388 	__atomic_subtract_##type(p, v, t);				\
389 	__ATOMIC_ACQ();							\
390     }									\
391 									\
392     static __inline void						\
393     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
394 	u_##type t;							\
395 	__ATOMIC_REL();							\
396 	__atomic_subtract_##type(p, v, t);				\
397     }									\
398     /* _ATOMIC_SUBTRACT */
399 
400 _ATOMIC_SUBTRACT(int)
401 _ATOMIC_SUBTRACT(long)
402 
403 #define	atomic_subtract_32	atomic_subtract_int
404 #define	atomic_subtract_acq_32	atomic_subtract_acq_int
405 #define	atomic_subtract_rel_32	atomic_subtract_rel_int
406 
407 #ifdef __powerpc64__
408 #define	atomic_subtract_64	atomic_subtract_long
409 #define	atomic_subtract_acq_64	atomic_subract_acq_long
410 #define	atomic_subtract_rel_64	atomic_subtract_rel_long
411 
412 #define	atomic_subtract_ptr	atomic_subtract_long
413 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
414 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
415 #else
416 #define	atomic_subtract_ptr	atomic_subtract_int
417 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
418 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
419 #endif
420 #undef _ATOMIC_SUBTRACT
421 #undef __atomic_subtract_long
422 #undef __atomic_subtract_int
423 
424 /*
425  * atomic_store_rel(p, v)
426  */
427 /* TODO -- see below */
428 
429 /*
430  * Old/original implementations that still need revisiting.
431  */
432 
433 static __inline u_int
434 atomic_readandclear_int(volatile u_int *addr)
435 {
436 	u_int result,temp;
437 
438 #ifdef __GNUCLIKE_ASM
439 	__asm __volatile (
440 		"\tsync\n"			/* drain writes */
441 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
442 		"li %1, 0\n\t"			/* load new value */
443 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
444 		"bne- 1b\n\t"			/* spin if failed */
445 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
446 		: "r" (addr), "m" (*addr)
447 		: "cr0", "memory");
448 #endif
449 
450 	return (result);
451 }
452 
453 #ifdef __powerpc64__
454 static __inline u_long
455 atomic_readandclear_long(volatile u_long *addr)
456 {
457 	u_long result,temp;
458 
459 #ifdef __GNUCLIKE_ASM
460 	__asm __volatile (
461 		"\tsync\n"			/* drain writes */
462 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
463 		"li %1, 0\n\t"			/* load new value */
464 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
465 		"bne- 1b\n\t"			/* spin if failed */
466 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
467 		: "r" (addr), "m" (*addr)
468 		: "cr0", "memory");
469 #endif
470 
471 	return (result);
472 }
473 #endif
474 
475 #define	atomic_readandclear_32		atomic_readandclear_int
476 
477 #ifdef __powerpc64__
478 #define	atomic_readandclear_64		atomic_readandclear_long
479 
480 #define	atomic_readandclear_ptr		atomic_readandclear_long
481 #else
482 static __inline u_long
483 atomic_readandclear_long(volatile u_long *addr)
484 {
485 
486 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
487 }
488 
489 #define	atomic_readandclear_ptr		atomic_readandclear_int
490 #endif
491 
492 /*
493  * We assume that a = b will do atomic loads and stores.
494  */
495 #define	ATOMIC_STORE_LOAD(TYPE)					\
496 static __inline u_##TYPE					\
497 atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
498 {								\
499 	u_##TYPE v;						\
500 								\
501 	v = *p;							\
502 	mb();							\
503 	return (v);						\
504 }								\
505 								\
506 static __inline void						\
507 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
508 {								\
509 	mb();							\
510 	*p = v;							\
511 }
512 
513 ATOMIC_STORE_LOAD(int)
514 
515 #define	atomic_load_acq_32	atomic_load_acq_int
516 #define	atomic_store_rel_32	atomic_store_rel_int
517 
518 #ifdef __powerpc64__
519 ATOMIC_STORE_LOAD(long)
520 
521 #define	atomic_load_acq_64	atomic_load_acq_long
522 #define	atomic_store_rel_64	atomic_store_rel_long
523 
524 #define	atomic_load_acq_ptr	atomic_load_acq_long
525 #define	atomic_store_rel_ptr	atomic_store_rel_long
526 #else
527 static __inline u_long
528 atomic_load_acq_long(volatile u_long *addr)
529 {
530 
531 	return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
532 }
533 
534 static __inline void
535 atomic_store_rel_long(volatile u_long *addr, u_long val)
536 {
537 
538 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
539 }
540 
541 #define	atomic_load_acq_ptr	atomic_load_acq_int
542 #define	atomic_store_rel_ptr	atomic_store_rel_int
543 #endif
544 #undef ATOMIC_STORE_LOAD
545 
546 /*
547  * Atomically compare the value stored at *p with cmpval and if the
548  * two values are equal, update the value of *p with newval. Returns
549  * zero if the compare failed, nonzero otherwise.
550  */
551 static __inline int
552 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
553 {
554 	int	ret;
555 
556 #ifdef __GNUCLIKE_ASM
557 	__asm __volatile (
558 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
559 		"cmplw %3, %0\n\t"		/* compare */
560 		"bne 2f\n\t"			/* exit if not equal */
561 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
562 		"bne- 1b\n\t"			/* spin if failed */
563 		"li %0, 1\n\t"			/* success - retval = 1 */
564 		"b 3f\n\t"			/* we've succeeded */
565 		"2:\n\t"
566 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
567 		"li %0, 0\n\t"			/* failure - retval = 0 */
568 		"3:\n\t"
569 		: "=&r" (ret), "=m" (*p)
570 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
571 		: "cr0", "memory");
572 #endif
573 
574 	return (ret);
575 }
576 static __inline int
577 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
578 {
579 	int ret;
580 
581 #ifdef __GNUCLIKE_ASM
582 	__asm __volatile (
583 	    #ifdef __powerpc64__
584 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
585 		"cmpld %3, %0\n\t"		/* compare */
586 		"bne 2f\n\t"			/* exit if not equal */
587 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
588 	    #else
589 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
590 		"cmplw %3, %0\n\t"		/* compare */
591 		"bne 2f\n\t"			/* exit if not equal */
592 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
593 	    #endif
594 		"bne- 1b\n\t"			/* spin if failed */
595 		"li %0, 1\n\t"			/* success - retval = 1 */
596 		"b 3f\n\t"			/* we've succeeded */
597 		"2:\n\t"
598 	    #ifdef __powerpc64__
599 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
600 	    #else
601 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
602 	    #endif
603 		"li %0, 0\n\t"			/* failure - retval = 0 */
604 		"3:\n\t"
605 		: "=&r" (ret), "=m" (*p)
606 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
607 		: "cr0", "memory");
608 #endif
609 
610 	return (ret);
611 }
612 
613 static __inline int
614 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval)
615 {
616 	int retval;
617 
618 	retval = atomic_cmpset_int(p, cmpval, newval);
619 	__ATOMIC_ACQ();
620 	return (retval);
621 }
622 
623 static __inline int
624 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
625 {
626 	__ATOMIC_REL();
627 	return (atomic_cmpset_int(p, cmpval, newval));
628 }
629 
630 static __inline int
631 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
632 {
633 	u_long retval;
634 
635 	retval = atomic_cmpset_long(p, cmpval, newval);
636 	__ATOMIC_ACQ();
637 	return (retval);
638 }
639 
640 static __inline int
641 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
642 {
643 	__ATOMIC_REL();
644 	return (atomic_cmpset_long(p, cmpval, newval));
645 }
646 
647 #define	atomic_cmpset_32	atomic_cmpset_int
648 #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
649 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
650 
651 #ifdef __powerpc64__
652 #define	atomic_cmpset_64	atomic_cmpset_long
653 #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
654 #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
655 
656 #define	atomic_cmpset_ptr	atomic_cmpset_long
657 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
658 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
659 #else
660 #define	atomic_cmpset_ptr	atomic_cmpset_int
661 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
662 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
663 #endif
664 
665 static __inline u_int
666 atomic_fetchadd_int(volatile u_int *p, u_int v)
667 {
668 	u_int value;
669 
670 	do {
671 		value = *p;
672 	} while (!atomic_cmpset_int(p, value, value + v));
673 	return (value);
674 }
675 
676 static __inline u_long
677 atomic_fetchadd_long(volatile u_long *p, u_long v)
678 {
679 	u_long value;
680 
681 	do {
682 		value = *p;
683 	} while (!atomic_cmpset_long(p, value, value + v));
684 	return (value);
685 }
686 
687 static __inline u_int
688 atomic_swap_32(volatile u_int *p, u_int v)
689 {
690 	u_int prev;
691 
692 	__asm __volatile(
693 	"1:	lwarx	%0,0,%2\n"
694 	"	stwcx.	%3,0,%2\n"
695 	"	bne-	1b\n"
696 	: "=&r" (prev), "+m" (*(volatile u_int *)p)
697 	: "r" (p), "r" (v)
698 	: "cr0", "memory");
699 
700 	return (prev);
701 }
702 
703 #ifdef __powerpc64__
704 static __inline u_long
705 atomic_swap_64(volatile u_long *p, u_long v)
706 {
707 	u_long prev;
708 
709 	__asm __volatile(
710 	"1:	ldarx	%0,0,%2\n"
711 	"	stdcx.	%3,0,%2\n"
712 	"	bne-	1b\n"
713 	: "=&r" (prev), "+m" (*(volatile u_long *)p)
714 	: "r" (p), "r" (v)
715 	: "cr0", "memory");
716 
717 	return (prev);
718 }
719 #endif
720 
721 #define	atomic_fetchadd_32	atomic_fetchadd_int
722 #define	atomic_swap_int		atomic_swap_32
723 
724 #ifdef __powerpc64__
725 #define	atomic_fetchadd_64	atomic_fetchadd_long
726 #define	atomic_swap_long	atomic_swap_64
727 #define	atomic_swap_ptr		atomic_swap_64
728 #endif
729 
730 #undef __ATOMIC_REL
731 #undef __ATOMIC_ACQ
732 
733 #endif /* ! _MACHINE_ATOMIC_H_ */
734