xref: /freebsd/sys/powerpc/include/atomic.h (revision 6486b015fc84e96725fef22b0e3363351399ae83)
1 /*-
2  * Copyright (c) 2008 Marcel Moolenaar
3  * Copyright (c) 2001 Benno Rice
4  * Copyright (c) 2001 David E. O'Brien
5  * Copyright (c) 1998 Doug Rabson
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice, this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
18  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
21  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27  * SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 
32 #ifndef _MACHINE_ATOMIC_H_
33 #define	_MACHINE_ATOMIC_H_
34 
35 #ifndef _SYS_CDEFS_H_
36 #error this file needs sys/cdefs.h as a prerequisite
37 #endif
38 
39 /* NOTE: lwsync is equivalent to sync on systems without lwsync */
40 #define mb()		__asm __volatile("lwsync" : : : "memory")
41 #ifdef __powerpc64__
42 #define rmb()		__asm __volatile("lwsync" : : : "memory")
43 #define wmb()		__asm __volatile("lwsync" : : : "memory")
44 #else
45 #define rmb()		__asm __volatile("lwsync" : : : "memory")
46 #define wmb()		__asm __volatile("eieio" : : : "memory")
47 #endif
48 
49 /*
50  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
51  * with the atomic lXarx/stXcx. sequences below. See Appendix B.2 of Book II
52  * of the architecture manual.
53  */
54 #ifdef __powerpc64__
55 #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
56 #define __ATOMIC_ACQ()	__asm __volatile("lwsync" : : : "memory")
57 #else
58 #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
59 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
60 #endif
61 
62 /*
63  * atomic_add(p, v)
64  * { *p += v; }
65  */
66 
67 #define __atomic_add_int(p, v, t)				\
68     __asm __volatile(						\
69 	"1:	lwarx	%0, 0, %2\n"				\
70 	"	add	%0, %3, %0\n"				\
71 	"	stwcx.	%0, 0, %2\n"				\
72 	"	bne-	1b\n"					\
73 	: "=&r" (t), "=m" (*p)					\
74 	: "r" (p), "r" (v), "m" (*p)				\
75 	: "cc", "memory")					\
76     /* __atomic_add_int */
77 
78 #ifdef __powerpc64__
79 #define __atomic_add_long(p, v, t)				\
80     __asm __volatile(						\
81 	"1:	ldarx	%0, 0, %2\n"				\
82 	"	add	%0, %3, %0\n"				\
83 	"	stdcx.	%0, 0, %2\n"				\
84 	"	bne-	1b\n"					\
85 	: "=&r" (t), "=m" (*p)					\
86 	: "r" (p), "r" (v), "m" (*p)				\
87 	: "cc", "memory")					\
88     /* __atomic_add_long */
89 #else
90 #define	__atomic_add_long(p, v, t)				\
91     __asm __volatile(						\
92 	"1:	lwarx	%0, 0, %2\n"				\
93 	"	add	%0, %3, %0\n"				\
94 	"	stwcx.	%0, 0, %2\n"				\
95 	"	bne-	1b\n"					\
96 	: "=&r" (t), "=m" (*p)					\
97 	: "r" (p), "r" (v), "m" (*p)				\
98 	: "cc", "memory")					\
99     /* __atomic_add_long */
100 #endif
101 
102 #define	_ATOMIC_ADD(type)					\
103     static __inline void					\
104     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
105 	u_##type t;						\
106 	__atomic_add_##type(p, v, t);				\
107     }								\
108 								\
109     static __inline void					\
110     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
111 	u_##type t;						\
112 	__atomic_add_##type(p, v, t);				\
113 	__ATOMIC_ACQ();						\
114     }								\
115 								\
116     static __inline void					\
117     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
118 	u_##type t;						\
119 	__ATOMIC_REL();						\
120 	__atomic_add_##type(p, v, t);				\
121     }								\
122     /* _ATOMIC_ADD */
123 
124 _ATOMIC_ADD(int)
125 _ATOMIC_ADD(long)
126 
127 #define	atomic_add_32		atomic_add_int
128 #define	atomic_add_acq_32	atomic_add_acq_int
129 #define	atomic_add_rel_32	atomic_add_rel_int
130 
131 #ifdef __powerpc64__
132 #define	atomic_add_64		atomic_add_long
133 #define	atomic_add_acq_64	atomic_add_acq_long
134 #define	atomic_add_rel_64	atomic_add_rel_long
135 
136 #define	atomic_add_ptr		atomic_add_long
137 #define	atomic_add_acq_ptr	atomic_add_acq_long
138 #define	atomic_add_rel_ptr	atomic_add_rel_long
139 #else
140 #define	atomic_add_ptr		atomic_add_int
141 #define	atomic_add_acq_ptr	atomic_add_acq_int
142 #define	atomic_add_rel_ptr	atomic_add_rel_int
143 #endif
144 #undef _ATOMIC_ADD
145 #undef __atomic_add_long
146 #undef __atomic_add_int
147 
148 /*
149  * atomic_clear(p, v)
150  * { *p &= ~v; }
151  */
152 
153 #define __atomic_clear_int(p, v, t)				\
154     __asm __volatile(						\
155 	"1:	lwarx	%0, 0, %2\n"				\
156 	"	andc	%0, %0, %3\n"				\
157 	"	stwcx.	%0, 0, %2\n"				\
158 	"	bne-	1b\n"					\
159 	: "=&r" (t), "=m" (*p)					\
160 	: "r" (p), "r" (v), "m" (*p)				\
161 	: "cc", "memory")					\
162     /* __atomic_clear_int */
163 
164 #ifdef __powerpc64__
165 #define __atomic_clear_long(p, v, t)				\
166     __asm __volatile(						\
167 	"1:	ldarx	%0, 0, %2\n"				\
168 	"	andc	%0, %0, %3\n"				\
169 	"	stdcx.	%0, 0, %2\n"				\
170 	"	bne-	1b\n"					\
171 	: "=&r" (t), "=m" (*p)					\
172 	: "r" (p), "r" (v), "m" (*p)				\
173 	: "cc", "memory")					\
174     /* __atomic_clear_long */
175 #else
176 #define	__atomic_clear_long(p, v, t)				\
177     __asm __volatile(						\
178 	"1:	lwarx	%0, 0, %2\n"				\
179 	"	andc	%0, %0, %3\n"				\
180 	"	stwcx.	%0, 0, %2\n"				\
181 	"	bne-	1b\n"					\
182 	: "=&r" (t), "=m" (*p)					\
183 	: "r" (p), "r" (v), "m" (*p)				\
184 	: "cc", "memory")					\
185     /* __atomic_clear_long */
186 #endif
187 
188 #define	_ATOMIC_CLEAR(type)					\
189     static __inline void					\
190     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
191 	u_##type t;						\
192 	__atomic_clear_##type(p, v, t);				\
193     }								\
194 								\
195     static __inline void					\
196     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
197 	u_##type t;						\
198 	__atomic_clear_##type(p, v, t);				\
199 	__ATOMIC_ACQ();						\
200     }								\
201 								\
202     static __inline void					\
203     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
204 	u_##type t;						\
205 	__ATOMIC_REL();						\
206 	__atomic_clear_##type(p, v, t);				\
207     }								\
208     /* _ATOMIC_CLEAR */
209 
210 
211 _ATOMIC_CLEAR(int)
212 _ATOMIC_CLEAR(long)
213 
214 #define	atomic_clear_32		atomic_clear_int
215 #define	atomic_clear_acq_32	atomic_clear_acq_int
216 #define	atomic_clear_rel_32	atomic_clear_rel_int
217 
218 #ifdef __powerpc64__
219 #define	atomic_clear_64		atomic_clear_long
220 #define	atomic_clear_acq_64	atomic_clear_acq_long
221 #define	atomic_clear_rel_64	atomic_clear_rel_long
222 
223 #define	atomic_clear_ptr	atomic_clear_long
224 #define	atomic_clear_acq_ptr	atomic_clear_acq_long
225 #define	atomic_clear_rel_ptr	atomic_clear_rel_long
226 #else
227 #define	atomic_clear_ptr	atomic_clear_int
228 #define	atomic_clear_acq_ptr	atomic_clear_acq_int
229 #define	atomic_clear_rel_ptr	atomic_clear_rel_int
230 #endif
231 #undef _ATOMIC_CLEAR
232 #undef __atomic_clear_long
233 #undef __atomic_clear_int
234 
235 /*
236  * atomic_cmpset(p, o, n)
237  */
238 /* TODO -- see below */
239 
240 /*
241  * atomic_load_acq(p)
242  */
243 /* TODO -- see below */
244 
245 /*
246  * atomic_readandclear(p)
247  */
248 /* TODO -- see below */
249 
250 /*
251  * atomic_set(p, v)
252  * { *p |= v; }
253  */
254 
255 #define __atomic_set_int(p, v, t)				\
256     __asm __volatile(						\
257 	"1:	lwarx	%0, 0, %2\n"				\
258 	"	or	%0, %3, %0\n"				\
259 	"	stwcx.	%0, 0, %2\n"				\
260 	"	bne-	1b\n"					\
261 	: "=&r" (t), "=m" (*p)					\
262 	: "r" (p), "r" (v), "m" (*p)				\
263 	: "cc", "memory")					\
264     /* __atomic_set_int */
265 
266 #ifdef __powerpc64__
267 #define __atomic_set_long(p, v, t)				\
268     __asm __volatile(						\
269 	"1:	ldarx	%0, 0, %2\n"				\
270 	"	or	%0, %3, %0\n"				\
271 	"	stdcx.	%0, 0, %2\n"				\
272 	"	bne-	1b\n"					\
273 	: "=&r" (t), "=m" (*p)					\
274 	: "r" (p), "r" (v), "m" (*p)				\
275 	: "cc", "memory")					\
276     /* __atomic_set_long */
277 #else
278 #define	__atomic_set_long(p, v, t)				\
279     __asm __volatile(						\
280 	"1:	lwarx	%0, 0, %2\n"				\
281 	"	or	%0, %3, %0\n"				\
282 	"	stwcx.	%0, 0, %2\n"				\
283 	"	bne-	1b\n"					\
284 	: "=&r" (t), "=m" (*p)					\
285 	: "r" (p), "r" (v), "m" (*p)				\
286 	: "cc", "memory")					\
287     /* __atomic_set_long */
288 #endif
289 
290 #define	_ATOMIC_SET(type)					\
291     static __inline void					\
292     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
293 	u_##type t;						\
294 	__atomic_set_##type(p, v, t);				\
295     }								\
296 								\
297     static __inline void					\
298     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
299 	u_##type t;						\
300 	__atomic_set_##type(p, v, t);				\
301 	__ATOMIC_ACQ();						\
302     }								\
303 								\
304     static __inline void					\
305     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
306 	u_##type t;						\
307 	__ATOMIC_REL();						\
308 	__atomic_set_##type(p, v, t);				\
309     }								\
310     /* _ATOMIC_SET */
311 
312 _ATOMIC_SET(int)
313 _ATOMIC_SET(long)
314 
315 #define	atomic_set_32		atomic_set_int
316 #define	atomic_set_acq_32	atomic_set_acq_int
317 #define	atomic_set_rel_32	atomic_set_rel_int
318 
319 #ifdef __powerpc64__
320 #define	atomic_set_64		atomic_set_long
321 #define	atomic_set_acq_64	atomic_set_acq_long
322 #define	atomic_set_rel_64	atomic_set_rel_long
323 
324 #define	atomic_set_ptr		atomic_set_long
325 #define	atomic_set_acq_ptr	atomic_set_acq_long
326 #define	atomic_set_rel_ptr	atomic_set_rel_long
327 #else
328 #define	atomic_set_ptr		atomic_set_int
329 #define	atomic_set_acq_ptr	atomic_set_acq_int
330 #define	atomic_set_rel_ptr	atomic_set_rel_int
331 #endif
332 #undef _ATOMIC_SET
333 #undef __atomic_set_long
334 #undef __atomic_set_int
335 
336 /*
337  * atomic_subtract(p, v)
338  * { *p -= v; }
339  */
340 
341 #define __atomic_subtract_int(p, v, t)				\
342     __asm __volatile(						\
343 	"1:	lwarx	%0, 0, %2\n"				\
344 	"	subf	%0, %3, %0\n"				\
345 	"	stwcx.	%0, 0, %2\n"				\
346 	"	bne-	1b\n"					\
347 	: "=&r" (t), "=m" (*p)					\
348 	: "r" (p), "r" (v), "m" (*p)				\
349 	: "cc", "memory")					\
350     /* __atomic_subtract_int */
351 
352 #ifdef __powerpc64__
353 #define __atomic_subtract_long(p, v, t)				\
354     __asm __volatile(						\
355 	"1:	ldarx	%0, 0, %2\n"				\
356 	"	subf	%0, %3, %0\n"				\
357 	"	stdcx.	%0, 0, %2\n"				\
358 	"	bne-	1b\n"					\
359 	: "=&r" (t), "=m" (*p)					\
360 	: "r" (p), "r" (v), "m" (*p)				\
361 	: "cc", "memory")					\
362     /* __atomic_subtract_long */
363 #else
364 #define	__atomic_subtract_long(p, v, t)				\
365     __asm __volatile(						\
366 	"1:	lwarx	%0, 0, %2\n"				\
367 	"	subf	%0, %3, %0\n"				\
368 	"	stwcx.	%0, 0, %2\n"				\
369 	"	bne-	1b\n"					\
370 	: "=&r" (t), "=m" (*p)					\
371 	: "r" (p), "r" (v), "m" (*p)				\
372 	: "cc", "memory")					\
373     /* __atomic_subtract_long */
374 #endif
375 
376 #define	_ATOMIC_SUBTRACT(type)						\
377     static __inline void						\
378     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
379 	u_##type t;							\
380 	__atomic_subtract_##type(p, v, t);				\
381     }									\
382 									\
383     static __inline void						\
384     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
385 	u_##type t;							\
386 	__atomic_subtract_##type(p, v, t);				\
387 	__ATOMIC_ACQ();							\
388     }									\
389 									\
390     static __inline void						\
391     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
392 	u_##type t;							\
393 	__ATOMIC_REL();							\
394 	__atomic_subtract_##type(p, v, t);				\
395     }									\
396     /* _ATOMIC_SUBTRACT */
397 
398 _ATOMIC_SUBTRACT(int)
399 _ATOMIC_SUBTRACT(long)
400 
401 #define	atomic_subtract_32	atomic_subtract_int
402 #define	atomic_subtract_acq_32	atomic_subtract_acq_int
403 #define	atomic_subtract_rel_32	atomic_subtract_rel_int
404 
405 #ifdef __powerpc64__
406 #define	atomic_subtract_64	atomic_subtract_long
407 #define	atomic_subtract_acq_64	atomic_subract_acq_long
408 #define	atomic_subtract_rel_64	atomic_subtract_rel_long
409 
410 #define	atomic_subtract_ptr	atomic_subtract_long
411 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
412 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
413 #else
414 #define	atomic_subtract_ptr	atomic_subtract_int
415 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
416 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
417 #endif
418 #undef _ATOMIC_SUBTRACT
419 #undef __atomic_subtract_long
420 #undef __atomic_subtract_int
421 
422 /*
423  * atomic_store_rel(p, v)
424  */
425 /* TODO -- see below */
426 
427 /*
428  * Old/original implementations that still need revisiting.
429  */
430 
431 static __inline u_int
432 atomic_readandclear_int(volatile u_int *addr)
433 {
434 	u_int result,temp;
435 
436 #ifdef __GNUCLIKE_ASM
437 	__asm __volatile (
438 		"\tsync\n"			/* drain writes */
439 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
440 		"li %1, 0\n\t"			/* load new value */
441 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
442 		"bne- 1b\n\t"			/* spin if failed */
443 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
444 		: "r" (addr), "m" (*addr)
445 		: "cc", "memory");
446 #endif
447 
448 	return (result);
449 }
450 
451 #ifdef __powerpc64__
452 static __inline u_long
453 atomic_readandclear_long(volatile u_long *addr)
454 {
455 	u_long result,temp;
456 
457 #ifdef __GNUCLIKE_ASM
458 	__asm __volatile (
459 		"\tsync\n"			/* drain writes */
460 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
461 		"li %1, 0\n\t"			/* load new value */
462 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
463 		"bne- 1b\n\t"			/* spin if failed */
464 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
465 		: "r" (addr), "m" (*addr)
466 		: "cc", "memory");
467 #endif
468 
469 	return (result);
470 }
471 #endif
472 
473 #define	atomic_readandclear_32		atomic_readandclear_int
474 
475 #ifdef __powerpc64__
476 #define	atomic_readandclear_64		atomic_readandclear_long
477 
478 #define	atomic_readandclear_ptr		atomic_readandclear_long
479 #else
480 static __inline u_long
481 atomic_readandclear_long(volatile u_long *addr)
482 {
483 
484 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
485 }
486 
487 #define	atomic_readandclear_ptr		atomic_readandclear_int
488 #endif
489 
490 /*
491  * We assume that a = b will do atomic loads and stores.
492  */
493 #define	ATOMIC_STORE_LOAD(TYPE)					\
494 static __inline u_##TYPE					\
495 atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
496 {								\
497 	u_##TYPE v;						\
498 								\
499 	v = *p;							\
500 	mb();							\
501 	return (v);						\
502 }								\
503 								\
504 static __inline void						\
505 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
506 {								\
507 	mb();							\
508 	*p = v;							\
509 }
510 
511 ATOMIC_STORE_LOAD(int)
512 
513 #define	atomic_load_acq_32	atomic_load_acq_int
514 #define	atomic_store_rel_32	atomic_store_rel_int
515 
516 #ifdef __powerpc64__
517 ATOMIC_STORE_LOAD(long)
518 
519 #define	atomic_load_acq_64	atomic_load_acq_long
520 #define	atomic_store_rel_64	atomic_store_rel_long
521 
522 #define	atomic_load_acq_ptr	atomic_load_acq_long
523 #define	atomic_store_rel_ptr	atomic_store_rel_long
524 #else
525 static __inline u_long
526 atomic_load_acq_long(volatile u_long *addr)
527 {
528 
529 	return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
530 }
531 
532 static __inline void
533 atomic_store_rel_long(volatile u_long *addr, u_long val)
534 {
535 
536 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
537 }
538 
539 #define	atomic_load_acq_ptr	atomic_load_acq_int
540 #define	atomic_store_rel_ptr	atomic_store_rel_int
541 #endif
542 #undef ATOMIC_STORE_LOAD
543 
544 /*
545  * Atomically compare the value stored at *p with cmpval and if the
546  * two values are equal, update the value of *p with newval. Returns
547  * zero if the compare failed, nonzero otherwise.
548  */
549 static __inline int
550 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
551 {
552 	int	ret;
553 
554 #ifdef __GNUCLIKE_ASM
555 	__asm __volatile (
556 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
557 		"cmplw %3, %0\n\t"		/* compare */
558 		"bne 2f\n\t"			/* exit if not equal */
559 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
560 		"bne- 1b\n\t"			/* spin if failed */
561 		"li %0, 1\n\t"			/* success - retval = 1 */
562 		"b 3f\n\t"			/* we've succeeded */
563 		"2:\n\t"
564 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
565 		"li %0, 0\n\t"			/* failure - retval = 0 */
566 		"3:\n\t"
567 		: "=&r" (ret), "=m" (*p)
568 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
569 		: "cc", "memory");
570 #endif
571 
572 	return (ret);
573 }
574 static __inline int
575 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
576 {
577 	int ret;
578 
579 #ifdef __GNUCLIKE_ASM
580 	__asm __volatile (
581 	    #ifdef __powerpc64__
582 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
583 		"cmpld %3, %0\n\t"		/* compare */
584 		"bne 2f\n\t"			/* exit if not equal */
585 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
586 	    #else
587 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
588 		"cmplw %3, %0\n\t"		/* compare */
589 		"bne 2f\n\t"			/* exit if not equal */
590 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
591 	    #endif
592 		"bne- 1b\n\t"			/* spin if failed */
593 		"li %0, 1\n\t"			/* success - retval = 1 */
594 		"b 3f\n\t"			/* we've succeeded */
595 		"2:\n\t"
596 	    #ifdef __powerpc64__
597 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
598 	    #else
599 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
600 	    #endif
601 		"li %0, 0\n\t"			/* failure - retval = 0 */
602 		"3:\n\t"
603 		: "=&r" (ret), "=m" (*p)
604 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
605 		: "cc", "memory");
606 #endif
607 
608 	return (ret);
609 }
610 
611 static __inline int
612 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval)
613 {
614 	int retval;
615 
616 	retval = atomic_cmpset_int(p, cmpval, newval);
617 	__ATOMIC_ACQ();
618 	return (retval);
619 }
620 
621 static __inline int
622 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
623 {
624 	__ATOMIC_REL();
625 	return (atomic_cmpset_int(p, cmpval, newval));
626 }
627 
628 static __inline int
629 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
630 {
631 	u_long retval;
632 
633 	retval = atomic_cmpset_long(p, cmpval, newval);
634 	__ATOMIC_ACQ();
635 	return (retval);
636 }
637 
638 static __inline int
639 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
640 {
641 	__ATOMIC_REL();
642 	return (atomic_cmpset_long(p, cmpval, newval));
643 }
644 
645 #define	atomic_cmpset_32	atomic_cmpset_int
646 #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
647 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
648 
649 #ifdef __powerpc64__
650 #define	atomic_cmpset_64	atomic_cmpset_long
651 #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
652 #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
653 
654 #define	atomic_cmpset_ptr	atomic_cmpset_long
655 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
656 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
657 #else
658 #define	atomic_cmpset_ptr	atomic_cmpset_int
659 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
660 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
661 #endif
662 
663 static __inline u_int
664 atomic_fetchadd_int(volatile u_int *p, u_int v)
665 {
666 	u_int value;
667 
668 	do {
669 		value = *p;
670 	} while (!atomic_cmpset_int(p, value, value + v));
671 	return (value);
672 }
673 
674 static __inline u_long
675 atomic_fetchadd_long(volatile u_long *p, u_long v)
676 {
677 	u_long value;
678 
679 	do {
680 		value = *p;
681 	} while (!atomic_cmpset_long(p, value, value + v));
682 	return (value);
683 }
684 
685 #define	atomic_fetchadd_32	atomic_fetchadd_int
686 
687 #ifdef __powerpc64__
688 #define	atomic_fetchadd_64	atomic_fetchadd_long
689 #endif
690 
691 #endif /* ! _MACHINE_ATOMIC_H_ */
692