xref: /freebsd/sys/powerpc/include/atomic.h (revision 6472ac3d8a86336899b6cfb789a4cd9897e3fab5)
1 /*-
2  * Copyright (c) 2008 Marcel Moolenaar
3  * Copyright (c) 2001 Benno Rice
4  * Copyright (c) 2001 David E. O'Brien
5  * Copyright (c) 1998 Doug Rabson
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice, this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
18  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
21  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27  * SUCH DAMAGE.
28  *
29  * $FreeBSD$
30  */
31 
32 #ifndef _MACHINE_ATOMIC_H_
33 #define	_MACHINE_ATOMIC_H_
34 
35 #ifndef _SYS_CDEFS_H_
36 #error this file needs sys/cdefs.h as a prerequisite
37 #endif
38 
39 #define	__ATOMIC_BARRIER					\
40     __asm __volatile("sync" : : : "memory")
41 
42 #define mb()	__ATOMIC_BARRIER
43 #define	wmb()	mb()
44 #define	rmb()	mb()
45 
46 /*
47  * atomic_add(p, v)
48  * { *p += v; }
49  */
50 
51 #define __atomic_add_int(p, v, t)				\
52     __asm __volatile(						\
53 	"1:	lwarx	%0, 0, %2\n"				\
54 	"	add	%0, %3, %0\n"				\
55 	"	stwcx.	%0, 0, %2\n"				\
56 	"	bne-	1b\n"					\
57 	: "=&r" (t), "=m" (*p)					\
58 	: "r" (p), "r" (v), "m" (*p)				\
59 	: "cc", "memory")					\
60     /* __atomic_add_int */
61 
62 #ifdef __powerpc64__
63 #define __atomic_add_long(p, v, t)				\
64     __asm __volatile(						\
65 	"1:	ldarx	%0, 0, %2\n"				\
66 	"	add	%0, %3, %0\n"				\
67 	"	stdcx.	%0, 0, %2\n"				\
68 	"	bne-	1b\n"					\
69 	: "=&r" (t), "=m" (*p)					\
70 	: "r" (p), "r" (v), "m" (*p)				\
71 	: "cc", "memory")					\
72     /* __atomic_add_long */
73 #else
74 #define	__atomic_add_long(p, v, t)				\
75     __asm __volatile(						\
76 	"1:	lwarx	%0, 0, %2\n"				\
77 	"	add	%0, %3, %0\n"				\
78 	"	stwcx.	%0, 0, %2\n"				\
79 	"	bne-	1b\n"					\
80 	: "=&r" (t), "=m" (*p)					\
81 	: "r" (p), "r" (v), "m" (*p)				\
82 	: "cc", "memory")					\
83     /* __atomic_add_long */
84 #endif
85 
86 #define	_ATOMIC_ADD(type)					\
87     static __inline void					\
88     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
89 	u_##type t;						\
90 	__atomic_add_##type(p, v, t);				\
91     }								\
92 								\
93     static __inline void					\
94     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
95 	u_##type t;						\
96 	__atomic_add_##type(p, v, t);				\
97 	__ATOMIC_BARRIER;					\
98     }								\
99 								\
100     static __inline void					\
101     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
102 	u_##type t;						\
103 	__ATOMIC_BARRIER;					\
104 	__atomic_add_##type(p, v, t);				\
105     }								\
106     /* _ATOMIC_ADD */
107 
108 _ATOMIC_ADD(int)
109 _ATOMIC_ADD(long)
110 
111 #define	atomic_add_32		atomic_add_int
112 #define	atomic_add_acq_32	atomic_add_acq_int
113 #define	atomic_add_rel_32	atomic_add_rel_int
114 
115 #ifdef __powerpc64__
116 #define	atomic_add_64		atomic_add_long
117 #define	atomic_add_acq_64	atomic_add_acq_long
118 #define	atomic_add_rel_64	atomic_add_rel_long
119 
120 #define	atomic_add_ptr		atomic_add_long
121 #define	atomic_add_acq_ptr	atomic_add_acq_long
122 #define	atomic_add_rel_ptr	atomic_add_rel_long
123 #else
124 #define	atomic_add_ptr		atomic_add_int
125 #define	atomic_add_acq_ptr	atomic_add_acq_int
126 #define	atomic_add_rel_ptr	atomic_add_rel_int
127 #endif
128 #undef _ATOMIC_ADD
129 #undef __atomic_add_long
130 #undef __atomic_add_int
131 
132 /*
133  * atomic_clear(p, v)
134  * { *p &= ~v; }
135  */
136 
137 #define __atomic_clear_int(p, v, t)				\
138     __asm __volatile(						\
139 	"1:	lwarx	%0, 0, %2\n"				\
140 	"	andc	%0, %0, %3\n"				\
141 	"	stwcx.	%0, 0, %2\n"				\
142 	"	bne-	1b\n"					\
143 	: "=&r" (t), "=m" (*p)					\
144 	: "r" (p), "r" (v), "m" (*p)				\
145 	: "cc", "memory")					\
146     /* __atomic_clear_int */
147 
148 #ifdef __powerpc64__
149 #define __atomic_clear_long(p, v, t)				\
150     __asm __volatile(						\
151 	"1:	ldarx	%0, 0, %2\n"				\
152 	"	andc	%0, %0, %3\n"				\
153 	"	stdcx.	%0, 0, %2\n"				\
154 	"	bne-	1b\n"					\
155 	: "=&r" (t), "=m" (*p)					\
156 	: "r" (p), "r" (v), "m" (*p)				\
157 	: "cc", "memory")					\
158     /* __atomic_clear_long */
159 #else
160 #define	__atomic_clear_long(p, v, t)				\
161     __asm __volatile(						\
162 	"1:	lwarx	%0, 0, %2\n"				\
163 	"	andc	%0, %0, %3\n"				\
164 	"	stwcx.	%0, 0, %2\n"				\
165 	"	bne-	1b\n"					\
166 	: "=&r" (t), "=m" (*p)					\
167 	: "r" (p), "r" (v), "m" (*p)				\
168 	: "cc", "memory")					\
169     /* __atomic_clear_long */
170 #endif
171 
172 #define	_ATOMIC_CLEAR(type)					\
173     static __inline void					\
174     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
175 	u_##type t;						\
176 	__atomic_clear_##type(p, v, t);				\
177     }								\
178 								\
179     static __inline void					\
180     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
181 	u_##type t;						\
182 	__atomic_clear_##type(p, v, t);				\
183 	__ATOMIC_BARRIER;					\
184     }								\
185 								\
186     static __inline void					\
187     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
188 	u_##type t;						\
189 	__ATOMIC_BARRIER;					\
190 	__atomic_clear_##type(p, v, t);				\
191     }								\
192     /* _ATOMIC_CLEAR */
193 
194 
195 _ATOMIC_CLEAR(int)
196 _ATOMIC_CLEAR(long)
197 
198 #define	atomic_clear_32		atomic_clear_int
199 #define	atomic_clear_acq_32	atomic_clear_acq_int
200 #define	atomic_clear_rel_32	atomic_clear_rel_int
201 
202 #ifdef __powerpc64__
203 #define	atomic_clear_64		atomic_clear_long
204 #define	atomic_clear_acq_64	atomic_clear_acq_long
205 #define	atomic_clear_rel_64	atomic_clear_rel_long
206 
207 #define	atomic_clear_ptr	atomic_clear_long
208 #define	atomic_clear_acq_ptr	atomic_clear_acq_long
209 #define	atomic_clear_rel_ptr	atomic_clear_rel_long
210 #else
211 #define	atomic_clear_ptr	atomic_clear_int
212 #define	atomic_clear_acq_ptr	atomic_clear_acq_int
213 #define	atomic_clear_rel_ptr	atomic_clear_rel_int
214 #endif
215 #undef _ATOMIC_CLEAR
216 #undef __atomic_clear_long
217 #undef __atomic_clear_int
218 
219 /*
220  * atomic_cmpset(p, o, n)
221  */
222 /* TODO -- see below */
223 
224 /*
225  * atomic_load_acq(p)
226  */
227 /* TODO -- see below */
228 
229 /*
230  * atomic_readandclear(p)
231  */
232 /* TODO -- see below */
233 
234 /*
235  * atomic_set(p, v)
236  * { *p |= v; }
237  */
238 
239 #define __atomic_set_int(p, v, t)				\
240     __asm __volatile(						\
241 	"1:	lwarx	%0, 0, %2\n"				\
242 	"	or	%0, %3, %0\n"				\
243 	"	stwcx.	%0, 0, %2\n"				\
244 	"	bne-	1b\n"					\
245 	: "=&r" (t), "=m" (*p)					\
246 	: "r" (p), "r" (v), "m" (*p)				\
247 	: "cc", "memory")					\
248     /* __atomic_set_int */
249 
250 #ifdef __powerpc64__
251 #define __atomic_set_long(p, v, t)				\
252     __asm __volatile(						\
253 	"1:	ldarx	%0, 0, %2\n"				\
254 	"	or	%0, %3, %0\n"				\
255 	"	stdcx.	%0, 0, %2\n"				\
256 	"	bne-	1b\n"					\
257 	: "=&r" (t), "=m" (*p)					\
258 	: "r" (p), "r" (v), "m" (*p)				\
259 	: "cc", "memory")					\
260     /* __atomic_set_long */
261 #else
262 #define	__atomic_set_long(p, v, t)				\
263     __asm __volatile(						\
264 	"1:	lwarx	%0, 0, %2\n"				\
265 	"	or	%0, %3, %0\n"				\
266 	"	stwcx.	%0, 0, %2\n"				\
267 	"	bne-	1b\n"					\
268 	: "=&r" (t), "=m" (*p)					\
269 	: "r" (p), "r" (v), "m" (*p)				\
270 	: "cc", "memory")					\
271     /* __atomic_set_long */
272 #endif
273 
274 #define	_ATOMIC_SET(type)					\
275     static __inline void					\
276     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
277 	u_##type t;						\
278 	__atomic_set_##type(p, v, t);				\
279     }								\
280 								\
281     static __inline void					\
282     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
283 	u_##type t;						\
284 	__atomic_set_##type(p, v, t);				\
285 	__ATOMIC_BARRIER;					\
286     }								\
287 								\
288     static __inline void					\
289     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
290 	u_##type t;						\
291 	__ATOMIC_BARRIER;					\
292 	__atomic_set_##type(p, v, t);				\
293     }								\
294     /* _ATOMIC_SET */
295 
296 _ATOMIC_SET(int)
297 _ATOMIC_SET(long)
298 
299 #define	atomic_set_32		atomic_set_int
300 #define	atomic_set_acq_32	atomic_set_acq_int
301 #define	atomic_set_rel_32	atomic_set_rel_int
302 
303 #ifdef __powerpc64__
304 #define	atomic_set_64		atomic_set_long
305 #define	atomic_set_acq_64	atomic_set_acq_long
306 #define	atomic_set_rel_64	atomic_set_rel_long
307 
308 #define	atomic_set_ptr		atomic_set_long
309 #define	atomic_set_acq_ptr	atomic_set_acq_long
310 #define	atomic_set_rel_ptr	atomic_set_rel_long
311 #else
312 #define	atomic_set_ptr		atomic_set_int
313 #define	atomic_set_acq_ptr	atomic_set_acq_int
314 #define	atomic_set_rel_ptr	atomic_set_rel_int
315 #endif
316 #undef _ATOMIC_SET
317 #undef __atomic_set_long
318 #undef __atomic_set_int
319 
320 /*
321  * atomic_subtract(p, v)
322  * { *p -= v; }
323  */
324 
325 #define __atomic_subtract_int(p, v, t)				\
326     __asm __volatile(						\
327 	"1:	lwarx	%0, 0, %2\n"				\
328 	"	subf	%0, %3, %0\n"				\
329 	"	stwcx.	%0, 0, %2\n"				\
330 	"	bne-	1b\n"					\
331 	: "=&r" (t), "=m" (*p)					\
332 	: "r" (p), "r" (v), "m" (*p)				\
333 	: "cc", "memory")					\
334     /* __atomic_subtract_int */
335 
336 #ifdef __powerpc64__
337 #define __atomic_subtract_long(p, v, t)				\
338     __asm __volatile(						\
339 	"1:	ldarx	%0, 0, %2\n"				\
340 	"	subf	%0, %3, %0\n"				\
341 	"	stdcx.	%0, 0, %2\n"				\
342 	"	bne-	1b\n"					\
343 	: "=&r" (t), "=m" (*p)					\
344 	: "r" (p), "r" (v), "m" (*p)				\
345 	: "cc", "memory")					\
346     /* __atomic_subtract_long */
347 #else
348 #define	__atomic_subtract_long(p, v, t)				\
349     __asm __volatile(						\
350 	"1:	lwarx	%0, 0, %2\n"				\
351 	"	subf	%0, %3, %0\n"				\
352 	"	stwcx.	%0, 0, %2\n"				\
353 	"	bne-	1b\n"					\
354 	: "=&r" (t), "=m" (*p)					\
355 	: "r" (p), "r" (v), "m" (*p)				\
356 	: "cc", "memory")					\
357     /* __atomic_subtract_long */
358 #endif
359 
360 #define	_ATOMIC_SUBTRACT(type)						\
361     static __inline void						\
362     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
363 	u_##type t;							\
364 	__atomic_subtract_##type(p, v, t);				\
365     }									\
366 									\
367     static __inline void						\
368     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
369 	u_##type t;							\
370 	__atomic_subtract_##type(p, v, t);				\
371 	__ATOMIC_BARRIER;						\
372     }									\
373 									\
374     static __inline void						\
375     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
376 	u_##type t;							\
377 	__ATOMIC_BARRIER;						\
378 	__atomic_subtract_##type(p, v, t);				\
379     }									\
380     /* _ATOMIC_SUBTRACT */
381 
382 _ATOMIC_SUBTRACT(int)
383 _ATOMIC_SUBTRACT(long)
384 
385 #define	atomic_subtract_32	atomic_subtract_int
386 #define	atomic_subtract_acq_32	atomic_subtract_acq_int
387 #define	atomic_subtract_rel_32	atomic_subtract_rel_int
388 
389 #ifdef __powerpc64__
390 #define	atomic_subtract_64	atomic_subtract_long
391 #define	atomic_subtract_acq_64	atomic_subract_acq_long
392 #define	atomic_subtract_rel_64	atomic_subtract_rel_long
393 
394 #define	atomic_subtract_ptr	atomic_subtract_long
395 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
396 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
397 #else
398 #define	atomic_subtract_ptr	atomic_subtract_int
399 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
400 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
401 #endif
402 #undef _ATOMIC_SUBTRACT
403 #undef __atomic_subtract_long
404 #undef __atomic_subtract_int
405 
406 /*
407  * atomic_store_rel(p, v)
408  */
409 /* TODO -- see below */
410 
411 /*
412  * Old/original implementations that still need revisiting.
413  */
414 
415 static __inline u_int
416 atomic_readandclear_int(volatile u_int *addr)
417 {
418 	u_int result,temp;
419 
420 #ifdef __GNUCLIKE_ASM
421 	__asm __volatile (
422 		"\tsync\n"			/* drain writes */
423 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
424 		"li %1, 0\n\t"			/* load new value */
425 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
426 		"bne- 1b\n\t"			/* spin if failed */
427 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
428 		: "r" (addr), "m" (*addr)
429 		: "cc", "memory");
430 #endif
431 
432 	return (result);
433 }
434 
435 #ifdef __powerpc64__
436 static __inline u_long
437 atomic_readandclear_long(volatile u_long *addr)
438 {
439 	u_long result,temp;
440 
441 #ifdef __GNUCLIKE_ASM
442 	__asm __volatile (
443 		"\tsync\n"			/* drain writes */
444 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
445 		"li %1, 0\n\t"			/* load new value */
446 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
447 		"bne- 1b\n\t"			/* spin if failed */
448 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
449 		: "r" (addr), "m" (*addr)
450 		: "cc", "memory");
451 #endif
452 
453 	return (result);
454 }
455 #endif
456 
457 #define	atomic_readandclear_32		atomic_readandclear_int
458 
459 #ifdef __powerpc64__
460 #define	atomic_readandclear_64		atomic_readandclear_long
461 
462 #define	atomic_readandclear_ptr		atomic_readandclear_long
463 #else
464 static __inline u_long
465 atomic_readandclear_long(volatile u_long *addr)
466 {
467 
468 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
469 }
470 
471 #define	atomic_readandclear_ptr		atomic_readandclear_int
472 #endif
473 
474 /*
475  * We assume that a = b will do atomic loads and stores.
476  */
477 #define	ATOMIC_STORE_LOAD(TYPE)					\
478 static __inline u_##TYPE					\
479 atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
480 {								\
481 	u_##TYPE v;						\
482 								\
483 	v = *p;							\
484 	__ATOMIC_BARRIER;					\
485 	return (v);						\
486 }								\
487 								\
488 static __inline void						\
489 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
490 {								\
491 	__ATOMIC_BARRIER;					\
492 	*p = v;							\
493 }
494 
495 ATOMIC_STORE_LOAD(int)
496 
497 #define	atomic_load_acq_32	atomic_load_acq_int
498 #define	atomic_store_rel_32	atomic_store_rel_int
499 
500 #ifdef __powerpc64__
501 ATOMIC_STORE_LOAD(long)
502 
503 #define	atomic_load_acq_64	atomic_load_acq_long
504 #define	atomic_store_rel_64	atomic_store_rel_long
505 
506 #define	atomic_load_acq_ptr	atomic_load_acq_long
507 #define	atomic_store_rel_ptr	atomic_store_rel_long
508 #else
509 static __inline u_long
510 atomic_load_acq_long(volatile u_long *addr)
511 {
512 
513 	return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
514 }
515 
516 static __inline void
517 atomic_store_rel_long(volatile u_long *addr, u_long val)
518 {
519 
520 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
521 }
522 
523 #define	atomic_load_acq_ptr	atomic_load_acq_int
524 #define	atomic_store_rel_ptr	atomic_store_rel_int
525 #endif
526 #undef ATOMIC_STORE_LOAD
527 
528 /*
529  * Atomically compare the value stored at *p with cmpval and if the
530  * two values are equal, update the value of *p with newval. Returns
531  * zero if the compare failed, nonzero otherwise.
532  */
533 static __inline int
534 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
535 {
536 	int	ret;
537 
538 #ifdef __GNUCLIKE_ASM
539 	__asm __volatile (
540 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
541 		"cmplw %3, %0\n\t"		/* compare */
542 		"bne 2f\n\t"			/* exit if not equal */
543 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
544 		"bne- 1b\n\t"			/* spin if failed */
545 		"li %0, 1\n\t"			/* success - retval = 1 */
546 		"b 3f\n\t"			/* we've succeeded */
547 		"2:\n\t"
548 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
549 		"li %0, 0\n\t"			/* failure - retval = 0 */
550 		"3:\n\t"
551 		: "=&r" (ret), "=m" (*p)
552 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
553 		: "cc", "memory");
554 #endif
555 
556 	return (ret);
557 }
558 static __inline int
559 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
560 {
561 	int ret;
562 
563 #ifdef __GNUCLIKE_ASM
564 	__asm __volatile (
565 	    #ifdef __powerpc64__
566 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
567 		"cmpld %3, %0\n\t"		/* compare */
568 		"bne 2f\n\t"			/* exit if not equal */
569 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
570 	    #else
571 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
572 		"cmplw %3, %0\n\t"		/* compare */
573 		"bne 2f\n\t"			/* exit if not equal */
574 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
575 	    #endif
576 		"bne- 1b\n\t"			/* spin if failed */
577 		"li %0, 1\n\t"			/* success - retval = 1 */
578 		"b 3f\n\t"			/* we've succeeded */
579 		"2:\n\t"
580 	    #ifdef __powerpc64__
581 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
582 	    #else
583 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
584 	    #endif
585 		"li %0, 0\n\t"			/* failure - retval = 0 */
586 		"3:\n\t"
587 		: "=&r" (ret), "=m" (*p)
588 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
589 		: "cc", "memory");
590 #endif
591 
592 	return (ret);
593 }
594 
595 static __inline int
596 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval)
597 {
598 	int retval;
599 
600 	retval = atomic_cmpset_int(p, cmpval, newval);
601 	__ATOMIC_BARRIER;
602 	return (retval);
603 }
604 
605 static __inline int
606 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
607 {
608 	__ATOMIC_BARRIER;
609 	return (atomic_cmpset_int(p, cmpval, newval));
610 }
611 
612 static __inline int
613 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
614 {
615 	u_long retval;
616 
617 	retval = atomic_cmpset_long(p, cmpval, newval);
618 	__ATOMIC_BARRIER;
619 	return (retval);
620 }
621 
622 static __inline int
623 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
624 {
625 	__ATOMIC_BARRIER;
626 	return (atomic_cmpset_long(p, cmpval, newval));
627 }
628 
629 #define	atomic_cmpset_32	atomic_cmpset_int
630 #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
631 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
632 
633 #ifdef __powerpc64__
634 #define	atomic_cmpset_64	atomic_cmpset_long
635 #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
636 #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
637 
638 #define	atomic_cmpset_ptr	atomic_cmpset_long
639 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
640 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
641 #else
642 #define	atomic_cmpset_ptr	atomic_cmpset_int
643 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
644 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
645 #endif
646 
647 static __inline u_int
648 atomic_fetchadd_int(volatile u_int *p, u_int v)
649 {
650 	u_int value;
651 
652 	do {
653 		value = *p;
654 	} while (!atomic_cmpset_int(p, value, value + v));
655 	return (value);
656 }
657 
658 static __inline u_long
659 atomic_fetchadd_long(volatile u_long *p, u_long v)
660 {
661 	u_long value;
662 
663 	do {
664 		value = *p;
665 	} while (!atomic_cmpset_long(p, value, value + v));
666 	return (value);
667 }
668 
669 #define	atomic_fetchadd_32	atomic_fetchadd_int
670 
671 #ifdef __powerpc64__
672 #define	atomic_fetchadd_64	atomic_fetchadd_long
673 #endif
674 
675 #endif /* ! _MACHINE_ATOMIC_H_ */
676