xref: /freebsd/sys/powerpc/include/atomic.h (revision b1bebaaba9b9c0ddfe503c43ca8e9e3917ee2c57)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause
3  *
4  * Copyright (c) 2008 Marcel Moolenaar
5  * Copyright (c) 2001 Benno Rice
6  * Copyright (c) 2001 David E. O'Brien
7  * Copyright (c) 1998 Doug Rabson
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29  * SUCH DAMAGE.
30  */
31 
32 #ifndef _MACHINE_ATOMIC_H_
33 #define	_MACHINE_ATOMIC_H_
34 
35 #include <sys/atomic_common.h>
36 
37 #ifndef __powerpc64__
38 #include <sys/_atomic64e.h>
39 #endif
40 
41 /*
42  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
43  * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
44  * of this file. See also Appendix B.2 of Book II of the architecture manual.
45  *
46  * Note that not all Book-E processors accept the light-weight sync variant.
47  * In particular, early models of E500 cores are known to wedge. Bank on all
48  * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
49  * to use the heavier-weight sync.
50  */
51 
52 #ifdef __powerpc64__
53 #define mb()		__asm __volatile("sync" : : : "memory")
54 #define rmb()		__asm __volatile("lwsync" : : : "memory")
55 #define wmb()		__asm __volatile("lwsync" : : : "memory")
56 #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
57 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
58 #else
59 #define mb()		__asm __volatile("sync" : : : "memory")
60 #define rmb()		__asm __volatile("sync" : : : "memory")
61 #define wmb()		__asm __volatile("sync" : : : "memory")
62 #define __ATOMIC_REL()	__asm __volatile("sync" : : : "memory")
63 #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
64 #endif
65 
66 static __inline void
67 powerpc_lwsync(void)
68 {
69 
70 #ifdef __powerpc64__
71 	__asm __volatile("lwsync" : : : "memory");
72 #else
73 	__asm __volatile("sync" : : : "memory");
74 #endif
75 }
76 
77 /*
78  * atomic_add(p, v)
79  * { *p += v; }
80  */
81 
82 #define __atomic_add_int(p, v, t)				\
83     __asm __volatile(						\
84 	"1:	lwarx	%0, 0, %2\n"				\
85 	"	add	%0, %3, %0\n"				\
86 	"	stwcx.	%0, 0, %2\n"				\
87 	"	bne-	1b\n"					\
88 	: "=&r" (t), "=m" (*p)					\
89 	: "r" (p), "r" (v), "m" (*p)				\
90 	: "cr0", "memory")					\
91     /* __atomic_add_int */
92 
93 #ifdef __powerpc64__
94 #define __atomic_add_long(p, v, t)				\
95     __asm __volatile(						\
96 	"1:	ldarx	%0, 0, %2\n"				\
97 	"	add	%0, %3, %0\n"				\
98 	"	stdcx.	%0, 0, %2\n"				\
99 	"	bne-	1b\n"					\
100 	: "=&r" (t), "=m" (*p)					\
101 	: "r" (p), "r" (v), "m" (*p)				\
102 	: "cr0", "memory")					\
103     /* __atomic_add_long */
104 #else
105 #define	__atomic_add_long(p, v, t)				\
106     __asm __volatile(						\
107 	"1:	lwarx	%0, 0, %2\n"				\
108 	"	add	%0, %3, %0\n"				\
109 	"	stwcx.	%0, 0, %2\n"				\
110 	"	bne-	1b\n"					\
111 	: "=&r" (t), "=m" (*p)					\
112 	: "r" (p), "r" (v), "m" (*p)				\
113 	: "cr0", "memory")					\
114     /* __atomic_add_long */
115 #endif
116 
117 #define	_ATOMIC_ADD(type)					\
118     static __inline void					\
119     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
120 	u_##type t;						\
121 	__atomic_add_##type(p, v, t);				\
122     }								\
123 								\
124     static __inline void					\
125     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
126 	u_##type t;						\
127 	__atomic_add_##type(p, v, t);				\
128 	__ATOMIC_ACQ();						\
129     }								\
130 								\
131     static __inline void					\
132     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
133 	u_##type t;						\
134 	__ATOMIC_REL();						\
135 	__atomic_add_##type(p, v, t);				\
136     }								\
137     /* _ATOMIC_ADD */
138 
139 _ATOMIC_ADD(int)
140 _ATOMIC_ADD(long)
141 
142 #define	atomic_add_32		atomic_add_int
143 #define	atomic_add_acq_32	atomic_add_acq_int
144 #define	atomic_add_rel_32	atomic_add_rel_int
145 
146 #ifdef __powerpc64__
147 #define	atomic_add_64		atomic_add_long
148 #define	atomic_add_acq_64	atomic_add_acq_long
149 #define	atomic_add_rel_64	atomic_add_rel_long
150 
151 #define	atomic_add_ptr		atomic_add_long
152 #define	atomic_add_acq_ptr	atomic_add_acq_long
153 #define	atomic_add_rel_ptr	atomic_add_rel_long
154 #else
155 #define	atomic_add_ptr		atomic_add_int
156 #define	atomic_add_acq_ptr	atomic_add_acq_int
157 #define	atomic_add_rel_ptr	atomic_add_rel_int
158 #endif
159 #undef _ATOMIC_ADD
160 #undef __atomic_add_long
161 #undef __atomic_add_int
162 
163 /*
164  * atomic_clear(p, v)
165  * { *p &= ~v; }
166  */
167 
168 #ifdef	ISA_206_ATOMICS
169 #define __atomic_clear_char(p, v, t)				\
170     __asm __volatile(						\
171 	"1:	lbarx	%0, 0, %2\n"				\
172 	"	andc	%0, %0, %3\n"				\
173 	"	stbcx.	%0, 0, %2\n"				\
174 	"	bne-	1b\n"					\
175 	: "=&r" (t), "=m" (*p)					\
176 	: "r" (p), "r" (v), "m" (*p)				\
177 	: "cr0", "memory")					\
178     /* __atomic_clear_short */
179 #define __atomic_clear_short(p, v, t)				\
180     __asm __volatile(						\
181 	"1:	lharx	%0, 0, %2\n"				\
182 	"	andc	%0, %0, %3\n"				\
183 	"	sthcx.	%0, 0, %2\n"				\
184 	"	bne-	1b\n"					\
185 	: "=&r" (t), "=m" (*p)					\
186 	: "r" (p), "r" (v), "m" (*p)				\
187 	: "cr0", "memory")					\
188     /* __atomic_clear_short */
189 #endif
190 #define __atomic_clear_int(p, v, t)				\
191     __asm __volatile(						\
192 	"1:	lwarx	%0, 0, %2\n"				\
193 	"	andc	%0, %0, %3\n"				\
194 	"	stwcx.	%0, 0, %2\n"				\
195 	"	bne-	1b\n"					\
196 	: "=&r" (t), "=m" (*p)					\
197 	: "r" (p), "r" (v), "m" (*p)				\
198 	: "cr0", "memory")					\
199     /* __atomic_clear_int */
200 
201 #ifdef __powerpc64__
202 #define __atomic_clear_long(p, v, t)				\
203     __asm __volatile(						\
204 	"1:	ldarx	%0, 0, %2\n"				\
205 	"	andc	%0, %0, %3\n"				\
206 	"	stdcx.	%0, 0, %2\n"				\
207 	"	bne-	1b\n"					\
208 	: "=&r" (t), "=m" (*p)					\
209 	: "r" (p), "r" (v), "m" (*p)				\
210 	: "cr0", "memory")					\
211     /* __atomic_clear_long */
212 #else
213 #define	__atomic_clear_long(p, v, t)				\
214     __asm __volatile(						\
215 	"1:	lwarx	%0, 0, %2\n"				\
216 	"	andc	%0, %0, %3\n"				\
217 	"	stwcx.	%0, 0, %2\n"				\
218 	"	bne-	1b\n"					\
219 	: "=&r" (t), "=m" (*p)					\
220 	: "r" (p), "r" (v), "m" (*p)				\
221 	: "cr0", "memory")					\
222     /* __atomic_clear_long */
223 #endif
224 
225 #define	_ATOMIC_CLEAR(type)					\
226     static __inline void					\
227     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
228 	u_##type t;						\
229 	__atomic_clear_##type(p, v, t);				\
230     }								\
231 								\
232     static __inline void					\
233     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
234 	u_##type t;						\
235 	__atomic_clear_##type(p, v, t);				\
236 	__ATOMIC_ACQ();						\
237     }								\
238 								\
239     static __inline void					\
240     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
241 	u_##type t;						\
242 	__ATOMIC_REL();						\
243 	__atomic_clear_##type(p, v, t);				\
244     }								\
245     /* _ATOMIC_CLEAR */
246 
247 #ifdef	ISA_206_ATOMICS
248 _ATOMIC_CLEAR(char)
249 _ATOMIC_CLEAR(short)
250 #endif
251 
252 _ATOMIC_CLEAR(int)
253 _ATOMIC_CLEAR(long)
254 
255 #define	atomic_clear_32		atomic_clear_int
256 #define	atomic_clear_acq_32	atomic_clear_acq_int
257 #define	atomic_clear_rel_32	atomic_clear_rel_int
258 
259 #ifdef __powerpc64__
260 #define	atomic_clear_64		atomic_clear_long
261 #define	atomic_clear_acq_64	atomic_clear_acq_long
262 #define	atomic_clear_rel_64	atomic_clear_rel_long
263 
264 #define	atomic_clear_ptr	atomic_clear_long
265 #define	atomic_clear_acq_ptr	atomic_clear_acq_long
266 #define	atomic_clear_rel_ptr	atomic_clear_rel_long
267 #else
268 #define	atomic_clear_ptr	atomic_clear_int
269 #define	atomic_clear_acq_ptr	atomic_clear_acq_int
270 #define	atomic_clear_rel_ptr	atomic_clear_rel_int
271 #endif
272 #undef _ATOMIC_CLEAR
273 #undef __atomic_clear_long
274 #undef __atomic_clear_int
275 
276 /*
277  * atomic_cmpset(p, o, n)
278  */
279 /* TODO -- see below */
280 
281 /*
282  * atomic_load_acq(p)
283  */
284 /* TODO -- see below */
285 
286 /*
287  * atomic_readandclear(p)
288  */
289 /* TODO -- see below */
290 
291 /*
292  * atomic_set(p, v)
293  * { *p |= v; }
294  */
295 #ifdef	ISA_206_ATOMICS
296 #define __atomic_set_char(p, v, t)				\
297     __asm __volatile(						\
298 	"1:	lbarx	%0, 0, %2\n"				\
299 	"	or	%0, %3, %0\n"				\
300 	"	stbcx.	%0, 0, %2\n"				\
301 	"	bne-	1b\n"					\
302 	: "=&r" (t), "=m" (*p)					\
303 	: "r" (p), "r" (v), "m" (*p)				\
304 	: "cr0", "memory")					\
305     /* __atomic_set_char */
306 #define __atomic_set_short(p, v, t)				\
307     __asm __volatile(						\
308 	"1:	lharx	%0, 0, %2\n"				\
309 	"	or	%0, %3, %0\n"				\
310 	"	sthcx.	%0, 0, %2\n"				\
311 	"	bne-	1b\n"					\
312 	: "=&r" (t), "=m" (*p)					\
313 	: "r" (p), "r" (v), "m" (*p)				\
314 	: "cr0", "memory")					\
315     /* __atomic_set_short */
316 #endif
317 
318 #define __atomic_set_int(p, v, t)				\
319     __asm __volatile(						\
320 	"1:	lwarx	%0, 0, %2\n"				\
321 	"	or	%0, %3, %0\n"				\
322 	"	stwcx.	%0, 0, %2\n"				\
323 	"	bne-	1b\n"					\
324 	: "=&r" (t), "=m" (*p)					\
325 	: "r" (p), "r" (v), "m" (*p)				\
326 	: "cr0", "memory")					\
327     /* __atomic_set_int */
328 
329 #ifdef __powerpc64__
330 #define __atomic_set_long(p, v, t)				\
331     __asm __volatile(						\
332 	"1:	ldarx	%0, 0, %2\n"				\
333 	"	or	%0, %3, %0\n"				\
334 	"	stdcx.	%0, 0, %2\n"				\
335 	"	bne-	1b\n"					\
336 	: "=&r" (t), "=m" (*p)					\
337 	: "r" (p), "r" (v), "m" (*p)				\
338 	: "cr0", "memory")					\
339     /* __atomic_set_long */
340 #else
341 #define	__atomic_set_long(p, v, t)				\
342     __asm __volatile(						\
343 	"1:	lwarx	%0, 0, %2\n"				\
344 	"	or	%0, %3, %0\n"				\
345 	"	stwcx.	%0, 0, %2\n"				\
346 	"	bne-	1b\n"					\
347 	: "=&r" (t), "=m" (*p)					\
348 	: "r" (p), "r" (v), "m" (*p)				\
349 	: "cr0", "memory")					\
350     /* __atomic_set_long */
351 #endif
352 
353 #define	_ATOMIC_SET(type)					\
354     static __inline void					\
355     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
356 	u_##type t;						\
357 	__atomic_set_##type(p, v, t);				\
358     }								\
359 								\
360     static __inline void					\
361     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
362 	u_##type t;						\
363 	__atomic_set_##type(p, v, t);				\
364 	__ATOMIC_ACQ();						\
365     }								\
366 								\
367     static __inline void					\
368     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
369 	u_##type t;						\
370 	__ATOMIC_REL();						\
371 	__atomic_set_##type(p, v, t);				\
372     }								\
373     /* _ATOMIC_SET */
374 
375 #ifdef	ISA_206_ATOMICS
376 _ATOMIC_SET(char)
377 _ATOMIC_SET(short)
378 #endif
379 
380 _ATOMIC_SET(int)
381 _ATOMIC_SET(long)
382 
383 #define	atomic_set_32		atomic_set_int
384 #define	atomic_set_acq_32	atomic_set_acq_int
385 #define	atomic_set_rel_32	atomic_set_rel_int
386 
387 #ifdef __powerpc64__
388 #define	atomic_set_64		atomic_set_long
389 #define	atomic_set_acq_64	atomic_set_acq_long
390 #define	atomic_set_rel_64	atomic_set_rel_long
391 
392 #define	atomic_set_ptr		atomic_set_long
393 #define	atomic_set_acq_ptr	atomic_set_acq_long
394 #define	atomic_set_rel_ptr	atomic_set_rel_long
395 #else
396 #define	atomic_set_ptr		atomic_set_int
397 #define	atomic_set_acq_ptr	atomic_set_acq_int
398 #define	atomic_set_rel_ptr	atomic_set_rel_int
399 #endif
400 #undef _ATOMIC_SET
401 #undef __atomic_set_long
402 #undef __atomic_set_int
403 
404 /*
405  * atomic_subtract(p, v)
406  * { *p -= v; }
407  */
408 
409 #define __atomic_subtract_int(p, v, t)				\
410     __asm __volatile(						\
411 	"1:	lwarx	%0, 0, %2\n"				\
412 	"	subf	%0, %3, %0\n"				\
413 	"	stwcx.	%0, 0, %2\n"				\
414 	"	bne-	1b\n"					\
415 	: "=&r" (t), "=m" (*p)					\
416 	: "r" (p), "r" (v), "m" (*p)				\
417 	: "cr0", "memory")					\
418     /* __atomic_subtract_int */
419 
420 #ifdef __powerpc64__
421 #define __atomic_subtract_long(p, v, t)				\
422     __asm __volatile(						\
423 	"1:	ldarx	%0, 0, %2\n"				\
424 	"	subf	%0, %3, %0\n"				\
425 	"	stdcx.	%0, 0, %2\n"				\
426 	"	bne-	1b\n"					\
427 	: "=&r" (t), "=m" (*p)					\
428 	: "r" (p), "r" (v), "m" (*p)				\
429 	: "cr0", "memory")					\
430     /* __atomic_subtract_long */
431 #else
432 #define	__atomic_subtract_long(p, v, t)				\
433     __asm __volatile(						\
434 	"1:	lwarx	%0, 0, %2\n"				\
435 	"	subf	%0, %3, %0\n"				\
436 	"	stwcx.	%0, 0, %2\n"				\
437 	"	bne-	1b\n"					\
438 	: "=&r" (t), "=m" (*p)					\
439 	: "r" (p), "r" (v), "m" (*p)				\
440 	: "cr0", "memory")					\
441     /* __atomic_subtract_long */
442 #endif
443 
444 #define	_ATOMIC_SUBTRACT(type)						\
445     static __inline void						\
446     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
447 	u_##type t;							\
448 	__atomic_subtract_##type(p, v, t);				\
449     }									\
450 									\
451     static __inline void						\
452     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
453 	u_##type t;							\
454 	__atomic_subtract_##type(p, v, t);				\
455 	__ATOMIC_ACQ();							\
456     }									\
457 									\
458     static __inline void						\
459     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
460 	u_##type t;							\
461 	__ATOMIC_REL();							\
462 	__atomic_subtract_##type(p, v, t);				\
463     }									\
464     /* _ATOMIC_SUBTRACT */
465 
466 _ATOMIC_SUBTRACT(int)
467 _ATOMIC_SUBTRACT(long)
468 
469 #define	atomic_subtract_32	atomic_subtract_int
470 #define	atomic_subtract_acq_32	atomic_subtract_acq_int
471 #define	atomic_subtract_rel_32	atomic_subtract_rel_int
472 
473 #ifdef __powerpc64__
474 #define	atomic_subtract_64	atomic_subtract_long
475 #define	atomic_subtract_acq_64	atomic_subract_acq_long
476 #define	atomic_subtract_rel_64	atomic_subtract_rel_long
477 
478 #define	atomic_subtract_ptr	atomic_subtract_long
479 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
480 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
481 #else
482 #define	atomic_subtract_ptr	atomic_subtract_int
483 #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
484 #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
485 #endif
486 #undef _ATOMIC_SUBTRACT
487 #undef __atomic_subtract_long
488 #undef __atomic_subtract_int
489 
490 /*
491  * atomic_store_rel(p, v)
492  */
493 /* TODO -- see below */
494 
495 /*
496  * Old/original implementations that still need revisiting.
497  */
498 
499 static __inline u_int
500 atomic_readandclear_int(volatile u_int *addr)
501 {
502 	u_int result,temp;
503 
504 	__asm __volatile (
505 		"\tsync\n"			/* drain writes */
506 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
507 		"li %1, 0\n\t"			/* load new value */
508 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
509 		"bne- 1b\n\t"			/* spin if failed */
510 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
511 		: "r" (addr), "m" (*addr)
512 		: "cr0", "memory");
513 
514 	return (result);
515 }
516 
517 #ifdef __powerpc64__
518 static __inline u_long
519 atomic_readandclear_long(volatile u_long *addr)
520 {
521 	u_long result,temp;
522 
523 	__asm __volatile (
524 		"\tsync\n"			/* drain writes */
525 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
526 		"li %1, 0\n\t"			/* load new value */
527 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
528 		"bne- 1b\n\t"			/* spin if failed */
529 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
530 		: "r" (addr), "m" (*addr)
531 		: "cr0", "memory");
532 
533 	return (result);
534 }
535 #endif
536 
537 #define	atomic_readandclear_32		atomic_readandclear_int
538 
539 #ifdef __powerpc64__
540 #define	atomic_readandclear_64		atomic_readandclear_long
541 
542 #define	atomic_readandclear_ptr		atomic_readandclear_long
543 #else
544 static __inline u_long
545 atomic_readandclear_long(volatile u_long *addr)
546 {
547 
548 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
549 }
550 
551 #define	atomic_readandclear_ptr		atomic_readandclear_int
552 #endif
553 
554 /*
555  * We assume that a = b will do atomic loads and stores.
556  */
557 #define	ATOMIC_STORE_LOAD(TYPE)					\
558 static __inline u_##TYPE					\
559 atomic_load_acq_##TYPE(const volatile u_##TYPE *p)		\
560 {								\
561 	u_##TYPE v;						\
562 								\
563 	v = *p;							\
564 	powerpc_lwsync();					\
565 	return (v);						\
566 }								\
567 								\
568 static __inline void						\
569 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
570 {								\
571 								\
572 	powerpc_lwsync();					\
573 	*p = v;							\
574 }
575 
576 ATOMIC_STORE_LOAD(int)
577 
578 #define	atomic_load_acq_32	atomic_load_acq_int
579 #define	atomic_store_rel_32	atomic_store_rel_int
580 
581 #ifdef __powerpc64__
582 ATOMIC_STORE_LOAD(long)
583 
584 #define	atomic_load_acq_64	atomic_load_acq_long
585 #define	atomic_store_rel_64	atomic_store_rel_long
586 
587 #define	atomic_load_acq_ptr	atomic_load_acq_long
588 #define	atomic_store_rel_ptr	atomic_store_rel_long
589 #else
590 static __inline u_long
591 atomic_load_acq_long(const volatile u_long *addr)
592 {
593 
594 	return ((u_long)atomic_load_acq_int((const volatile u_int *)addr));
595 }
596 
597 static __inline void
598 atomic_store_rel_long(volatile u_long *addr, u_long val)
599 {
600 
601 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
602 }
603 
604 #define	atomic_load_acq_ptr	atomic_load_acq_int
605 #define	atomic_store_rel_ptr	atomic_store_rel_int
606 #endif
607 #undef ATOMIC_STORE_LOAD
608 
609 /*
610  * Atomically compare the value stored at *p with cmpval and if the
611  * two values are equal, update the value of *p with newval. Returns
612  * zero if the compare failed, nonzero otherwise.
613  */
614 #ifdef ISA_206_ATOMICS
615 static __inline int
616 atomic_cmpset_char(volatile u_char *p, u_char cmpval, u_char newval)
617 {
618 	int	ret;
619 
620 	__asm __volatile (
621 		"1:\tlbarx %0, 0, %2\n\t"	/* load old value */
622 		"cmplw %3, %0\n\t"		/* compare */
623 		"bne- 2f\n\t"			/* exit if not equal */
624 		"stbcx. %4, 0, %2\n\t"      	/* attempt to store */
625 		"bne- 1b\n\t"			/* spin if failed */
626 		"li %0, 1\n\t"			/* success - retval = 1 */
627 		"b 3f\n\t"			/* we've succeeded */
628 		"2:\n\t"
629 		"stbcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
630 		"li %0, 0\n\t"			/* failure - retval = 0 */
631 		"3:\n\t"
632 		: "=&r" (ret), "=m" (*p)
633 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
634 		: "cr0", "memory");
635 
636 	return (ret);
637 }
638 
639 static __inline int
640 atomic_cmpset_short(volatile u_short *p, u_short cmpval, u_short newval)
641 {
642 	int	ret;
643 
644 	__asm __volatile (
645 		"1:\tlharx %0, 0, %2\n\t"	/* load old value */
646 		"cmplw %3, %0\n\t"		/* compare */
647 		"bne- 2f\n\t"			/* exit if not equal */
648 		"sthcx. %4, 0, %2\n\t"      	/* attempt to store */
649 		"bne- 1b\n\t"			/* spin if failed */
650 		"li %0, 1\n\t"			/* success - retval = 1 */
651 		"b 3f\n\t"			/* we've succeeded */
652 		"2:\n\t"
653 		"sthcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
654 		"li %0, 0\n\t"			/* failure - retval = 0 */
655 		"3:\n\t"
656 		: "=&r" (ret), "=m" (*p)
657 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
658 		: "cr0", "memory");
659 
660 	return (ret);
661 }
662 #else
663 static __inline int
664 atomic_cmpset_masked(uint32_t *p, uint32_t cmpval, uint32_t newval,
665     uint32_t mask)
666 {
667 	int		ret;
668 	uint32_t	tmp;
669 
670 	__asm __volatile (
671 		"1:\tlwarx %2, 0, %3\n\t"	/* load old value */
672 		"and %0, %2, %7\n\t"
673 		"cmplw %4, %0\n\t"		/* compare */
674 		"bne- 2f\n\t"			/* exit if not equal */
675 		"andc %2, %2, %7\n\t"
676 		"or %2, %2, %5\n\t"
677 		"stwcx. %2, 0, %3\n\t"      	/* attempt to store */
678 		"bne- 1b\n\t"			/* spin if failed */
679 		"li %0, 1\n\t"			/* success - retval = 1 */
680 		"b 3f\n\t"			/* we've succeeded */
681 		"2:\n\t"
682 		"stwcx. %2, 0, %3\n\t"       	/* clear reservation (74xx) */
683 		"li %0, 0\n\t"			/* failure - retval = 0 */
684 		"3:\n\t"
685 		: "=&r" (ret), "=m" (*p), "+&r" (tmp)
686 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p),
687 		  "r" (mask)
688 		: "cr0", "memory");
689 
690 	return (ret);
691 }
692 
693 #define	_atomic_cmpset_masked_word(a,o,v,m) atomic_cmpset_masked(a, o, v, m)
694 #endif
695 
696 static __inline int
697 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
698 {
699 	int	ret;
700 
701 	__asm __volatile (
702 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
703 		"cmplw %3, %0\n\t"		/* compare */
704 		"bne- 2f\n\t"			/* exit if not equal */
705 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
706 		"bne- 1b\n\t"			/* spin if failed */
707 		"li %0, 1\n\t"			/* success - retval = 1 */
708 		"b 3f\n\t"			/* we've succeeded */
709 		"2:\n\t"
710 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
711 		"li %0, 0\n\t"			/* failure - retval = 0 */
712 		"3:\n\t"
713 		: "=&r" (ret), "=m" (*p)
714 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
715 		: "cr0", "memory");
716 
717 	return (ret);
718 }
719 static __inline int
720 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
721 {
722 	int ret;
723 
724 	__asm __volatile (
725 	    #ifdef __powerpc64__
726 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
727 		"cmpld %3, %0\n\t"		/* compare */
728 		"bne- 2f\n\t"			/* exit if not equal */
729 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
730 	    #else
731 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
732 		"cmplw %3, %0\n\t"		/* compare */
733 		"bne- 2f\n\t"			/* exit if not equal */
734 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
735 	    #endif
736 		"bne- 1b\n\t"			/* spin if failed */
737 		"li %0, 1\n\t"			/* success - retval = 1 */
738 		"b 3f\n\t"			/* we've succeeded */
739 		"2:\n\t"
740 	    #ifdef __powerpc64__
741 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
742 	    #else
743 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
744 	    #endif
745 		"li %0, 0\n\t"			/* failure - retval = 0 */
746 		"3:\n\t"
747 		: "=&r" (ret), "=m" (*p)
748 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
749 		: "cr0", "memory");
750 
751 	return (ret);
752 }
753 
754 #define	ATOMIC_CMPSET_ACQ_REL(type) \
755     static __inline int \
756     atomic_cmpset_acq_##type(volatile u_##type *p, \
757 	    u_##type cmpval, u_##type newval)\
758     {\
759 	u_##type retval; \
760 	retval = atomic_cmpset_##type(p, cmpval, newval);\
761 	__ATOMIC_ACQ();\
762 	return (retval);\
763     }\
764     static __inline int \
765     atomic_cmpset_rel_##type(volatile u_##type *p, \
766 	    u_##type cmpval, u_##type newval)\
767     {\
768 	__ATOMIC_REL();\
769 	return (atomic_cmpset_##type(p, cmpval, newval));\
770     }\
771     struct hack
772 
773 ATOMIC_CMPSET_ACQ_REL(int);
774 ATOMIC_CMPSET_ACQ_REL(long);
775 
776 #ifdef ISA_206_ATOMICS
777 #define	atomic_cmpset_8		atomic_cmpset_char
778 #endif
779 #define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
780 #define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
781 
782 #ifdef ISA_206_ATOMICS
783 #define	atomic_cmpset_16	atomic_cmpset_short
784 #endif
785 #define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
786 #define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
787 
788 #define	atomic_cmpset_32	atomic_cmpset_int
789 #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
790 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
791 
792 #ifdef __powerpc64__
793 #define	atomic_cmpset_64	atomic_cmpset_long
794 #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
795 #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
796 
797 #define	atomic_cmpset_ptr	atomic_cmpset_long
798 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
799 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
800 #else
801 #define	atomic_cmpset_ptr	atomic_cmpset_int
802 #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
803 #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
804 #endif
805 
806 /*
807  * Atomically compare the value stored at *p with *cmpval and if the
808  * two values are equal, update the value of *p with newval. Returns
809  * zero if the compare failed and sets *cmpval to the read value from *p,
810  * nonzero otherwise.
811  */
812 #ifdef ISA_206_ATOMICS
813 static __inline int
814 atomic_fcmpset_char(volatile u_char *p, u_char *cmpval, u_char newval)
815 {
816 	int	ret;
817 
818 	__asm __volatile (
819 		"lbarx %0, 0, %3\n\t"		/* load old value */
820 		"cmplw %4, %0\n\t"		/* compare */
821 		"bne- 1f\n\t"			/* exit if not equal */
822 		"stbcx. %5, 0, %3\n\t"      	/* attempt to store */
823 		"bne- 1f\n\t"			/* exit if failed */
824 		"li %0, 1\n\t"			/* success - retval = 1 */
825 		"b 2f\n\t"			/* we've succeeded */
826 		"1:\n\t"
827 		"stbcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
828 		"stbx %0, 0, %7\n\t"
829 		"li %0, 0\n\t"			/* failure - retval = 0 */
830 		"2:\n\t"
831 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
832 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
833 		: "cr0", "memory");
834 
835 	return (ret);
836 }
837 
838 static __inline int
839 atomic_fcmpset_short(volatile u_short *p, u_short *cmpval, u_short newval)
840 {
841 	int	ret;
842 
843 	__asm __volatile (
844 		"lharx %0, 0, %3\n\t"		/* load old value */
845 		"cmplw %4, %0\n\t"		/* compare */
846 		"bne- 1f\n\t"			/* exit if not equal */
847 		"sthcx. %5, 0, %3\n\t"      	/* attempt to store */
848 		"bne- 1f\n\t"			/* exit if failed */
849 		"li %0, 1\n\t"			/* success - retval = 1 */
850 		"b 2f\n\t"			/* we've succeeded */
851 		"1:\n\t"
852 		"sthcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
853 		"sthx %0, 0, %7\n\t"
854 		"li %0, 0\n\t"			/* failure - retval = 0 */
855 		"2:\n\t"
856 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
857 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
858 		: "cr0", "memory");
859 
860 	return (ret);
861 }
862 #endif	/* ISA_206_ATOMICS */
863 
864 static __inline int
865 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
866 {
867 	int	ret;
868 
869 	__asm __volatile (
870 		"lwarx %0, 0, %3\n\t"		/* load old value */
871 		"cmplw %4, %0\n\t"		/* compare */
872 		"bne- 1f\n\t"			/* exit if not equal */
873 		"stwcx. %5, 0, %3\n\t"      	/* attempt to store */
874 		"bne- 1f\n\t"			/* exit if failed */
875 		"li %0, 1\n\t"			/* success - retval = 1 */
876 		"b 2f\n\t"			/* we've succeeded */
877 		"1:\n\t"
878 		"stwcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
879 		"stwx %0, 0, %7\n\t"
880 		"li %0, 0\n\t"			/* failure - retval = 0 */
881 		"2:\n\t"
882 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
883 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
884 		: "cr0", "memory");
885 
886 	return (ret);
887 }
888 static __inline int
889 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
890 {
891 	int ret;
892 
893 	__asm __volatile (
894 	    #ifdef __powerpc64__
895 		"ldarx %0, 0, %3\n\t"		/* load old value */
896 		"cmpld %4, %0\n\t"		/* compare */
897 		"bne- 1f\n\t"			/* exit if not equal */
898 		"stdcx. %5, 0, %3\n\t"		/* attempt to store */
899 	    #else
900 		"lwarx %0, 0, %3\n\t"		/* load old value */
901 		"cmplw %4, %0\n\t"		/* compare */
902 		"bne- 1f\n\t"			/* exit if not equal */
903 		"stwcx. %5, 0, %3\n\t"		/* attempt to store */
904 	    #endif
905 		"bne- 1f\n\t"			/* exit if failed */
906 		"li %0, 1\n\t"			/* success - retval = 1 */
907 		"b 2f\n\t"			/* we've succeeded */
908 		"1:\n\t"
909 	    #ifdef __powerpc64__
910 		"stdcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
911 		"stdx %0, 0, %7\n\t"
912 	    #else
913 		"stwcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
914 		"stwx %0, 0, %7\n\t"
915 	    #endif
916 		"li %0, 0\n\t"			/* failure - retval = 0 */
917 		"2:\n\t"
918 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
919 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
920 		: "cr0", "memory");
921 
922 	return (ret);
923 }
924 
925 #define	ATOMIC_FCMPSET_ACQ_REL(type) \
926     static __inline int \
927     atomic_fcmpset_acq_##type(volatile u_##type *p, \
928 	    u_##type *cmpval, u_##type newval)\
929     {\
930 	u_##type retval; \
931 	retval = atomic_fcmpset_##type(p, cmpval, newval);\
932 	__ATOMIC_ACQ();\
933 	return (retval);\
934     }\
935     static __inline int \
936     atomic_fcmpset_rel_##type(volatile u_##type *p, \
937 	    u_##type *cmpval, u_##type newval)\
938     {\
939 	__ATOMIC_REL();\
940 	return (atomic_fcmpset_##type(p, cmpval, newval));\
941     }\
942     struct hack
943 
944 ATOMIC_FCMPSET_ACQ_REL(int);
945 ATOMIC_FCMPSET_ACQ_REL(long);
946 
947 #ifdef ISA_206_ATOMICS
948 #define	atomic_fcmpset_8	atomic_fcmpset_char
949 #endif
950 #define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
951 #define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
952 
953 #ifdef ISA_206_ATOMICS
954 #define	atomic_fcmpset_16	atomic_fcmpset_short
955 #endif
956 #define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
957 #define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
958 
959 #define	atomic_fcmpset_32	atomic_fcmpset_int
960 #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
961 #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
962 
963 #ifdef __powerpc64__
964 #define	atomic_fcmpset_64	atomic_fcmpset_long
965 #define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
966 #define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
967 
968 #define	atomic_fcmpset_ptr	atomic_fcmpset_long
969 #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
970 #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
971 #else
972 #define	atomic_fcmpset_ptr	atomic_fcmpset_int
973 #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_int
974 #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_int
975 #endif
976 
977 static __inline u_int
978 atomic_fetchadd_int(volatile u_int *p, u_int v)
979 {
980 	u_int value;
981 
982 	do {
983 		value = *p;
984 	} while (!atomic_cmpset_int(p, value, value + v));
985 	return (value);
986 }
987 
988 static __inline u_long
989 atomic_fetchadd_long(volatile u_long *p, u_long v)
990 {
991 	u_long value;
992 
993 	do {
994 		value = *p;
995 	} while (!atomic_cmpset_long(p, value, value + v));
996 	return (value);
997 }
998 
999 static __inline u_int
1000 atomic_swap_32(volatile u_int *p, u_int v)
1001 {
1002 	u_int prev;
1003 
1004 	__asm __volatile(
1005 	"1:	lwarx	%0,0,%2\n"
1006 	"	stwcx.	%3,0,%2\n"
1007 	"	bne-	1b\n"
1008 	: "=&r" (prev), "+m" (*(volatile u_int *)p)
1009 	: "r" (p), "r" (v)
1010 	: "cr0", "memory");
1011 
1012 	return (prev);
1013 }
1014 
1015 #ifdef __powerpc64__
1016 static __inline u_long
1017 atomic_swap_64(volatile u_long *p, u_long v)
1018 {
1019 	u_long prev;
1020 
1021 	__asm __volatile(
1022 	"1:	ldarx	%0,0,%2\n"
1023 	"	stdcx.	%3,0,%2\n"
1024 	"	bne-	1b\n"
1025 	: "=&r" (prev), "+m" (*(volatile u_long *)p)
1026 	: "r" (p), "r" (v)
1027 	: "cr0", "memory");
1028 
1029 	return (prev);
1030 }
1031 #endif
1032 
1033 #define	atomic_fetchadd_32	atomic_fetchadd_int
1034 #define	atomic_swap_int		atomic_swap_32
1035 
1036 #ifdef __powerpc64__
1037 #define	atomic_fetchadd_64	atomic_fetchadd_long
1038 #define	atomic_swap_long	atomic_swap_64
1039 #define	atomic_swap_ptr		atomic_swap_64
1040 #else
1041 #define	atomic_swap_long(p,v)	atomic_swap_32((volatile u_int *)(p), v)
1042 #define	atomic_swap_ptr(p,v)	atomic_swap_32((volatile u_int *)(p), v)
1043 #endif
1044 
1045 static __inline int
1046 atomic_testandset_int(volatile u_int *p, u_int v)
1047 {
1048 	u_int m = (1u << (v & 0x1f));
1049 	u_int res;
1050 	u_int tmp;
1051 
1052 	__asm __volatile(
1053 	"1:	lwarx	%0,0,%3\n"
1054 	"	and	%1,%0,%4\n"
1055 	"	or	%0,%0,%4\n"
1056 	"	stwcx.	%0,0,%3\n"
1057 	"	bne-	1b\n"
1058 	: "=&r"(tmp), "=&r"(res), "+m"(*p)
1059 	: "r"(p), "r"(m)
1060 	: "cr0", "memory");
1061 
1062 	return (res != 0);
1063 }
1064 
1065 static __inline int
1066 atomic_testandclear_int(volatile u_int *p, u_int v)
1067 {
1068 	u_int m = (1u << (v & 0x1f));
1069 	u_int res;
1070 	u_int tmp;
1071 
1072 	__asm __volatile(
1073 	"1:	lwarx	%0,0,%3\n"
1074 	"	and	%1,%0,%4\n"
1075 	"	andc	%0,%0,%4\n"
1076 	"	stwcx.	%0,0,%3\n"
1077 	"	bne-	1b\n"
1078 	: "=&r"(tmp), "=&r"(res), "+m"(*p)
1079 	: "r"(p), "r"(m)
1080 	: "cr0", "memory");
1081 
1082 	return (res != 0);
1083 }
1084 
1085 #ifdef __powerpc64__
1086 static __inline int
1087 atomic_testandset_long(volatile u_long *p, u_int v)
1088 {
1089 	u_long m = (1ul << (v & 0x3f));
1090 	u_long res;
1091 	u_long tmp;
1092 
1093 	__asm __volatile(
1094 	"1:	ldarx	%0,0,%3\n"
1095 	"	and	%1,%0,%4\n"
1096 	"	or	%0,%0,%4\n"
1097 	"	stdcx.	%0,0,%3\n"
1098 	"	bne-	1b\n"
1099 	: "=&r"(tmp), "=&r"(res), "+m"(*(volatile u_long *)p)
1100 	: "r"(p), "r"(m)
1101 	: "cr0", "memory");
1102 
1103 	return (res != 0);
1104 }
1105 
1106 static __inline int
1107 atomic_testandclear_long(volatile u_long *p, u_int v)
1108 {
1109 	u_long m = (1ul << (v & 0x3f));
1110 	u_long res;
1111 	u_long tmp;
1112 
1113 	__asm __volatile(
1114 	"1:	ldarx	%0,0,%3\n"
1115 	"	and	%1,%0,%4\n"
1116 	"	andc	%0,%0,%4\n"
1117 	"	stdcx.	%0,0,%3\n"
1118 	"	bne-	1b\n"
1119 	: "=&r"(tmp), "=&r"(res), "+m"(*p)
1120 	: "r"(p), "r"(m)
1121 	: "cr0", "memory");
1122 
1123 	return (res != 0);
1124 }
1125 #else
1126 static __inline int
1127 atomic_testandset_long(volatile u_long *p, u_int v)
1128 {
1129 	return (atomic_testandset_int((volatile u_int *)p, v));
1130 }
1131 
1132 static __inline int
1133 atomic_testandclear_long(volatile u_long *p, u_int v)
1134 {
1135 	return (atomic_testandclear_int((volatile u_int *)p, v));
1136 }
1137 #endif
1138 
1139 #define	atomic_testandclear_32	atomic_testandclear_int
1140 #define	atomic_testandset_32	atomic_testandset_int
1141 
1142 static __inline int
1143 atomic_testandset_acq_long(volatile u_long *p, u_int v)
1144 {
1145 	u_int a = atomic_testandset_long(p, v);
1146 	__ATOMIC_ACQ();
1147 	return (a);
1148 }
1149 
1150 #ifdef __powerpc64__
1151 #define	atomic_testandclear_ptr		atomic_testandclear_long
1152 #define	atomic_testandset_ptr		atomic_testandset_long
1153 #else
1154 #define	atomic_testandclear_ptr(p,v)					\
1155 	atomic_testandclear_32((volatile u_int *)(p), v)
1156 #define	atomic_testandset_ptr(p,v)					\
1157 	atomic_testandset_32((volatile u_int *)(p), v)
1158 #endif
1159 
1160 static __inline void
1161 atomic_thread_fence_acq(void)
1162 {
1163 
1164 	powerpc_lwsync();
1165 }
1166 
1167 static __inline void
1168 atomic_thread_fence_rel(void)
1169 {
1170 
1171 	powerpc_lwsync();
1172 }
1173 
1174 static __inline void
1175 atomic_thread_fence_acq_rel(void)
1176 {
1177 
1178 	powerpc_lwsync();
1179 }
1180 
1181 static __inline void
1182 atomic_thread_fence_seq_cst(void)
1183 {
1184 
1185 	__asm __volatile("sync" : : : "memory");
1186 }
1187 
1188 #ifndef ISA_206_ATOMICS
1189 #include <sys/_atomic_subword.h>
1190 #define	atomic_cmpset_char	atomic_cmpset_8
1191 #define	atomic_cmpset_short	atomic_cmpset_16
1192 #define	atomic_fcmpset_char	atomic_fcmpset_8
1193 #define	atomic_fcmpset_short	atomic_fcmpset_16
1194 #define	atomic_set_short	atomic_set_16
1195 #define	atomic_clear_short	atomic_clear_16
1196 #else
1197 #define	atomic_set_8		atomic_set_char
1198 #define	atomic_clear_8		atomic_clear_char
1199 #define	atomic_set_16		atomic_set_short
1200 #define	atomic_clear_16		atomic_clear_short
1201 #endif	/* ISA_206_ATOMICS */
1202 
1203 /* These need sys/_atomic_subword.h on non-ISA-2.06-atomic platforms. */
1204 ATOMIC_CMPSET_ACQ_REL(char);
1205 ATOMIC_CMPSET_ACQ_REL(short);
1206 
1207 ATOMIC_FCMPSET_ACQ_REL(char);
1208 ATOMIC_FCMPSET_ACQ_REL(short);
1209 
1210 #undef __ATOMIC_REL
1211 #undef __ATOMIC_ACQ
1212 
1213 #endif /* ! _MACHINE_ATOMIC_H_ */
1214