xref: /freebsd/sys/powerpc/include/atomic.h (revision 5e9a82e898d55816c366cfa3ffbca84f02569fe5)
1b5073b49SDavid E. O'Brien /*-
24d846d26SWarner Losh  * SPDX-License-Identifier: BSD-2-Clause
371e3c308SPedro F. Giffuni  *
4c563d533SMarcel Moolenaar  * Copyright (c) 2008 Marcel Moolenaar
5199a2415SBenno Rice  * Copyright (c) 2001 Benno Rice
6b5073b49SDavid E. O'Brien  * Copyright (c) 2001 David E. O'Brien
7b5073b49SDavid E. O'Brien  * Copyright (c) 1998 Doug Rabson
8b5073b49SDavid E. O'Brien  * All rights reserved.
9b5073b49SDavid E. O'Brien  *
10b5073b49SDavid E. O'Brien  * Redistribution and use in source and binary forms, with or without
11b5073b49SDavid E. O'Brien  * modification, are permitted provided that the following conditions
12b5073b49SDavid E. O'Brien  * are met:
13b5073b49SDavid E. O'Brien  * 1. Redistributions of source code must retain the above copyright
14b5073b49SDavid E. O'Brien  *    notice, this list of conditions and the following disclaimer.
15b5073b49SDavid E. O'Brien  * 2. Redistributions in binary form must reproduce the above copyright
16b5073b49SDavid E. O'Brien  *    notice, this list of conditions and the following disclaimer in the
17b5073b49SDavid E. O'Brien  *    documentation and/or other materials provided with the distribution.
18b5073b49SDavid E. O'Brien  *
19b5073b49SDavid E. O'Brien  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20b5073b49SDavid E. O'Brien  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21b5073b49SDavid E. O'Brien  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22b5073b49SDavid E. O'Brien  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23b5073b49SDavid E. O'Brien  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24b5073b49SDavid E. O'Brien  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25b5073b49SDavid E. O'Brien  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26b5073b49SDavid E. O'Brien  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27b5073b49SDavid E. O'Brien  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28b5073b49SDavid E. O'Brien  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29b5073b49SDavid E. O'Brien  * SUCH DAMAGE.
30b5073b49SDavid E. O'Brien  */
31b5073b49SDavid E. O'Brien 
32b5073b49SDavid E. O'Brien #ifndef _MACHINE_ATOMIC_H_
33b5073b49SDavid E. O'Brien #define	_MACHINE_ATOMIC_H_
34b5073b49SDavid E. O'Brien 
3530d4f9e8SKonstantin Belousov #include <sys/atomic_common.h>
3630d4f9e8SKonstantin Belousov 
379aafc7c0SBrandon Bergren #ifndef __powerpc64__
389aafc7c0SBrandon Bergren #include <sys/_atomic64e.h>
399aafc7c0SBrandon Bergren #endif
409aafc7c0SBrandon Bergren 
41a6349a99SNathan Whitehorn /*
42a4cbf436SNathan Whitehorn  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
43df0bef25SMarcel Moolenaar  * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
44df0bef25SMarcel Moolenaar  * of this file. See also Appendix B.2 of Book II of the architecture manual.
45df0bef25SMarcel Moolenaar  *
46df0bef25SMarcel Moolenaar  * Note that not all Book-E processors accept the light-weight sync variant.
47df0bef25SMarcel Moolenaar  * In particular, early models of E500 cores are known to wedge. Bank on all
48df0bef25SMarcel Moolenaar  * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
49df0bef25SMarcel Moolenaar  * to use the heavier-weight sync.
50a6349a99SNathan Whitehorn  */
51df0bef25SMarcel Moolenaar 
52df0bef25SMarcel Moolenaar #ifdef __powerpc64__
530b39ffb3SKonstantin Belousov #define mb()		__asm __volatile("sync" : : : "memory")
54df0bef25SMarcel Moolenaar #define rmb()		__asm __volatile("lwsync" : : : "memory")
55df0bef25SMarcel Moolenaar #define wmb()		__asm __volatile("lwsync" : : : "memory")
56a4cbf436SNathan Whitehorn #define __ATOMIC_REL()	__asm __volatile("lwsync" : : : "memory")
57270dc329SNathan Whitehorn #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
58df0bef25SMarcel Moolenaar #else
5970977949SMarcel Moolenaar #define mb()		__asm __volatile("sync" : : : "memory")
6070977949SMarcel Moolenaar #define rmb()		__asm __volatile("sync" : : : "memory")
6108c5f330SBjoern A. Zeeb #define wmb()		__asm __volatile("sync" : : : "memory")
6270977949SMarcel Moolenaar #define __ATOMIC_REL()	__asm __volatile("sync" : : : "memory")
63a4cbf436SNathan Whitehorn #define __ATOMIC_ACQ()	__asm __volatile("isync" : : : "memory")
64df0bef25SMarcel Moolenaar #endif
65db7f0b97SKip Macy 
660b39ffb3SKonstantin Belousov static __inline void
powerpc_lwsync(void)670b39ffb3SKonstantin Belousov powerpc_lwsync(void)
680b39ffb3SKonstantin Belousov {
690b39ffb3SKonstantin Belousov 
700b39ffb3SKonstantin Belousov #ifdef __powerpc64__
710b39ffb3SKonstantin Belousov 	__asm __volatile("lwsync" : : : "memory");
720b39ffb3SKonstantin Belousov #else
730b39ffb3SKonstantin Belousov 	__asm __volatile("sync" : : : "memory");
740b39ffb3SKonstantin Belousov #endif
750b39ffb3SKonstantin Belousov }
760b39ffb3SKonstantin Belousov 
77b5073b49SDavid E. O'Brien /*
78c563d533SMarcel Moolenaar  * atomic_add(p, v)
79c563d533SMarcel Moolenaar  * { *p += v; }
80b5073b49SDavid E. O'Brien  */
81b5073b49SDavid E. O'Brien 
82dc6dc1f5SAttilio Rao #define __atomic_add_int(p, v, t)				\
83c563d533SMarcel Moolenaar     __asm __volatile(						\
84c563d533SMarcel Moolenaar 	"1:	lwarx	%0, 0, %2\n"				\
85c563d533SMarcel Moolenaar 	"	add	%0, %3, %0\n"				\
86c563d533SMarcel Moolenaar 	"	stwcx.	%0, 0, %2\n"				\
87c563d533SMarcel Moolenaar 	"	bne-	1b\n"					\
88c563d533SMarcel Moolenaar 	: "=&r" (t), "=m" (*p)					\
89c563d533SMarcel Moolenaar 	: "r" (p), "r" (v), "m" (*p)				\
90181ca73bSJustin Hibbits 	: "cr0", "memory")					\
91dc6dc1f5SAttilio Rao     /* __atomic_add_int */
92b5073b49SDavid E. O'Brien 
93c3e289e1SNathan Whitehorn #ifdef __powerpc64__
94dc6dc1f5SAttilio Rao #define __atomic_add_long(p, v, t)				\
95c3e289e1SNathan Whitehorn     __asm __volatile(						\
96c3e289e1SNathan Whitehorn 	"1:	ldarx	%0, 0, %2\n"				\
97c3e289e1SNathan Whitehorn 	"	add	%0, %3, %0\n"				\
98c3e289e1SNathan Whitehorn 	"	stdcx.	%0, 0, %2\n"				\
99c3e289e1SNathan Whitehorn 	"	bne-	1b\n"					\
100c3e289e1SNathan Whitehorn 	: "=&r" (t), "=m" (*p)					\
101c3e289e1SNathan Whitehorn 	: "r" (p), "r" (v), "m" (*p)				\
102181ca73bSJustin Hibbits 	: "cr0", "memory")					\
103dc6dc1f5SAttilio Rao     /* __atomic_add_long */
104c3e289e1SNathan Whitehorn #else
105dc6dc1f5SAttilio Rao #define	__atomic_add_long(p, v, t)				\
106dc6dc1f5SAttilio Rao     __asm __volatile(						\
107dc6dc1f5SAttilio Rao 	"1:	lwarx	%0, 0, %2\n"				\
108dc6dc1f5SAttilio Rao 	"	add	%0, %3, %0\n"				\
109dc6dc1f5SAttilio Rao 	"	stwcx.	%0, 0, %2\n"				\
110dc6dc1f5SAttilio Rao 	"	bne-	1b\n"					\
111dc6dc1f5SAttilio Rao 	: "=&r" (t), "=m" (*p)					\
112dc6dc1f5SAttilio Rao 	: "r" (p), "r" (v), "m" (*p)				\
113181ca73bSJustin Hibbits 	: "cr0", "memory")					\
114dc6dc1f5SAttilio Rao     /* __atomic_add_long */
115c3e289e1SNathan Whitehorn #endif
116c563d533SMarcel Moolenaar 
117dc6dc1f5SAttilio Rao #define	_ATOMIC_ADD(type)					\
118c563d533SMarcel Moolenaar     static __inline void					\
119dc6dc1f5SAttilio Rao     atomic_add_##type(volatile u_##type *p, u_##type v) {	\
120dc6dc1f5SAttilio Rao 	u_##type t;						\
121dc6dc1f5SAttilio Rao 	__atomic_add_##type(p, v, t);				\
122c563d533SMarcel Moolenaar     }								\
123c563d533SMarcel Moolenaar 								\
124c563d533SMarcel Moolenaar     static __inline void					\
125dc6dc1f5SAttilio Rao     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {	\
126dc6dc1f5SAttilio Rao 	u_##type t;						\
127dc6dc1f5SAttilio Rao 	__atomic_add_##type(p, v, t);				\
128a4cbf436SNathan Whitehorn 	__ATOMIC_ACQ();						\
129c563d533SMarcel Moolenaar     }								\
130c563d533SMarcel Moolenaar 								\
131c563d533SMarcel Moolenaar     static __inline void					\
132dc6dc1f5SAttilio Rao     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {	\
133dc6dc1f5SAttilio Rao 	u_##type t;						\
134a4cbf436SNathan Whitehorn 	__ATOMIC_REL();						\
135dc6dc1f5SAttilio Rao 	__atomic_add_##type(p, v, t);				\
136c563d533SMarcel Moolenaar     }								\
137c563d533SMarcel Moolenaar     /* _ATOMIC_ADD */
138c563d533SMarcel Moolenaar 
139dc6dc1f5SAttilio Rao _ATOMIC_ADD(int)
_ATOMIC_ADD(long)140dc6dc1f5SAttilio Rao _ATOMIC_ADD(long)
141b5073b49SDavid E. O'Brien 
142dc6dc1f5SAttilio Rao #define	atomic_add_32		atomic_add_int
143dc6dc1f5SAttilio Rao #define	atomic_add_acq_32	atomic_add_acq_int
144dc6dc1f5SAttilio Rao #define	atomic_add_rel_32	atomic_add_rel_int
145dc6dc1f5SAttilio Rao 
146dc6dc1f5SAttilio Rao #ifdef __powerpc64__
147dc6dc1f5SAttilio Rao #define	atomic_add_64		atomic_add_long
148dc6dc1f5SAttilio Rao #define	atomic_add_acq_64	atomic_add_acq_long
149dc6dc1f5SAttilio Rao #define	atomic_add_rel_64	atomic_add_rel_long
150dc6dc1f5SAttilio Rao 
151dc6dc1f5SAttilio Rao #define	atomic_add_ptr		atomic_add_long
152dc6dc1f5SAttilio Rao #define	atomic_add_acq_ptr	atomic_add_acq_long
153dc6dc1f5SAttilio Rao #define	atomic_add_rel_ptr	atomic_add_rel_long
154dc6dc1f5SAttilio Rao #else
155dc6dc1f5SAttilio Rao #define	atomic_add_ptr		atomic_add_int
156dc6dc1f5SAttilio Rao #define	atomic_add_acq_ptr	atomic_add_acq_int
157dc6dc1f5SAttilio Rao #define	atomic_add_rel_ptr	atomic_add_rel_int
158dc6dc1f5SAttilio Rao #endif
159c563d533SMarcel Moolenaar #undef _ATOMIC_ADD
160dc6dc1f5SAttilio Rao #undef __atomic_add_long
161dc6dc1f5SAttilio Rao #undef __atomic_add_int
162b5073b49SDavid E. O'Brien 
163c563d533SMarcel Moolenaar /*
164c563d533SMarcel Moolenaar  * atomic_clear(p, v)
165c563d533SMarcel Moolenaar  * { *p &= ~v; }
166c563d533SMarcel Moolenaar  */
167c563d533SMarcel Moolenaar 
168dc6dc1f5SAttilio Rao #define __atomic_clear_int(p, v, t)				\
169c563d533SMarcel Moolenaar     __asm __volatile(						\
170c563d533SMarcel Moolenaar 	"1:	lwarx	%0, 0, %2\n"				\
171c563d533SMarcel Moolenaar 	"	andc	%0, %0, %3\n"				\
172c563d533SMarcel Moolenaar 	"	stwcx.	%0, 0, %2\n"				\
173c563d533SMarcel Moolenaar 	"	bne-	1b\n"					\
174c563d533SMarcel Moolenaar 	: "=&r" (t), "=m" (*p)					\
175c563d533SMarcel Moolenaar 	: "r" (p), "r" (v), "m" (*p)				\
176181ca73bSJustin Hibbits 	: "cr0", "memory")					\
177dc6dc1f5SAttilio Rao     /* __atomic_clear_int */
178c563d533SMarcel Moolenaar 
179c3e289e1SNathan Whitehorn #ifdef __powerpc64__
180dc6dc1f5SAttilio Rao #define __atomic_clear_long(p, v, t)				\
181c3e289e1SNathan Whitehorn     __asm __volatile(						\
182c3e289e1SNathan Whitehorn 	"1:	ldarx	%0, 0, %2\n"				\
183c3e289e1SNathan Whitehorn 	"	andc	%0, %0, %3\n"				\
184c3e289e1SNathan Whitehorn 	"	stdcx.	%0, 0, %2\n"				\
185c3e289e1SNathan Whitehorn 	"	bne-	1b\n"					\
186c3e289e1SNathan Whitehorn 	: "=&r" (t), "=m" (*p)					\
187c3e289e1SNathan Whitehorn 	: "r" (p), "r" (v), "m" (*p)				\
188181ca73bSJustin Hibbits 	: "cr0", "memory")					\
189dc6dc1f5SAttilio Rao     /* __atomic_clear_long */
190c3e289e1SNathan Whitehorn #else
191dc6dc1f5SAttilio Rao #define	__atomic_clear_long(p, v, t)				\
192dc6dc1f5SAttilio Rao     __asm __volatile(						\
193dc6dc1f5SAttilio Rao 	"1:	lwarx	%0, 0, %2\n"				\
194dc6dc1f5SAttilio Rao 	"	andc	%0, %0, %3\n"				\
195dc6dc1f5SAttilio Rao 	"	stwcx.	%0, 0, %2\n"				\
196dc6dc1f5SAttilio Rao 	"	bne-	1b\n"					\
197dc6dc1f5SAttilio Rao 	: "=&r" (t), "=m" (*p)					\
198dc6dc1f5SAttilio Rao 	: "r" (p), "r" (v), "m" (*p)				\
199181ca73bSJustin Hibbits 	: "cr0", "memory")					\
200dc6dc1f5SAttilio Rao     /* __atomic_clear_long */
201c3e289e1SNathan Whitehorn #endif
202c563d533SMarcel Moolenaar 
203dc6dc1f5SAttilio Rao #define	_ATOMIC_CLEAR(type)					\
204c563d533SMarcel Moolenaar     static __inline void					\
205dc6dc1f5SAttilio Rao     atomic_clear_##type(volatile u_##type *p, u_##type v) {	\
206dc6dc1f5SAttilio Rao 	u_##type t;						\
207dc6dc1f5SAttilio Rao 	__atomic_clear_##type(p, v, t);				\
208c563d533SMarcel Moolenaar     }								\
209c563d533SMarcel Moolenaar 								\
210c563d533SMarcel Moolenaar     static __inline void					\
211dc6dc1f5SAttilio Rao     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) {	\
212dc6dc1f5SAttilio Rao 	u_##type t;						\
213dc6dc1f5SAttilio Rao 	__atomic_clear_##type(p, v, t);				\
214a4cbf436SNathan Whitehorn 	__ATOMIC_ACQ();						\
215c563d533SMarcel Moolenaar     }								\
216c563d533SMarcel Moolenaar 								\
217c563d533SMarcel Moolenaar     static __inline void					\
218dc6dc1f5SAttilio Rao     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) {	\
219dc6dc1f5SAttilio Rao 	u_##type t;						\
220a4cbf436SNathan Whitehorn 	__ATOMIC_REL();						\
221dc6dc1f5SAttilio Rao 	__atomic_clear_##type(p, v, t);				\
222c563d533SMarcel Moolenaar     }								\
223c563d533SMarcel Moolenaar     /* _ATOMIC_CLEAR */
224c563d533SMarcel Moolenaar 
225dc6dc1f5SAttilio Rao _ATOMIC_CLEAR(int)
226dc6dc1f5SAttilio Rao _ATOMIC_CLEAR(long)
227dc6dc1f5SAttilio Rao 
228dc6dc1f5SAttilio Rao #define	atomic_clear_32		atomic_clear_int
229dc6dc1f5SAttilio Rao #define	atomic_clear_acq_32	atomic_clear_acq_int
230dc6dc1f5SAttilio Rao #define	atomic_clear_rel_32	atomic_clear_rel_int
231dc6dc1f5SAttilio Rao 
232dc6dc1f5SAttilio Rao #ifdef __powerpc64__
233dc6dc1f5SAttilio Rao #define	atomic_clear_64		atomic_clear_long
234dc6dc1f5SAttilio Rao #define	atomic_clear_acq_64	atomic_clear_acq_long
235dc6dc1f5SAttilio Rao #define	atomic_clear_rel_64	atomic_clear_rel_long
236dc6dc1f5SAttilio Rao 
237dc6dc1f5SAttilio Rao #define	atomic_clear_ptr	atomic_clear_long
238dc6dc1f5SAttilio Rao #define	atomic_clear_acq_ptr	atomic_clear_acq_long
239dc6dc1f5SAttilio Rao #define	atomic_clear_rel_ptr	atomic_clear_rel_long
240dc6dc1f5SAttilio Rao #else
241dc6dc1f5SAttilio Rao #define	atomic_clear_ptr	atomic_clear_int
242dc6dc1f5SAttilio Rao #define	atomic_clear_acq_ptr	atomic_clear_acq_int
243dc6dc1f5SAttilio Rao #define	atomic_clear_rel_ptr	atomic_clear_rel_int
244dc6dc1f5SAttilio Rao #endif
245c563d533SMarcel Moolenaar #undef _ATOMIC_CLEAR
246dc6dc1f5SAttilio Rao #undef __atomic_clear_long
247dc6dc1f5SAttilio Rao #undef __atomic_clear_int
248c563d533SMarcel Moolenaar 
249c563d533SMarcel Moolenaar /*
250c563d533SMarcel Moolenaar  * atomic_cmpset(p, o, n)
251c563d533SMarcel Moolenaar  */
252c563d533SMarcel Moolenaar /* TODO -- see below */
253c563d533SMarcel Moolenaar 
254c563d533SMarcel Moolenaar /*
255c563d533SMarcel Moolenaar  * atomic_load_acq(p)
256c563d533SMarcel Moolenaar  */
257c563d533SMarcel Moolenaar /* TODO -- see below */
258c563d533SMarcel Moolenaar 
259c563d533SMarcel Moolenaar /*
260c563d533SMarcel Moolenaar  * atomic_readandclear(p)
261c563d533SMarcel Moolenaar  */
262c563d533SMarcel Moolenaar /* TODO -- see below */
263c563d533SMarcel Moolenaar 
264c563d533SMarcel Moolenaar /*
265c563d533SMarcel Moolenaar  * atomic_set(p, v)
266c563d533SMarcel Moolenaar  * { *p |= v; }
267c563d533SMarcel Moolenaar  */
268c563d533SMarcel Moolenaar 
269dc6dc1f5SAttilio Rao #define __atomic_set_int(p, v, t)				\
270c563d533SMarcel Moolenaar     __asm __volatile(						\
271c563d533SMarcel Moolenaar 	"1:	lwarx	%0, 0, %2\n"				\
272c563d533SMarcel Moolenaar 	"	or	%0, %3, %0\n"				\
273c563d533SMarcel Moolenaar 	"	stwcx.	%0, 0, %2\n"				\
274c563d533SMarcel Moolenaar 	"	bne-	1b\n"					\
275c563d533SMarcel Moolenaar 	: "=&r" (t), "=m" (*p)					\
276c563d533SMarcel Moolenaar 	: "r" (p), "r" (v), "m" (*p)				\
277181ca73bSJustin Hibbits 	: "cr0", "memory")					\
278dc6dc1f5SAttilio Rao     /* __atomic_set_int */
279c563d533SMarcel Moolenaar 
280c3e289e1SNathan Whitehorn #ifdef __powerpc64__
281dc6dc1f5SAttilio Rao #define __atomic_set_long(p, v, t)				\
282c3e289e1SNathan Whitehorn     __asm __volatile(						\
283c3e289e1SNathan Whitehorn 	"1:	ldarx	%0, 0, %2\n"				\
284c3e289e1SNathan Whitehorn 	"	or	%0, %3, %0\n"				\
285c3e289e1SNathan Whitehorn 	"	stdcx.	%0, 0, %2\n"				\
286c3e289e1SNathan Whitehorn 	"	bne-	1b\n"					\
287c3e289e1SNathan Whitehorn 	: "=&r" (t), "=m" (*p)					\
288c3e289e1SNathan Whitehorn 	: "r" (p), "r" (v), "m" (*p)				\
289181ca73bSJustin Hibbits 	: "cr0", "memory")					\
290dc6dc1f5SAttilio Rao     /* __atomic_set_long */
291c3e289e1SNathan Whitehorn #else
292dc6dc1f5SAttilio Rao #define	__atomic_set_long(p, v, t)				\
293dc6dc1f5SAttilio Rao     __asm __volatile(						\
294dc6dc1f5SAttilio Rao 	"1:	lwarx	%0, 0, %2\n"				\
295dc6dc1f5SAttilio Rao 	"	or	%0, %3, %0\n"				\
296dc6dc1f5SAttilio Rao 	"	stwcx.	%0, 0, %2\n"				\
297dc6dc1f5SAttilio Rao 	"	bne-	1b\n"					\
298dc6dc1f5SAttilio Rao 	: "=&r" (t), "=m" (*p)					\
299dc6dc1f5SAttilio Rao 	: "r" (p), "r" (v), "m" (*p)				\
300181ca73bSJustin Hibbits 	: "cr0", "memory")					\
301dc6dc1f5SAttilio Rao     /* __atomic_set_long */
302c3e289e1SNathan Whitehorn #endif
303c563d533SMarcel Moolenaar 
304dc6dc1f5SAttilio Rao #define	_ATOMIC_SET(type)					\
305c563d533SMarcel Moolenaar     static __inline void					\
306dc6dc1f5SAttilio Rao     atomic_set_##type(volatile u_##type *p, u_##type v) {	\
307dc6dc1f5SAttilio Rao 	u_##type t;						\
308dc6dc1f5SAttilio Rao 	__atomic_set_##type(p, v, t);				\
309c563d533SMarcel Moolenaar     }								\
310c563d533SMarcel Moolenaar 								\
311c563d533SMarcel Moolenaar     static __inline void					\
312dc6dc1f5SAttilio Rao     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {	\
313dc6dc1f5SAttilio Rao 	u_##type t;						\
314dc6dc1f5SAttilio Rao 	__atomic_set_##type(p, v, t);				\
315a4cbf436SNathan Whitehorn 	__ATOMIC_ACQ();						\
316c563d533SMarcel Moolenaar     }								\
317c563d533SMarcel Moolenaar 								\
318c563d533SMarcel Moolenaar     static __inline void					\
319dc6dc1f5SAttilio Rao     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {	\
320dc6dc1f5SAttilio Rao 	u_##type t;						\
321a4cbf436SNathan Whitehorn 	__ATOMIC_REL();						\
322dc6dc1f5SAttilio Rao 	__atomic_set_##type(p, v, t);				\
323c563d533SMarcel Moolenaar     }								\
324c563d533SMarcel Moolenaar     /* _ATOMIC_SET */
325c563d533SMarcel Moolenaar 
326dc6dc1f5SAttilio Rao _ATOMIC_SET(int)
327dc6dc1f5SAttilio Rao _ATOMIC_SET(long)
328c563d533SMarcel Moolenaar 
329dc6dc1f5SAttilio Rao #define	atomic_set_32		atomic_set_int
330dc6dc1f5SAttilio Rao #define	atomic_set_acq_32	atomic_set_acq_int
331dc6dc1f5SAttilio Rao #define	atomic_set_rel_32	atomic_set_rel_int
332dc6dc1f5SAttilio Rao 
333dc6dc1f5SAttilio Rao #ifdef __powerpc64__
334dc6dc1f5SAttilio Rao #define	atomic_set_64		atomic_set_long
335dc6dc1f5SAttilio Rao #define	atomic_set_acq_64	atomic_set_acq_long
336dc6dc1f5SAttilio Rao #define	atomic_set_rel_64	atomic_set_rel_long
337dc6dc1f5SAttilio Rao 
338dc6dc1f5SAttilio Rao #define	atomic_set_ptr		atomic_set_long
339dc6dc1f5SAttilio Rao #define	atomic_set_acq_ptr	atomic_set_acq_long
340dc6dc1f5SAttilio Rao #define	atomic_set_rel_ptr	atomic_set_rel_long
341dc6dc1f5SAttilio Rao #else
342dc6dc1f5SAttilio Rao #define	atomic_set_ptr		atomic_set_int
343dc6dc1f5SAttilio Rao #define	atomic_set_acq_ptr	atomic_set_acq_int
344dc6dc1f5SAttilio Rao #define	atomic_set_rel_ptr	atomic_set_rel_int
345dc6dc1f5SAttilio Rao #endif
346c563d533SMarcel Moolenaar #undef _ATOMIC_SET
347dc6dc1f5SAttilio Rao #undef __atomic_set_long
348dc6dc1f5SAttilio Rao #undef __atomic_set_int
349c563d533SMarcel Moolenaar 
350c563d533SMarcel Moolenaar /*
351c563d533SMarcel Moolenaar  * atomic_subtract(p, v)
352c563d533SMarcel Moolenaar  * { *p -= v; }
353c563d533SMarcel Moolenaar  */
354c563d533SMarcel Moolenaar 
355dc6dc1f5SAttilio Rao #define __atomic_subtract_int(p, v, t)				\
356c563d533SMarcel Moolenaar     __asm __volatile(						\
357c563d533SMarcel Moolenaar 	"1:	lwarx	%0, 0, %2\n"				\
358c563d533SMarcel Moolenaar 	"	subf	%0, %3, %0\n"				\
359c563d533SMarcel Moolenaar 	"	stwcx.	%0, 0, %2\n"				\
360c563d533SMarcel Moolenaar 	"	bne-	1b\n"					\
361c563d533SMarcel Moolenaar 	: "=&r" (t), "=m" (*p)					\
362c563d533SMarcel Moolenaar 	: "r" (p), "r" (v), "m" (*p)				\
363181ca73bSJustin Hibbits 	: "cr0", "memory")					\
364dc6dc1f5SAttilio Rao     /* __atomic_subtract_int */
365c563d533SMarcel Moolenaar 
366c3e289e1SNathan Whitehorn #ifdef __powerpc64__
367dc6dc1f5SAttilio Rao #define __atomic_subtract_long(p, v, t)				\
368c3e289e1SNathan Whitehorn     __asm __volatile(						\
369c3e289e1SNathan Whitehorn 	"1:	ldarx	%0, 0, %2\n"				\
370c3e289e1SNathan Whitehorn 	"	subf	%0, %3, %0\n"				\
371c3e289e1SNathan Whitehorn 	"	stdcx.	%0, 0, %2\n"				\
372c3e289e1SNathan Whitehorn 	"	bne-	1b\n"					\
373c3e289e1SNathan Whitehorn 	: "=&r" (t), "=m" (*p)					\
374c3e289e1SNathan Whitehorn 	: "r" (p), "r" (v), "m" (*p)				\
375181ca73bSJustin Hibbits 	: "cr0", "memory")					\
376dc6dc1f5SAttilio Rao     /* __atomic_subtract_long */
377c3e289e1SNathan Whitehorn #else
378dc6dc1f5SAttilio Rao #define	__atomic_subtract_long(p, v, t)				\
379dc6dc1f5SAttilio Rao     __asm __volatile(						\
380dc6dc1f5SAttilio Rao 	"1:	lwarx	%0, 0, %2\n"				\
381dc6dc1f5SAttilio Rao 	"	subf	%0, %3, %0\n"				\
382dc6dc1f5SAttilio Rao 	"	stwcx.	%0, 0, %2\n"				\
383dc6dc1f5SAttilio Rao 	"	bne-	1b\n"					\
384dc6dc1f5SAttilio Rao 	: "=&r" (t), "=m" (*p)					\
385dc6dc1f5SAttilio Rao 	: "r" (p), "r" (v), "m" (*p)				\
386181ca73bSJustin Hibbits 	: "cr0", "memory")					\
387dc6dc1f5SAttilio Rao     /* __atomic_subtract_long */
388c3e289e1SNathan Whitehorn #endif
389c563d533SMarcel Moolenaar 
390dc6dc1f5SAttilio Rao #define	_ATOMIC_SUBTRACT(type)						\
391c563d533SMarcel Moolenaar     static __inline void						\
392dc6dc1f5SAttilio Rao     atomic_subtract_##type(volatile u_##type *p, u_##type v) {		\
393dc6dc1f5SAttilio Rao 	u_##type t;							\
394dc6dc1f5SAttilio Rao 	__atomic_subtract_##type(p, v, t);				\
395c563d533SMarcel Moolenaar     }									\
396c563d533SMarcel Moolenaar 									\
397c563d533SMarcel Moolenaar     static __inline void						\
398dc6dc1f5SAttilio Rao     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {	\
399dc6dc1f5SAttilio Rao 	u_##type t;							\
400dc6dc1f5SAttilio Rao 	__atomic_subtract_##type(p, v, t);				\
401a4cbf436SNathan Whitehorn 	__ATOMIC_ACQ();							\
402c563d533SMarcel Moolenaar     }									\
403c563d533SMarcel Moolenaar 									\
404c563d533SMarcel Moolenaar     static __inline void						\
405dc6dc1f5SAttilio Rao     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {	\
406dc6dc1f5SAttilio Rao 	u_##type t;							\
407a4cbf436SNathan Whitehorn 	__ATOMIC_REL();							\
408dc6dc1f5SAttilio Rao 	__atomic_subtract_##type(p, v, t);				\
409c563d533SMarcel Moolenaar     }									\
410c563d533SMarcel Moolenaar     /* _ATOMIC_SUBTRACT */
411c563d533SMarcel Moolenaar 
412dc6dc1f5SAttilio Rao _ATOMIC_SUBTRACT(int)
413dc6dc1f5SAttilio Rao _ATOMIC_SUBTRACT(long)
414c563d533SMarcel Moolenaar 
415dc6dc1f5SAttilio Rao #define	atomic_subtract_32	atomic_subtract_int
416dc6dc1f5SAttilio Rao #define	atomic_subtract_acq_32	atomic_subtract_acq_int
417dc6dc1f5SAttilio Rao #define	atomic_subtract_rel_32	atomic_subtract_rel_int
418dc6dc1f5SAttilio Rao 
419dc6dc1f5SAttilio Rao #ifdef __powerpc64__
420dc6dc1f5SAttilio Rao #define	atomic_subtract_64	atomic_subtract_long
421dc6dc1f5SAttilio Rao #define	atomic_subtract_acq_64	atomic_subract_acq_long
422dc6dc1f5SAttilio Rao #define	atomic_subtract_rel_64	atomic_subtract_rel_long
423dc6dc1f5SAttilio Rao 
424dc6dc1f5SAttilio Rao #define	atomic_subtract_ptr	atomic_subtract_long
425dc6dc1f5SAttilio Rao #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
426dc6dc1f5SAttilio Rao #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
427dc6dc1f5SAttilio Rao #else
428dc6dc1f5SAttilio Rao #define	atomic_subtract_ptr	atomic_subtract_int
429dc6dc1f5SAttilio Rao #define	atomic_subtract_acq_ptr	atomic_subtract_acq_int
430dc6dc1f5SAttilio Rao #define	atomic_subtract_rel_ptr	atomic_subtract_rel_int
431dc6dc1f5SAttilio Rao #endif
432c563d533SMarcel Moolenaar #undef _ATOMIC_SUBTRACT
433dc6dc1f5SAttilio Rao #undef __atomic_subtract_long
434dc6dc1f5SAttilio Rao #undef __atomic_subtract_int
435c563d533SMarcel Moolenaar 
436c563d533SMarcel Moolenaar /*
437c563d533SMarcel Moolenaar  * atomic_store_rel(p, v)
438c563d533SMarcel Moolenaar  */
439c563d533SMarcel Moolenaar /* TODO -- see below */
440c563d533SMarcel Moolenaar 
441c563d533SMarcel Moolenaar /*
442c563d533SMarcel Moolenaar  * Old/original implementations that still need revisiting.
443c563d533SMarcel Moolenaar  */
444b5073b49SDavid E. O'Brien 
445dc6dc1f5SAttilio Rao static __inline u_int
446dc6dc1f5SAttilio Rao atomic_readandclear_int(volatile u_int *addr)
447b5073b49SDavid E. O'Brien {
448dc6dc1f5SAttilio Rao 	u_int result,temp;
449b5073b49SDavid E. O'Brien 
450b5073b49SDavid E. O'Brien 	__asm __volatile (
4518e9238c6SPeter Grehan 		"\tsync\n"			/* drain writes */
4528e9238c6SPeter Grehan 		"1:\tlwarx %0, 0, %3\n\t"	/* load old value */
453199a2415SBenno Rice 		"li %1, 0\n\t"			/* load new value */
4548e9238c6SPeter Grehan 		"stwcx. %1, 0, %3\n\t"      	/* attempt to store */
4556e1073f0SPeter Grehan 		"bne- 1b\n\t"			/* spin if failed */
45680d52f16SJohn Baldwin 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
45780d52f16SJohn Baldwin 		: "r" (addr), "m" (*addr)
458181ca73bSJustin Hibbits 		: "cr0", "memory");
459b5073b49SDavid E. O'Brien 
4608e9238c6SPeter Grehan 	return (result);
461b5073b49SDavid E. O'Brien }
462b5073b49SDavid E. O'Brien 
463c3e289e1SNathan Whitehorn #ifdef __powerpc64__
464dc6dc1f5SAttilio Rao static __inline u_long
atomic_readandclear_long(volatile u_long * addr)465dc6dc1f5SAttilio Rao atomic_readandclear_long(volatile u_long *addr)
466c3e289e1SNathan Whitehorn {
467dc6dc1f5SAttilio Rao 	u_long result,temp;
468c3e289e1SNathan Whitehorn 
469c3e289e1SNathan Whitehorn 	__asm __volatile (
470c3e289e1SNathan Whitehorn 		"\tsync\n"			/* drain writes */
471c3e289e1SNathan Whitehorn 		"1:\tldarx %0, 0, %3\n\t"	/* load old value */
472c3e289e1SNathan Whitehorn 		"li %1, 0\n\t"			/* load new value */
473c3e289e1SNathan Whitehorn 		"stdcx. %1, 0, %3\n\t"      	/* attempt to store */
474c3e289e1SNathan Whitehorn 		"bne- 1b\n\t"			/* spin if failed */
475c3e289e1SNathan Whitehorn 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
476c3e289e1SNathan Whitehorn 		: "r" (addr), "m" (*addr)
477181ca73bSJustin Hibbits 		: "cr0", "memory");
478c3e289e1SNathan Whitehorn 
479c3e289e1SNathan Whitehorn 	return (result);
480c3e289e1SNathan Whitehorn }
481c3e289e1SNathan Whitehorn #endif
482c3e289e1SNathan Whitehorn 
483dc6dc1f5SAttilio Rao #define	atomic_readandclear_32		atomic_readandclear_int
484c3e289e1SNathan Whitehorn 
485c3e289e1SNathan Whitehorn #ifdef __powerpc64__
486dc6dc1f5SAttilio Rao #define	atomic_readandclear_64		atomic_readandclear_long
487dc6dc1f5SAttilio Rao 
488dc6dc1f5SAttilio Rao #define	atomic_readandclear_ptr		atomic_readandclear_long
489c3e289e1SNathan Whitehorn #else
490dc6dc1f5SAttilio Rao static __inline u_long
atomic_readandclear_long(volatile u_long * addr)491dc6dc1f5SAttilio Rao atomic_readandclear_long(volatile u_long *addr)
492dc6dc1f5SAttilio Rao {
493dc6dc1f5SAttilio Rao 
494dc6dc1f5SAttilio Rao 	return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
495dc6dc1f5SAttilio Rao }
496dc6dc1f5SAttilio Rao 
497dc6dc1f5SAttilio Rao #define	atomic_readandclear_ptr		atomic_readandclear_int
498c3e289e1SNathan Whitehorn #endif
499122eceefSJohn Baldwin 
500199a2415SBenno Rice /*
501199a2415SBenno Rice  * We assume that a = b will do atomic loads and stores.
502199a2415SBenno Rice  */
503dc6dc1f5SAttilio Rao #define	ATOMIC_STORE_LOAD(TYPE)					\
504199a2415SBenno Rice static __inline u_##TYPE					\
505*5e9a82e8SOlivier Certner atomic_load_acq_##TYPE(const volatile u_##TYPE *p)		\
506199a2415SBenno Rice {								\
5078e9238c6SPeter Grehan 	u_##TYPE v;						\
5088e9238c6SPeter Grehan 								\
5098e9238c6SPeter Grehan 	v = *p;							\
5106a0fd1a5SJustin Hibbits 	powerpc_lwsync();					\
5118e9238c6SPeter Grehan 	return (v);						\
512199a2415SBenno Rice }								\
513199a2415SBenno Rice 								\
514199a2415SBenno Rice static __inline void						\
515199a2415SBenno Rice atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
516199a2415SBenno Rice {								\
5170b39ffb3SKonstantin Belousov 								\
5180b39ffb3SKonstantin Belousov 	powerpc_lwsync();					\
5198e9238c6SPeter Grehan 	*p = v;							\
520199a2415SBenno Rice }
521199a2415SBenno Rice 
522dc6dc1f5SAttilio Rao ATOMIC_STORE_LOAD(int)
523dc6dc1f5SAttilio Rao 
524dc6dc1f5SAttilio Rao #define	atomic_load_acq_32	atomic_load_acq_int
525dc6dc1f5SAttilio Rao #define	atomic_store_rel_32	atomic_store_rel_int
526199a2415SBenno Rice 
527c3e289e1SNathan Whitehorn #ifdef __powerpc64__
ATOMIC_STORE_LOAD(long)528dc6dc1f5SAttilio Rao ATOMIC_STORE_LOAD(long)
529dc6dc1f5SAttilio Rao 
530dc6dc1f5SAttilio Rao #define	atomic_load_acq_64	atomic_load_acq_long
531dc6dc1f5SAttilio Rao #define	atomic_store_rel_64	atomic_store_rel_long
532dc6dc1f5SAttilio Rao 
533dc6dc1f5SAttilio Rao #define	atomic_load_acq_ptr	atomic_load_acq_long
534dc6dc1f5SAttilio Rao #define	atomic_store_rel_ptr	atomic_store_rel_long
535c3e289e1SNathan Whitehorn #else
536dc6dc1f5SAttilio Rao static __inline u_long
537*5e9a82e8SOlivier Certner atomic_load_acq_long(const volatile u_long *addr)
538dc6dc1f5SAttilio Rao {
539122eceefSJohn Baldwin 
540*5e9a82e8SOlivier Certner 	return ((u_long)atomic_load_acq_int((const volatile u_int *)addr));
541dc6dc1f5SAttilio Rao }
542dc6dc1f5SAttilio Rao 
543dc6dc1f5SAttilio Rao static __inline void
544dc6dc1f5SAttilio Rao atomic_store_rel_long(volatile u_long *addr, u_long val)
545dc6dc1f5SAttilio Rao {
546dc6dc1f5SAttilio Rao 
547dc6dc1f5SAttilio Rao 	atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
548dc6dc1f5SAttilio Rao }
549dc6dc1f5SAttilio Rao 
550dc6dc1f5SAttilio Rao #define	atomic_load_acq_ptr	atomic_load_acq_int
551dc6dc1f5SAttilio Rao #define	atomic_store_rel_ptr	atomic_store_rel_int
552dc6dc1f5SAttilio Rao #endif
553199a2415SBenno Rice #undef ATOMIC_STORE_LOAD
554199a2415SBenno Rice 
555b5073b49SDavid E. O'Brien /*
556b5073b49SDavid E. O'Brien  * Atomically compare the value stored at *p with cmpval and if the
557b5073b49SDavid E. O'Brien  * two values are equal, update the value of *p with newval. Returns
558b5073b49SDavid E. O'Brien  * zero if the compare failed, nonzero otherwise.
559b5073b49SDavid E. O'Brien  */
56084046d16SJustin Hibbits #ifdef ISA_206_ATOMICS
56184046d16SJustin Hibbits static __inline int
56284046d16SJustin Hibbits atomic_cmpset_char(volatile u_char *p, u_char cmpval, u_char newval)
56384046d16SJustin Hibbits {
56484046d16SJustin Hibbits 	int	ret;
56584046d16SJustin Hibbits 
56684046d16SJustin Hibbits 	__asm __volatile (
56784046d16SJustin Hibbits 		"1:\tlbarx %0, 0, %2\n\t"	/* load old value */
56884046d16SJustin Hibbits 		"cmplw %3, %0\n\t"		/* compare */
56984046d16SJustin Hibbits 		"bne- 2f\n\t"			/* exit if not equal */
57084046d16SJustin Hibbits 		"stbcx. %4, 0, %2\n\t"      	/* attempt to store */
57184046d16SJustin Hibbits 		"bne- 1b\n\t"			/* spin if failed */
57284046d16SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
57384046d16SJustin Hibbits 		"b 3f\n\t"			/* we've succeeded */
57484046d16SJustin Hibbits 		"2:\n\t"
57584046d16SJustin Hibbits 		"stbcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
57684046d16SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
57784046d16SJustin Hibbits 		"3:\n\t"
57884046d16SJustin Hibbits 		: "=&r" (ret), "=m" (*p)
57984046d16SJustin Hibbits 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
58084046d16SJustin Hibbits 		: "cr0", "memory");
58184046d16SJustin Hibbits 
58284046d16SJustin Hibbits 	return (ret);
58384046d16SJustin Hibbits }
58484046d16SJustin Hibbits 
58584046d16SJustin Hibbits static __inline int
atomic_cmpset_short(volatile u_short * p,u_short cmpval,u_short newval)58684046d16SJustin Hibbits atomic_cmpset_short(volatile u_short *p, u_short cmpval, u_short newval)
58784046d16SJustin Hibbits {
58884046d16SJustin Hibbits 	int	ret;
58984046d16SJustin Hibbits 
59084046d16SJustin Hibbits 	__asm __volatile (
59184046d16SJustin Hibbits 		"1:\tlharx %0, 0, %2\n\t"	/* load old value */
59284046d16SJustin Hibbits 		"cmplw %3, %0\n\t"		/* compare */
59384046d16SJustin Hibbits 		"bne- 2f\n\t"			/* exit if not equal */
59484046d16SJustin Hibbits 		"sthcx. %4, 0, %2\n\t"      	/* attempt to store */
59584046d16SJustin Hibbits 		"bne- 1b\n\t"			/* spin if failed */
59684046d16SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
59784046d16SJustin Hibbits 		"b 3f\n\t"			/* we've succeeded */
59884046d16SJustin Hibbits 		"2:\n\t"
59984046d16SJustin Hibbits 		"sthcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
60084046d16SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
60184046d16SJustin Hibbits 		"3:\n\t"
60284046d16SJustin Hibbits 		: "=&r" (ret), "=m" (*p)
60384046d16SJustin Hibbits 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
60484046d16SJustin Hibbits 		: "cr0", "memory");
60584046d16SJustin Hibbits 
60684046d16SJustin Hibbits 	return (ret);
60784046d16SJustin Hibbits }
608d0bdb111SJustin Hibbits #else
609d0bdb111SJustin Hibbits static __inline int
610d0bdb111SJustin Hibbits atomic_cmpset_masked(uint32_t *p, uint32_t cmpval, uint32_t newval,
611d0bdb111SJustin Hibbits     uint32_t mask)
612d0bdb111SJustin Hibbits {
613d0bdb111SJustin Hibbits 	int		ret;
614d0bdb111SJustin Hibbits 	uint32_t	tmp;
615d0bdb111SJustin Hibbits 
616d0bdb111SJustin Hibbits 	__asm __volatile (
6179941cb06SBrandon Bergren 		"1:\tlwarx %2, 0, %3\n\t"	/* load old value */
618d0bdb111SJustin Hibbits 		"and %0, %2, %7\n\t"
619d0bdb111SJustin Hibbits 		"cmplw %4, %0\n\t"		/* compare */
620d0bdb111SJustin Hibbits 		"bne- 2f\n\t"			/* exit if not equal */
621d0bdb111SJustin Hibbits 		"andc %2, %2, %7\n\t"
622d0bdb111SJustin Hibbits 		"or %2, %2, %5\n\t"
623d0bdb111SJustin Hibbits 		"stwcx. %2, 0, %3\n\t"      	/* attempt to store */
624d0bdb111SJustin Hibbits 		"bne- 1b\n\t"			/* spin if failed */
625d0bdb111SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
626d0bdb111SJustin Hibbits 		"b 3f\n\t"			/* we've succeeded */
627d0bdb111SJustin Hibbits 		"2:\n\t"
628d0bdb111SJustin Hibbits 		"stwcx. %2, 0, %3\n\t"       	/* clear reservation (74xx) */
629d0bdb111SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
630d0bdb111SJustin Hibbits 		"3:\n\t"
631d0bdb111SJustin Hibbits 		: "=&r" (ret), "=m" (*p), "+&r" (tmp)
632d0bdb111SJustin Hibbits 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p),
633d0bdb111SJustin Hibbits 		  "r" (mask)
634d0bdb111SJustin Hibbits 		: "cr0", "memory");
635d0bdb111SJustin Hibbits 
636d0bdb111SJustin Hibbits 	return (ret);
637d0bdb111SJustin Hibbits }
638d0bdb111SJustin Hibbits 
639d0bdb111SJustin Hibbits #define	_atomic_cmpset_masked_word(a,o,v,m) atomic_cmpset_masked(a, o, v, m)
64084046d16SJustin Hibbits #endif
64184046d16SJustin Hibbits 
642c3e289e1SNathan Whitehorn static __inline int
atomic_cmpset_int(volatile u_int * p,u_int cmpval,u_int newval)643dc6dc1f5SAttilio Rao atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
644b5073b49SDavid E. O'Brien {
645c3e289e1SNathan Whitehorn 	int	ret;
646b5073b49SDavid E. O'Brien 
647b5073b49SDavid E. O'Brien 	__asm __volatile (
6488e9238c6SPeter Grehan 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
6498e9238c6SPeter Grehan 		"cmplw %3, %0\n\t"		/* compare */
650e44ed9d3SJustin Hibbits 		"bne- 2f\n\t"			/* exit if not equal */
6518e9238c6SPeter Grehan 		"stwcx. %4, 0, %2\n\t"      	/* attempt to store */
652d27f1d4cSBenno Rice 		"bne- 1b\n\t"			/* spin if failed */
6538e9238c6SPeter Grehan 		"li %0, 1\n\t"			/* success - retval = 1 */
654dfc02c30SBenno Rice 		"b 3f\n\t"			/* we've succeeded */
6558e9238c6SPeter Grehan 		"2:\n\t"
6568e9238c6SPeter Grehan 		"stwcx. %0, 0, %2\n\t"       	/* clear reservation (74xx) */
6578e9238c6SPeter Grehan 		"li %0, 0\n\t"			/* failure - retval = 0 */
6588e9238c6SPeter Grehan 		"3:\n\t"
65980d52f16SJohn Baldwin 		: "=&r" (ret), "=m" (*p)
66080d52f16SJohn Baldwin 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
661181ca73bSJustin Hibbits 		: "cr0", "memory");
662b5073b49SDavid E. O'Brien 
6638e9238c6SPeter Grehan 	return (ret);
664b5073b49SDavid E. O'Brien }
665c3e289e1SNathan Whitehorn static __inline int
atomic_cmpset_long(volatile u_long * p,u_long cmpval,u_long newval)666ba90c265SJohn Birrell atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
667ba90c265SJohn Birrell {
668c3e289e1SNathan Whitehorn 	int ret;
669ba90c265SJohn Birrell 
670ba90c265SJohn Birrell 	__asm __volatile (
671c3e289e1SNathan Whitehorn 	    #ifdef __powerpc64__
672c3e289e1SNathan Whitehorn 		"1:\tldarx %0, 0, %2\n\t"	/* load old value */
673c3e289e1SNathan Whitehorn 		"cmpld %3, %0\n\t"		/* compare */
674e44ed9d3SJustin Hibbits 		"bne- 2f\n\t"			/* exit if not equal */
675c3e289e1SNathan Whitehorn 		"stdcx. %4, 0, %2\n\t"		/* attempt to store */
676c3e289e1SNathan Whitehorn 	    #else
677ba90c265SJohn Birrell 		"1:\tlwarx %0, 0, %2\n\t"	/* load old value */
678ba90c265SJohn Birrell 		"cmplw %3, %0\n\t"		/* compare */
679e44ed9d3SJustin Hibbits 		"bne- 2f\n\t"			/* exit if not equal */
680ba90c265SJohn Birrell 		"stwcx. %4, 0, %2\n\t"		/* attempt to store */
681c3e289e1SNathan Whitehorn 	    #endif
682ba90c265SJohn Birrell 		"bne- 1b\n\t"			/* spin if failed */
683ba90c265SJohn Birrell 		"li %0, 1\n\t"			/* success - retval = 1 */
684ba90c265SJohn Birrell 		"b 3f\n\t"			/* we've succeeded */
685ba90c265SJohn Birrell 		"2:\n\t"
686c3e289e1SNathan Whitehorn 	    #ifdef __powerpc64__
687c3e289e1SNathan Whitehorn 		"stdcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
688c3e289e1SNathan Whitehorn 	    #else
689ba90c265SJohn Birrell 		"stwcx. %0, 0, %2\n\t"		/* clear reservation (74xx) */
690c3e289e1SNathan Whitehorn 	    #endif
691ba90c265SJohn Birrell 		"li %0, 0\n\t"			/* failure - retval = 0 */
692ba90c265SJohn Birrell 		"3:\n\t"
693ba90c265SJohn Birrell 		: "=&r" (ret), "=m" (*p)
694ba90c265SJohn Birrell 		: "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
695181ca73bSJustin Hibbits 		: "cr0", "memory");
696ba90c265SJohn Birrell 
697ba90c265SJohn Birrell 	return (ret);
698ba90c265SJohn Birrell }
699ba90c265SJohn Birrell 
70084046d16SJustin Hibbits #define	ATOMIC_CMPSET_ACQ_REL(type) \
70184046d16SJustin Hibbits     static __inline int \
70284046d16SJustin Hibbits     atomic_cmpset_acq_##type(volatile u_##type *p, \
70384046d16SJustin Hibbits 	    u_##type cmpval, u_##type newval)\
70484046d16SJustin Hibbits     {\
70584046d16SJustin Hibbits 	u_##type retval; \
70684046d16SJustin Hibbits 	retval = atomic_cmpset_##type(p, cmpval, newval);\
70784046d16SJustin Hibbits 	__ATOMIC_ACQ();\
70884046d16SJustin Hibbits 	return (retval);\
70984046d16SJustin Hibbits     }\
71084046d16SJustin Hibbits     static __inline int \
71184046d16SJustin Hibbits     atomic_cmpset_rel_##type(volatile u_##type *p, \
71284046d16SJustin Hibbits 	    u_##type cmpval, u_##type newval)\
71384046d16SJustin Hibbits     {\
7149551397fSJustin Hibbits 	__ATOMIC_REL();\
71584046d16SJustin Hibbits 	return (atomic_cmpset_##type(p, cmpval, newval));\
71684046d16SJustin Hibbits     }\
71784046d16SJustin Hibbits     struct hack
718199a2415SBenno Rice 
71984046d16SJustin Hibbits ATOMIC_CMPSET_ACQ_REL(int);
72084046d16SJustin Hibbits ATOMIC_CMPSET_ACQ_REL(long);
721199a2415SBenno Rice 
722ca0ec73cSConrad Meyer #ifdef ISA_206_ATOMICS
72384046d16SJustin Hibbits #define	atomic_cmpset_8		atomic_cmpset_char
724ca0ec73cSConrad Meyer #endif
72584046d16SJustin Hibbits #define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
72684046d16SJustin Hibbits #define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
727ba90c265SJohn Birrell 
728ca0ec73cSConrad Meyer #ifdef ISA_206_ATOMICS
72984046d16SJustin Hibbits #define	atomic_cmpset_16	atomic_cmpset_short
730ca0ec73cSConrad Meyer #endif
73184046d16SJustin Hibbits #define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
73284046d16SJustin Hibbits #define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
733ba90c265SJohn Birrell 
734dc6dc1f5SAttilio Rao #define	atomic_cmpset_32	atomic_cmpset_int
735dc6dc1f5SAttilio Rao #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
736dc6dc1f5SAttilio Rao #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
737c108b80cSMarcel Moolenaar 
738c3e289e1SNathan Whitehorn #ifdef __powerpc64__
739dc6dc1f5SAttilio Rao #define	atomic_cmpset_64	atomic_cmpset_long
740dc6dc1f5SAttilio Rao #define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
741dc6dc1f5SAttilio Rao #define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
742dc6dc1f5SAttilio Rao 
743dc6dc1f5SAttilio Rao #define	atomic_cmpset_ptr	atomic_cmpset_long
744dc6dc1f5SAttilio Rao #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
745dc6dc1f5SAttilio Rao #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
746c3e289e1SNathan Whitehorn #else
747dc6dc1f5SAttilio Rao #define	atomic_cmpset_ptr	atomic_cmpset_int
748dc6dc1f5SAttilio Rao #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_int
749dc6dc1f5SAttilio Rao #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_int
750c3e289e1SNathan Whitehorn #endif
751199a2415SBenno Rice 
75202f151d4SJustin Hibbits /*
75302f151d4SJustin Hibbits  * Atomically compare the value stored at *p with *cmpval and if the
75402f151d4SJustin Hibbits  * two values are equal, update the value of *p with newval. Returns
75502f151d4SJustin Hibbits  * zero if the compare failed and sets *cmpval to the read value from *p,
75602f151d4SJustin Hibbits  * nonzero otherwise.
75702f151d4SJustin Hibbits  */
75884046d16SJustin Hibbits #ifdef ISA_206_ATOMICS
75984046d16SJustin Hibbits static __inline int
atomic_fcmpset_char(volatile u_char * p,u_char * cmpval,u_char newval)76084046d16SJustin Hibbits atomic_fcmpset_char(volatile u_char *p, u_char *cmpval, u_char newval)
76184046d16SJustin Hibbits {
76284046d16SJustin Hibbits 	int	ret;
76384046d16SJustin Hibbits 
76484046d16SJustin Hibbits 	__asm __volatile (
76584046d16SJustin Hibbits 		"lbarx %0, 0, %3\n\t"		/* load old value */
76684046d16SJustin Hibbits 		"cmplw %4, %0\n\t"		/* compare */
76784046d16SJustin Hibbits 		"bne- 1f\n\t"			/* exit if not equal */
76884046d16SJustin Hibbits 		"stbcx. %5, 0, %3\n\t"      	/* attempt to store */
76984046d16SJustin Hibbits 		"bne- 1f\n\t"			/* exit if failed */
77084046d16SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
77184046d16SJustin Hibbits 		"b 2f\n\t"			/* we've succeeded */
77284046d16SJustin Hibbits 		"1:\n\t"
77384046d16SJustin Hibbits 		"stbcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
774895a2258SJustin Hibbits 		"stbx %0, 0, %7\n\t"
77584046d16SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
77684046d16SJustin Hibbits 		"2:\n\t"
77784046d16SJustin Hibbits 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
77884046d16SJustin Hibbits 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
77984046d16SJustin Hibbits 		: "cr0", "memory");
78084046d16SJustin Hibbits 
78184046d16SJustin Hibbits 	return (ret);
78284046d16SJustin Hibbits }
78384046d16SJustin Hibbits 
78484046d16SJustin Hibbits static __inline int
atomic_fcmpset_short(volatile u_short * p,u_short * cmpval,u_short newval)78584046d16SJustin Hibbits atomic_fcmpset_short(volatile u_short *p, u_short *cmpval, u_short newval)
78684046d16SJustin Hibbits {
78784046d16SJustin Hibbits 	int	ret;
78884046d16SJustin Hibbits 
78984046d16SJustin Hibbits 	__asm __volatile (
79084046d16SJustin Hibbits 		"lharx %0, 0, %3\n\t"		/* load old value */
79184046d16SJustin Hibbits 		"cmplw %4, %0\n\t"		/* compare */
79284046d16SJustin Hibbits 		"bne- 1f\n\t"			/* exit if not equal */
79384046d16SJustin Hibbits 		"sthcx. %5, 0, %3\n\t"      	/* attempt to store */
79484046d16SJustin Hibbits 		"bne- 1f\n\t"			/* exit if failed */
79584046d16SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
79684046d16SJustin Hibbits 		"b 2f\n\t"			/* we've succeeded */
79784046d16SJustin Hibbits 		"1:\n\t"
79884046d16SJustin Hibbits 		"sthcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
799895a2258SJustin Hibbits 		"sthx %0, 0, %7\n\t"
80084046d16SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
80184046d16SJustin Hibbits 		"2:\n\t"
80284046d16SJustin Hibbits 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
80384046d16SJustin Hibbits 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
80484046d16SJustin Hibbits 		: "cr0", "memory");
80584046d16SJustin Hibbits 
80684046d16SJustin Hibbits 	return (ret);
80784046d16SJustin Hibbits }
80884046d16SJustin Hibbits #endif	/* ISA_206_ATOMICS */
80984046d16SJustin Hibbits 
81002f151d4SJustin Hibbits static __inline int
atomic_fcmpset_int(volatile u_int * p,u_int * cmpval,u_int newval)81102f151d4SJustin Hibbits atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
81202f151d4SJustin Hibbits {
81302f151d4SJustin Hibbits 	int	ret;
81402f151d4SJustin Hibbits 
81502f151d4SJustin Hibbits 	__asm __volatile (
816d3a8234cSJustin Hibbits 		"lwarx %0, 0, %3\n\t"		/* load old value */
81702f151d4SJustin Hibbits 		"cmplw %4, %0\n\t"		/* compare */
818e44ed9d3SJustin Hibbits 		"bne- 1f\n\t"			/* exit if not equal */
81902f151d4SJustin Hibbits 		"stwcx. %5, 0, %3\n\t"      	/* attempt to store */
820d3a8234cSJustin Hibbits 		"bne- 1f\n\t"			/* exit if failed */
82102f151d4SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
822d3a8234cSJustin Hibbits 		"b 2f\n\t"			/* we've succeeded */
823d3a8234cSJustin Hibbits 		"1:\n\t"
82402f151d4SJustin Hibbits 		"stwcx. %0, 0, %3\n\t"       	/* clear reservation (74xx) */
82502f151d4SJustin Hibbits 		"stwx %0, 0, %7\n\t"
82602f151d4SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
827d3a8234cSJustin Hibbits 		"2:\n\t"
82802f151d4SJustin Hibbits 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
82902f151d4SJustin Hibbits 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
83002f151d4SJustin Hibbits 		: "cr0", "memory");
83102f151d4SJustin Hibbits 
83202f151d4SJustin Hibbits 	return (ret);
83302f151d4SJustin Hibbits }
83402f151d4SJustin Hibbits static __inline int
atomic_fcmpset_long(volatile u_long * p,u_long * cmpval,u_long newval)83502f151d4SJustin Hibbits atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
83602f151d4SJustin Hibbits {
83702f151d4SJustin Hibbits 	int ret;
83802f151d4SJustin Hibbits 
83902f151d4SJustin Hibbits 	__asm __volatile (
84002f151d4SJustin Hibbits 	    #ifdef __powerpc64__
841d3a8234cSJustin Hibbits 		"ldarx %0, 0, %3\n\t"		/* load old value */
84202f151d4SJustin Hibbits 		"cmpld %4, %0\n\t"		/* compare */
843e44ed9d3SJustin Hibbits 		"bne- 1f\n\t"			/* exit if not equal */
84402f151d4SJustin Hibbits 		"stdcx. %5, 0, %3\n\t"		/* attempt to store */
84502f151d4SJustin Hibbits 	    #else
846d3a8234cSJustin Hibbits 		"lwarx %0, 0, %3\n\t"		/* load old value */
84702f151d4SJustin Hibbits 		"cmplw %4, %0\n\t"		/* compare */
848e44ed9d3SJustin Hibbits 		"bne- 1f\n\t"			/* exit if not equal */
84902f151d4SJustin Hibbits 		"stwcx. %5, 0, %3\n\t"		/* attempt to store */
85002f151d4SJustin Hibbits 	    #endif
851d3a8234cSJustin Hibbits 		"bne- 1f\n\t"			/* exit if failed */
85202f151d4SJustin Hibbits 		"li %0, 1\n\t"			/* success - retval = 1 */
853d3a8234cSJustin Hibbits 		"b 2f\n\t"			/* we've succeeded */
854d3a8234cSJustin Hibbits 		"1:\n\t"
85502f151d4SJustin Hibbits 	    #ifdef __powerpc64__
85602f151d4SJustin Hibbits 		"stdcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
85702f151d4SJustin Hibbits 		"stdx %0, 0, %7\n\t"
85802f151d4SJustin Hibbits 	    #else
85902f151d4SJustin Hibbits 		"stwcx. %0, 0, %3\n\t"		/* clear reservation (74xx) */
86002f151d4SJustin Hibbits 		"stwx %0, 0, %7\n\t"
86102f151d4SJustin Hibbits 	    #endif
86202f151d4SJustin Hibbits 		"li %0, 0\n\t"			/* failure - retval = 0 */
863d3a8234cSJustin Hibbits 		"2:\n\t"
86402f151d4SJustin Hibbits 		: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
86502f151d4SJustin Hibbits 		: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
86602f151d4SJustin Hibbits 		: "cr0", "memory");
86702f151d4SJustin Hibbits 
86802f151d4SJustin Hibbits 	return (ret);
86902f151d4SJustin Hibbits }
87002f151d4SJustin Hibbits 
87184046d16SJustin Hibbits #define	ATOMIC_FCMPSET_ACQ_REL(type) \
87284046d16SJustin Hibbits     static __inline int \
87384046d16SJustin Hibbits     atomic_fcmpset_acq_##type(volatile u_##type *p, \
87484046d16SJustin Hibbits 	    u_##type *cmpval, u_##type newval)\
87584046d16SJustin Hibbits     {\
87684046d16SJustin Hibbits 	u_##type retval; \
87784046d16SJustin Hibbits 	retval = atomic_fcmpset_##type(p, cmpval, newval);\
87884046d16SJustin Hibbits 	__ATOMIC_ACQ();\
87984046d16SJustin Hibbits 	return (retval);\
88084046d16SJustin Hibbits     }\
88184046d16SJustin Hibbits     static __inline int \
88284046d16SJustin Hibbits     atomic_fcmpset_rel_##type(volatile u_##type *p, \
88384046d16SJustin Hibbits 	    u_##type *cmpval, u_##type newval)\
88484046d16SJustin Hibbits     {\
88584046d16SJustin Hibbits 	__ATOMIC_REL();\
88684046d16SJustin Hibbits 	return (atomic_fcmpset_##type(p, cmpval, newval));\
88784046d16SJustin Hibbits     }\
88884046d16SJustin Hibbits     struct hack
88902f151d4SJustin Hibbits 
89084046d16SJustin Hibbits ATOMIC_FCMPSET_ACQ_REL(int);
89184046d16SJustin Hibbits ATOMIC_FCMPSET_ACQ_REL(long);
89202f151d4SJustin Hibbits 
893ca0ec73cSConrad Meyer #ifdef ISA_206_ATOMICS
89484046d16SJustin Hibbits #define	atomic_fcmpset_8	atomic_fcmpset_char
895ca0ec73cSConrad Meyer #endif
89684046d16SJustin Hibbits #define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
89784046d16SJustin Hibbits #define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
89802f151d4SJustin Hibbits 
899ca0ec73cSConrad Meyer #ifdef ISA_206_ATOMICS
90084046d16SJustin Hibbits #define	atomic_fcmpset_16	atomic_fcmpset_short
901ca0ec73cSConrad Meyer #endif
90284046d16SJustin Hibbits #define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
90384046d16SJustin Hibbits #define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
90402f151d4SJustin Hibbits 
90502f151d4SJustin Hibbits #define	atomic_fcmpset_32	atomic_fcmpset_int
90602f151d4SJustin Hibbits #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
90702f151d4SJustin Hibbits #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
90802f151d4SJustin Hibbits 
90902f151d4SJustin Hibbits #ifdef __powerpc64__
91002f151d4SJustin Hibbits #define	atomic_fcmpset_64	atomic_fcmpset_long
91102f151d4SJustin Hibbits #define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
91202f151d4SJustin Hibbits #define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
91302f151d4SJustin Hibbits 
91402f151d4SJustin Hibbits #define	atomic_fcmpset_ptr	atomic_fcmpset_long
91502f151d4SJustin Hibbits #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
91602f151d4SJustin Hibbits #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
91702f151d4SJustin Hibbits #else
91802f151d4SJustin Hibbits #define	atomic_fcmpset_ptr	atomic_fcmpset_int
91902f151d4SJustin Hibbits #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_int
92002f151d4SJustin Hibbits #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_int
92102f151d4SJustin Hibbits #endif
92202f151d4SJustin Hibbits 
923dc6dc1f5SAttilio Rao static __inline u_int
atomic_fetchadd_int(volatile u_int * p,u_int v)924dc6dc1f5SAttilio Rao atomic_fetchadd_int(volatile u_int *p, u_int v)
9253c2bc2bfSJohn Baldwin {
926dc6dc1f5SAttilio Rao 	u_int value;
9273c2bc2bfSJohn Baldwin 
9283c2bc2bfSJohn Baldwin 	do {
9293c2bc2bfSJohn Baldwin 		value = *p;
930dc6dc1f5SAttilio Rao 	} while (!atomic_cmpset_int(p, value, value + v));
9313c2bc2bfSJohn Baldwin 	return (value);
9323c2bc2bfSJohn Baldwin }
9333c2bc2bfSJohn Baldwin 
934dc6dc1f5SAttilio Rao static __inline u_long
atomic_fetchadd_long(volatile u_long * p,u_long v)935dc6dc1f5SAttilio Rao atomic_fetchadd_long(volatile u_long *p, u_long v)
936c3e289e1SNathan Whitehorn {
937dc6dc1f5SAttilio Rao 	u_long value;
938c3e289e1SNathan Whitehorn 
939c3e289e1SNathan Whitehorn 	do {
940c3e289e1SNathan Whitehorn 		value = *p;
941c3e289e1SNathan Whitehorn 	} while (!atomic_cmpset_long(p, value, value + v));
942c3e289e1SNathan Whitehorn 	return (value);
943c3e289e1SNathan Whitehorn }
944c3e289e1SNathan Whitehorn 
945feb86bbeSAndreas Tobler static __inline u_int
atomic_swap_32(volatile u_int * p,u_int v)946feb86bbeSAndreas Tobler atomic_swap_32(volatile u_int *p, u_int v)
947feb86bbeSAndreas Tobler {
948feb86bbeSAndreas Tobler 	u_int prev;
949feb86bbeSAndreas Tobler 
950feb86bbeSAndreas Tobler 	__asm __volatile(
951feb86bbeSAndreas Tobler 	"1:	lwarx	%0,0,%2\n"
952feb86bbeSAndreas Tobler 	"	stwcx.	%3,0,%2\n"
953feb86bbeSAndreas Tobler 	"	bne-	1b\n"
954feb86bbeSAndreas Tobler 	: "=&r" (prev), "+m" (*(volatile u_int *)p)
955feb86bbeSAndreas Tobler 	: "r" (p), "r" (v)
956181ca73bSJustin Hibbits 	: "cr0", "memory");
957feb86bbeSAndreas Tobler 
958feb86bbeSAndreas Tobler 	return (prev);
959feb86bbeSAndreas Tobler }
960feb86bbeSAndreas Tobler 
961feb86bbeSAndreas Tobler #ifdef __powerpc64__
962feb86bbeSAndreas Tobler static __inline u_long
atomic_swap_64(volatile u_long * p,u_long v)963feb86bbeSAndreas Tobler atomic_swap_64(volatile u_long *p, u_long v)
964feb86bbeSAndreas Tobler {
965feb86bbeSAndreas Tobler 	u_long prev;
966feb86bbeSAndreas Tobler 
967feb86bbeSAndreas Tobler 	__asm __volatile(
968feb86bbeSAndreas Tobler 	"1:	ldarx	%0,0,%2\n"
969feb86bbeSAndreas Tobler 	"	stdcx.	%3,0,%2\n"
970feb86bbeSAndreas Tobler 	"	bne-	1b\n"
971feb86bbeSAndreas Tobler 	: "=&r" (prev), "+m" (*(volatile u_long *)p)
972feb86bbeSAndreas Tobler 	: "r" (p), "r" (v)
973181ca73bSJustin Hibbits 	: "cr0", "memory");
974feb86bbeSAndreas Tobler 
975feb86bbeSAndreas Tobler 	return (prev);
976feb86bbeSAndreas Tobler }
977feb86bbeSAndreas Tobler #endif
978feb86bbeSAndreas Tobler 
979dc6dc1f5SAttilio Rao #define	atomic_fetchadd_32	atomic_fetchadd_int
980feb86bbeSAndreas Tobler #define	atomic_swap_int		atomic_swap_32
981dc6dc1f5SAttilio Rao 
982dc6dc1f5SAttilio Rao #ifdef __powerpc64__
983dc6dc1f5SAttilio Rao #define	atomic_fetchadd_64	atomic_fetchadd_long
984feb86bbeSAndreas Tobler #define	atomic_swap_long	atomic_swap_64
985feb86bbeSAndreas Tobler #define	atomic_swap_ptr		atomic_swap_64
986d7a9bfeeSHans Petter Selasky #else
987d7a9bfeeSHans Petter Selasky #define	atomic_swap_long(p,v)	atomic_swap_32((volatile u_int *)(p), v)
988d7a9bfeeSHans Petter Selasky #define	atomic_swap_ptr(p,v)	atomic_swap_32((volatile u_int *)(p), v)
989c3e289e1SNathan Whitehorn #endif
9903c2bc2bfSJohn Baldwin 
991b5d227b0SJustin Hibbits static __inline int
atomic_testandset_int(volatile u_int * p,u_int v)992b5d227b0SJustin Hibbits atomic_testandset_int(volatile u_int *p, u_int v)
993b5d227b0SJustin Hibbits {
994aa473645SJustin Hibbits 	u_int m = (1u << (v & 0x1f));
995b5d227b0SJustin Hibbits 	u_int res;
996b5d227b0SJustin Hibbits 	u_int tmp;
997b5d227b0SJustin Hibbits 
998b5d227b0SJustin Hibbits 	__asm __volatile(
999b5d227b0SJustin Hibbits 	"1:	lwarx	%0,0,%3\n"
1000aa473645SJustin Hibbits 	"	and	%1,%0,%4\n"
1001b5d227b0SJustin Hibbits 	"	or	%0,%0,%4\n"
1002b5d227b0SJustin Hibbits 	"	stwcx.	%0,0,%3\n"
1003b5d227b0SJustin Hibbits 	"	bne-	1b\n"
1004b5d227b0SJustin Hibbits 	: "=&r"(tmp), "=&r"(res), "+m"(*p)
1005b5d227b0SJustin Hibbits 	: "r"(p), "r"(m)
1006b5d227b0SJustin Hibbits 	: "cr0", "memory");
1007b5d227b0SJustin Hibbits 
1008b5d227b0SJustin Hibbits 	return (res != 0);
1009b5d227b0SJustin Hibbits }
1010b5d227b0SJustin Hibbits 
1011b5d227b0SJustin Hibbits static __inline int
atomic_testandclear_int(volatile u_int * p,u_int v)1012b5d227b0SJustin Hibbits atomic_testandclear_int(volatile u_int *p, u_int v)
1013b5d227b0SJustin Hibbits {
1014aa473645SJustin Hibbits 	u_int m = (1u << (v & 0x1f));
1015b5d227b0SJustin Hibbits 	u_int res;
1016b5d227b0SJustin Hibbits 	u_int tmp;
1017b5d227b0SJustin Hibbits 
1018b5d227b0SJustin Hibbits 	__asm __volatile(
1019b5d227b0SJustin Hibbits 	"1:	lwarx	%0,0,%3\n"
1020aa473645SJustin Hibbits 	"	and	%1,%0,%4\n"
1021b5d227b0SJustin Hibbits 	"	andc	%0,%0,%4\n"
1022b5d227b0SJustin Hibbits 	"	stwcx.	%0,0,%3\n"
1023b5d227b0SJustin Hibbits 	"	bne-	1b\n"
1024b5d227b0SJustin Hibbits 	: "=&r"(tmp), "=&r"(res), "+m"(*p)
1025b5d227b0SJustin Hibbits 	: "r"(p), "r"(m)
1026b5d227b0SJustin Hibbits 	: "cr0", "memory");
1027b5d227b0SJustin Hibbits 
1028b5d227b0SJustin Hibbits 	return (res != 0);
1029b5d227b0SJustin Hibbits }
1030b5d227b0SJustin Hibbits 
1031b5d227b0SJustin Hibbits #ifdef __powerpc64__
1032b5d227b0SJustin Hibbits static __inline int
atomic_testandset_long(volatile u_long * p,u_int v)1033b5d227b0SJustin Hibbits atomic_testandset_long(volatile u_long *p, u_int v)
1034b5d227b0SJustin Hibbits {
1035aa473645SJustin Hibbits 	u_long m = (1ul << (v & 0x3f));
1036b5d227b0SJustin Hibbits 	u_long res;
1037b5d227b0SJustin Hibbits 	u_long tmp;
1038b5d227b0SJustin Hibbits 
1039b5d227b0SJustin Hibbits 	__asm __volatile(
1040b5d227b0SJustin Hibbits 	"1:	ldarx	%0,0,%3\n"
1041aa473645SJustin Hibbits 	"	and	%1,%0,%4\n"
1042b5d227b0SJustin Hibbits 	"	or	%0,%0,%4\n"
1043b5d227b0SJustin Hibbits 	"	stdcx.	%0,0,%3\n"
1044b5d227b0SJustin Hibbits 	"	bne-	1b\n"
1045b5d227b0SJustin Hibbits 	: "=&r"(tmp), "=&r"(res), "+m"(*(volatile u_long *)p)
1046b5d227b0SJustin Hibbits 	: "r"(p), "r"(m)
1047b5d227b0SJustin Hibbits 	: "cr0", "memory");
1048b5d227b0SJustin Hibbits 
1049b5d227b0SJustin Hibbits 	return (res != 0);
1050b5d227b0SJustin Hibbits }
1051b5d227b0SJustin Hibbits 
1052b5d227b0SJustin Hibbits static __inline int
atomic_testandclear_long(volatile u_long * p,u_int v)1053b5d227b0SJustin Hibbits atomic_testandclear_long(volatile u_long *p, u_int v)
1054b5d227b0SJustin Hibbits {
1055aa473645SJustin Hibbits 	u_long m = (1ul << (v & 0x3f));
1056b5d227b0SJustin Hibbits 	u_long res;
1057b5d227b0SJustin Hibbits 	u_long tmp;
1058b5d227b0SJustin Hibbits 
1059b5d227b0SJustin Hibbits 	__asm __volatile(
1060b5d227b0SJustin Hibbits 	"1:	ldarx	%0,0,%3\n"
1061aa473645SJustin Hibbits 	"	and	%1,%0,%4\n"
1062b5d227b0SJustin Hibbits 	"	andc	%0,%0,%4\n"
1063b5d227b0SJustin Hibbits 	"	stdcx.	%0,0,%3\n"
1064b5d227b0SJustin Hibbits 	"	bne-	1b\n"
1065b5d227b0SJustin Hibbits 	: "=&r"(tmp), "=&r"(res), "+m"(*p)
1066b5d227b0SJustin Hibbits 	: "r"(p), "r"(m)
1067b5d227b0SJustin Hibbits 	: "cr0", "memory");
1068b5d227b0SJustin Hibbits 
1069b5d227b0SJustin Hibbits 	return (res != 0);
1070b5d227b0SJustin Hibbits }
1071b5d227b0SJustin Hibbits #else
1072b5d227b0SJustin Hibbits static __inline int
atomic_testandset_long(volatile u_long * p,u_int v)1073b5d227b0SJustin Hibbits atomic_testandset_long(volatile u_long *p, u_int v)
1074b5d227b0SJustin Hibbits {
107562b4e25fSLi-Wen Hsu 	return (atomic_testandset_int((volatile u_int *)p, v));
1076b5d227b0SJustin Hibbits }
1077b5d227b0SJustin Hibbits 
1078b5d227b0SJustin Hibbits static __inline int
atomic_testandclear_long(volatile u_long * p,u_int v)1079b5d227b0SJustin Hibbits atomic_testandclear_long(volatile u_long *p, u_int v)
1080b5d227b0SJustin Hibbits {
108162b4e25fSLi-Wen Hsu 	return (atomic_testandclear_int((volatile u_int *)p, v));
1082b5d227b0SJustin Hibbits }
1083b5d227b0SJustin Hibbits #endif
1084b5d227b0SJustin Hibbits 
1085b5d227b0SJustin Hibbits #define	atomic_testandclear_32	atomic_testandclear_int
1086b5d227b0SJustin Hibbits #define	atomic_testandset_32	atomic_testandset_int
1087b5d227b0SJustin Hibbits 
1088b5d227b0SJustin Hibbits static __inline int
atomic_testandset_acq_long(volatile u_long * p,u_int v)1089b5d227b0SJustin Hibbits atomic_testandset_acq_long(volatile u_long *p, u_int v)
1090b5d227b0SJustin Hibbits {
1091b5d227b0SJustin Hibbits 	u_int a = atomic_testandset_long(p, v);
1092b5d227b0SJustin Hibbits 	__ATOMIC_ACQ();
1093b5d227b0SJustin Hibbits 	return (a);
1094b5d227b0SJustin Hibbits }
1095b5d227b0SJustin Hibbits 
1096a80b9ee1SJohn Baldwin #ifdef __powerpc64__
1097a80b9ee1SJohn Baldwin #define	atomic_testandclear_ptr		atomic_testandclear_long
1098a80b9ee1SJohn Baldwin #define	atomic_testandset_ptr		atomic_testandset_long
1099a80b9ee1SJohn Baldwin #else
1100a80b9ee1SJohn Baldwin #define	atomic_testandclear_ptr(p,v)					\
1101a80b9ee1SJohn Baldwin 	atomic_testandclear_32((volatile u_int *)(p), v)
1102a80b9ee1SJohn Baldwin #define	atomic_testandset_ptr(p,v)					\
1103a80b9ee1SJohn Baldwin 	atomic_testandset_32((volatile u_int *)(p), v)
1104a80b9ee1SJohn Baldwin #endif
1105a80b9ee1SJohn Baldwin 
11068954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_acq(void)11078954a9a4SKonstantin Belousov atomic_thread_fence_acq(void)
11088954a9a4SKonstantin Belousov {
11098954a9a4SKonstantin Belousov 
11100b39ffb3SKonstantin Belousov 	powerpc_lwsync();
11118954a9a4SKonstantin Belousov }
11128954a9a4SKonstantin Belousov 
11138954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_rel(void)11148954a9a4SKonstantin Belousov atomic_thread_fence_rel(void)
11158954a9a4SKonstantin Belousov {
11168954a9a4SKonstantin Belousov 
11170b39ffb3SKonstantin Belousov 	powerpc_lwsync();
11188954a9a4SKonstantin Belousov }
11198954a9a4SKonstantin Belousov 
11208954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_acq_rel(void)11218954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void)
11228954a9a4SKonstantin Belousov {
11238954a9a4SKonstantin Belousov 
11240b39ffb3SKonstantin Belousov 	powerpc_lwsync();
11258954a9a4SKonstantin Belousov }
11268954a9a4SKonstantin Belousov 
11278954a9a4SKonstantin Belousov static __inline void
atomic_thread_fence_seq_cst(void)11288954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void)
11298954a9a4SKonstantin Belousov {
11308954a9a4SKonstantin Belousov 
11318954a9a4SKonstantin Belousov 	__asm __volatile("sync" : : : "memory");
11328954a9a4SKonstantin Belousov }
11338954a9a4SKonstantin Belousov 
113484046d16SJustin Hibbits #ifndef ISA_206_ATOMICS
113584046d16SJustin Hibbits #include <sys/_atomic_subword.h>
1136ca0ec73cSConrad Meyer #define	atomic_cmpset_char	atomic_cmpset_8
1137ca0ec73cSConrad Meyer #define	atomic_cmpset_short	atomic_cmpset_16
1138ca0ec73cSConrad Meyer #define	atomic_fcmpset_char	atomic_fcmpset_8
1139ca0ec73cSConrad Meyer #define	atomic_fcmpset_short	atomic_fcmpset_16
114084046d16SJustin Hibbits #endif
114184046d16SJustin Hibbits 
114284046d16SJustin Hibbits /* These need sys/_atomic_subword.h on non-ISA-2.06-atomic platforms. */
114384046d16SJustin Hibbits ATOMIC_CMPSET_ACQ_REL(char);
114484046d16SJustin Hibbits ATOMIC_CMPSET_ACQ_REL(short);
114584046d16SJustin Hibbits 
114684046d16SJustin Hibbits ATOMIC_FCMPSET_ACQ_REL(char);
114784046d16SJustin Hibbits ATOMIC_FCMPSET_ACQ_REL(short);
114884046d16SJustin Hibbits 
114984046d16SJustin Hibbits #undef __ATOMIC_REL
115084046d16SJustin Hibbits #undef __ATOMIC_ACQ
115184046d16SJustin Hibbits 
1152b5073b49SDavid E. O'Brien #endif /* ! _MACHINE_ATOMIC_H_ */
1153