xref: /freebsd/sys/contrib/openzfs/lib/libspl/include/atomic.h (revision eda14cbc264d6969b02f2b1994cef11148e914f1)
1*eda14cbcSMatt Macy /*
2*eda14cbcSMatt Macy  * CDDL HEADER START
3*eda14cbcSMatt Macy  *
4*eda14cbcSMatt Macy  * The contents of this file are subject to the terms of the
5*eda14cbcSMatt Macy  * Common Development and Distribution License (the "License").
6*eda14cbcSMatt Macy  * You may not use this file except in compliance with the License.
7*eda14cbcSMatt Macy  *
8*eda14cbcSMatt Macy  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9*eda14cbcSMatt Macy  * or http://www.opensolaris.org/os/licensing.
10*eda14cbcSMatt Macy  * See the License for the specific language governing permissions
11*eda14cbcSMatt Macy  * and limitations under the License.
12*eda14cbcSMatt Macy  *
13*eda14cbcSMatt Macy  * When distributing Covered Code, include this CDDL HEADER in each
14*eda14cbcSMatt Macy  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15*eda14cbcSMatt Macy  * If applicable, add the following below this CDDL HEADER, with the
16*eda14cbcSMatt Macy  * fields enclosed by brackets "[]" replaced with your own identifying
17*eda14cbcSMatt Macy  * information: Portions Copyright [yyyy] [name of copyright owner]
18*eda14cbcSMatt Macy  *
19*eda14cbcSMatt Macy  * CDDL HEADER END
20*eda14cbcSMatt Macy  */
21*eda14cbcSMatt Macy 
22*eda14cbcSMatt Macy /*
23*eda14cbcSMatt Macy  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24*eda14cbcSMatt Macy  * Use is subject to license terms.
25*eda14cbcSMatt Macy  */
26*eda14cbcSMatt Macy 
27*eda14cbcSMatt Macy #ifndef	_SYS_ATOMIC_H
28*eda14cbcSMatt Macy #define	_SYS_ATOMIC_H
29*eda14cbcSMatt Macy 
30*eda14cbcSMatt Macy #include <sys/types.h>
31*eda14cbcSMatt Macy #include <sys/inttypes.h>
32*eda14cbcSMatt Macy 
33*eda14cbcSMatt Macy #ifdef	__cplusplus
34*eda14cbcSMatt Macy extern "C" {
35*eda14cbcSMatt Macy #endif
36*eda14cbcSMatt Macy 
37*eda14cbcSMatt Macy #if defined(__STDC__)
38*eda14cbcSMatt Macy /*
39*eda14cbcSMatt Macy  * Increment target.
40*eda14cbcSMatt Macy  */
41*eda14cbcSMatt Macy extern void atomic_inc_8(volatile uint8_t *);
42*eda14cbcSMatt Macy extern void atomic_inc_uchar(volatile uchar_t *);
43*eda14cbcSMatt Macy extern void atomic_inc_16(volatile uint16_t *);
44*eda14cbcSMatt Macy extern void atomic_inc_ushort(volatile ushort_t *);
45*eda14cbcSMatt Macy extern void atomic_inc_32(volatile uint32_t *);
46*eda14cbcSMatt Macy extern void atomic_inc_uint(volatile uint_t *);
47*eda14cbcSMatt Macy extern void atomic_inc_ulong(volatile ulong_t *);
48*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
49*eda14cbcSMatt Macy extern void atomic_inc_64(volatile uint64_t *);
50*eda14cbcSMatt Macy #endif
51*eda14cbcSMatt Macy 
52*eda14cbcSMatt Macy /*
53*eda14cbcSMatt Macy  * Decrement target
54*eda14cbcSMatt Macy  */
55*eda14cbcSMatt Macy extern void atomic_dec_8(volatile uint8_t *);
56*eda14cbcSMatt Macy extern void atomic_dec_uchar(volatile uchar_t *);
57*eda14cbcSMatt Macy extern void atomic_dec_16(volatile uint16_t *);
58*eda14cbcSMatt Macy extern void atomic_dec_ushort(volatile ushort_t *);
59*eda14cbcSMatt Macy extern void atomic_dec_32(volatile uint32_t *);
60*eda14cbcSMatt Macy extern void atomic_dec_uint(volatile uint_t *);
61*eda14cbcSMatt Macy extern void atomic_dec_ulong(volatile ulong_t *);
62*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
63*eda14cbcSMatt Macy extern void atomic_dec_64(volatile uint64_t *);
64*eda14cbcSMatt Macy #endif
65*eda14cbcSMatt Macy 
66*eda14cbcSMatt Macy /*
67*eda14cbcSMatt Macy  * Add delta to target
68*eda14cbcSMatt Macy  */
69*eda14cbcSMatt Macy extern void atomic_add_8(volatile uint8_t *, int8_t);
70*eda14cbcSMatt Macy extern void atomic_add_char(volatile uchar_t *, signed char);
71*eda14cbcSMatt Macy extern void atomic_add_16(volatile uint16_t *, int16_t);
72*eda14cbcSMatt Macy extern void atomic_add_short(volatile ushort_t *, short);
73*eda14cbcSMatt Macy extern void atomic_add_32(volatile uint32_t *, int32_t);
74*eda14cbcSMatt Macy extern void atomic_add_int(volatile uint_t *, int);
75*eda14cbcSMatt Macy extern void atomic_add_ptr(volatile void *, ssize_t);
76*eda14cbcSMatt Macy extern void atomic_add_long(volatile ulong_t *, long);
77*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
78*eda14cbcSMatt Macy extern void atomic_add_64(volatile uint64_t *, int64_t);
79*eda14cbcSMatt Macy #endif
80*eda14cbcSMatt Macy 
81*eda14cbcSMatt Macy /*
82*eda14cbcSMatt Macy  * Subtract delta from target
83*eda14cbcSMatt Macy  */
84*eda14cbcSMatt Macy extern void atomic_sub_8(volatile uint8_t *, int8_t);
85*eda14cbcSMatt Macy extern void atomic_sub_char(volatile uchar_t *, signed char);
86*eda14cbcSMatt Macy extern void atomic_sub_16(volatile uint16_t *, int16_t);
87*eda14cbcSMatt Macy extern void atomic_sub_short(volatile ushort_t *, short);
88*eda14cbcSMatt Macy extern void atomic_sub_32(volatile uint32_t *, int32_t);
89*eda14cbcSMatt Macy extern void atomic_sub_int(volatile uint_t *, int);
90*eda14cbcSMatt Macy extern void atomic_sub_ptr(volatile void *, ssize_t);
91*eda14cbcSMatt Macy extern void atomic_sub_long(volatile ulong_t *, long);
92*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
93*eda14cbcSMatt Macy extern void atomic_sub_64(volatile uint64_t *, int64_t);
94*eda14cbcSMatt Macy #endif
95*eda14cbcSMatt Macy 
96*eda14cbcSMatt Macy /*
97*eda14cbcSMatt Macy  * logical OR bits with target
98*eda14cbcSMatt Macy  */
99*eda14cbcSMatt Macy extern void atomic_or_8(volatile uint8_t *, uint8_t);
100*eda14cbcSMatt Macy extern void atomic_or_uchar(volatile uchar_t *, uchar_t);
101*eda14cbcSMatt Macy extern void atomic_or_16(volatile uint16_t *, uint16_t);
102*eda14cbcSMatt Macy extern void atomic_or_ushort(volatile ushort_t *, ushort_t);
103*eda14cbcSMatt Macy extern void atomic_or_32(volatile uint32_t *, uint32_t);
104*eda14cbcSMatt Macy extern void atomic_or_uint(volatile uint_t *, uint_t);
105*eda14cbcSMatt Macy extern void atomic_or_ulong(volatile ulong_t *, ulong_t);
106*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
107*eda14cbcSMatt Macy extern void atomic_or_64(volatile uint64_t *, uint64_t);
108*eda14cbcSMatt Macy #endif
109*eda14cbcSMatt Macy 
110*eda14cbcSMatt Macy /*
111*eda14cbcSMatt Macy  * logical AND bits with target
112*eda14cbcSMatt Macy  */
113*eda14cbcSMatt Macy extern void atomic_and_8(volatile uint8_t *, uint8_t);
114*eda14cbcSMatt Macy extern void atomic_and_uchar(volatile uchar_t *, uchar_t);
115*eda14cbcSMatt Macy extern void atomic_and_16(volatile uint16_t *, uint16_t);
116*eda14cbcSMatt Macy extern void atomic_and_ushort(volatile ushort_t *, ushort_t);
117*eda14cbcSMatt Macy extern void atomic_and_32(volatile uint32_t *, uint32_t);
118*eda14cbcSMatt Macy extern void atomic_and_uint(volatile uint_t *, uint_t);
119*eda14cbcSMatt Macy extern void atomic_and_ulong(volatile ulong_t *, ulong_t);
120*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
121*eda14cbcSMatt Macy extern void atomic_and_64(volatile uint64_t *, uint64_t);
122*eda14cbcSMatt Macy #endif
123*eda14cbcSMatt Macy 
124*eda14cbcSMatt Macy /*
125*eda14cbcSMatt Macy  * As above, but return the new value.  Note that these _nv() variants are
126*eda14cbcSMatt Macy  * substantially more expensive on some platforms than the no-return-value
127*eda14cbcSMatt Macy  * versions above, so don't use them unless you really need to know the
128*eda14cbcSMatt Macy  * new value *atomically* (e.g. when decrementing a reference count and
129*eda14cbcSMatt Macy  * checking whether it went to zero).
130*eda14cbcSMatt Macy  */
131*eda14cbcSMatt Macy 
132*eda14cbcSMatt Macy /*
133*eda14cbcSMatt Macy  * Increment target and return new value.
134*eda14cbcSMatt Macy  */
135*eda14cbcSMatt Macy extern uint8_t atomic_inc_8_nv(volatile uint8_t *);
136*eda14cbcSMatt Macy extern uchar_t atomic_inc_uchar_nv(volatile uchar_t *);
137*eda14cbcSMatt Macy extern uint16_t atomic_inc_16_nv(volatile uint16_t *);
138*eda14cbcSMatt Macy extern ushort_t atomic_inc_ushort_nv(volatile ushort_t *);
139*eda14cbcSMatt Macy extern uint32_t atomic_inc_32_nv(volatile uint32_t *);
140*eda14cbcSMatt Macy extern uint_t atomic_inc_uint_nv(volatile uint_t *);
141*eda14cbcSMatt Macy extern ulong_t atomic_inc_ulong_nv(volatile ulong_t *);
142*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
143*eda14cbcSMatt Macy extern uint64_t atomic_inc_64_nv(volatile uint64_t *);
144*eda14cbcSMatt Macy #endif
145*eda14cbcSMatt Macy 
146*eda14cbcSMatt Macy /*
147*eda14cbcSMatt Macy  * Decrement target and return new value.
148*eda14cbcSMatt Macy  */
149*eda14cbcSMatt Macy extern uint8_t atomic_dec_8_nv(volatile uint8_t *);
150*eda14cbcSMatt Macy extern uchar_t atomic_dec_uchar_nv(volatile uchar_t *);
151*eda14cbcSMatt Macy extern uint16_t atomic_dec_16_nv(volatile uint16_t *);
152*eda14cbcSMatt Macy extern ushort_t atomic_dec_ushort_nv(volatile ushort_t *);
153*eda14cbcSMatt Macy extern uint32_t atomic_dec_32_nv(volatile uint32_t *);
154*eda14cbcSMatt Macy extern uint_t atomic_dec_uint_nv(volatile uint_t *);
155*eda14cbcSMatt Macy extern ulong_t atomic_dec_ulong_nv(volatile ulong_t *);
156*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
157*eda14cbcSMatt Macy extern uint64_t atomic_dec_64_nv(volatile uint64_t *);
158*eda14cbcSMatt Macy #endif
159*eda14cbcSMatt Macy 
160*eda14cbcSMatt Macy /*
161*eda14cbcSMatt Macy  * Add delta to target
162*eda14cbcSMatt Macy  */
163*eda14cbcSMatt Macy extern uint8_t atomic_add_8_nv(volatile uint8_t *, int8_t);
164*eda14cbcSMatt Macy extern uchar_t atomic_add_char_nv(volatile uchar_t *, signed char);
165*eda14cbcSMatt Macy extern uint16_t atomic_add_16_nv(volatile uint16_t *, int16_t);
166*eda14cbcSMatt Macy extern ushort_t atomic_add_short_nv(volatile ushort_t *, short);
167*eda14cbcSMatt Macy extern uint32_t atomic_add_32_nv(volatile uint32_t *, int32_t);
168*eda14cbcSMatt Macy extern uint_t atomic_add_int_nv(volatile uint_t *, int);
169*eda14cbcSMatt Macy extern void *atomic_add_ptr_nv(volatile void *, ssize_t);
170*eda14cbcSMatt Macy extern ulong_t atomic_add_long_nv(volatile ulong_t *, long);
171*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
172*eda14cbcSMatt Macy extern uint64_t atomic_add_64_nv(volatile uint64_t *, int64_t);
173*eda14cbcSMatt Macy #endif
174*eda14cbcSMatt Macy 
175*eda14cbcSMatt Macy /*
176*eda14cbcSMatt Macy  * Subtract delta from target
177*eda14cbcSMatt Macy  */
178*eda14cbcSMatt Macy extern uint8_t atomic_sub_8_nv(volatile uint8_t *, int8_t);
179*eda14cbcSMatt Macy extern uchar_t atomic_sub_char_nv(volatile uchar_t *, signed char);
180*eda14cbcSMatt Macy extern uint16_t atomic_sub_16_nv(volatile uint16_t *, int16_t);
181*eda14cbcSMatt Macy extern ushort_t atomic_sub_short_nv(volatile ushort_t *, short);
182*eda14cbcSMatt Macy extern uint32_t atomic_sub_32_nv(volatile uint32_t *, int32_t);
183*eda14cbcSMatt Macy extern uint_t atomic_sub_int_nv(volatile uint_t *, int);
184*eda14cbcSMatt Macy extern void *atomic_sub_ptr_nv(volatile void *, ssize_t);
185*eda14cbcSMatt Macy extern ulong_t atomic_sub_long_nv(volatile ulong_t *, long);
186*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
187*eda14cbcSMatt Macy extern uint64_t atomic_sub_64_nv(volatile uint64_t *, int64_t);
188*eda14cbcSMatt Macy #endif
189*eda14cbcSMatt Macy 
190*eda14cbcSMatt Macy /*
191*eda14cbcSMatt Macy  * logical OR bits with target and return new value.
192*eda14cbcSMatt Macy  */
193*eda14cbcSMatt Macy extern uint8_t atomic_or_8_nv(volatile uint8_t *, uint8_t);
194*eda14cbcSMatt Macy extern uchar_t atomic_or_uchar_nv(volatile uchar_t *, uchar_t);
195*eda14cbcSMatt Macy extern uint16_t atomic_or_16_nv(volatile uint16_t *, uint16_t);
196*eda14cbcSMatt Macy extern ushort_t atomic_or_ushort_nv(volatile ushort_t *, ushort_t);
197*eda14cbcSMatt Macy extern uint32_t atomic_or_32_nv(volatile uint32_t *, uint32_t);
198*eda14cbcSMatt Macy extern uint_t atomic_or_uint_nv(volatile uint_t *, uint_t);
199*eda14cbcSMatt Macy extern ulong_t atomic_or_ulong_nv(volatile ulong_t *, ulong_t);
200*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
201*eda14cbcSMatt Macy extern uint64_t atomic_or_64_nv(volatile uint64_t *, uint64_t);
202*eda14cbcSMatt Macy #endif
203*eda14cbcSMatt Macy 
204*eda14cbcSMatt Macy /*
205*eda14cbcSMatt Macy  * logical AND bits with target and return new value.
206*eda14cbcSMatt Macy  */
207*eda14cbcSMatt Macy extern uint8_t atomic_and_8_nv(volatile uint8_t *, uint8_t);
208*eda14cbcSMatt Macy extern uchar_t atomic_and_uchar_nv(volatile uchar_t *, uchar_t);
209*eda14cbcSMatt Macy extern uint16_t atomic_and_16_nv(volatile uint16_t *, uint16_t);
210*eda14cbcSMatt Macy extern ushort_t atomic_and_ushort_nv(volatile ushort_t *, ushort_t);
211*eda14cbcSMatt Macy extern uint32_t atomic_and_32_nv(volatile uint32_t *, uint32_t);
212*eda14cbcSMatt Macy extern uint_t atomic_and_uint_nv(volatile uint_t *, uint_t);
213*eda14cbcSMatt Macy extern ulong_t atomic_and_ulong_nv(volatile ulong_t *, ulong_t);
214*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
215*eda14cbcSMatt Macy extern uint64_t atomic_and_64_nv(volatile uint64_t *, uint64_t);
216*eda14cbcSMatt Macy #endif
217*eda14cbcSMatt Macy 
218*eda14cbcSMatt Macy /*
219*eda14cbcSMatt Macy  * If *arg1 == arg2, set *arg1 = arg3; return old value
220*eda14cbcSMatt Macy  */
221*eda14cbcSMatt Macy extern uint8_t atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
222*eda14cbcSMatt Macy extern uchar_t atomic_cas_uchar(volatile uchar_t *, uchar_t, uchar_t);
223*eda14cbcSMatt Macy extern uint16_t atomic_cas_16(volatile uint16_t *, uint16_t, uint16_t);
224*eda14cbcSMatt Macy extern ushort_t atomic_cas_ushort(volatile ushort_t *, ushort_t, ushort_t);
225*eda14cbcSMatt Macy extern uint32_t atomic_cas_32(volatile uint32_t *, uint32_t, uint32_t);
226*eda14cbcSMatt Macy extern uint_t atomic_cas_uint(volatile uint_t *, uint_t, uint_t);
227*eda14cbcSMatt Macy extern void *atomic_cas_ptr(volatile void *, void *, void *);
228*eda14cbcSMatt Macy extern ulong_t atomic_cas_ulong(volatile ulong_t *, ulong_t, ulong_t);
229*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
230*eda14cbcSMatt Macy extern uint64_t atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
231*eda14cbcSMatt Macy #endif
232*eda14cbcSMatt Macy 
233*eda14cbcSMatt Macy /*
234*eda14cbcSMatt Macy  * Swap target and return old value
235*eda14cbcSMatt Macy  */
236*eda14cbcSMatt Macy extern uint8_t atomic_swap_8(volatile uint8_t *, uint8_t);
237*eda14cbcSMatt Macy extern uchar_t atomic_swap_uchar(volatile uchar_t *, uchar_t);
238*eda14cbcSMatt Macy extern uint16_t atomic_swap_16(volatile uint16_t *, uint16_t);
239*eda14cbcSMatt Macy extern ushort_t atomic_swap_ushort(volatile ushort_t *, ushort_t);
240*eda14cbcSMatt Macy extern uint32_t atomic_swap_32(volatile uint32_t *, uint32_t);
241*eda14cbcSMatt Macy extern uint_t atomic_swap_uint(volatile uint_t *, uint_t);
242*eda14cbcSMatt Macy extern void *atomic_swap_ptr(volatile void *, void *);
243*eda14cbcSMatt Macy extern ulong_t atomic_swap_ulong(volatile ulong_t *, ulong_t);
244*eda14cbcSMatt Macy #if defined(_INT64_TYPE)
245*eda14cbcSMatt Macy extern uint64_t atomic_swap_64(volatile uint64_t *, uint64_t);
246*eda14cbcSMatt Macy #endif
247*eda14cbcSMatt Macy 
248*eda14cbcSMatt Macy /*
249*eda14cbcSMatt Macy  * Perform an exclusive atomic bit set/clear on a target.
250*eda14cbcSMatt Macy  * Returns 0 if bit was successfully set/cleared, or -1
251*eda14cbcSMatt Macy  * if the bit was already set/cleared.
252*eda14cbcSMatt Macy  */
253*eda14cbcSMatt Macy extern int atomic_set_long_excl(volatile ulong_t *, uint_t);
254*eda14cbcSMatt Macy extern int atomic_clear_long_excl(volatile ulong_t *, uint_t);
255*eda14cbcSMatt Macy 
256*eda14cbcSMatt Macy /*
257*eda14cbcSMatt Macy  * Generic memory barrier used during lock entry, placed after the
258*eda14cbcSMatt Macy  * memory operation that acquires the lock to guarantee that the lock
259*eda14cbcSMatt Macy  * protects its data.  No stores from after the memory barrier will
260*eda14cbcSMatt Macy  * reach visibility, and no loads from after the barrier will be
261*eda14cbcSMatt Macy  * resolved, before the lock acquisition reaches global visibility.
262*eda14cbcSMatt Macy  */
263*eda14cbcSMatt Macy extern void membar_enter(void);
264*eda14cbcSMatt Macy 
265*eda14cbcSMatt Macy /*
266*eda14cbcSMatt Macy  * Generic memory barrier used during lock exit, placed before the
267*eda14cbcSMatt Macy  * memory operation that releases the lock to guarantee that the lock
268*eda14cbcSMatt Macy  * protects its data.  All loads and stores issued before the barrier
269*eda14cbcSMatt Macy  * will be resolved before the subsequent lock update reaches visibility.
270*eda14cbcSMatt Macy  */
271*eda14cbcSMatt Macy extern void membar_exit(void);
272*eda14cbcSMatt Macy 
273*eda14cbcSMatt Macy /*
274*eda14cbcSMatt Macy  * Arrange that all stores issued before this point in the code reach
275*eda14cbcSMatt Macy  * global visibility before any stores that follow; useful in producer
276*eda14cbcSMatt Macy  * modules that update a data item, then set a flag that it is available.
277*eda14cbcSMatt Macy  * The memory barrier guarantees that the available flag is not visible
278*eda14cbcSMatt Macy  * earlier than the updated data, i.e. it imposes store ordering.
279*eda14cbcSMatt Macy  */
280*eda14cbcSMatt Macy extern void membar_producer(void);
281*eda14cbcSMatt Macy 
282*eda14cbcSMatt Macy /*
283*eda14cbcSMatt Macy  * Arrange that all loads issued before this point in the code are
284*eda14cbcSMatt Macy  * completed before any subsequent loads; useful in consumer modules
285*eda14cbcSMatt Macy  * that check to see if data is available and read the data.
286*eda14cbcSMatt Macy  * The memory barrier guarantees that the data is not sampled until
287*eda14cbcSMatt Macy  * after the available flag has been seen, i.e. it imposes load ordering.
288*eda14cbcSMatt Macy  */
289*eda14cbcSMatt Macy extern void membar_consumer(void);
290*eda14cbcSMatt Macy #endif  /* __STDC__ */
291*eda14cbcSMatt Macy 
292*eda14cbcSMatt Macy #ifdef	__cplusplus
293*eda14cbcSMatt Macy }
294*eda14cbcSMatt Macy #endif
295*eda14cbcSMatt Macy 
296*eda14cbcSMatt Macy #endif	/* _SYS_ATOMIC_H */
297