xref: /freebsd/sys/contrib/openzfs/lib/libspl/include/atomic.h (revision 61145dc2b94f12f6a47344fb9aac702321880e43)
1*61145dc2SMartin Matuska // SPDX-License-Identifier: CDDL-1.0
2eda14cbcSMatt Macy /*
3eda14cbcSMatt Macy  * CDDL HEADER START
4eda14cbcSMatt Macy  *
5eda14cbcSMatt Macy  * The contents of this file are subject to the terms of the
6eda14cbcSMatt Macy  * Common Development and Distribution License (the "License").
7eda14cbcSMatt Macy  * You may not use this file except in compliance with the License.
8eda14cbcSMatt Macy  *
9eda14cbcSMatt Macy  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10271171e0SMartin Matuska  * or https://opensource.org/licenses/CDDL-1.0.
11eda14cbcSMatt Macy  * See the License for the specific language governing permissions
12eda14cbcSMatt Macy  * and limitations under the License.
13eda14cbcSMatt Macy  *
14eda14cbcSMatt Macy  * When distributing Covered Code, include this CDDL HEADER in each
15eda14cbcSMatt Macy  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16eda14cbcSMatt Macy  * If applicable, add the following below this CDDL HEADER, with the
17eda14cbcSMatt Macy  * fields enclosed by brackets "[]" replaced with your own identifying
18eda14cbcSMatt Macy  * information: Portions Copyright [yyyy] [name of copyright owner]
19eda14cbcSMatt Macy  *
20eda14cbcSMatt Macy  * CDDL HEADER END
21eda14cbcSMatt Macy  */
22eda14cbcSMatt Macy 
23eda14cbcSMatt Macy /*
24eda14cbcSMatt Macy  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
25eda14cbcSMatt Macy  * Use is subject to license terms.
26eda14cbcSMatt Macy  */
27eda14cbcSMatt Macy 
28eda14cbcSMatt Macy #ifndef	_SYS_ATOMIC_H
29eda14cbcSMatt Macy #define	_SYS_ATOMIC_H
30eda14cbcSMatt Macy 
31eda14cbcSMatt Macy #include <sys/types.h>
32eda14cbcSMatt Macy #include <sys/inttypes.h>
33eda14cbcSMatt Macy 
34eda14cbcSMatt Macy #ifdef	__cplusplus
35eda14cbcSMatt Macy extern "C" {
36eda14cbcSMatt Macy #endif
37eda14cbcSMatt Macy 
38eda14cbcSMatt Macy #if defined(__STDC__)
39eda14cbcSMatt Macy /*
40eda14cbcSMatt Macy  * Increment target.
41eda14cbcSMatt Macy  */
42eda14cbcSMatt Macy extern void atomic_inc_8(volatile uint8_t *);
43eda14cbcSMatt Macy extern void atomic_inc_uchar(volatile uchar_t *);
44eda14cbcSMatt Macy extern void atomic_inc_16(volatile uint16_t *);
45eda14cbcSMatt Macy extern void atomic_inc_ushort(volatile ushort_t *);
46eda14cbcSMatt Macy extern void atomic_inc_32(volatile uint32_t *);
47eda14cbcSMatt Macy extern void atomic_inc_uint(volatile uint_t *);
48eda14cbcSMatt Macy extern void atomic_inc_ulong(volatile ulong_t *);
49eda14cbcSMatt Macy #if defined(_INT64_TYPE)
50eda14cbcSMatt Macy extern void atomic_inc_64(volatile uint64_t *);
51eda14cbcSMatt Macy #endif
52eda14cbcSMatt Macy 
53eda14cbcSMatt Macy /*
54eda14cbcSMatt Macy  * Decrement target
55eda14cbcSMatt Macy  */
56eda14cbcSMatt Macy extern void atomic_dec_8(volatile uint8_t *);
57eda14cbcSMatt Macy extern void atomic_dec_uchar(volatile uchar_t *);
58eda14cbcSMatt Macy extern void atomic_dec_16(volatile uint16_t *);
59eda14cbcSMatt Macy extern void atomic_dec_ushort(volatile ushort_t *);
60eda14cbcSMatt Macy extern void atomic_dec_32(volatile uint32_t *);
61eda14cbcSMatt Macy extern void atomic_dec_uint(volatile uint_t *);
62eda14cbcSMatt Macy extern void atomic_dec_ulong(volatile ulong_t *);
63eda14cbcSMatt Macy #if defined(_INT64_TYPE)
64eda14cbcSMatt Macy extern void atomic_dec_64(volatile uint64_t *);
65eda14cbcSMatt Macy #endif
66eda14cbcSMatt Macy 
67eda14cbcSMatt Macy /*
68eda14cbcSMatt Macy  * Add delta to target
69eda14cbcSMatt Macy  */
70eda14cbcSMatt Macy extern void atomic_add_8(volatile uint8_t *, int8_t);
71eda14cbcSMatt Macy extern void atomic_add_char(volatile uchar_t *, signed char);
72eda14cbcSMatt Macy extern void atomic_add_16(volatile uint16_t *, int16_t);
73eda14cbcSMatt Macy extern void atomic_add_short(volatile ushort_t *, short);
74eda14cbcSMatt Macy extern void atomic_add_32(volatile uint32_t *, int32_t);
75eda14cbcSMatt Macy extern void atomic_add_int(volatile uint_t *, int);
76eda14cbcSMatt Macy extern void atomic_add_ptr(volatile void *, ssize_t);
77eda14cbcSMatt Macy extern void atomic_add_long(volatile ulong_t *, long);
78eda14cbcSMatt Macy #if defined(_INT64_TYPE)
79eda14cbcSMatt Macy extern void atomic_add_64(volatile uint64_t *, int64_t);
80eda14cbcSMatt Macy #endif
81eda14cbcSMatt Macy 
82eda14cbcSMatt Macy /*
83eda14cbcSMatt Macy  * Subtract delta from target
84eda14cbcSMatt Macy  */
85eda14cbcSMatt Macy extern void atomic_sub_8(volatile uint8_t *, int8_t);
86eda14cbcSMatt Macy extern void atomic_sub_char(volatile uchar_t *, signed char);
87eda14cbcSMatt Macy extern void atomic_sub_16(volatile uint16_t *, int16_t);
88eda14cbcSMatt Macy extern void atomic_sub_short(volatile ushort_t *, short);
89eda14cbcSMatt Macy extern void atomic_sub_32(volatile uint32_t *, int32_t);
90eda14cbcSMatt Macy extern void atomic_sub_int(volatile uint_t *, int);
91eda14cbcSMatt Macy extern void atomic_sub_ptr(volatile void *, ssize_t);
92eda14cbcSMatt Macy extern void atomic_sub_long(volatile ulong_t *, long);
93eda14cbcSMatt Macy #if defined(_INT64_TYPE)
94eda14cbcSMatt Macy extern void atomic_sub_64(volatile uint64_t *, int64_t);
95eda14cbcSMatt Macy #endif
96eda14cbcSMatt Macy 
97eda14cbcSMatt Macy /*
98eda14cbcSMatt Macy  * logical OR bits with target
99eda14cbcSMatt Macy  */
100eda14cbcSMatt Macy extern void atomic_or_8(volatile uint8_t *, uint8_t);
101eda14cbcSMatt Macy extern void atomic_or_uchar(volatile uchar_t *, uchar_t);
102eda14cbcSMatt Macy extern void atomic_or_16(volatile uint16_t *, uint16_t);
103eda14cbcSMatt Macy extern void atomic_or_ushort(volatile ushort_t *, ushort_t);
104eda14cbcSMatt Macy extern void atomic_or_32(volatile uint32_t *, uint32_t);
105eda14cbcSMatt Macy extern void atomic_or_uint(volatile uint_t *, uint_t);
106eda14cbcSMatt Macy extern void atomic_or_ulong(volatile ulong_t *, ulong_t);
107eda14cbcSMatt Macy #if defined(_INT64_TYPE)
108eda14cbcSMatt Macy extern void atomic_or_64(volatile uint64_t *, uint64_t);
109eda14cbcSMatt Macy #endif
110eda14cbcSMatt Macy 
111eda14cbcSMatt Macy /*
112eda14cbcSMatt Macy  * logical AND bits with target
113eda14cbcSMatt Macy  */
114eda14cbcSMatt Macy extern void atomic_and_8(volatile uint8_t *, uint8_t);
115eda14cbcSMatt Macy extern void atomic_and_uchar(volatile uchar_t *, uchar_t);
116eda14cbcSMatt Macy extern void atomic_and_16(volatile uint16_t *, uint16_t);
117eda14cbcSMatt Macy extern void atomic_and_ushort(volatile ushort_t *, ushort_t);
118eda14cbcSMatt Macy extern void atomic_and_32(volatile uint32_t *, uint32_t);
119eda14cbcSMatt Macy extern void atomic_and_uint(volatile uint_t *, uint_t);
120eda14cbcSMatt Macy extern void atomic_and_ulong(volatile ulong_t *, ulong_t);
121eda14cbcSMatt Macy #if defined(_INT64_TYPE)
122eda14cbcSMatt Macy extern void atomic_and_64(volatile uint64_t *, uint64_t);
123eda14cbcSMatt Macy #endif
124eda14cbcSMatt Macy 
125eda14cbcSMatt Macy /*
126eda14cbcSMatt Macy  * As above, but return the new value.  Note that these _nv() variants are
127eda14cbcSMatt Macy  * substantially more expensive on some platforms than the no-return-value
128eda14cbcSMatt Macy  * versions above, so don't use them unless you really need to know the
129eda14cbcSMatt Macy  * new value *atomically* (e.g. when decrementing a reference count and
130eda14cbcSMatt Macy  * checking whether it went to zero).
131eda14cbcSMatt Macy  */
132eda14cbcSMatt Macy 
133eda14cbcSMatt Macy /*
134eda14cbcSMatt Macy  * Increment target and return new value.
135eda14cbcSMatt Macy  */
136eda14cbcSMatt Macy extern uint8_t atomic_inc_8_nv(volatile uint8_t *);
137eda14cbcSMatt Macy extern uchar_t atomic_inc_uchar_nv(volatile uchar_t *);
138eda14cbcSMatt Macy extern uint16_t atomic_inc_16_nv(volatile uint16_t *);
139eda14cbcSMatt Macy extern ushort_t atomic_inc_ushort_nv(volatile ushort_t *);
140eda14cbcSMatt Macy extern uint32_t atomic_inc_32_nv(volatile uint32_t *);
141eda14cbcSMatt Macy extern uint_t atomic_inc_uint_nv(volatile uint_t *);
142eda14cbcSMatt Macy extern ulong_t atomic_inc_ulong_nv(volatile ulong_t *);
143eda14cbcSMatt Macy #if defined(_INT64_TYPE)
144eda14cbcSMatt Macy extern uint64_t atomic_inc_64_nv(volatile uint64_t *);
145eda14cbcSMatt Macy #endif
146eda14cbcSMatt Macy 
147eda14cbcSMatt Macy /*
148eda14cbcSMatt Macy  * Decrement target and return new value.
149eda14cbcSMatt Macy  */
150eda14cbcSMatt Macy extern uint8_t atomic_dec_8_nv(volatile uint8_t *);
151eda14cbcSMatt Macy extern uchar_t atomic_dec_uchar_nv(volatile uchar_t *);
152eda14cbcSMatt Macy extern uint16_t atomic_dec_16_nv(volatile uint16_t *);
153eda14cbcSMatt Macy extern ushort_t atomic_dec_ushort_nv(volatile ushort_t *);
154eda14cbcSMatt Macy extern uint32_t atomic_dec_32_nv(volatile uint32_t *);
155eda14cbcSMatt Macy extern uint_t atomic_dec_uint_nv(volatile uint_t *);
156eda14cbcSMatt Macy extern ulong_t atomic_dec_ulong_nv(volatile ulong_t *);
157eda14cbcSMatt Macy #if defined(_INT64_TYPE)
158eda14cbcSMatt Macy extern uint64_t atomic_dec_64_nv(volatile uint64_t *);
159eda14cbcSMatt Macy #endif
160eda14cbcSMatt Macy 
161eda14cbcSMatt Macy /*
162eda14cbcSMatt Macy  * Add delta to target
163eda14cbcSMatt Macy  */
164eda14cbcSMatt Macy extern uint8_t atomic_add_8_nv(volatile uint8_t *, int8_t);
165eda14cbcSMatt Macy extern uchar_t atomic_add_char_nv(volatile uchar_t *, signed char);
166eda14cbcSMatt Macy extern uint16_t atomic_add_16_nv(volatile uint16_t *, int16_t);
167eda14cbcSMatt Macy extern ushort_t atomic_add_short_nv(volatile ushort_t *, short);
168eda14cbcSMatt Macy extern uint32_t atomic_add_32_nv(volatile uint32_t *, int32_t);
169eda14cbcSMatt Macy extern uint_t atomic_add_int_nv(volatile uint_t *, int);
170eda14cbcSMatt Macy extern void *atomic_add_ptr_nv(volatile void *, ssize_t);
171eda14cbcSMatt Macy extern ulong_t atomic_add_long_nv(volatile ulong_t *, long);
172eda14cbcSMatt Macy #if defined(_INT64_TYPE)
173eda14cbcSMatt Macy extern uint64_t atomic_add_64_nv(volatile uint64_t *, int64_t);
174eda14cbcSMatt Macy #endif
175eda14cbcSMatt Macy 
176eda14cbcSMatt Macy /*
177eda14cbcSMatt Macy  * Subtract delta from target
178eda14cbcSMatt Macy  */
179eda14cbcSMatt Macy extern uint8_t atomic_sub_8_nv(volatile uint8_t *, int8_t);
180eda14cbcSMatt Macy extern uchar_t atomic_sub_char_nv(volatile uchar_t *, signed char);
181eda14cbcSMatt Macy extern uint16_t atomic_sub_16_nv(volatile uint16_t *, int16_t);
182eda14cbcSMatt Macy extern ushort_t atomic_sub_short_nv(volatile ushort_t *, short);
183eda14cbcSMatt Macy extern uint32_t atomic_sub_32_nv(volatile uint32_t *, int32_t);
184eda14cbcSMatt Macy extern uint_t atomic_sub_int_nv(volatile uint_t *, int);
185eda14cbcSMatt Macy extern void *atomic_sub_ptr_nv(volatile void *, ssize_t);
186eda14cbcSMatt Macy extern ulong_t atomic_sub_long_nv(volatile ulong_t *, long);
187eda14cbcSMatt Macy #if defined(_INT64_TYPE)
188eda14cbcSMatt Macy extern uint64_t atomic_sub_64_nv(volatile uint64_t *, int64_t);
189eda14cbcSMatt Macy #endif
190eda14cbcSMatt Macy 
191eda14cbcSMatt Macy /*
192eda14cbcSMatt Macy  * logical OR bits with target and return new value.
193eda14cbcSMatt Macy  */
194eda14cbcSMatt Macy extern uint8_t atomic_or_8_nv(volatile uint8_t *, uint8_t);
195eda14cbcSMatt Macy extern uchar_t atomic_or_uchar_nv(volatile uchar_t *, uchar_t);
196eda14cbcSMatt Macy extern uint16_t atomic_or_16_nv(volatile uint16_t *, uint16_t);
197eda14cbcSMatt Macy extern ushort_t atomic_or_ushort_nv(volatile ushort_t *, ushort_t);
198eda14cbcSMatt Macy extern uint32_t atomic_or_32_nv(volatile uint32_t *, uint32_t);
199eda14cbcSMatt Macy extern uint_t atomic_or_uint_nv(volatile uint_t *, uint_t);
200eda14cbcSMatt Macy extern ulong_t atomic_or_ulong_nv(volatile ulong_t *, ulong_t);
201eda14cbcSMatt Macy #if defined(_INT64_TYPE)
202eda14cbcSMatt Macy extern uint64_t atomic_or_64_nv(volatile uint64_t *, uint64_t);
203eda14cbcSMatt Macy #endif
204eda14cbcSMatt Macy 
205eda14cbcSMatt Macy /*
206eda14cbcSMatt Macy  * logical AND bits with target and return new value.
207eda14cbcSMatt Macy  */
208eda14cbcSMatt Macy extern uint8_t atomic_and_8_nv(volatile uint8_t *, uint8_t);
209eda14cbcSMatt Macy extern uchar_t atomic_and_uchar_nv(volatile uchar_t *, uchar_t);
210eda14cbcSMatt Macy extern uint16_t atomic_and_16_nv(volatile uint16_t *, uint16_t);
211eda14cbcSMatt Macy extern ushort_t atomic_and_ushort_nv(volatile ushort_t *, ushort_t);
212eda14cbcSMatt Macy extern uint32_t atomic_and_32_nv(volatile uint32_t *, uint32_t);
213eda14cbcSMatt Macy extern uint_t atomic_and_uint_nv(volatile uint_t *, uint_t);
214eda14cbcSMatt Macy extern ulong_t atomic_and_ulong_nv(volatile ulong_t *, ulong_t);
215eda14cbcSMatt Macy #if defined(_INT64_TYPE)
216eda14cbcSMatt Macy extern uint64_t atomic_and_64_nv(volatile uint64_t *, uint64_t);
217eda14cbcSMatt Macy #endif
218eda14cbcSMatt Macy 
219eda14cbcSMatt Macy /*
220eda14cbcSMatt Macy  * If *arg1 == arg2, set *arg1 = arg3; return old value
221eda14cbcSMatt Macy  */
222eda14cbcSMatt Macy extern uint8_t atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
223eda14cbcSMatt Macy extern uchar_t atomic_cas_uchar(volatile uchar_t *, uchar_t, uchar_t);
224eda14cbcSMatt Macy extern uint16_t atomic_cas_16(volatile uint16_t *, uint16_t, uint16_t);
225eda14cbcSMatt Macy extern ushort_t atomic_cas_ushort(volatile ushort_t *, ushort_t, ushort_t);
226eda14cbcSMatt Macy extern uint32_t atomic_cas_32(volatile uint32_t *, uint32_t, uint32_t);
227eda14cbcSMatt Macy extern uint_t atomic_cas_uint(volatile uint_t *, uint_t, uint_t);
228eda14cbcSMatt Macy extern void *atomic_cas_ptr(volatile void *, void *, void *);
229eda14cbcSMatt Macy extern ulong_t atomic_cas_ulong(volatile ulong_t *, ulong_t, ulong_t);
230eda14cbcSMatt Macy #if defined(_INT64_TYPE)
231eda14cbcSMatt Macy extern uint64_t atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
232eda14cbcSMatt Macy #endif
233eda14cbcSMatt Macy 
234eda14cbcSMatt Macy /*
235eda14cbcSMatt Macy  * Swap target and return old value
236eda14cbcSMatt Macy  */
237eda14cbcSMatt Macy extern uint8_t atomic_swap_8(volatile uint8_t *, uint8_t);
238eda14cbcSMatt Macy extern uchar_t atomic_swap_uchar(volatile uchar_t *, uchar_t);
239eda14cbcSMatt Macy extern uint16_t atomic_swap_16(volatile uint16_t *, uint16_t);
240eda14cbcSMatt Macy extern ushort_t atomic_swap_ushort(volatile ushort_t *, ushort_t);
241eda14cbcSMatt Macy extern uint32_t atomic_swap_32(volatile uint32_t *, uint32_t);
242eda14cbcSMatt Macy extern uint_t atomic_swap_uint(volatile uint_t *, uint_t);
243eda14cbcSMatt Macy extern void *atomic_swap_ptr(volatile void *, void *);
244eda14cbcSMatt Macy extern ulong_t atomic_swap_ulong(volatile ulong_t *, ulong_t);
245eda14cbcSMatt Macy #if defined(_INT64_TYPE)
246eda14cbcSMatt Macy extern uint64_t atomic_swap_64(volatile uint64_t *, uint64_t);
247eda14cbcSMatt Macy #endif
248eda14cbcSMatt Macy 
249eda14cbcSMatt Macy /*
25016038816SMartin Matuska  * Atomically read variable.
25116038816SMartin Matuska  */
25216038816SMartin Matuska #define	atomic_load_char(p)	(*(volatile uchar_t *)(p))
25316038816SMartin Matuska #define	atomic_load_short(p)	(*(volatile ushort_t *)(p))
25416038816SMartin Matuska #define	atomic_load_int(p)	(*(volatile uint_t *)(p))
25516038816SMartin Matuska #define	atomic_load_long(p)	(*(volatile ulong_t *)(p))
25616038816SMartin Matuska #define	atomic_load_ptr(p)	(*(volatile __typeof(*p) *)(p))
25716038816SMartin Matuska #define	atomic_load_8(p)	(*(volatile uint8_t *)(p))
25816038816SMartin Matuska #define	atomic_load_16(p)	(*(volatile uint16_t *)(p))
25916038816SMartin Matuska #define	atomic_load_32(p)	(*(volatile uint32_t *)(p))
26016038816SMartin Matuska #ifdef _LP64
26116038816SMartin Matuska #define	atomic_load_64(p)	(*(volatile uint64_t *)(p))
26216038816SMartin Matuska #elif defined(_INT64_TYPE)
26316038816SMartin Matuska extern uint64_t atomic_load_64(volatile uint64_t *);
26416038816SMartin Matuska #endif
26516038816SMartin Matuska 
26616038816SMartin Matuska /*
26716038816SMartin Matuska  * Atomically write variable.
26816038816SMartin Matuska  */
26916038816SMartin Matuska #define	atomic_store_char(p, v)		\
27016038816SMartin Matuska 	(*(volatile uchar_t *)(p) = (uchar_t)(v))
27116038816SMartin Matuska #define	atomic_store_short(p, v)	\
27216038816SMartin Matuska 	(*(volatile ushort_t *)(p) = (ushort_t)(v))
27316038816SMartin Matuska #define	atomic_store_int(p, v)		\
27416038816SMartin Matuska 	(*(volatile uint_t *)(p) = (uint_t)(v))
27516038816SMartin Matuska #define	atomic_store_long(p, v)		\
27616038816SMartin Matuska 	(*(volatile ulong_t *)(p) = (ulong_t)(v))
27716038816SMartin Matuska #define	atomic_store_ptr(p, v)		\
27816038816SMartin Matuska 	(*(volatile __typeof(*p) *)(p) = (v))
27916038816SMartin Matuska #define	atomic_store_8(p, v)		\
28016038816SMartin Matuska 	(*(volatile uint8_t *)(p) = (uint8_t)(v))
28116038816SMartin Matuska #define	atomic_store_16(p, v)		\
28216038816SMartin Matuska 	(*(volatile uint16_t *)(p) = (uint16_t)(v))
28316038816SMartin Matuska #define	atomic_store_32(p, v)		\
28416038816SMartin Matuska 	(*(volatile uint32_t *)(p) = (uint32_t)(v))
28516038816SMartin Matuska #ifdef _LP64
28616038816SMartin Matuska #define	atomic_store_64(p, v)		\
28716038816SMartin Matuska 	(*(volatile uint64_t *)(p) = (uint64_t)(v))
28816038816SMartin Matuska #elif defined(_INT64_TYPE)
28916038816SMartin Matuska extern void atomic_store_64(volatile uint64_t *, uint64_t);
29016038816SMartin Matuska #endif
29116038816SMartin Matuska 
29216038816SMartin Matuska /*
293eda14cbcSMatt Macy  * Perform an exclusive atomic bit set/clear on a target.
294eda14cbcSMatt Macy  * Returns 0 if bit was successfully set/cleared, or -1
295eda14cbcSMatt Macy  * if the bit was already set/cleared.
296eda14cbcSMatt Macy  */
297eda14cbcSMatt Macy extern int atomic_set_long_excl(volatile ulong_t *, uint_t);
298eda14cbcSMatt Macy extern int atomic_clear_long_excl(volatile ulong_t *, uint_t);
299eda14cbcSMatt Macy 
300eda14cbcSMatt Macy /*
301eda14cbcSMatt Macy  * Generic memory barrier used during lock entry, placed after the
302eda14cbcSMatt Macy  * memory operation that acquires the lock to guarantee that the lock
303eda14cbcSMatt Macy  * protects its data.  No stores from after the memory barrier will
304eda14cbcSMatt Macy  * reach visibility, and no loads from after the barrier will be
305eda14cbcSMatt Macy  * resolved, before the lock acquisition reaches global visibility.
306eda14cbcSMatt Macy  */
307eda14cbcSMatt Macy extern void membar_enter(void);
308eda14cbcSMatt Macy 
309eda14cbcSMatt Macy /*
310eda14cbcSMatt Macy  * Generic memory barrier used during lock exit, placed before the
311eda14cbcSMatt Macy  * memory operation that releases the lock to guarantee that the lock
312eda14cbcSMatt Macy  * protects its data.  All loads and stores issued before the barrier
313eda14cbcSMatt Macy  * will be resolved before the subsequent lock update reaches visibility.
314eda14cbcSMatt Macy  */
315eda14cbcSMatt Macy extern void membar_exit(void);
316eda14cbcSMatt Macy 
317eda14cbcSMatt Macy /*
318c7046f76SMartin Matuska  * Make all stores and loads emitted prior to the the barrier complete before
319c7046f76SMartin Matuska  * crossing it, while also making sure stores and loads emitted after the
320c7046f76SMartin Matuska  * barrier only start being executed after crossing it.
321c7046f76SMartin Matuska  */
322c7046f76SMartin Matuska extern void membar_sync(void);
323c7046f76SMartin Matuska 
324c7046f76SMartin Matuska /*
325eda14cbcSMatt Macy  * Arrange that all stores issued before this point in the code reach
326eda14cbcSMatt Macy  * global visibility before any stores that follow; useful in producer
327eda14cbcSMatt Macy  * modules that update a data item, then set a flag that it is available.
328eda14cbcSMatt Macy  * The memory barrier guarantees that the available flag is not visible
329eda14cbcSMatt Macy  * earlier than the updated data, i.e. it imposes store ordering.
330eda14cbcSMatt Macy  */
331eda14cbcSMatt Macy extern void membar_producer(void);
332eda14cbcSMatt Macy 
333eda14cbcSMatt Macy /*
334eda14cbcSMatt Macy  * Arrange that all loads issued before this point in the code are
335eda14cbcSMatt Macy  * completed before any subsequent loads; useful in consumer modules
336eda14cbcSMatt Macy  * that check to see if data is available and read the data.
337eda14cbcSMatt Macy  * The memory barrier guarantees that the data is not sampled until
338eda14cbcSMatt Macy  * after the available flag has been seen, i.e. it imposes load ordering.
339eda14cbcSMatt Macy  */
340eda14cbcSMatt Macy extern void membar_consumer(void);
341eda14cbcSMatt Macy #endif  /* __STDC__ */
342eda14cbcSMatt Macy 
343eda14cbcSMatt Macy #ifdef	__cplusplus
344eda14cbcSMatt Macy }
345eda14cbcSMatt Macy #endif
346eda14cbcSMatt Macy 
347eda14cbcSMatt Macy #endif	/* _SYS_ATOMIC_H */
348