xref: /freebsd/sys/contrib/openzfs/lib/libspl/atomic.c (revision ccb59683b98360afaf5b5bb641a68fea22c68d0b)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License, Version 1.0 only
6  * (the "License").  You may not use this file except in compliance
7  * with the License.
8  *
9  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10  * or https://opensource.org/licenses/CDDL-1.0.
11  * See the License for the specific language governing permissions
12  * and limitations under the License.
13  *
14  * When distributing Covered Code, include this CDDL HEADER in each
15  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16  * If applicable, add the following below this CDDL HEADER, with the
17  * fields enclosed by brackets "[]" replaced with your own identifying
18  * information: Portions Copyright [yyyy] [name of copyright owner]
19  *
20  * CDDL HEADER END
21  */
22 /*
23  * Copyright (c) 2009 by Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <atomic.h>
28 
29 /*
30  * These are the void returning variants
31  */
32 #define	ATOMIC_INC(name, type) \
33 	void atomic_inc_##name(volatile type *target)			\
34 	{								\
35 		(void) __atomic_add_fetch(target, 1, __ATOMIC_SEQ_CST);	\
36 	}
37 
38 /* BEGIN CSTYLED */
39 ATOMIC_INC(8, uint8_t)
40 ATOMIC_INC(16, uint16_t)
41 ATOMIC_INC(32, uint32_t)
42 ATOMIC_INC(64, uint64_t)
43 ATOMIC_INC(uchar, uchar_t)
44 ATOMIC_INC(ushort, ushort_t)
45 ATOMIC_INC(uint, uint_t)
46 ATOMIC_INC(ulong, ulong_t)
47 /* END CSTYLED */
48 
49 
50 #define	ATOMIC_DEC(name, type) \
51 	void atomic_dec_##name(volatile type *target)			\
52 	{								\
53 		(void) __atomic_sub_fetch(target, 1, __ATOMIC_SEQ_CST);	\
54 	}
55 
56 /* BEGIN CSTYLED */
57 ATOMIC_DEC(8, uint8_t)
58 ATOMIC_DEC(16, uint16_t)
59 ATOMIC_DEC(32, uint32_t)
60 ATOMIC_DEC(64, uint64_t)
61 ATOMIC_DEC(uchar, uchar_t)
62 ATOMIC_DEC(ushort, ushort_t)
63 ATOMIC_DEC(uint, uint_t)
64 ATOMIC_DEC(ulong, ulong_t)
65 /* END CSTYLED */
66 
67 
68 #define	ATOMIC_ADD(name, type1, type2) \
69 	void atomic_add_##name(volatile type1 *target, type2 bits)	\
70 	{								\
71 		(void) __atomic_add_fetch(target, bits, __ATOMIC_SEQ_CST); \
72 	}
73 
74 void
75 atomic_add_ptr(volatile void *target, ssize_t bits)
76 {
77 	(void) __atomic_add_fetch((void **)target, bits, __ATOMIC_SEQ_CST);
78 }
79 
80 /* BEGIN CSTYLED */
81 ATOMIC_ADD(8, uint8_t, int8_t)
82 ATOMIC_ADD(16, uint16_t, int16_t)
83 ATOMIC_ADD(32, uint32_t, int32_t)
84 ATOMIC_ADD(64, uint64_t, int64_t)
85 ATOMIC_ADD(char, uchar_t, signed char)
86 ATOMIC_ADD(short, ushort_t, short)
87 ATOMIC_ADD(int, uint_t, int)
88 ATOMIC_ADD(long, ulong_t, long)
89 /* END CSTYLED */
90 
91 
92 #define	ATOMIC_SUB(name, type1, type2) \
93 	void atomic_sub_##name(volatile type1 *target, type2 bits)	\
94 	{								\
95 		(void) __atomic_sub_fetch(target, bits, __ATOMIC_SEQ_CST); \
96 	}
97 
98 void
99 atomic_sub_ptr(volatile void *target, ssize_t bits)
100 {
101 	(void) __atomic_sub_fetch((void **)target, bits, __ATOMIC_SEQ_CST);
102 }
103 
104 /* BEGIN CSTYLED */
105 ATOMIC_SUB(8, uint8_t, int8_t)
106 ATOMIC_SUB(16, uint16_t, int16_t)
107 ATOMIC_SUB(32, uint32_t, int32_t)
108 ATOMIC_SUB(64, uint64_t, int64_t)
109 ATOMIC_SUB(char, uchar_t, signed char)
110 ATOMIC_SUB(short, ushort_t, short)
111 ATOMIC_SUB(int, uint_t, int)
112 ATOMIC_SUB(long, ulong_t, long)
113 /* END CSTYLED */
114 
115 
116 #define	ATOMIC_OR(name, type) \
117 	void atomic_or_##name(volatile type *target, type bits)		\
118 	{								\
119 		(void) __atomic_or_fetch(target, bits, __ATOMIC_SEQ_CST); \
120 	}
121 
122 /* BEGIN CSTYLED */
123 ATOMIC_OR(8, uint8_t)
124 ATOMIC_OR(16, uint16_t)
125 ATOMIC_OR(32, uint32_t)
126 ATOMIC_OR(64, uint64_t)
127 ATOMIC_OR(uchar, uchar_t)
128 ATOMIC_OR(ushort, ushort_t)
129 ATOMIC_OR(uint, uint_t)
130 ATOMIC_OR(ulong, ulong_t)
131 /* END CSTYLED */
132 
133 
134 #define	ATOMIC_AND(name, type) \
135 	void atomic_and_##name(volatile type *target, type bits)	\
136 	{								\
137 		(void) __atomic_and_fetch(target, bits, __ATOMIC_SEQ_CST); \
138 	}
139 
140 /* BEGIN CSTYLED */
141 ATOMIC_AND(8, uint8_t)
142 ATOMIC_AND(16, uint16_t)
143 ATOMIC_AND(32, uint32_t)
144 ATOMIC_AND(64, uint64_t)
145 ATOMIC_AND(uchar, uchar_t)
146 ATOMIC_AND(ushort, ushort_t)
147 ATOMIC_AND(uint, uint_t)
148 ATOMIC_AND(ulong, ulong_t)
149 /* END CSTYLED */
150 
151 
152 /*
153  * New value returning variants
154  */
155 
156 #define	ATOMIC_INC_NV(name, type) \
157 	type atomic_inc_##name##_nv(volatile type *target)		\
158 	{								\
159 		return (__atomic_add_fetch(target, 1, __ATOMIC_SEQ_CST)); \
160 	}
161 
162 /* BEGIN CSTYLED */
163 ATOMIC_INC_NV(8, uint8_t)
164 ATOMIC_INC_NV(16, uint16_t)
165 ATOMIC_INC_NV(32, uint32_t)
166 ATOMIC_INC_NV(64, uint64_t)
167 ATOMIC_INC_NV(uchar, uchar_t)
168 ATOMIC_INC_NV(ushort, ushort_t)
169 ATOMIC_INC_NV(uint, uint_t)
170 ATOMIC_INC_NV(ulong, ulong_t)
171 /* END CSTYLED */
172 
173 
174 #define	ATOMIC_DEC_NV(name, type) \
175 	type atomic_dec_##name##_nv(volatile type *target)		\
176 	{								\
177 		return (__atomic_sub_fetch(target, 1, __ATOMIC_SEQ_CST)); \
178 	}
179 
180 /* BEGIN CSTYLED */
181 ATOMIC_DEC_NV(8, uint8_t)
182 ATOMIC_DEC_NV(16, uint16_t)
183 ATOMIC_DEC_NV(32, uint32_t)
184 ATOMIC_DEC_NV(64, uint64_t)
185 ATOMIC_DEC_NV(uchar, uchar_t)
186 ATOMIC_DEC_NV(ushort, ushort_t)
187 ATOMIC_DEC_NV(uint, uint_t)
188 ATOMIC_DEC_NV(ulong, ulong_t)
189 /* END CSTYLED */
190 
191 
192 #define	ATOMIC_ADD_NV(name, type1, type2) \
193 	type1 atomic_add_##name##_nv(volatile type1 *target, type2 bits) \
194 	{								\
195 		return (__atomic_add_fetch(target, bits, __ATOMIC_SEQ_CST)); \
196 	}
197 
198 void *
199 atomic_add_ptr_nv(volatile void *target, ssize_t bits)
200 {
201 	return (__atomic_add_fetch((void **)target, bits, __ATOMIC_SEQ_CST));
202 }
203 
204 /* BEGIN CSTYLED */
205 ATOMIC_ADD_NV(8, uint8_t, int8_t)
206 ATOMIC_ADD_NV(16, uint16_t, int16_t)
207 ATOMIC_ADD_NV(32, uint32_t, int32_t)
208 ATOMIC_ADD_NV(64, uint64_t, int64_t)
209 ATOMIC_ADD_NV(char, uchar_t, signed char)
210 ATOMIC_ADD_NV(short, ushort_t, short)
211 ATOMIC_ADD_NV(int, uint_t, int)
212 ATOMIC_ADD_NV(long, ulong_t, long)
213 /* END CSTYLED */
214 
215 
216 #define	ATOMIC_SUB_NV(name, type1, type2) \
217 	type1 atomic_sub_##name##_nv(volatile type1 *target, type2 bits) \
218 	{								\
219 		return (__atomic_sub_fetch(target, bits, __ATOMIC_SEQ_CST)); \
220 	}
221 
222 void *
223 atomic_sub_ptr_nv(volatile void *target, ssize_t bits)
224 {
225 	return (__atomic_sub_fetch((void **)target, bits, __ATOMIC_SEQ_CST));
226 }
227 
228 /* BEGIN CSTYLED */
229 ATOMIC_SUB_NV(8, uint8_t, int8_t)
230 ATOMIC_SUB_NV(char, uchar_t, signed char)
231 ATOMIC_SUB_NV(16, uint16_t, int16_t)
232 ATOMIC_SUB_NV(short, ushort_t, short)
233 ATOMIC_SUB_NV(32, uint32_t, int32_t)
234 ATOMIC_SUB_NV(int, uint_t, int)
235 ATOMIC_SUB_NV(long, ulong_t, long)
236 ATOMIC_SUB_NV(64, uint64_t, int64_t)
237 /* END CSTYLED */
238 
239 
240 #define	ATOMIC_OR_NV(name, type) \
241 	type atomic_or_##name##_nv(volatile type *target, type bits)	\
242 	{								\
243 		return (__atomic_or_fetch(target, bits, __ATOMIC_SEQ_CST)); \
244 	}
245 
246 /* BEGIN CSTYLED */
247 ATOMIC_OR_NV(8, uint8_t)
248 ATOMIC_OR_NV(16, uint16_t)
249 ATOMIC_OR_NV(32, uint32_t)
250 ATOMIC_OR_NV(64, uint64_t)
251 ATOMIC_OR_NV(uchar, uchar_t)
252 ATOMIC_OR_NV(ushort, ushort_t)
253 ATOMIC_OR_NV(uint, uint_t)
254 ATOMIC_OR_NV(ulong, ulong_t)
255 /* END CSTYLED */
256 
257 
258 #define	ATOMIC_AND_NV(name, type) \
259 	type atomic_and_##name##_nv(volatile type *target, type bits)	\
260 	{								\
261 		return (__atomic_and_fetch(target, bits, __ATOMIC_SEQ_CST)); \
262 	}
263 
264 /* BEGIN CSTYLED */
265 ATOMIC_AND_NV(8, uint8_t)
266 ATOMIC_AND_NV(16, uint16_t)
267 ATOMIC_AND_NV(32, uint32_t)
268 ATOMIC_AND_NV(64, uint64_t)
269 ATOMIC_AND_NV(uchar, uchar_t)
270 ATOMIC_AND_NV(ushort, ushort_t)
271 ATOMIC_AND_NV(uint, uint_t)
272 ATOMIC_AND_NV(ulong, ulong_t)
273 /* END CSTYLED */
274 
275 
276 /*
277  * If *tgt == exp, set *tgt = des; return old value
278  *
279  * This may not look right on the first pass (or the sixteenth), but,
280  * from https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html:
281  * > If they are not equal, the operation is a read
282  * > and the current contents of *ptr are written into *expected.
283  * And, in the converse case, exp is already *target by definition.
284  */
285 
286 #define	ATOMIC_CAS(name, type) \
287 	type atomic_cas_##name(volatile type *target, type exp, type des) \
288 	{								\
289 		__atomic_compare_exchange_n(target, &exp, des, B_FALSE,	\
290 		    __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);		\
291 		return (exp);						\
292 	}
293 
294 void *
295 atomic_cas_ptr(volatile void *target, void *exp, void *des)
296 {
297 
298 	__atomic_compare_exchange_n((void **)target, &exp, des, B_FALSE,
299 	    __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
300 	return (exp);
301 }
302 
303 /* BEGIN CSTYLED */
304 ATOMIC_CAS(8, uint8_t)
305 ATOMIC_CAS(16, uint16_t)
306 ATOMIC_CAS(32, uint32_t)
307 ATOMIC_CAS(64, uint64_t)
308 ATOMIC_CAS(uchar, uchar_t)
309 ATOMIC_CAS(ushort, ushort_t)
310 ATOMIC_CAS(uint, uint_t)
311 ATOMIC_CAS(ulong, ulong_t)
312 /* END CSTYLED */
313 
314 
315 /*
316  * Swap target and return old value
317  */
318 
319 #define	ATOMIC_SWAP(name, type) \
320 	type atomic_swap_##name(volatile type *target, type bits)	\
321 	{								\
322 		return (__atomic_exchange_n(target, bits, __ATOMIC_SEQ_CST)); \
323 	}
324 
325 /* BEGIN CSTYLED */
326 ATOMIC_SWAP(8, uint8_t)
327 ATOMIC_SWAP(16, uint16_t)
328 ATOMIC_SWAP(32, uint32_t)
329 ATOMIC_SWAP(64, uint64_t)
330 ATOMIC_SWAP(uchar, uchar_t)
331 ATOMIC_SWAP(ushort, ushort_t)
332 ATOMIC_SWAP(uint, uint_t)
333 ATOMIC_SWAP(ulong, ulong_t)
334 /* END CSTYLED */
335 
336 void *
337 atomic_swap_ptr(volatile void *target, void *bits)
338 {
339 	return (__atomic_exchange_n((void **)target, bits, __ATOMIC_SEQ_CST));
340 }
341 
342 #ifndef _LP64
343 uint64_t
344 atomic_load_64(volatile uint64_t *target)
345 {
346 	return (__atomic_load_n(target, __ATOMIC_RELAXED));
347 }
348 
349 void
350 atomic_store_64(volatile uint64_t *target, uint64_t bits)
351 {
352 	return (__atomic_store_n(target, bits, __ATOMIC_RELAXED));
353 }
354 #endif
355 
356 int
357 atomic_set_long_excl(volatile ulong_t *target, uint_t value)
358 {
359 	ulong_t bit = 1UL << value;
360 	ulong_t old = __atomic_fetch_or(target, bit, __ATOMIC_SEQ_CST);
361 	return ((old & bit) ? -1 : 0);
362 }
363 
364 int
365 atomic_clear_long_excl(volatile ulong_t *target, uint_t value)
366 {
367 	ulong_t bit = 1UL << value;
368 	ulong_t old = __atomic_fetch_and(target, ~bit, __ATOMIC_SEQ_CST);
369 	return ((old & bit) ? 0 : -1);
370 }
371 
372 void
373 membar_enter(void)
374 {
375 	__atomic_thread_fence(__ATOMIC_SEQ_CST);
376 }
377 
378 void
379 membar_exit(void)
380 {
381 	__atomic_thread_fence(__ATOMIC_SEQ_CST);
382 }
383 
384 void
385 membar_sync(void)
386 {
387 	__atomic_thread_fence(__ATOMIC_SEQ_CST);
388 }
389 
390 void
391 membar_producer(void)
392 {
393 	__atomic_thread_fence(__ATOMIC_RELEASE);
394 }
395 
396 void
397 membar_consumer(void)
398 {
399 	__atomic_thread_fence(__ATOMIC_ACQUIRE);
400 }
401