1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License, Version 1.0 only 6 * (the "License"). You may not use this file except in compliance 7 * with the License. 8 * 9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 10 * or http://www.opensolaris.org/os/licensing. 11 * See the License for the specific language governing permissions 12 * and limitations under the License. 13 * 14 * When distributing Covered Code, include this CDDL HEADER in each 15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 16 * If applicable, add the following below this CDDL HEADER, with the 17 * fields enclosed by brackets "[]" replaced with your own identifying 18 * information: Portions Copyright [yyyy] [name of copyright owner] 19 * 20 * CDDL HEADER END 21 */ 22 /* 23 * Copyright (c) 2009 by Sun Microsystems, Inc. All rights reserved. 24 * Use is subject to license terms. 25 */ 26 27 #include <atomic.h> 28 29 /* 30 * These are the void returning variants 31 */ 32 /* BEGIN CSTYLED */ 33 #define ATOMIC_INC(name, type) \ 34 void atomic_inc_##name(volatile type *target) \ 35 { \ 36 (void) __atomic_add_fetch(target, 1, __ATOMIC_SEQ_CST); \ 37 } 38 39 ATOMIC_INC(8, uint8_t) 40 ATOMIC_INC(uchar, uchar_t) 41 ATOMIC_INC(16, uint16_t) 42 ATOMIC_INC(ushort, ushort_t) 43 ATOMIC_INC(32, uint32_t) 44 ATOMIC_INC(uint, uint_t) 45 ATOMIC_INC(ulong, ulong_t) 46 ATOMIC_INC(64, uint64_t) 47 48 49 #define ATOMIC_DEC(name, type) \ 50 void atomic_dec_##name(volatile type *target) \ 51 { \ 52 (void) __atomic_sub_fetch(target, 1, __ATOMIC_SEQ_CST); \ 53 } 54 55 ATOMIC_DEC(8, uint8_t) 56 ATOMIC_DEC(uchar, uchar_t) 57 ATOMIC_DEC(16, uint16_t) 58 ATOMIC_DEC(ushort, ushort_t) 59 ATOMIC_DEC(32, uint32_t) 60 ATOMIC_DEC(uint, uint_t) 61 ATOMIC_DEC(ulong, ulong_t) 62 ATOMIC_DEC(64, uint64_t) 63 64 65 #define ATOMIC_ADD(name, type1, type2) \ 66 void atomic_add_##name(volatile type1 *target, type2 bits) \ 67 { \ 68 (void) __atomic_add_fetch(target, bits, __ATOMIC_SEQ_CST); \ 69 } 70 71 ATOMIC_ADD(8, uint8_t, int8_t) 72 ATOMIC_ADD(char, uchar_t, signed char) 73 ATOMIC_ADD(16, uint16_t, int16_t) 74 ATOMIC_ADD(short, ushort_t, short) 75 ATOMIC_ADD(32, uint32_t, int32_t) 76 ATOMIC_ADD(int, uint_t, int) 77 ATOMIC_ADD(long, ulong_t, long) 78 ATOMIC_ADD(64, uint64_t, int64_t) 79 80 void 81 atomic_add_ptr(volatile void *target, ssize_t bits) 82 { 83 (void) __atomic_add_fetch((void **)target, bits, __ATOMIC_SEQ_CST); 84 } 85 86 87 #define ATOMIC_SUB(name, type1, type2) \ 88 void atomic_sub_##name(volatile type1 *target, type2 bits) \ 89 { \ 90 (void) __atomic_sub_fetch(target, bits, __ATOMIC_SEQ_CST); \ 91 } 92 93 ATOMIC_SUB(8, uint8_t, int8_t) 94 ATOMIC_SUB(char, uchar_t, signed char) 95 ATOMIC_SUB(16, uint16_t, int16_t) 96 ATOMIC_SUB(short, ushort_t, short) 97 ATOMIC_SUB(32, uint32_t, int32_t) 98 ATOMIC_SUB(int, uint_t, int) 99 ATOMIC_SUB(long, ulong_t, long) 100 ATOMIC_SUB(64, uint64_t, int64_t) 101 102 void 103 atomic_sub_ptr(volatile void *target, ssize_t bits) 104 { 105 (void) __atomic_sub_fetch((void **)target, bits, __ATOMIC_SEQ_CST); 106 } 107 108 109 #define ATOMIC_OR(name, type) \ 110 void atomic_or_##name(volatile type *target, type bits) \ 111 { \ 112 (void) __atomic_or_fetch(target, bits, __ATOMIC_SEQ_CST); \ 113 } 114 115 ATOMIC_OR(8, uint8_t) 116 ATOMIC_OR(uchar, uchar_t) 117 ATOMIC_OR(16, uint16_t) 118 ATOMIC_OR(ushort, ushort_t) 119 ATOMIC_OR(32, uint32_t) 120 ATOMIC_OR(uint, uint_t) 121 ATOMIC_OR(ulong, ulong_t) 122 ATOMIC_OR(64, uint64_t) 123 124 125 #define ATOMIC_AND(name, type) \ 126 void atomic_and_##name(volatile type *target, type bits) \ 127 { \ 128 (void) __atomic_and_fetch(target, bits, __ATOMIC_SEQ_CST); \ 129 } 130 131 ATOMIC_AND(8, uint8_t) 132 ATOMIC_AND(uchar, uchar_t) 133 ATOMIC_AND(16, uint16_t) 134 ATOMIC_AND(ushort, ushort_t) 135 ATOMIC_AND(32, uint32_t) 136 ATOMIC_AND(uint, uint_t) 137 ATOMIC_AND(ulong, ulong_t) 138 ATOMIC_AND(64, uint64_t) 139 140 141 /* 142 * New value returning variants 143 */ 144 145 #define ATOMIC_INC_NV(name, type) \ 146 type atomic_inc_##name##_nv(volatile type *target) \ 147 { \ 148 return (__atomic_add_fetch(target, 1, __ATOMIC_SEQ_CST)); \ 149 } 150 151 ATOMIC_INC_NV(8, uint8_t) 152 ATOMIC_INC_NV(uchar, uchar_t) 153 ATOMIC_INC_NV(16, uint16_t) 154 ATOMIC_INC_NV(ushort, ushort_t) 155 ATOMIC_INC_NV(32, uint32_t) 156 ATOMIC_INC_NV(uint, uint_t) 157 ATOMIC_INC_NV(ulong, ulong_t) 158 ATOMIC_INC_NV(64, uint64_t) 159 160 161 #define ATOMIC_DEC_NV(name, type) \ 162 type atomic_dec_##name##_nv(volatile type *target) \ 163 { \ 164 return (__atomic_sub_fetch(target, 1, __ATOMIC_SEQ_CST)); \ 165 } 166 167 ATOMIC_DEC_NV(8, uint8_t) 168 ATOMIC_DEC_NV(uchar, uchar_t) 169 ATOMIC_DEC_NV(16, uint16_t) 170 ATOMIC_DEC_NV(ushort, ushort_t) 171 ATOMIC_DEC_NV(32, uint32_t) 172 ATOMIC_DEC_NV(uint, uint_t) 173 ATOMIC_DEC_NV(ulong, ulong_t) 174 ATOMIC_DEC_NV(64, uint64_t) 175 176 177 #define ATOMIC_ADD_NV(name, type1, type2) \ 178 type1 atomic_add_##name##_nv(volatile type1 *target, type2 bits) \ 179 { \ 180 return (__atomic_add_fetch(target, bits, __ATOMIC_SEQ_CST)); \ 181 } 182 183 ATOMIC_ADD_NV(8, uint8_t, int8_t) 184 ATOMIC_ADD_NV(char, uchar_t, signed char) 185 ATOMIC_ADD_NV(16, uint16_t, int16_t) 186 ATOMIC_ADD_NV(short, ushort_t, short) 187 ATOMIC_ADD_NV(32, uint32_t, int32_t) 188 ATOMIC_ADD_NV(int, uint_t, int) 189 ATOMIC_ADD_NV(long, ulong_t, long) 190 ATOMIC_ADD_NV(64, uint64_t, int64_t) 191 192 void * 193 atomic_add_ptr_nv(volatile void *target, ssize_t bits) 194 { 195 return (__atomic_add_fetch((void **)target, bits, __ATOMIC_SEQ_CST)); 196 } 197 198 199 #define ATOMIC_SUB_NV(name, type1, type2) \ 200 type1 atomic_sub_##name##_nv(volatile type1 *target, type2 bits) \ 201 { \ 202 return (__atomic_sub_fetch(target, bits, __ATOMIC_SEQ_CST)); \ 203 } 204 205 ATOMIC_SUB_NV(8, uint8_t, int8_t) 206 ATOMIC_SUB_NV(char, uchar_t, signed char) 207 ATOMIC_SUB_NV(16, uint16_t, int16_t) 208 ATOMIC_SUB_NV(short, ushort_t, short) 209 ATOMIC_SUB_NV(32, uint32_t, int32_t) 210 ATOMIC_SUB_NV(int, uint_t, int) 211 ATOMIC_SUB_NV(long, ulong_t, long) 212 ATOMIC_SUB_NV(64, uint64_t, int64_t) 213 214 void * 215 atomic_sub_ptr_nv(volatile void *target, ssize_t bits) 216 { 217 return (__atomic_sub_fetch((void **)target, bits, __ATOMIC_SEQ_CST)); 218 } 219 220 221 #define ATOMIC_OR_NV(name, type) \ 222 type atomic_or_##name##_nv(volatile type *target, type bits) \ 223 { \ 224 return (__atomic_or_fetch(target, bits, __ATOMIC_SEQ_CST)); \ 225 } 226 227 ATOMIC_OR_NV(8, uint8_t) 228 ATOMIC_OR_NV(uchar, uchar_t) 229 ATOMIC_OR_NV(16, uint16_t) 230 ATOMIC_OR_NV(ushort, ushort_t) 231 ATOMIC_OR_NV(32, uint32_t) 232 ATOMIC_OR_NV(uint, uint_t) 233 ATOMIC_OR_NV(ulong, ulong_t) 234 ATOMIC_OR_NV(64, uint64_t) 235 236 237 #define ATOMIC_AND_NV(name, type) \ 238 type atomic_and_##name##_nv(volatile type *target, type bits) \ 239 { \ 240 return (__atomic_and_fetch(target, bits, __ATOMIC_SEQ_CST)); \ 241 } 242 243 ATOMIC_AND_NV(8, uint8_t) 244 ATOMIC_AND_NV(uchar, uchar_t) 245 ATOMIC_AND_NV(16, uint16_t) 246 ATOMIC_AND_NV(ushort, ushort_t) 247 ATOMIC_AND_NV(32, uint32_t) 248 ATOMIC_AND_NV(uint, uint_t) 249 ATOMIC_AND_NV(ulong, ulong_t) 250 ATOMIC_AND_NV(64, uint64_t) 251 252 253 /* 254 * If *tgt == exp, set *tgt = des; return old value 255 * 256 * This may not look right on the first pass (or the sixteenth), but, 257 * from https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html: 258 * > If they are not equal, the operation is a read 259 * > and the current contents of *ptr are written into *expected. 260 * And, in the converse case, exp is already *target by definition. 261 */ 262 263 #define ATOMIC_CAS(name, type) \ 264 type atomic_cas_##name(volatile type *target, type exp, type des) \ 265 { \ 266 __atomic_compare_exchange_n(target, &exp, des, B_FALSE, \ 267 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); \ 268 return (exp); \ 269 } 270 271 ATOMIC_CAS(8, uint8_t) 272 ATOMIC_CAS(uchar, uchar_t) 273 ATOMIC_CAS(16, uint16_t) 274 ATOMIC_CAS(ushort, ushort_t) 275 ATOMIC_CAS(32, uint32_t) 276 ATOMIC_CAS(uint, uint_t) 277 ATOMIC_CAS(ulong, ulong_t) 278 ATOMIC_CAS(64, uint64_t) 279 280 void * 281 atomic_cas_ptr(volatile void *target, void *exp, void *des) 282 { 283 284 __atomic_compare_exchange_n((void **)target, &exp, des, B_FALSE, 285 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); 286 return (exp); 287 } 288 289 290 /* 291 * Swap target and return old value 292 */ 293 294 #define ATOMIC_SWAP(name, type) \ 295 type atomic_swap_##name(volatile type *target, type bits) \ 296 { \ 297 return (__atomic_exchange_n(target, bits, __ATOMIC_SEQ_CST)); \ 298 } 299 300 ATOMIC_SWAP(8, uint8_t) 301 ATOMIC_SWAP(uchar, uchar_t) 302 ATOMIC_SWAP(16, uint16_t) 303 ATOMIC_SWAP(ushort, ushort_t) 304 ATOMIC_SWAP(32, uint32_t) 305 ATOMIC_SWAP(uint, uint_t) 306 ATOMIC_SWAP(ulong, ulong_t) 307 ATOMIC_SWAP(64, uint64_t) 308 /* END CSTYLED */ 309 310 void * 311 atomic_swap_ptr(volatile void *target, void *bits) 312 { 313 return (__atomic_exchange_n((void **)target, bits, __ATOMIC_SEQ_CST)); 314 } 315 316 #ifndef _LP64 317 uint64_t 318 atomic_load_64(volatile uint64_t *target) 319 { 320 return (__atomic_load_n(target, __ATOMIC_RELAXED)); 321 } 322 323 void 324 atomic_store_64(volatile uint64_t *target, uint64_t bits) 325 { 326 return (__atomic_store_n(target, bits, __ATOMIC_RELAXED)); 327 } 328 #endif 329 330 int 331 atomic_set_long_excl(volatile ulong_t *target, uint_t value) 332 { 333 ulong_t bit = 1UL << value; 334 ulong_t old = __atomic_fetch_or(target, bit, __ATOMIC_SEQ_CST); 335 return ((old & bit) ? -1 : 0); 336 } 337 338 int 339 atomic_clear_long_excl(volatile ulong_t *target, uint_t value) 340 { 341 ulong_t bit = 1UL << value; 342 ulong_t old = __atomic_fetch_and(target, ~bit, __ATOMIC_SEQ_CST); 343 return ((old & bit) ? 0 : -1); 344 } 345 346 void 347 membar_enter(void) 348 { 349 __atomic_thread_fence(__ATOMIC_SEQ_CST); 350 } 351 352 void 353 membar_exit(void) 354 { 355 __atomic_thread_fence(__ATOMIC_SEQ_CST); 356 } 357 358 void 359 membar_producer(void) 360 { 361 __atomic_thread_fence(__ATOMIC_RELEASE); 362 } 363 364 void 365 membar_consumer(void) 366 { 367 __atomic_thread_fence(__ATOMIC_ACQUIRE); 368 } 369