Lines Matching full:target

33 	void atomic_inc_##name(volatile type *target)			\
35 (void) __atomic_add_fetch(target, 1, __ATOMIC_SEQ_CST); \
49 void atomic_dec_##name(volatile type *target) \ in ATOMIC_INC()
51 (void) __atomic_sub_fetch(target, 1, __ATOMIC_SEQ_CST); \ in ATOMIC_INC()
65 void atomic_add_##name(volatile type1 *target, type2 bits) \
67 (void) __atomic_add_fetch(target, bits, __ATOMIC_SEQ_CST); \
71 atomic_add_ptr(volatile void *target, ssize_t bits)
73 (void) __atomic_add_fetch((void **)target, bits, __ATOMIC_SEQ_CST);
87 void atomic_sub_##name(volatile type1 *target, type2 bits) \ in ATOMIC_ADD()
89 (void) __atomic_sub_fetch(target, bits, __ATOMIC_SEQ_CST); \ in ATOMIC_ADD()
93 atomic_sub_ptr(volatile void *target, ssize_t bits)
95 (void) __atomic_sub_fetch((void **)target, bits, __ATOMIC_SEQ_CST);
109 void atomic_or_##name(volatile type *target, type bits) \ in ATOMIC_SUB()
111 (void) __atomic_or_fetch(target, bits, __ATOMIC_SEQ_CST); \ in ATOMIC_SUB()
125 void atomic_and_##name(volatile type *target, type bits) \
127 (void) __atomic_and_fetch(target, bits, __ATOMIC_SEQ_CST); \
145 type atomic_inc_##name##_nv(volatile type *target) \
147 return (__atomic_add_fetch(target, 1, __ATOMIC_SEQ_CST)); \
161 type atomic_dec_##name##_nv(volatile type *target) \
163 return (__atomic_sub_fetch(target, 1, __ATOMIC_SEQ_CST)); \
177 type1 atomic_add_##name##_nv(volatile type1 *target, type2 bits) \
179 return (__atomic_add_fetch(target, bits, __ATOMIC_SEQ_CST)); \
183 atomic_add_ptr_nv(volatile void *target, ssize_t bits)
185 return (__atomic_add_fetch((void **)target, bits, __ATOMIC_SEQ_CST));
199 type1 atomic_sub_##name##_nv(volatile type1 *target, type2 bits) \ in ATOMIC_ADD_NV()
201 return (__atomic_sub_fetch(target, bits, __ATOMIC_SEQ_CST)); \ in ATOMIC_ADD_NV()
205 atomic_sub_ptr_nv(volatile void *target, ssize_t bits)
207 return (__atomic_sub_fetch((void **)target, bits, __ATOMIC_SEQ_CST));
221 type atomic_or_##name##_nv(volatile type *target, type bits) \ in ATOMIC_SUB_NV()
223 return (__atomic_or_fetch(target, bits, __ATOMIC_SEQ_CST)); \ in ATOMIC_SUB_NV()
237 type atomic_and_##name##_nv(volatile type *target, type bits) \
239 return (__atomic_and_fetch(target, bits, __ATOMIC_SEQ_CST)); \
259 * And, in the converse case, exp is already *target by definition.
263 type atomic_cas_##name(volatile type *target, type exp, type des) \
265 __atomic_compare_exchange_n(target, &exp, des, B_FALSE, \
271 atomic_cas_ptr(volatile void *target, void *exp, void *des)
274 __atomic_compare_exchange_n((void **)target, &exp, des, B_FALSE,
290 * Swap target and return old value in ATOMIC_CAS()
294 type atomic_swap_##name(volatile type *target, type bits) \ in ATOMIC_CAS()
296 return (__atomic_exchange_n(target, bits, __ATOMIC_SEQ_CST)); \ in ATOMIC_CAS()
309 atomic_swap_ptr(volatile void *target, void *bits)
311 return (__atomic_exchange_n((void **)target, bits, __ATOMIC_SEQ_CST));
316 atomic_load_64(volatile uint64_t *target) in atomic_load_64() argument
318 return (__atomic_load_n(target, __ATOMIC_RELAXED)); in atomic_load_64()
322 atomic_store_64(volatile uint64_t *target, uint64_t bits) in atomic_store_64() argument
324 return (__atomic_store_n(target, bits, __ATOMIC_RELAXED)); in atomic_store_64()
329 atomic_set_long_excl(volatile ulong_t *target, uint_t value) in atomic_set_long_excl() argument
332 ulong_t old = __atomic_fetch_or(target, bit, __ATOMIC_SEQ_CST); in atomic_set_long_excl()
337 atomic_clear_long_excl(volatile ulong_t *target, uint_t value) in atomic_clear_long_excl() argument
340 ulong_t old = __atomic_fetch_and(target, ~bit, __ATOMIC_SEQ_CST); in atomic_clear_long_excl()