Home
last modified time | relevance | path

Searched refs:__order (Results 1 – 9 of 9) sorted by relevance

/freebsd/contrib/llvm-project/libcxx/include/__atomic/
H A Dcxx_atomic_impl.h68 _LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_thread_fence(memory_order __order) { in __cxx_atomic_thread_fence() argument
69 __atomic_thread_fence(__to_gcc_order(__order)); in __cxx_atomic_thread_fence()
72 _LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_signal_fence(memory_order __order) { in __cxx_atomic_signal_fence() argument
73 __atomic_signal_fence(__to_gcc_order(__order)); in __cxx_atomic_signal_fence()
78 __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) { in __cxx_atomic_store() argument
79 __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order)); in __cxx_atomic_store()
83 …OM_ABI void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) { in __cxx_atomic_store() argument
84 __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order)); in __cxx_atomic_store()
88 …_ABI _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) { in __cxx_atomic_load() argument
90 __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order)); in __cxx_atomic_load()
[all …]
H A Datomic_ref.h124 …_LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) cons…
125 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {
127__order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::s…
129 __atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));
137 _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept
138 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) {
140__order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::a…
141 __order == memory_order::seq_cst,
145 __atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));
151 …_LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) co…
[all …]
H A Dto_gcc_order.h24 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) { in __to_gcc_order() argument
26 return __order == memory_order_relaxed in __to_gcc_order()
28 : (__order == memory_order_acquire in __to_gcc_order()
30 : (__order == memory_order_release in __to_gcc_order()
32 : (__order == memory_order_seq_cst in __to_gcc_order()
34 … : (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL : __ATOMIC_CONSUME)))); in __to_gcc_order()
37 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) { in __to_gcc_failure_order() argument
39 return __order == memory_order_relaxed in __to_gcc_failure_order()
41 : (__order == memory_order_acquire in __to_gcc_failure_order()
43 : (__order == memory_order_release in __to_gcc_failure_order()
[all …]
H A Datomic_sync.h147 __atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
149 …atomic_wait_poll_impl<_AtomicWaitable, __decay_t<_Poll> > __poll_impl = {__a, __poll, __order};
150 …atomic_wait_backoff_impl<_AtomicWaitable, __decay_t<_Poll> > __backoff_fn = {__a, __poll, __order};
169 …M_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
170 __atomic_wait_poll_impl<_AtomicWaitable, __decay_t<_Poll> > __poll_fn = {__a, __poll, __order};
197 __atomic_wait(_AtomicWaitable& __a, _Up __val, memory_order __order) {
200 std::__atomic_wait_unless(__a, __nonatomic_equal, __order);
H A Datomic_flag.h87 …IDE_FROM_ABI _LIBCPP_ATOMIC_FLAG_TYPE __atomic_load(const atomic_flag& __a, memory_order __order) {
88 return std::__cxx_atomic_load(&__a.__a_, __order);
92 __atomic_load(const volatile atomic_flag& __a, memory_order __order) {
93 return std::__cxx_atomic_load(&__a.__a_, __order);
H A Datomic_base.h199 …HIDE_FROM_ABI _Tp __atomic_load(const __atomic_base<_Tp, _IsIntegral>& __a, memory_order __order) {
200 return __a.load(__order);
204 __atomic_load(const volatile __atomic_base<_Tp, _IsIntegral>& __this, memory_order __order) {
205 return __this.load(__order);
H A Datomic.h178 auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) {
179 return std::__cxx_atomic_fetch_add(__a, __builtin_operand, __order);
186 auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) {
187 return std::__cxx_atomic_fetch_sub(__a, __builtin_operand, __order);
/freebsd/contrib/llvm-project/libcxx/src/include/
H A Datomic_support.h48 inline _LIBCPP_HIDE_FROM_ABI void __libcpp_atomic_store(_ValueType* __dest, _FromType __val, int __order = _AO_Seq) {
49 __atomic_store_n(__dest, __val, __order);
58 inline _LIBCPP_HIDE_FROM_ABI _ValueType __libcpp_atomic_load(_ValueType const* __val, int __order = _AO_Seq) {
59 return __atomic_load_n(__val, __order);
63 inline _LIBCPP_HIDE_FROM_ABI _ValueType __libcpp_atomic_add(_ValueType* __val, _AddType __a, int __order = _AO_Seq) {
64 return __atomic_add_fetch(__val, __a, __order); in __libcpp_relaxed_store()
69 __libcpp_atomic_exchange(_ValueType* __target, _ValueType __value, int __order = _AO_Seq) {
70 return __atomic_exchange_n(__target, __value, __order);
/freebsd/sys/sys/
H A Dstdatomic.h140 atomic_thread_fence(memory_order __order __unused) in atomic_thread_fence()
144 __c11_atomic_thread_fence(__order); in atomic_thread_fence()
146 __atomic_thread_fence(__order); in atomic_thread_fence()
153 atomic_signal_fence(memory_order __order __unused) in atomic_signal_fence()
157 __c11_atomic_signal_fence(__order); in atomic_signal_fence()
159 __atomic_signal_fence(__order); in atomic_signal_fence()
384 memory_order __order) in atomic_flag_test_and_set_explicit() argument
386 return (atomic_exchange_explicit(&__object->__flag, 1, __order)); in atomic_flag_test_and_set_explicit()
390 atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order) in atomic_flag_clear_explicit() argument
393 atomic_store_explicit(&__object->__flag, 0, __order); in atomic_flag_clear_explicit()