xref: /freebsd/contrib/llvm-project/libcxx/include/__atomic/atomic_sync.h (revision 47ef2a131091508e049ab10cad7f91a3c1342cd9)
1 //===----------------------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef _LIBCPP___ATOMIC_ATOMIC_SYNC_H
10 #define _LIBCPP___ATOMIC_ATOMIC_SYNC_H
11 
12 #include <__atomic/contention_t.h>
13 #include <__atomic/cxx_atomic_impl.h>
14 #include <__atomic/memory_order.h>
15 #include <__atomic/to_gcc_order.h>
16 #include <__chrono/duration.h>
17 #include <__config>
18 #include <__memory/addressof.h>
19 #include <__thread/poll_with_backoff.h>
20 #include <__thread/support.h>
21 #include <__type_traits/conjunction.h>
22 #include <__type_traits/decay.h>
23 #include <__type_traits/invoke.h>
24 #include <__type_traits/void_t.h>
25 #include <__utility/declval.h>
26 #include <cstring>
27 
28 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
29 #  pragma GCC system_header
30 #endif
31 
32 _LIBCPP_BEGIN_NAMESPACE_STD
33 
34 // The customisation points to enable the following functions:
35 // - __atomic_wait
36 // - __atomic_wait_unless
37 // - __atomic_notify_one
38 // - __atomic_notify_all
39 // Note that std::atomic<T>::wait was back-ported to C++03
40 // The below implementations look ugly to support C++03
41 template <class _Tp, class = void>
42 struct __atomic_waitable_traits {
43   template <class _AtomicWaitable>
44   static void __atomic_load(_AtomicWaitable&&, memory_order) = delete;
45 
46   template <class _AtomicWaitable>
47   static void __atomic_contention_address(_AtomicWaitable&&) = delete;
48 };
49 
50 template <class _Tp, class = void>
51 struct __atomic_waitable : false_type {};
52 
53 template <class _Tp>
54 struct __atomic_waitable< _Tp,
55                           __void_t<decltype(__atomic_waitable_traits<__decay_t<_Tp> >::__atomic_load(
56                                        std::declval<const _Tp&>(), std::declval<memory_order>())),
57                                    decltype(__atomic_waitable_traits<__decay_t<_Tp> >::__atomic_contention_address(
58                                        std::declval<const _Tp&>()))> > : true_type {};
59 
60 template <class _AtomicWaitable, class _Poll>
61 struct __atomic_wait_poll_impl {
62   const _AtomicWaitable& __a_;
63   _Poll __poll_;
64   memory_order __order_;
65 
66   _LIBCPP_HIDE_FROM_ABI bool operator()() const {
67     auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a_, __order_);
68     return __poll_(__current_val);
69   }
70 };
71 
72 #ifndef _LIBCPP_HAS_NO_THREADS
73 
74 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*) _NOEXCEPT;
75 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*) _NOEXCEPT;
76 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
77 __libcpp_atomic_monitor(void const volatile*) _NOEXCEPT;
78 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
79 __libcpp_atomic_wait(void const volatile*, __cxx_contention_t) _NOEXCEPT;
80 
81 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
82 __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
83 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
84 __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
85 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
86 __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
87 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
88 __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t) _NOEXCEPT;
89 
90 template <class _AtomicWaitable, class _Poll>
91 struct __atomic_wait_backoff_impl {
92   const _AtomicWaitable& __a_;
93   _Poll __poll_;
94   memory_order __order_;
95 
96   using __waitable_traits = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >;
97 
98   _LIBCPP_AVAILABILITY_SYNC
99   _LIBCPP_HIDE_FROM_ABI bool
100   __update_monitor_val_and_poll(__cxx_atomic_contention_t const volatile*, __cxx_contention_t& __monitor_val) const {
101     // In case the contention type happens to be __cxx_atomic_contention_t, i.e. __cxx_atomic_impl<int64_t>,
102     // the platform wait is directly monitoring the atomic value itself.
103     // `__poll_` takes the current value of the atomic as an in-out argument
104     // to potentially modify it. After it returns, `__monitor` has a value
105     // which can be safely waited on by `std::__libcpp_atomic_wait` without any
106     // ABA style issues.
107     __monitor_val = __waitable_traits::__atomic_load(__a_, __order_);
108     return __poll_(__monitor_val);
109   }
110 
111   _LIBCPP_AVAILABILITY_SYNC
112   _LIBCPP_HIDE_FROM_ABI bool
113   __update_monitor_val_and_poll(void const volatile* __contention_address, __cxx_contention_t& __monitor_val) const {
114     // In case the contention type is anything else, platform wait is monitoring a __cxx_atomic_contention_t
115     // from the global pool, the monitor comes from __libcpp_atomic_monitor
116     __monitor_val      = std::__libcpp_atomic_monitor(__contention_address);
117     auto __current_val = __waitable_traits::__atomic_load(__a_, __order_);
118     return __poll_(__current_val);
119   }
120 
121   _LIBCPP_AVAILABILITY_SYNC
122   _LIBCPP_HIDE_FROM_ABI bool operator()(chrono::nanoseconds __elapsed) const {
123     if (__elapsed > chrono::microseconds(64)) {
124       auto __contention_address = __waitable_traits::__atomic_contention_address(__a_);
125       __cxx_contention_t __monitor_val;
126       if (__update_monitor_val_and_poll(__contention_address, __monitor_val))
127         return true;
128       std::__libcpp_atomic_wait(__contention_address, __monitor_val);
129     } else if (__elapsed > chrono::microseconds(4))
130       __libcpp_thread_yield();
131     else {
132     } // poll
133     return false;
134   }
135 };
136 
137 // The semantics of this function are similar to `atomic`'s
138 // `.wait(T old, std::memory_order order)`, but instead of having a hardcoded
139 // predicate (is the loaded value unequal to `old`?), the predicate function is
140 // specified as an argument. The loaded value is given as an in-out argument to
141 // the predicate. If the predicate function returns `true`,
142 // `__atomic_wait_unless` will return. If the predicate function returns
143 // `false`, it must set the argument to its current understanding of the atomic
144 // value. The predicate function must not return `false` spuriously.
145 template <class _AtomicWaitable, class _Poll>
146 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
147 __atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
148   static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
149   __atomic_wait_poll_impl<_AtomicWaitable, __decay_t<_Poll> > __poll_impl     = {__a, __poll, __order};
150   __atomic_wait_backoff_impl<_AtomicWaitable, __decay_t<_Poll> > __backoff_fn = {__a, __poll, __order};
151   std::__libcpp_thread_poll_with_backoff(__poll_impl, __backoff_fn);
152 }
153 
154 template <class _AtomicWaitable>
155 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable& __a) {
156   static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
157   std::__cxx_atomic_notify_one(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
158 }
159 
160 template <class _AtomicWaitable>
161 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable& __a) {
162   static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
163   std::__cxx_atomic_notify_all(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
164 }
165 
166 #else // _LIBCPP_HAS_NO_THREADS
167 
168 template <class _AtomicWaitable, class _Poll>
169 _LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
170   __atomic_wait_poll_impl<_AtomicWaitable, __decay_t<_Poll> > __poll_fn = {__a, __poll, __order};
171   std::__libcpp_thread_poll_with_backoff(__poll_fn, __spinning_backoff_policy());
172 }
173 
174 template <class _AtomicWaitable>
175 _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable&) {}
176 
177 template <class _AtomicWaitable>
178 _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable&) {}
179 
180 #endif // _LIBCPP_HAS_NO_THREADS
181 
182 template <typename _Tp>
183 _LIBCPP_HIDE_FROM_ABI bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
184   return std::memcmp(std::addressof(__lhs), std::addressof(__rhs), sizeof(_Tp)) == 0;
185 }
186 
187 template <class _Tp>
188 struct __atomic_compare_unequal_to {
189   _Tp __val_;
190   _LIBCPP_HIDE_FROM_ABI bool operator()(const _Tp& __arg) const {
191     return !std::__cxx_nonatomic_compare_equal(__arg, __val_);
192   }
193 };
194 
195 template <class _AtomicWaitable, class _Up>
196 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
197 __atomic_wait(_AtomicWaitable& __a, _Up __val, memory_order __order) {
198   static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
199   __atomic_compare_unequal_to<_Up> __nonatomic_equal = {__val};
200   std::__atomic_wait_unless(__a, __nonatomic_equal, __order);
201 }
202 
203 _LIBCPP_END_NAMESPACE_STD
204 
205 #endif // _LIBCPP___ATOMIC_ATOMIC_SYNC_H
206