xref: /freebsd/contrib/llvm-project/libcxx/include/__atomic/cxx_atomic_impl.h (revision a4e5e0106ac7145f56eb39a691e302cabb4635be)
1 //===----------------------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
10 #define _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
11 
12 #include <__atomic/is_always_lock_free.h>
13 #include <__atomic/memory_order.h>
14 #include <__config>
15 #include <__memory/addressof.h>
16 #include <__type_traits/conditional.h>
17 #include <__type_traits/is_assignable.h>
18 #include <__type_traits/is_trivially_copyable.h>
19 #include <__type_traits/remove_const.h>
20 #include <cstddef>
21 #include <cstring>
22 
23 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
24 #  pragma GCC system_header
25 #endif
26 
27 _LIBCPP_BEGIN_NAMESPACE_STD
28 
29 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
30     defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
31 
32 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
33 // the default operator= in an object is not volatile, a byte-by-byte copy
34 // is required.
35 template <typename _Tp, typename _Tv> _LIBCPP_HIDE_FROM_ABI
36 typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
37 __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
38   __a_value = __val;
39 }
40 template <typename _Tp, typename _Tv> _LIBCPP_HIDE_FROM_ABI
41 typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
42 __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
43   volatile char* __to         = reinterpret_cast<volatile char*>(std::addressof(__a_value));
44   volatile char* __end = __to + sizeof(_Tp);
45   volatile const char* __from = reinterpret_cast<volatile const char*>(std::addressof(__val));
46   while (__to != __end)
47     *__to++ = *__from++;
48 }
49 
50 #endif
51 
52 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
53 
54 template <typename _Tp>
55 struct __cxx_atomic_base_impl {
56 
57   _LIBCPP_HIDE_FROM_ABI
58 #ifndef _LIBCPP_CXX03_LANG
59     __cxx_atomic_base_impl() _NOEXCEPT = default;
60 #else
61     __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
62 #endif // _LIBCPP_CXX03_LANG
63   _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
64     : __a_value(value) {}
65   _Tp __a_value;
66 };
67 
68 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
69   // Avoid switch statement to make this a constexpr.
70   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
71          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
72           (__order == memory_order_release ? __ATOMIC_RELEASE:
73            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
74             (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
75               __ATOMIC_CONSUME))));
76 }
77 
78 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
79   // Avoid switch statement to make this a constexpr.
80   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
81          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
82           (__order == memory_order_release ? __ATOMIC_RELAXED:
83            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
84             (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
85               __ATOMIC_CONSUME))));
86 }
87 
88 template <typename _Tp>
89 _LIBCPP_HIDE_FROM_ABI
90 void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
91   __cxx_atomic_assign_volatile(__a->__a_value, __val);
92 }
93 
94 template <typename _Tp>
95 _LIBCPP_HIDE_FROM_ABI
96 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
97   __a->__a_value = __val;
98 }
99 
100 _LIBCPP_HIDE_FROM_ABI inline
101 void __cxx_atomic_thread_fence(memory_order __order) {
102   __atomic_thread_fence(__to_gcc_order(__order));
103 }
104 
105 _LIBCPP_HIDE_FROM_ABI inline
106 void __cxx_atomic_signal_fence(memory_order __order) {
107   __atomic_signal_fence(__to_gcc_order(__order));
108 }
109 
110 template <typename _Tp>
111 _LIBCPP_HIDE_FROM_ABI
112 void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
113                         memory_order __order) {
114   __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
115 }
116 
117 template <typename _Tp>
118 _LIBCPP_HIDE_FROM_ABI
119 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
120                         memory_order __order) {
121   __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
122 }
123 
124 template <typename _Tp>
125 _LIBCPP_HIDE_FROM_ABI
126 _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
127                       memory_order __order) {
128   _Tp __ret;
129   __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
130   return __ret;
131 }
132 
133 template <typename _Tp>
134 _LIBCPP_HIDE_FROM_ABI
135 _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
136   _Tp __ret;
137   __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
138   return __ret;
139 }
140 
141 template <typename _Tp>
142 _LIBCPP_HIDE_FROM_ABI
143 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
144                           _Tp __value, memory_order __order) {
145   _Tp __ret;
146   __atomic_exchange(
147       std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
148   return __ret;
149 }
150 
151 template <typename _Tp>
152 _LIBCPP_HIDE_FROM_ABI
153 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
154                           memory_order __order) {
155   _Tp __ret;
156   __atomic_exchange(
157       std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
158   return __ret;
159 }
160 
161 template <typename _Tp>
162 _LIBCPP_HIDE_FROM_ABI
163 bool __cxx_atomic_compare_exchange_strong(
164     volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
165     memory_order __success, memory_order __failure) {
166   return __atomic_compare_exchange(
167       std::addressof(__a->__a_value),
168       __expected,
169       std::addressof(__value),
170       false,
171       __to_gcc_order(__success),
172       __to_gcc_failure_order(__failure));
173 }
174 
175 template <typename _Tp>
176 _LIBCPP_HIDE_FROM_ABI
177 bool __cxx_atomic_compare_exchange_strong(
178     __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
179     memory_order __failure) {
180   return __atomic_compare_exchange(
181       std::addressof(__a->__a_value),
182       __expected,
183       std::addressof(__value),
184       false,
185       __to_gcc_order(__success),
186       __to_gcc_failure_order(__failure));
187 }
188 
189 template <typename _Tp>
190 _LIBCPP_HIDE_FROM_ABI
191 bool __cxx_atomic_compare_exchange_weak(
192     volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
193     memory_order __success, memory_order __failure) {
194   return __atomic_compare_exchange(
195       std::addressof(__a->__a_value),
196       __expected,
197       std::addressof(__value),
198       true,
199       __to_gcc_order(__success),
200       __to_gcc_failure_order(__failure));
201 }
202 
203 template <typename _Tp>
204 _LIBCPP_HIDE_FROM_ABI
205 bool __cxx_atomic_compare_exchange_weak(
206     __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
207     memory_order __failure) {
208   return __atomic_compare_exchange(
209       std::addressof(__a->__a_value),
210       __expected,
211       std::addressof(__value),
212       true,
213       __to_gcc_order(__success),
214       __to_gcc_failure_order(__failure));
215 }
216 
217 template <typename _Tp>
218 struct __skip_amt { enum {value = 1}; };
219 
220 template <typename _Tp>
221 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
222 
223 // FIXME: Haven't figured out what the spec says about using arrays with
224 // atomic_fetch_add. Force a failure rather than creating bad behavior.
225 template <typename _Tp>
226 struct __skip_amt<_Tp[]> { };
227 template <typename _Tp, int n>
228 struct __skip_amt<_Tp[n]> { };
229 
230 template <typename _Tp, typename _Td>
231 _LIBCPP_HIDE_FROM_ABI
232 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
233                            _Td __delta, memory_order __order) {
234   return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
235 }
236 
237 template <typename _Tp, typename _Td>
238 _LIBCPP_HIDE_FROM_ABI
239 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
240                            memory_order __order) {
241   return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
242 }
243 
244 template <typename _Tp, typename _Td>
245 _LIBCPP_HIDE_FROM_ABI
246 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
247                            _Td __delta, memory_order __order) {
248   return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
249 }
250 
251 template <typename _Tp, typename _Td>
252 _LIBCPP_HIDE_FROM_ABI
253 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
254                            memory_order __order) {
255   return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
256 }
257 
258 template <typename _Tp>
259 _LIBCPP_HIDE_FROM_ABI
260 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
261                            _Tp __pattern, memory_order __order) {
262   return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
263 }
264 
265 template <typename _Tp>
266 _LIBCPP_HIDE_FROM_ABI
267 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
268                            _Tp __pattern, memory_order __order) {
269   return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
270 }
271 
272 template <typename _Tp>
273 _LIBCPP_HIDE_FROM_ABI
274 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
275                           _Tp __pattern, memory_order __order) {
276   return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
277 }
278 
279 template <typename _Tp>
280 _LIBCPP_HIDE_FROM_ABI
281 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
282                           memory_order __order) {
283   return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
284 }
285 
286 template <typename _Tp>
287 _LIBCPP_HIDE_FROM_ABI
288 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
289                            _Tp __pattern, memory_order __order) {
290   return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
291 }
292 
293 template <typename _Tp>
294 _LIBCPP_HIDE_FROM_ABI
295 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
296                            memory_order __order) {
297   return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
298 }
299 
300 #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
301 
302 #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
303 
304 template <typename _Tp>
305 struct __cxx_atomic_base_impl {
306 
307   _LIBCPP_HIDE_FROM_ABI
308 #ifndef _LIBCPP_CXX03_LANG
309     __cxx_atomic_base_impl() _NOEXCEPT = default;
310 #else
311     __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
312 #endif // _LIBCPP_CXX03_LANG
313   _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT
314     : __a_value(__value) {}
315   _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
316 };
317 
318 #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
319 
320 _LIBCPP_HIDE_FROM_ABI inline
321 void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
322     __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
323 }
324 
325 _LIBCPP_HIDE_FROM_ABI inline
326 void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
327     __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
328 }
329 
330 template<class _Tp>
331 _LIBCPP_HIDE_FROM_ABI
332 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
333     __c11_atomic_init(std::addressof(__a->__a_value), __val);
334 }
335 template<class _Tp>
336 _LIBCPP_HIDE_FROM_ABI
337 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
338     __c11_atomic_init(std::addressof(__a->__a_value), __val);
339 }
340 
341 template<class _Tp>
342 _LIBCPP_HIDE_FROM_ABI
343 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
344     __c11_atomic_store(std::addressof(__a->__a_value), __val, static_cast<__memory_order_underlying_t>(__order));
345 }
346 template<class _Tp>
347 _LIBCPP_HIDE_FROM_ABI
348 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
349     __c11_atomic_store(std::addressof(__a->__a_value), __val, static_cast<__memory_order_underlying_t>(__order));
350 }
351 
352 template<class _Tp>
353 _LIBCPP_HIDE_FROM_ABI
354 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
355     using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
356     return __c11_atomic_load(
357         const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
358 }
359 template<class _Tp>
360 _LIBCPP_HIDE_FROM_ABI
361 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
362     using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
363     return __c11_atomic_load(
364         const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
365 }
366 
367 template<class _Tp>
368 _LIBCPP_HIDE_FROM_ABI
369 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
370     return __c11_atomic_exchange(
371         std::addressof(__a->__a_value), __value, static_cast<__memory_order_underlying_t>(__order));
372 }
373 template<class _Tp>
374 _LIBCPP_HIDE_FROM_ABI
375 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
376     return __c11_atomic_exchange(
377         std::addressof(__a->__a_value), __value, static_cast<__memory_order_underlying_t>(__order));
378 }
379 
380 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
381   // Avoid switch statement to make this a constexpr.
382   return __order == memory_order_release ? memory_order_relaxed:
383          (__order == memory_order_acq_rel ? memory_order_acquire:
384              __order);
385 }
386 
387 template<class _Tp>
388 _LIBCPP_HIDE_FROM_ABI
389 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
390   return __c11_atomic_compare_exchange_strong(
391       std::addressof(__a->__a_value),
392       __expected,
393       __value,
394       static_cast<__memory_order_underlying_t>(__success),
395       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
396 }
397 template<class _Tp>
398 _LIBCPP_HIDE_FROM_ABI
399 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
400   return __c11_atomic_compare_exchange_strong(
401       std::addressof(__a->__a_value),
402       __expected,
403       __value,
404       static_cast<__memory_order_underlying_t>(__success),
405       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
406 }
407 
408 template<class _Tp>
409 _LIBCPP_HIDE_FROM_ABI
410 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
411   return __c11_atomic_compare_exchange_weak(
412       std::addressof(__a->__a_value),
413       __expected,
414       __value,
415       static_cast<__memory_order_underlying_t>(__success),
416       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
417 }
418 template<class _Tp>
419 _LIBCPP_HIDE_FROM_ABI
420 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
421   return __c11_atomic_compare_exchange_weak(
422       std::addressof(__a->__a_value),
423       __expected,
424       __value,
425       static_cast<__memory_order_underlying_t>(__success),
426       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
427 }
428 
429 template<class _Tp>
430 _LIBCPP_HIDE_FROM_ABI
431 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
432   return __c11_atomic_fetch_add(
433       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
434 }
435 template<class _Tp>
436 _LIBCPP_HIDE_FROM_ABI
437 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
438   return __c11_atomic_fetch_add(
439       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
440 }
441 
442 template<class _Tp>
443 _LIBCPP_HIDE_FROM_ABI
444 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
445   return __c11_atomic_fetch_add(
446       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
447 }
448 template<class _Tp>
449 _LIBCPP_HIDE_FROM_ABI
450 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
451   return __c11_atomic_fetch_add(
452       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
453 }
454 
455 template<class _Tp>
456 _LIBCPP_HIDE_FROM_ABI
457 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
458   return __c11_atomic_fetch_sub(
459       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
460 }
461 template<class _Tp>
462 _LIBCPP_HIDE_FROM_ABI
463 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
464   return __c11_atomic_fetch_sub(
465       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
466 }
467 template<class _Tp>
468 _LIBCPP_HIDE_FROM_ABI
469 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
470   return __c11_atomic_fetch_sub(
471       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
472 }
473 template<class _Tp>
474 _LIBCPP_HIDE_FROM_ABI
475 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
476   return __c11_atomic_fetch_sub(
477       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
478 }
479 
480 template<class _Tp>
481 _LIBCPP_HIDE_FROM_ABI
482 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
483   return __c11_atomic_fetch_and(
484       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
485 }
486 template<class _Tp>
487 _LIBCPP_HIDE_FROM_ABI
488 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
489   return __c11_atomic_fetch_and(
490       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
491 }
492 
493 template<class _Tp>
494 _LIBCPP_HIDE_FROM_ABI
495 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
496   return __c11_atomic_fetch_or(
497       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
498 }
499 template<class _Tp>
500 _LIBCPP_HIDE_FROM_ABI
501 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
502   return __c11_atomic_fetch_or(
503       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
504 }
505 
506 template<class _Tp>
507 _LIBCPP_HIDE_FROM_ABI
508 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
509   return __c11_atomic_fetch_xor(
510       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
511 }
512 template<class _Tp>
513 _LIBCPP_HIDE_FROM_ABI
514 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
515   return __c11_atomic_fetch_xor(
516       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
517 }
518 
519 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
520 
521 #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
522 
523 template<typename _Tp>
524 struct __cxx_atomic_lock_impl {
525 
526   _LIBCPP_HIDE_FROM_ABI
527   __cxx_atomic_lock_impl() _NOEXCEPT
528     : __a_value(), __a_lock(0) {}
529   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit
530   __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
531     : __a_value(value), __a_lock(0) {}
532 
533   _Tp __a_value;
534   mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
535 
536   _LIBCPP_HIDE_FROM_ABI void __lock() const volatile {
537     while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
538         /*spin*/;
539   }
540   _LIBCPP_HIDE_FROM_ABI void __lock() const {
541     while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
542         /*spin*/;
543   }
544   _LIBCPP_HIDE_FROM_ABI void __unlock() const volatile {
545     __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
546   }
547   _LIBCPP_HIDE_FROM_ABI void __unlock() const {
548     __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
549   }
550   _LIBCPP_HIDE_FROM_ABI _Tp __read() const volatile {
551     __lock();
552     _Tp __old;
553     __cxx_atomic_assign_volatile(__old, __a_value);
554     __unlock();
555     return __old;
556   }
557   _LIBCPP_HIDE_FROM_ABI _Tp __read() const {
558     __lock();
559     _Tp __old = __a_value;
560     __unlock();
561     return __old;
562   }
563 };
564 
565 template <typename _Tp>
566 _LIBCPP_HIDE_FROM_ABI
567 void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
568   __cxx_atomic_assign_volatile(__a->__a_value, __val);
569 }
570 template <typename _Tp>
571 _LIBCPP_HIDE_FROM_ABI
572 void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
573   __a->__a_value = __val;
574 }
575 
576 template <typename _Tp>
577 _LIBCPP_HIDE_FROM_ABI
578 void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
579   __a->__lock();
580   __cxx_atomic_assign_volatile(__a->__a_value, __val);
581   __a->__unlock();
582 }
583 template <typename _Tp>
584 _LIBCPP_HIDE_FROM_ABI
585 void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
586   __a->__lock();
587   __a->__a_value = __val;
588   __a->__unlock();
589 }
590 
591 template <typename _Tp>
592 _LIBCPP_HIDE_FROM_ABI
593 _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
594   return __a->__read();
595 }
596 template <typename _Tp>
597 _LIBCPP_HIDE_FROM_ABI
598 _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
599   return __a->__read();
600 }
601 
602 template <typename _Tp>
603 _LIBCPP_HIDE_FROM_ABI
604 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
605   __a->__lock();
606   _Tp __old;
607   __cxx_atomic_assign_volatile(__old, __a->__a_value);
608   __cxx_atomic_assign_volatile(__a->__a_value, __value);
609   __a->__unlock();
610   return __old;
611 }
612 template <typename _Tp>
613 _LIBCPP_HIDE_FROM_ABI
614 _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
615   __a->__lock();
616   _Tp __old = __a->__a_value;
617   __a->__a_value = __value;
618   __a->__unlock();
619   return __old;
620 }
621 
622 template <typename _Tp>
623 _LIBCPP_HIDE_FROM_ABI
624 bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
625                                           _Tp* __expected, _Tp __value, memory_order, memory_order) {
626   _Tp __temp;
627   __a->__lock();
628   __cxx_atomic_assign_volatile(__temp, __a->__a_value);
629   bool __ret = (std::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
630   if(__ret)
631     __cxx_atomic_assign_volatile(__a->__a_value, __value);
632   else
633     __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
634   __a->__unlock();
635   return __ret;
636 }
637 template <typename _Tp>
638 _LIBCPP_HIDE_FROM_ABI
639 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
640                                           _Tp* __expected, _Tp __value, memory_order, memory_order) {
641   __a->__lock();
642   bool __ret = (std::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
643   if(__ret)
644     std::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
645   else
646     std::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
647   __a->__unlock();
648   return __ret;
649 }
650 
651 template <typename _Tp>
652 _LIBCPP_HIDE_FROM_ABI
653 bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
654                                         _Tp* __expected, _Tp __value, memory_order, memory_order) {
655   _Tp __temp;
656   __a->__lock();
657   __cxx_atomic_assign_volatile(__temp, __a->__a_value);
658   bool __ret = (std::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
659   if(__ret)
660     __cxx_atomic_assign_volatile(__a->__a_value, __value);
661   else
662     __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
663   __a->__unlock();
664   return __ret;
665 }
666 template <typename _Tp>
667 _LIBCPP_HIDE_FROM_ABI
668 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
669                                         _Tp* __expected, _Tp __value, memory_order, memory_order) {
670   __a->__lock();
671   bool __ret = (std::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
672   if(__ret)
673     std::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
674   else
675     std::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
676   __a->__unlock();
677   return __ret;
678 }
679 
680 template <typename _Tp, typename _Td>
681 _LIBCPP_HIDE_FROM_ABI
682 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
683                            _Td __delta, memory_order) {
684   __a->__lock();
685   _Tp __old;
686   __cxx_atomic_assign_volatile(__old, __a->__a_value);
687   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
688   __a->__unlock();
689   return __old;
690 }
691 template <typename _Tp, typename _Td>
692 _LIBCPP_HIDE_FROM_ABI
693 _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
694                            _Td __delta, memory_order) {
695   __a->__lock();
696   _Tp __old = __a->__a_value;
697   __a->__a_value += __delta;
698   __a->__unlock();
699   return __old;
700 }
701 
702 template <typename _Tp, typename _Td>
703 _LIBCPP_HIDE_FROM_ABI
704 _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
705                            ptrdiff_t __delta, memory_order) {
706   __a->__lock();
707   _Tp* __old;
708   __cxx_atomic_assign_volatile(__old, __a->__a_value);
709   __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
710   __a->__unlock();
711   return __old;
712 }
713 template <typename _Tp, typename _Td>
714 _LIBCPP_HIDE_FROM_ABI
715 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
716                            ptrdiff_t __delta, memory_order) {
717   __a->__lock();
718   _Tp* __old = __a->__a_value;
719   __a->__a_value += __delta;
720   __a->__unlock();
721   return __old;
722 }
723 
724 template <typename _Tp, typename _Td>
725 _LIBCPP_HIDE_FROM_ABI
726 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
727                            _Td __delta, memory_order) {
728   __a->__lock();
729   _Tp __old;
730   __cxx_atomic_assign_volatile(__old, __a->__a_value);
731   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
732   __a->__unlock();
733   return __old;
734 }
735 template <typename _Tp, typename _Td>
736 _LIBCPP_HIDE_FROM_ABI
737 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
738                            _Td __delta, memory_order) {
739   __a->__lock();
740   _Tp __old = __a->__a_value;
741   __a->__a_value -= __delta;
742   __a->__unlock();
743   return __old;
744 }
745 
746 template <typename _Tp>
747 _LIBCPP_HIDE_FROM_ABI
748 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
749                            _Tp __pattern, memory_order) {
750   __a->__lock();
751   _Tp __old;
752   __cxx_atomic_assign_volatile(__old, __a->__a_value);
753   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
754   __a->__unlock();
755   return __old;
756 }
757 template <typename _Tp>
758 _LIBCPP_HIDE_FROM_ABI
759 _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
760                            _Tp __pattern, memory_order) {
761   __a->__lock();
762   _Tp __old = __a->__a_value;
763   __a->__a_value &= __pattern;
764   __a->__unlock();
765   return __old;
766 }
767 
768 template <typename _Tp>
769 _LIBCPP_HIDE_FROM_ABI
770 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
771                           _Tp __pattern, memory_order) {
772   __a->__lock();
773   _Tp __old;
774   __cxx_atomic_assign_volatile(__old, __a->__a_value);
775   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
776   __a->__unlock();
777   return __old;
778 }
779 template <typename _Tp>
780 _LIBCPP_HIDE_FROM_ABI
781 _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
782                           _Tp __pattern, memory_order) {
783   __a->__lock();
784   _Tp __old = __a->__a_value;
785   __a->__a_value |= __pattern;
786   __a->__unlock();
787   return __old;
788 }
789 
790 template <typename _Tp>
791 _LIBCPP_HIDE_FROM_ABI
792 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
793                            _Tp __pattern, memory_order) {
794   __a->__lock();
795   _Tp __old;
796   __cxx_atomic_assign_volatile(__old, __a->__a_value);
797   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
798   __a->__unlock();
799   return __old;
800 }
801 template <typename _Tp>
802 _LIBCPP_HIDE_FROM_ABI
803 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
804                            _Tp __pattern, memory_order) {
805   __a->__lock();
806   _Tp __old = __a->__a_value;
807   __a->__a_value ^= __pattern;
808   __a->__unlock();
809   return __old;
810 }
811 
812 template <typename _Tp,
813           typename _Base = typename conditional<__libcpp_is_always_lock_free<_Tp>::__value,
814                                                 __cxx_atomic_base_impl<_Tp>,
815                                                 __cxx_atomic_lock_impl<_Tp> >::type>
816 #else
817 template <typename _Tp,
818           typename _Base = __cxx_atomic_base_impl<_Tp> >
819 #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
820 struct __cxx_atomic_impl : public _Base {
821     static_assert(is_trivially_copyable<_Tp>::value,
822       "std::atomic<T> requires that 'T' be a trivially copyable type");
823 
824   _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl() _NOEXCEPT = default;
825   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT
826     : _Base(__value) {}
827 };
828 
829 _LIBCPP_END_NAMESPACE_STD
830 
831 #endif // _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
832