xref: /freebsd/contrib/llvm-project/libcxx/include/atomic (revision 349cc55c9796c4596a5b9904cd3281af295f878f)
1// -*- C++ -*-
2//===----------------------------------------------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro [version.syn]
20
21#define __cpp_lib_atomic_is_always_lock_free
22#define __cpp_lib_atomic_flag_test
23#define __cpp_lib_atomic_lock_free_type_aliases
24#define __cpp_lib_atomic_wait
25
26 // order and consistency
27
28 enum memory_order: unspecified // enum class in C++20
29 {
30    relaxed,
31    consume, // load-consume
32    acquire, // load-acquire
33    release, // store-release
34    acq_rel, // store-release load-acquire
35    seq_cst // store-release load-acquire
36 };
37
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
44
45template <class T> T kill_dependency(T y) noexcept;
46
47// lock-free property
48
49#define ATOMIC_BOOL_LOCK_FREE unspecified
50#define ATOMIC_CHAR_LOCK_FREE unspecified
51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
55#define ATOMIC_SHORT_LOCK_FREE unspecified
56#define ATOMIC_INT_LOCK_FREE unspecified
57#define ATOMIC_LONG_LOCK_FREE unspecified
58#define ATOMIC_LLONG_LOCK_FREE unspecified
59#define ATOMIC_POINTER_LOCK_FREE unspecified
60
61template <class T>
62struct atomic
63{
64    using value_type = T;
65
66    static constexpr bool is_always_lock_free;
67    bool is_lock_free() const volatile noexcept;
68    bool is_lock_free() const noexcept;
69
70    atomic() noexcept = default; // until C++20
71    constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
72    constexpr atomic(T desr) noexcept;
73    atomic(const atomic&) = delete;
74    atomic& operator=(const atomic&) = delete;
75    atomic& operator=(const atomic&) volatile = delete;
76
77    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
78    T load(memory_order m = memory_order_seq_cst) const noexcept;
79    operator T() const volatile noexcept;
80    operator T() const noexcept;
81    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
83    T operator=(T) volatile noexcept;
84    T operator=(T) noexcept;
85
86    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
87    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
88    bool compare_exchange_weak(T& expc, T desr,
89                               memory_order s, memory_order f) volatile noexcept;
90    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
91    bool compare_exchange_strong(T& expc, T desr,
92                                 memory_order s, memory_order f) volatile noexcept;
93    bool compare_exchange_strong(T& expc, T desr,
94                                 memory_order s, memory_order f) noexcept;
95    bool compare_exchange_weak(T& expc, T desr,
96                               memory_order m = memory_order_seq_cst) volatile noexcept;
97    bool compare_exchange_weak(T& expc, T desr,
98                               memory_order m = memory_order_seq_cst) noexcept;
99    bool compare_exchange_strong(T& expc, T desr,
100                                memory_order m = memory_order_seq_cst) volatile noexcept;
101    bool compare_exchange_strong(T& expc, T desr,
102                                 memory_order m = memory_order_seq_cst) noexcept;
103
104    void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
105    void wait(T, memory_order = memory_order::seq_cst) const noexcept;
106    void notify_one() volatile noexcept;
107    void notify_one() noexcept;
108    void notify_all() volatile noexcept;
109    void notify_all() noexcept;
110};
111
112template <>
113struct atomic<integral>
114{
115    using value_type = integral;
116    using difference_type = value_type;
117
118    static constexpr bool is_always_lock_free;
119    bool is_lock_free() const volatile noexcept;
120    bool is_lock_free() const noexcept;
121
122    atomic() noexcept = default;
123    constexpr atomic(integral desr) noexcept;
124    atomic(const atomic&) = delete;
125    atomic& operator=(const atomic&) = delete;
126    atomic& operator=(const atomic&) volatile = delete;
127
128    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
129    integral load(memory_order m = memory_order_seq_cst) const noexcept;
130    operator integral() const volatile noexcept;
131    operator integral() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral operator=(integral desr) volatile noexcept;
135    integral operator=(integral desr) noexcept;
136
137    integral exchange(integral desr,
138                      memory_order m = memory_order_seq_cst) volatile noexcept;
139    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    bool compare_exchange_weak(integral& expc, integral desr,
141                               memory_order s, memory_order f) volatile noexcept;
142    bool compare_exchange_weak(integral& expc, integral desr,
143                               memory_order s, memory_order f) noexcept;
144    bool compare_exchange_strong(integral& expc, integral desr,
145                                 memory_order s, memory_order f) volatile noexcept;
146    bool compare_exchange_strong(integral& expc, integral desr,
147                                 memory_order s, memory_order f) noexcept;
148    bool compare_exchange_weak(integral& expc, integral desr,
149                               memory_order m = memory_order_seq_cst) volatile noexcept;
150    bool compare_exchange_weak(integral& expc, integral desr,
151                               memory_order m = memory_order_seq_cst) noexcept;
152    bool compare_exchange_strong(integral& expc, integral desr,
153                                memory_order m = memory_order_seq_cst) volatile noexcept;
154    bool compare_exchange_strong(integral& expc, integral desr,
155                                 memory_order m = memory_order_seq_cst) noexcept;
156
157    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
159    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
162    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
163    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
164    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
165    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
167
168    integral operator++(int) volatile noexcept;
169    integral operator++(int) noexcept;
170    integral operator--(int) volatile noexcept;
171    integral operator--(int) noexcept;
172    integral operator++() volatile noexcept;
173    integral operator++() noexcept;
174    integral operator--() volatile noexcept;
175    integral operator--() noexcept;
176    integral operator+=(integral op) volatile noexcept;
177    integral operator+=(integral op) noexcept;
178    integral operator-=(integral op) volatile noexcept;
179    integral operator-=(integral op) noexcept;
180    integral operator&=(integral op) volatile noexcept;
181    integral operator&=(integral op) noexcept;
182    integral operator|=(integral op) volatile noexcept;
183    integral operator|=(integral op) noexcept;
184    integral operator^=(integral op) volatile noexcept;
185    integral operator^=(integral op) noexcept;
186
187    void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
188    void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
189    void notify_one() volatile noexcept;
190    void notify_one() noexcept;
191    void notify_all() volatile noexcept;
192    void notify_all() noexcept;
193};
194
195template <class T>
196struct atomic<T*>
197{
198    using value_type = T*;
199    using difference_type = ptrdiff_t;
200
201    static constexpr bool is_always_lock_free;
202    bool is_lock_free() const volatile noexcept;
203    bool is_lock_free() const noexcept;
204
205    atomic() noexcept = default; // until C++20
206    constexpr atomic() noexcept; // since C++20
207    constexpr atomic(T* desr) noexcept;
208    atomic(const atomic&) = delete;
209    atomic& operator=(const atomic&) = delete;
210    atomic& operator=(const atomic&) volatile = delete;
211
212    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
213    T* load(memory_order m = memory_order_seq_cst) const noexcept;
214    operator T*() const volatile noexcept;
215    operator T*() const noexcept;
216    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
217    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
218    T* operator=(T*) volatile noexcept;
219    T* operator=(T*) noexcept;
220
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
222    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order s, memory_order f) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order s, memory_order f) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                 memory_order s, memory_order f) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order s, memory_order f) noexcept;
231    bool compare_exchange_weak(T*& expc, T* desr,
232                               memory_order m = memory_order_seq_cst) volatile noexcept;
233    bool compare_exchange_weak(T*& expc, T* desr,
234                               memory_order m = memory_order_seq_cst) noexcept;
235    bool compare_exchange_strong(T*& expc, T* desr,
236                                memory_order m = memory_order_seq_cst) volatile noexcept;
237    bool compare_exchange_strong(T*& expc, T* desr,
238                                 memory_order m = memory_order_seq_cst) noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
242    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
243
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256
257    void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
258    void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
259    void notify_one() volatile noexcept;
260    void notify_one() noexcept;
261    void notify_all() volatile noexcept;
262    void notify_all() noexcept;
263};
264
265
266// [atomics.nonmembers], non-member functions
267template<class T>
268  bool atomic_is_lock_free(const volatile atomic<T>*) noexcept;
269template<class T>
270  bool atomic_is_lock_free(const atomic<T>*) noexcept;
271template<class T>
272  void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept;
273template<class T>
274  void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept;
275template<class T>
276  void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type,
277                             memory_order) noexcept;
278template<class T>
279  void atomic_store_explicit(atomic<T>*, atomic<T>::value_type,
280                             memory_order) noexcept;
281template<class T>
282  T atomic_load(const volatile atomic<T>*) noexcept;
283template<class T>
284  T atomic_load(const atomic<T>*) noexcept;
285template<class T>
286  T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
287template<class T>
288  T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
289template<class T>
290  T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept;
291template<class T>
292  T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept;
293template<class T>
294  T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type,
295                             memory_order) noexcept;
296template<class T>
297  T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type,
298                             memory_order) noexcept;
299template<class T>
300  bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*,
301                                    atomic<T>::value_type) noexcept;
302template<class T>
303  bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*,
304                                    atomic<T>::value_type) noexcept;
305template<class T>
306  bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*,
307                                      atomic<T>::value_type) noexcept;
308template<class T>
309  bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*,
310                                      atomic<T>::value_type) noexcept;
311template<class T>
312  bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*,
313                                             atomic<T>::value_type,
314                                             memory_order, memory_order) noexcept;
315template<class T>
316  bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*,
317                                             atomic<T>::value_type,
318                                             memory_order, memory_order) noexcept;
319template<class T>
320  bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*,
321                                               atomic<T>::value_type,
322                                               memory_order, memory_order) noexcept;
323template<class T>
324  bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*,
325                                               atomic<T>::value_type,
326                                               memory_order, memory_order) noexcept;
327
328template<class T>
329  T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
330template<class T>
331  T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept;
332template<class T>
333  T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type,
334                              memory_order) noexcept;
335template<class T>
336  T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type,
337                              memory_order) noexcept;
338template<class T>
339  T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
340template<class T>
341  T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept;
342template<class T>
343  T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type,
344                              memory_order) noexcept;
345template<class T>
346  T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type,
347                              memory_order) noexcept;
348template<class T>
349  T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept;
350template<class T>
351  T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept;
352template<class T>
353  T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type,
354                              memory_order) noexcept;
355template<class T>
356  T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type,
357                              memory_order) noexcept;
358template<class T>
359  T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept;
360template<class T>
361  T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept;
362template<class T>
363  T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type,
364                             memory_order) noexcept;
365template<class T>
366  T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type,
367                             memory_order) noexcept;
368template<class T>
369  T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept;
370template<class T>
371  T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept;
372template<class T>
373  T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type,
374                              memory_order) noexcept;
375template<class T>
376  T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type,
377                              memory_order) noexcept;
378
379template<class T>
380  void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type);
381template<class T>
382  void atomic_wait(const atomic<T>*, atomic<T>::value_type);
383template<class T>
384  void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type,
385                            memory_order);
386template<class T>
387  void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type,
388                            memory_order);
389template<class T>
390  void atomic_notify_one(volatile atomic<T>*);
391template<class T>
392  void atomic_notify_one(atomic<T>*);
393template<class T>
394  void atomic_notify_all(volatile atomic<T>*);
395template<class T>
396  void atomic_notify_all(atomic<T>*);
397
398// Atomics for standard typedef types
399
400typedef atomic<bool>               atomic_bool;
401typedef atomic<char>               atomic_char;
402typedef atomic<signed char>        atomic_schar;
403typedef atomic<unsigned char>      atomic_uchar;
404typedef atomic<short>              atomic_short;
405typedef atomic<unsigned short>     atomic_ushort;
406typedef atomic<int>                atomic_int;
407typedef atomic<unsigned int>       atomic_uint;
408typedef atomic<long>               atomic_long;
409typedef atomic<unsigned long>      atomic_ulong;
410typedef atomic<long long>          atomic_llong;
411typedef atomic<unsigned long long> atomic_ullong;
412typedef atomic<char8_t>            atomic_char8_t; // C++20
413typedef atomic<char16_t>           atomic_char16_t;
414typedef atomic<char32_t>           atomic_char32_t;
415typedef atomic<wchar_t>            atomic_wchar_t;
416
417typedef atomic<int_least8_t>   atomic_int_least8_t;
418typedef atomic<uint_least8_t>  atomic_uint_least8_t;
419typedef atomic<int_least16_t>  atomic_int_least16_t;
420typedef atomic<uint_least16_t> atomic_uint_least16_t;
421typedef atomic<int_least32_t>  atomic_int_least32_t;
422typedef atomic<uint_least32_t> atomic_uint_least32_t;
423typedef atomic<int_least64_t>  atomic_int_least64_t;
424typedef atomic<uint_least64_t> atomic_uint_least64_t;
425
426typedef atomic<int_fast8_t>   atomic_int_fast8_t;
427typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
428typedef atomic<int_fast16_t>  atomic_int_fast16_t;
429typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
430typedef atomic<int_fast32_t>  atomic_int_fast32_t;
431typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
432typedef atomic<int_fast64_t>  atomic_int_fast64_t;
433typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
434
435typedef atomic<int8_t>   atomic_int8_t;
436typedef atomic<uint8_t>  atomic_uint8_t;
437typedef atomic<int16_t>  atomic_int16_t;
438typedef atomic<uint16_t> atomic_uint16_t;
439typedef atomic<int32_t>  atomic_int32_t;
440typedef atomic<uint32_t> atomic_uint32_t;
441typedef atomic<int64_t>  atomic_int64_t;
442typedef atomic<uint64_t> atomic_uint64_t;
443
444typedef atomic<intptr_t>  atomic_intptr_t;
445typedef atomic<uintptr_t> atomic_uintptr_t;
446typedef atomic<size_t>    atomic_size_t;
447typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
448typedef atomic<intmax_t>  atomic_intmax_t;
449typedef atomic<uintmax_t> atomic_uintmax_t;
450
451// flag type and operations
452
453typedef struct atomic_flag
454{
455    atomic_flag() noexcept = default; // until C++20
456    constexpr atomic_flag() noexcept; // since C++20
457    atomic_flag(const atomic_flag&) = delete;
458    atomic_flag& operator=(const atomic_flag&) = delete;
459    atomic_flag& operator=(const atomic_flag&) volatile = delete;
460
461    bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
462    bool test(memory_order m = memory_order_seq_cst) noexcept;
463    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
464    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
465    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
466    void clear(memory_order m = memory_order_seq_cst) noexcept;
467
468    void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
469    void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
470    void notify_one() volatile noexcept;
471    void notify_one() noexcept;
472    void notify_all() volatile noexcept;
473    void notify_all() noexcept;
474} atomic_flag;
475
476bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
477bool atomic_flag_test(atomic_flag* obj) noexcept;
478bool atomic_flag_test_explicit(volatile atomic_flag* obj,
479                               memory_order m) noexcept;
480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
484                                       memory_order m) noexcept;
485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
486void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
487void atomic_flag_clear(atomic_flag* obj) noexcept;
488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
490
491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
492void atomic_wait(const atomic_flag* obj, T old) noexcept;
493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
495void atomic_one(volatile atomic_flag* obj) noexcept;
496void atomic_one(atomic_flag* obj) noexcept;
497void atomic_all(volatile atomic_flag* obj) noexcept;
498void atomic_all(atomic_flag* obj) noexcept;
499
500// fences
501
502void atomic_thread_fence(memory_order m) noexcept;
503void atomic_signal_fence(memory_order m) noexcept;
504
505// deprecated
506
507template <class T>
508  void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
509
510template <class T>
511  void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
512
513#define ATOMIC_VAR_INIT(value) see below
514
515#define ATOMIC_FLAG_INIT see below
516
517}  // std
518
519*/
520
521#include <__availability>
522#include <__config>
523#include <__thread/poll_with_backoff.h>
524#include <cstddef>
525#include <cstdint>
526#include <cstring>
527#include <type_traits>
528#include <version>
529
530#ifndef _LIBCPP_HAS_NO_THREADS
531# include <__threading_support>
532#endif
533
534#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
535#pragma GCC system_header
536#endif
537
538#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
539# error <atomic> is not implemented
540#endif
541#ifdef kill_dependency
542# error C++ standard library is incompatible with <stdatomic.h>
543#endif
544
545#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
546  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
547                           __m == memory_order_acquire || \
548                           __m == memory_order_acq_rel,   \
549                        "memory order argument to atomic operation is invalid")
550
551#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
552  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
553                           __m == memory_order_acq_rel,   \
554                        "memory order argument to atomic operation is invalid")
555
556#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
557  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
558                           __f == memory_order_acq_rel,   \
559                        "memory order argument to atomic operation is invalid")
560
561_LIBCPP_BEGIN_NAMESPACE_STD
562
563// Figure out what the underlying type for `memory_order` would be if it were
564// declared as an unscoped enum (accounting for -fshort-enums). Use this result
565// to pin the underlying type in C++20.
566enum __legacy_memory_order {
567    __mo_relaxed,
568    __mo_consume,
569    __mo_acquire,
570    __mo_release,
571    __mo_acq_rel,
572    __mo_seq_cst
573};
574
575typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
576
577#if _LIBCPP_STD_VER > 17
578
579enum class memory_order : __memory_order_underlying_t {
580  relaxed = __mo_relaxed,
581  consume = __mo_consume,
582  acquire = __mo_acquire,
583  release = __mo_release,
584  acq_rel = __mo_acq_rel,
585  seq_cst = __mo_seq_cst
586};
587
588inline constexpr auto memory_order_relaxed = memory_order::relaxed;
589inline constexpr auto memory_order_consume = memory_order::consume;
590inline constexpr auto memory_order_acquire = memory_order::acquire;
591inline constexpr auto memory_order_release = memory_order::release;
592inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
593inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
594
595#else
596
597typedef enum memory_order {
598  memory_order_relaxed = __mo_relaxed,
599  memory_order_consume = __mo_consume,
600  memory_order_acquire = __mo_acquire,
601  memory_order_release = __mo_release,
602  memory_order_acq_rel = __mo_acq_rel,
603  memory_order_seq_cst = __mo_seq_cst,
604} memory_order;
605
606#endif // _LIBCPP_STD_VER > 17
607
608template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
609bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
610    return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
611}
612
613static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
614  "unexpected underlying type for std::memory_order");
615
616#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
617    defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
618
619// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
620// the default operator= in an object is not volatile, a byte-by-byte copy
621// is required.
622template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
623typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
624__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
625  __a_value = __val;
626}
627template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
628typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
629__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
630  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
631  volatile char* __end = __to + sizeof(_Tp);
632  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
633  while (__to != __end)
634    *__to++ = *__from++;
635}
636
637#endif
638
639#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
640
641template <typename _Tp>
642struct __cxx_atomic_base_impl {
643
644  _LIBCPP_INLINE_VISIBILITY
645#ifndef _LIBCPP_CXX03_LANG
646    __cxx_atomic_base_impl() _NOEXCEPT = default;
647#else
648    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
649#endif // _LIBCPP_CXX03_LANG
650  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
651    : __a_value(value) {}
652  _Tp __a_value;
653};
654
655_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
656  // Avoid switch statement to make this a constexpr.
657  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
658         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
659          (__order == memory_order_release ? __ATOMIC_RELEASE:
660           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
661            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
662              __ATOMIC_CONSUME))));
663}
664
665_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
666  // Avoid switch statement to make this a constexpr.
667  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
668         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
669          (__order == memory_order_release ? __ATOMIC_RELAXED:
670           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
671            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
672              __ATOMIC_CONSUME))));
673}
674
675template <typename _Tp>
676_LIBCPP_INLINE_VISIBILITY
677void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
678  __cxx_atomic_assign_volatile(__a->__a_value, __val);
679}
680
681template <typename _Tp>
682_LIBCPP_INLINE_VISIBILITY
683void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
684  __a->__a_value = __val;
685}
686
687_LIBCPP_INLINE_VISIBILITY inline
688void __cxx_atomic_thread_fence(memory_order __order) {
689  __atomic_thread_fence(__to_gcc_order(__order));
690}
691
692_LIBCPP_INLINE_VISIBILITY inline
693void __cxx_atomic_signal_fence(memory_order __order) {
694  __atomic_signal_fence(__to_gcc_order(__order));
695}
696
697template <typename _Tp>
698_LIBCPP_INLINE_VISIBILITY
699void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
700                        memory_order __order) {
701  __atomic_store(&__a->__a_value, &__val,
702                 __to_gcc_order(__order));
703}
704
705template <typename _Tp>
706_LIBCPP_INLINE_VISIBILITY
707void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
708                        memory_order __order) {
709  __atomic_store(&__a->__a_value, &__val,
710                 __to_gcc_order(__order));
711}
712
713template <typename _Tp>
714_LIBCPP_INLINE_VISIBILITY
715_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
716                      memory_order __order) {
717  _Tp __ret;
718  __atomic_load(&__a->__a_value, &__ret,
719                __to_gcc_order(__order));
720  return __ret;
721}
722
723template <typename _Tp>
724_LIBCPP_INLINE_VISIBILITY
725_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
726  _Tp __ret;
727  __atomic_load(&__a->__a_value, &__ret,
728                __to_gcc_order(__order));
729  return __ret;
730}
731
732template <typename _Tp>
733_LIBCPP_INLINE_VISIBILITY
734_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
735                          _Tp __value, memory_order __order) {
736  _Tp __ret;
737  __atomic_exchange(&__a->__a_value, &__value, &__ret,
738                    __to_gcc_order(__order));
739  return __ret;
740}
741
742template <typename _Tp>
743_LIBCPP_INLINE_VISIBILITY
744_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
745                          memory_order __order) {
746  _Tp __ret;
747  __atomic_exchange(&__a->__a_value, &__value, &__ret,
748                    __to_gcc_order(__order));
749  return __ret;
750}
751
752template <typename _Tp>
753_LIBCPP_INLINE_VISIBILITY
754bool __cxx_atomic_compare_exchange_strong(
755    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
756    memory_order __success, memory_order __failure) {
757  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
758                                   false,
759                                   __to_gcc_order(__success),
760                                   __to_gcc_failure_order(__failure));
761}
762
763template <typename _Tp>
764_LIBCPP_INLINE_VISIBILITY
765bool __cxx_atomic_compare_exchange_strong(
766    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
767    memory_order __failure) {
768  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
769                                   false,
770                                   __to_gcc_order(__success),
771                                   __to_gcc_failure_order(__failure));
772}
773
774template <typename _Tp>
775_LIBCPP_INLINE_VISIBILITY
776bool __cxx_atomic_compare_exchange_weak(
777    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
778    memory_order __success, memory_order __failure) {
779  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
780                                   true,
781                                   __to_gcc_order(__success),
782                                   __to_gcc_failure_order(__failure));
783}
784
785template <typename _Tp>
786_LIBCPP_INLINE_VISIBILITY
787bool __cxx_atomic_compare_exchange_weak(
788    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
789    memory_order __failure) {
790  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
791                                   true,
792                                   __to_gcc_order(__success),
793                                   __to_gcc_failure_order(__failure));
794}
795
796template <typename _Tp>
797struct __skip_amt { enum {value = 1}; };
798
799template <typename _Tp>
800struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
801
802// FIXME: Haven't figured out what the spec says about using arrays with
803// atomic_fetch_add. Force a failure rather than creating bad behavior.
804template <typename _Tp>
805struct __skip_amt<_Tp[]> { };
806template <typename _Tp, int n>
807struct __skip_amt<_Tp[n]> { };
808
809template <typename _Tp, typename _Td>
810_LIBCPP_INLINE_VISIBILITY
811_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
812                           _Td __delta, memory_order __order) {
813  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
814                            __to_gcc_order(__order));
815}
816
817template <typename _Tp, typename _Td>
818_LIBCPP_INLINE_VISIBILITY
819_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
820                           memory_order __order) {
821  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
822                            __to_gcc_order(__order));
823}
824
825template <typename _Tp, typename _Td>
826_LIBCPP_INLINE_VISIBILITY
827_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
828                           _Td __delta, memory_order __order) {
829  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
830                            __to_gcc_order(__order));
831}
832
833template <typename _Tp, typename _Td>
834_LIBCPP_INLINE_VISIBILITY
835_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
836                           memory_order __order) {
837  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
838                            __to_gcc_order(__order));
839}
840
841template <typename _Tp>
842_LIBCPP_INLINE_VISIBILITY
843_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
844                           _Tp __pattern, memory_order __order) {
845  return __atomic_fetch_and(&__a->__a_value, __pattern,
846                            __to_gcc_order(__order));
847}
848
849template <typename _Tp>
850_LIBCPP_INLINE_VISIBILITY
851_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
852                           _Tp __pattern, memory_order __order) {
853  return __atomic_fetch_and(&__a->__a_value, __pattern,
854                            __to_gcc_order(__order));
855}
856
857template <typename _Tp>
858_LIBCPP_INLINE_VISIBILITY
859_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
860                          _Tp __pattern, memory_order __order) {
861  return __atomic_fetch_or(&__a->__a_value, __pattern,
862                           __to_gcc_order(__order));
863}
864
865template <typename _Tp>
866_LIBCPP_INLINE_VISIBILITY
867_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
868                          memory_order __order) {
869  return __atomic_fetch_or(&__a->__a_value, __pattern,
870                           __to_gcc_order(__order));
871}
872
873template <typename _Tp>
874_LIBCPP_INLINE_VISIBILITY
875_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
876                           _Tp __pattern, memory_order __order) {
877  return __atomic_fetch_xor(&__a->__a_value, __pattern,
878                            __to_gcc_order(__order));
879}
880
881template <typename _Tp>
882_LIBCPP_INLINE_VISIBILITY
883_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
884                           memory_order __order) {
885  return __atomic_fetch_xor(&__a->__a_value, __pattern,
886                            __to_gcc_order(__order));
887}
888
889#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
890
891#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
892
893template <typename _Tp>
894struct __cxx_atomic_base_impl {
895
896  _LIBCPP_INLINE_VISIBILITY
897#ifndef _LIBCPP_CXX03_LANG
898    __cxx_atomic_base_impl() _NOEXCEPT = default;
899#else
900    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
901#endif // _LIBCPP_CXX03_LANG
902  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
903    : __a_value(value) {}
904  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
905};
906
907#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
908
909_LIBCPP_INLINE_VISIBILITY inline
910void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
911    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
912}
913
914_LIBCPP_INLINE_VISIBILITY inline
915void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
916    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
917}
918
919template<class _Tp>
920_LIBCPP_INLINE_VISIBILITY
921void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
922    __c11_atomic_init(&__a->__a_value, __val);
923}
924template<class _Tp>
925_LIBCPP_INLINE_VISIBILITY
926void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
927    __c11_atomic_init(&__a->__a_value, __val);
928}
929
930template<class _Tp>
931_LIBCPP_INLINE_VISIBILITY
932void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
933    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
934}
935template<class _Tp>
936_LIBCPP_INLINE_VISIBILITY
937void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
938    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
939}
940
941template<class _Tp>
942_LIBCPP_INLINE_VISIBILITY
943_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
944    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
945    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
946}
947template<class _Tp>
948_LIBCPP_INLINE_VISIBILITY
949_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
950    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
951    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
952}
953
954template<class _Tp>
955_LIBCPP_INLINE_VISIBILITY
956_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
957    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
958}
959template<class _Tp>
960_LIBCPP_INLINE_VISIBILITY
961_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
962    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
963}
964
965_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
966  // Avoid switch statement to make this a constexpr.
967  return __order == memory_order_release ? memory_order_relaxed:
968         (__order == memory_order_acq_rel ? memory_order_acquire:
969             __order);
970}
971
972template<class _Tp>
973_LIBCPP_INLINE_VISIBILITY
974bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
975    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
976}
977template<class _Tp>
978_LIBCPP_INLINE_VISIBILITY
979bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
980    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
981}
982
983template<class _Tp>
984_LIBCPP_INLINE_VISIBILITY
985bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
986    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
987}
988template<class _Tp>
989_LIBCPP_INLINE_VISIBILITY
990bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
991    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
992}
993
994template<class _Tp>
995_LIBCPP_INLINE_VISIBILITY
996_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
997    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
998}
999template<class _Tp>
1000_LIBCPP_INLINE_VISIBILITY
1001_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1002    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1003}
1004
1005template<class _Tp>
1006_LIBCPP_INLINE_VISIBILITY
1007_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1008    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1009}
1010template<class _Tp>
1011_LIBCPP_INLINE_VISIBILITY
1012_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1013    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1014}
1015
1016template<class _Tp>
1017_LIBCPP_INLINE_VISIBILITY
1018_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1019    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1020}
1021template<class _Tp>
1022_LIBCPP_INLINE_VISIBILITY
1023_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1024    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1025}
1026template<class _Tp>
1027_LIBCPP_INLINE_VISIBILITY
1028_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1029    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1030}
1031template<class _Tp>
1032_LIBCPP_INLINE_VISIBILITY
1033_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1034    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1035}
1036
1037template<class _Tp>
1038_LIBCPP_INLINE_VISIBILITY
1039_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1040    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1041}
1042template<class _Tp>
1043_LIBCPP_INLINE_VISIBILITY
1044_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1045    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1046}
1047
1048template<class _Tp>
1049_LIBCPP_INLINE_VISIBILITY
1050_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1051    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1052}
1053template<class _Tp>
1054_LIBCPP_INLINE_VISIBILITY
1055_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1056    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1057}
1058
1059template<class _Tp>
1060_LIBCPP_INLINE_VISIBILITY
1061_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1062    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1063}
1064template<class _Tp>
1065_LIBCPP_INLINE_VISIBILITY
1066_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1067    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1068}
1069
1070#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1071
1072template <class _Tp>
1073_LIBCPP_INLINE_VISIBILITY
1074_Tp kill_dependency(_Tp __y) _NOEXCEPT
1075{
1076    return __y;
1077}
1078
1079#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1080# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1081# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1082#ifndef _LIBCPP_HAS_NO_CHAR8_T
1083# define ATOMIC_CHAR8_T_LOCK_FREE   __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
1084#endif
1085# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1086# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1087# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1088# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1089# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1090# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1091# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1092# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1093#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1094# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1095# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1096#ifndef _LIBCPP_HAS_NO_CHAR8_T
1097# define ATOMIC_CHAR8_T_LOCK_FREE   __GCC_ATOMIC_CHAR8_T_LOCK_FREE
1098#endif
1099# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1100# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1101# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1102# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1103# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1104# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1105# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1106# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1107#endif
1108
1109#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1110
1111template<typename _Tp>
1112struct __cxx_atomic_lock_impl {
1113
1114  _LIBCPP_INLINE_VISIBILITY
1115  __cxx_atomic_lock_impl() _NOEXCEPT
1116    : __a_value(), __a_lock(0) {}
1117  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1118  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1119    : __a_value(value), __a_lock(0) {}
1120
1121  _Tp __a_value;
1122  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1123
1124  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1125    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1126        /*spin*/;
1127  }
1128  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1129    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1130        /*spin*/;
1131  }
1132  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1133    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1134  }
1135  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1136    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1137  }
1138  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1139    __lock();
1140    _Tp __old;
1141    __cxx_atomic_assign_volatile(__old, __a_value);
1142    __unlock();
1143    return __old;
1144  }
1145  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1146    __lock();
1147    _Tp __old = __a_value;
1148    __unlock();
1149    return __old;
1150  }
1151};
1152
1153template <typename _Tp>
1154_LIBCPP_INLINE_VISIBILITY
1155void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1156  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1157}
1158template <typename _Tp>
1159_LIBCPP_INLINE_VISIBILITY
1160void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1161  __a->__a_value = __val;
1162}
1163
1164template <typename _Tp>
1165_LIBCPP_INLINE_VISIBILITY
1166void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1167  __a->__lock();
1168  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1169  __a->__unlock();
1170}
1171template <typename _Tp>
1172_LIBCPP_INLINE_VISIBILITY
1173void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1174  __a->__lock();
1175  __a->__a_value = __val;
1176  __a->__unlock();
1177}
1178
1179template <typename _Tp>
1180_LIBCPP_INLINE_VISIBILITY
1181_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1182  return __a->__read();
1183}
1184template <typename _Tp>
1185_LIBCPP_INLINE_VISIBILITY
1186_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1187  return __a->__read();
1188}
1189
1190template <typename _Tp>
1191_LIBCPP_INLINE_VISIBILITY
1192_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1193  __a->__lock();
1194  _Tp __old;
1195  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1196  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1197  __a->__unlock();
1198  return __old;
1199}
1200template <typename _Tp>
1201_LIBCPP_INLINE_VISIBILITY
1202_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1203  __a->__lock();
1204  _Tp __old = __a->__a_value;
1205  __a->__a_value = __value;
1206  __a->__unlock();
1207  return __old;
1208}
1209
1210template <typename _Tp>
1211_LIBCPP_INLINE_VISIBILITY
1212bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1213                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1214  _Tp __temp;
1215  __a->__lock();
1216  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1217  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1218  if(__ret)
1219    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1220  else
1221    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1222  __a->__unlock();
1223  return __ret;
1224}
1225template <typename _Tp>
1226_LIBCPP_INLINE_VISIBILITY
1227bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1228                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1229  __a->__lock();
1230  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1231  if(__ret)
1232    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1233  else
1234    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1235  __a->__unlock();
1236  return __ret;
1237}
1238
1239template <typename _Tp>
1240_LIBCPP_INLINE_VISIBILITY
1241bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1242                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1243  _Tp __temp;
1244  __a->__lock();
1245  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1246  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1247  if(__ret)
1248    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1249  else
1250    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1251  __a->__unlock();
1252  return __ret;
1253}
1254template <typename _Tp>
1255_LIBCPP_INLINE_VISIBILITY
1256bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1257                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1258  __a->__lock();
1259  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1260  if(__ret)
1261    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1262  else
1263    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1264  __a->__unlock();
1265  return __ret;
1266}
1267
1268template <typename _Tp, typename _Td>
1269_LIBCPP_INLINE_VISIBILITY
1270_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1271                           _Td __delta, memory_order) {
1272  __a->__lock();
1273  _Tp __old;
1274  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1275  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1276  __a->__unlock();
1277  return __old;
1278}
1279template <typename _Tp, typename _Td>
1280_LIBCPP_INLINE_VISIBILITY
1281_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1282                           _Td __delta, memory_order) {
1283  __a->__lock();
1284  _Tp __old = __a->__a_value;
1285  __a->__a_value += __delta;
1286  __a->__unlock();
1287  return __old;
1288}
1289
1290template <typename _Tp, typename _Td>
1291_LIBCPP_INLINE_VISIBILITY
1292_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1293                           ptrdiff_t __delta, memory_order) {
1294  __a->__lock();
1295  _Tp* __old;
1296  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1297  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1298  __a->__unlock();
1299  return __old;
1300}
1301template <typename _Tp, typename _Td>
1302_LIBCPP_INLINE_VISIBILITY
1303_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1304                           ptrdiff_t __delta, memory_order) {
1305  __a->__lock();
1306  _Tp* __old = __a->__a_value;
1307  __a->__a_value += __delta;
1308  __a->__unlock();
1309  return __old;
1310}
1311
1312template <typename _Tp, typename _Td>
1313_LIBCPP_INLINE_VISIBILITY
1314_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1315                           _Td __delta, memory_order) {
1316  __a->__lock();
1317  _Tp __old;
1318  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1319  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1320  __a->__unlock();
1321  return __old;
1322}
1323template <typename _Tp, typename _Td>
1324_LIBCPP_INLINE_VISIBILITY
1325_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1326                           _Td __delta, memory_order) {
1327  __a->__lock();
1328  _Tp __old = __a->__a_value;
1329  __a->__a_value -= __delta;
1330  __a->__unlock();
1331  return __old;
1332}
1333
1334template <typename _Tp>
1335_LIBCPP_INLINE_VISIBILITY
1336_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1337                           _Tp __pattern, memory_order) {
1338  __a->__lock();
1339  _Tp __old;
1340  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1341  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1342  __a->__unlock();
1343  return __old;
1344}
1345template <typename _Tp>
1346_LIBCPP_INLINE_VISIBILITY
1347_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1348                           _Tp __pattern, memory_order) {
1349  __a->__lock();
1350  _Tp __old = __a->__a_value;
1351  __a->__a_value &= __pattern;
1352  __a->__unlock();
1353  return __old;
1354}
1355
1356template <typename _Tp>
1357_LIBCPP_INLINE_VISIBILITY
1358_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1359                          _Tp __pattern, memory_order) {
1360  __a->__lock();
1361  _Tp __old;
1362  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1363  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1364  __a->__unlock();
1365  return __old;
1366}
1367template <typename _Tp>
1368_LIBCPP_INLINE_VISIBILITY
1369_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1370                          _Tp __pattern, memory_order) {
1371  __a->__lock();
1372  _Tp __old = __a->__a_value;
1373  __a->__a_value |= __pattern;
1374  __a->__unlock();
1375  return __old;
1376}
1377
1378template <typename _Tp>
1379_LIBCPP_INLINE_VISIBILITY
1380_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1381                           _Tp __pattern, memory_order) {
1382  __a->__lock();
1383  _Tp __old;
1384  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1385  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1386  __a->__unlock();
1387  return __old;
1388}
1389template <typename _Tp>
1390_LIBCPP_INLINE_VISIBILITY
1391_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1392                           _Tp __pattern, memory_order) {
1393  __a->__lock();
1394  _Tp __old = __a->__a_value;
1395  __a->__a_value ^= __pattern;
1396  __a->__unlock();
1397  return __old;
1398}
1399
1400#ifdef __cpp_lib_atomic_is_always_lock_free
1401
1402template<typename _Tp> struct __cxx_is_always_lock_free {
1403    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1404
1405#else
1406
1407template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1408// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1409template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1410template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1411template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1412template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1413#ifndef _LIBCPP_HAS_NO_CHAR8_T
1414template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
1415#endif
1416template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1417template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1418#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
1419template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1420#endif
1421template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1422template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1423template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1424template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1425template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1426template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1427template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1428template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1429template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1430template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1431
1432#endif //__cpp_lib_atomic_is_always_lock_free
1433
1434template <typename _Tp,
1435          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1436                                                __cxx_atomic_base_impl<_Tp>,
1437                                                __cxx_atomic_lock_impl<_Tp> >::type>
1438#else
1439template <typename _Tp,
1440          typename _Base = __cxx_atomic_base_impl<_Tp> >
1441#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1442struct __cxx_atomic_impl : public _Base {
1443    static_assert(is_trivially_copyable<_Tp>::value,
1444      "std::atomic<T> requires that 'T' be a trivially copyable type");
1445
1446  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1447  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1448    : _Base(value) {}
1449};
1450
1451#if defined(__linux__) || (defined(__FreeBSD__) && defined(__mips__))
1452    using __cxx_contention_t = int32_t;
1453#else
1454    using __cxx_contention_t = int64_t;
1455#endif
1456
1457using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1458
1459#if defined(_LIBCPP_HAS_NO_THREADS)
1460#   define _LIBCPP_HAS_NO_PLATFORM_WAIT
1461#endif
1462
1463// TODO:
1464// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that
1465// it is not tied anywhere into the build system or even documented. We should
1466// clean it up because it is technically never defined except when threads are
1467// disabled. We should clean it up in its own changeset in case we break "bad"
1468// users.
1469#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1470
1471_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1472_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1473_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1474_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1475
1476_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1477_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1478_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1479_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1480
1481template <class _Atp, class _Fn>
1482struct __libcpp_atomic_wait_backoff_impl {
1483    _Atp* __a;
1484    _Fn __test_fn;
1485    _LIBCPP_AVAILABILITY_SYNC
1486    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1487    {
1488        if(__elapsed > chrono::microseconds(64))
1489        {
1490            auto const __monitor = __libcpp_atomic_monitor(__a);
1491            if(__test_fn())
1492                return true;
1493            __libcpp_atomic_wait(__a, __monitor);
1494        }
1495        else if(__elapsed > chrono::microseconds(4))
1496            __libcpp_thread_yield();
1497        else
1498            {} // poll
1499        return false;
1500    }
1501};
1502
1503template <class _Atp, class _Fn>
1504_LIBCPP_AVAILABILITY_SYNC
1505_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1506{
1507    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1508    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1509}
1510
1511#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1512
1513template <class _Tp>
1514_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1515template <class _Tp>
1516_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1517template <class _Atp, class _Fn>
1518_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1519{
1520#if defined(_LIBCPP_HAS_NO_THREADS)
1521    using _Policy = __spinning_backoff_policy;
1522#else
1523    using _Policy = __libcpp_timed_backoff_policy;
1524#endif
1525    return __libcpp_thread_poll_with_backoff(__test_fn, _Policy());
1526}
1527
1528#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1529
1530template <class _Atp, class _Tp>
1531struct __cxx_atomic_wait_test_fn_impl {
1532    _Atp* __a;
1533    _Tp __val;
1534    memory_order __order;
1535    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1536    {
1537        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1538    }
1539};
1540
1541template <class _Atp, class _Tp>
1542_LIBCPP_AVAILABILITY_SYNC
1543_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1544{
1545    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1546    return __cxx_atomic_wait(__a, __test_fn);
1547}
1548
1549// general atomic<T>
1550
1551template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1552struct __atomic_base  // false
1553{
1554    mutable __cxx_atomic_impl<_Tp> __a_;
1555
1556#if defined(__cpp_lib_atomic_is_always_lock_free)
1557  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1558#endif
1559
1560    _LIBCPP_INLINE_VISIBILITY
1561    bool is_lock_free() const volatile _NOEXCEPT
1562        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1563    _LIBCPP_INLINE_VISIBILITY
1564    bool is_lock_free() const _NOEXCEPT
1565        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1566    _LIBCPP_INLINE_VISIBILITY
1567    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1568      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1569        {__cxx_atomic_store(&__a_, __d, __m);}
1570    _LIBCPP_INLINE_VISIBILITY
1571    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1572      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1573        {__cxx_atomic_store(&__a_, __d, __m);}
1574    _LIBCPP_INLINE_VISIBILITY
1575    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1576      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1577        {return __cxx_atomic_load(&__a_, __m);}
1578    _LIBCPP_INLINE_VISIBILITY
1579    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1580      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1581        {return __cxx_atomic_load(&__a_, __m);}
1582    _LIBCPP_INLINE_VISIBILITY
1583    operator _Tp() const volatile _NOEXCEPT {return load();}
1584    _LIBCPP_INLINE_VISIBILITY
1585    operator _Tp() const _NOEXCEPT          {return load();}
1586    _LIBCPP_INLINE_VISIBILITY
1587    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1588        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1589    _LIBCPP_INLINE_VISIBILITY
1590    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1591        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1592    _LIBCPP_INLINE_VISIBILITY
1593    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1594                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1595      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1596        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1597    _LIBCPP_INLINE_VISIBILITY
1598    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1599                               memory_order __s, memory_order __f) _NOEXCEPT
1600      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1601        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1602    _LIBCPP_INLINE_VISIBILITY
1603    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1604                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1605      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1606        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1607    _LIBCPP_INLINE_VISIBILITY
1608    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1609                                 memory_order __s, memory_order __f) _NOEXCEPT
1610      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1611        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1612    _LIBCPP_INLINE_VISIBILITY
1613    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1614                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1615        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1616    _LIBCPP_INLINE_VISIBILITY
1617    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1618                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1619        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1620    _LIBCPP_INLINE_VISIBILITY
1621    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1622                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1623        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1624    _LIBCPP_INLINE_VISIBILITY
1625    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1626                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1627        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1628
1629    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1630        {__cxx_atomic_wait(&__a_, __v, __m);}
1631    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1632        {__cxx_atomic_wait(&__a_, __v, __m);}
1633    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1634        {__cxx_atomic_notify_one(&__a_);}
1635    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1636        {__cxx_atomic_notify_one(&__a_);}
1637    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1638        {__cxx_atomic_notify_all(&__a_);}
1639    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1640        {__cxx_atomic_notify_all(&__a_);}
1641
1642#if _LIBCPP_STD_VER > 17
1643    _LIBCPP_INLINE_VISIBILITY constexpr
1644    __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
1645#else
1646    _LIBCPP_INLINE_VISIBILITY
1647    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1648#endif
1649
1650    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1651    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1652
1653#ifndef _LIBCPP_CXX03_LANG
1654    __atomic_base(const __atomic_base&) = delete;
1655#else
1656private:
1657    _LIBCPP_INLINE_VISIBILITY
1658    __atomic_base(const __atomic_base&);
1659#endif
1660};
1661
1662#if defined(__cpp_lib_atomic_is_always_lock_free)
1663template <class _Tp, bool __b>
1664_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1665#endif
1666
1667// atomic<Integral>
1668
1669template <class _Tp>
1670struct __atomic_base<_Tp, true>
1671    : public __atomic_base<_Tp, false>
1672{
1673    typedef __atomic_base<_Tp, false> __base;
1674
1675    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
1676    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1677
1678    _LIBCPP_INLINE_VISIBILITY
1679    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1680
1681    _LIBCPP_INLINE_VISIBILITY
1682    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1683        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1684    _LIBCPP_INLINE_VISIBILITY
1685    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1686        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1687    _LIBCPP_INLINE_VISIBILITY
1688    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1689        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1690    _LIBCPP_INLINE_VISIBILITY
1691    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1692        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1693    _LIBCPP_INLINE_VISIBILITY
1694    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1695        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1696    _LIBCPP_INLINE_VISIBILITY
1697    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1698        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1699    _LIBCPP_INLINE_VISIBILITY
1700    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1701        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1702    _LIBCPP_INLINE_VISIBILITY
1703    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1704        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1705    _LIBCPP_INLINE_VISIBILITY
1706    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1707        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1708    _LIBCPP_INLINE_VISIBILITY
1709    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1710        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1711
1712    _LIBCPP_INLINE_VISIBILITY
1713    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1714    _LIBCPP_INLINE_VISIBILITY
1715    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1716    _LIBCPP_INLINE_VISIBILITY
1717    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1718    _LIBCPP_INLINE_VISIBILITY
1719    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1720    _LIBCPP_INLINE_VISIBILITY
1721    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1722    _LIBCPP_INLINE_VISIBILITY
1723    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1724    _LIBCPP_INLINE_VISIBILITY
1725    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1726    _LIBCPP_INLINE_VISIBILITY
1727    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1728    _LIBCPP_INLINE_VISIBILITY
1729    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1730    _LIBCPP_INLINE_VISIBILITY
1731    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1732    _LIBCPP_INLINE_VISIBILITY
1733    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1734    _LIBCPP_INLINE_VISIBILITY
1735    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1736    _LIBCPP_INLINE_VISIBILITY
1737    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1738    _LIBCPP_INLINE_VISIBILITY
1739    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1740    _LIBCPP_INLINE_VISIBILITY
1741    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1742    _LIBCPP_INLINE_VISIBILITY
1743    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1744    _LIBCPP_INLINE_VISIBILITY
1745    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1746    _LIBCPP_INLINE_VISIBILITY
1747    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1748};
1749
1750// atomic<T>
1751
1752template <class _Tp>
1753struct atomic
1754    : public __atomic_base<_Tp>
1755{
1756    typedef __atomic_base<_Tp> __base;
1757    typedef _Tp value_type;
1758    typedef value_type difference_type;
1759
1760#if _LIBCPP_STD_VER > 17
1761    _LIBCPP_INLINE_VISIBILITY
1762    atomic() = default;
1763#else
1764    _LIBCPP_INLINE_VISIBILITY
1765    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1766#endif
1767
1768    _LIBCPP_INLINE_VISIBILITY
1769    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1770
1771    _LIBCPP_INLINE_VISIBILITY
1772    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1773        {__base::store(__d); return __d;}
1774    _LIBCPP_INLINE_VISIBILITY
1775    _Tp operator=(_Tp __d) _NOEXCEPT
1776        {__base::store(__d); return __d;}
1777
1778    atomic& operator=(const atomic&) = delete;
1779    atomic& operator=(const atomic&) volatile = delete;
1780};
1781
1782// atomic<T*>
1783
1784template <class _Tp>
1785struct atomic<_Tp*>
1786    : public __atomic_base<_Tp*>
1787{
1788    typedef __atomic_base<_Tp*> __base;
1789    typedef _Tp* value_type;
1790    typedef ptrdiff_t difference_type;
1791
1792    _LIBCPP_INLINE_VISIBILITY
1793    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1794
1795    _LIBCPP_INLINE_VISIBILITY
1796    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1797
1798    _LIBCPP_INLINE_VISIBILITY
1799    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1800        {__base::store(__d); return __d;}
1801    _LIBCPP_INLINE_VISIBILITY
1802    _Tp* operator=(_Tp* __d) _NOEXCEPT
1803        {__base::store(__d); return __d;}
1804
1805    _LIBCPP_INLINE_VISIBILITY
1806    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1807        // __atomic_fetch_add accepts function pointers, guard against them.
1808        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1809        return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1810    }
1811
1812    _LIBCPP_INLINE_VISIBILITY
1813    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1814        // __atomic_fetch_add accepts function pointers, guard against them.
1815        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1816        return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1817    }
1818
1819    _LIBCPP_INLINE_VISIBILITY
1820    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1821        // __atomic_fetch_add accepts function pointers, guard against them.
1822        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1823        return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1824    }
1825
1826    _LIBCPP_INLINE_VISIBILITY
1827    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1828        // __atomic_fetch_add accepts function pointers, guard against them.
1829        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1830        return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1831    }
1832
1833    _LIBCPP_INLINE_VISIBILITY
1834    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1835    _LIBCPP_INLINE_VISIBILITY
1836    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1837    _LIBCPP_INLINE_VISIBILITY
1838    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1839    _LIBCPP_INLINE_VISIBILITY
1840    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1841    _LIBCPP_INLINE_VISIBILITY
1842    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1843    _LIBCPP_INLINE_VISIBILITY
1844    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1845    _LIBCPP_INLINE_VISIBILITY
1846    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1847    _LIBCPP_INLINE_VISIBILITY
1848    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1849    _LIBCPP_INLINE_VISIBILITY
1850    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1851    _LIBCPP_INLINE_VISIBILITY
1852    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1853    _LIBCPP_INLINE_VISIBILITY
1854    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1855    _LIBCPP_INLINE_VISIBILITY
1856    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1857
1858    atomic& operator=(const atomic&) = delete;
1859    atomic& operator=(const atomic&) volatile = delete;
1860};
1861
1862// atomic_is_lock_free
1863
1864template <class _Tp>
1865_LIBCPP_INLINE_VISIBILITY
1866bool
1867atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1868{
1869    return __o->is_lock_free();
1870}
1871
1872template <class _Tp>
1873_LIBCPP_INLINE_VISIBILITY
1874bool
1875atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1876{
1877    return __o->is_lock_free();
1878}
1879
1880// atomic_init
1881
1882template <class _Tp>
1883_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1884void
1885atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1886{
1887    __cxx_atomic_init(&__o->__a_, __d);
1888}
1889
1890template <class _Tp>
1891_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1892void
1893atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1894{
1895    __cxx_atomic_init(&__o->__a_, __d);
1896}
1897
1898// atomic_store
1899
1900template <class _Tp>
1901_LIBCPP_INLINE_VISIBILITY
1902void
1903atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1904{
1905    __o->store(__d);
1906}
1907
1908template <class _Tp>
1909_LIBCPP_INLINE_VISIBILITY
1910void
1911atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1912{
1913    __o->store(__d);
1914}
1915
1916// atomic_store_explicit
1917
1918template <class _Tp>
1919_LIBCPP_INLINE_VISIBILITY
1920void
1921atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1922  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1923{
1924    __o->store(__d, __m);
1925}
1926
1927template <class _Tp>
1928_LIBCPP_INLINE_VISIBILITY
1929void
1930atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1931  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1932{
1933    __o->store(__d, __m);
1934}
1935
1936// atomic_load
1937
1938template <class _Tp>
1939_LIBCPP_INLINE_VISIBILITY
1940_Tp
1941atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1942{
1943    return __o->load();
1944}
1945
1946template <class _Tp>
1947_LIBCPP_INLINE_VISIBILITY
1948_Tp
1949atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1950{
1951    return __o->load();
1952}
1953
1954// atomic_load_explicit
1955
1956template <class _Tp>
1957_LIBCPP_INLINE_VISIBILITY
1958_Tp
1959atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1960  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1961{
1962    return __o->load(__m);
1963}
1964
1965template <class _Tp>
1966_LIBCPP_INLINE_VISIBILITY
1967_Tp
1968atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1969  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1970{
1971    return __o->load(__m);
1972}
1973
1974// atomic_exchange
1975
1976template <class _Tp>
1977_LIBCPP_INLINE_VISIBILITY
1978_Tp
1979atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1980{
1981    return __o->exchange(__d);
1982}
1983
1984template <class _Tp>
1985_LIBCPP_INLINE_VISIBILITY
1986_Tp
1987atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1988{
1989    return __o->exchange(__d);
1990}
1991
1992// atomic_exchange_explicit
1993
1994template <class _Tp>
1995_LIBCPP_INLINE_VISIBILITY
1996_Tp
1997atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1998{
1999    return __o->exchange(__d, __m);
2000}
2001
2002template <class _Tp>
2003_LIBCPP_INLINE_VISIBILITY
2004_Tp
2005atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2006{
2007    return __o->exchange(__d, __m);
2008}
2009
2010// atomic_compare_exchange_weak
2011
2012template <class _Tp>
2013_LIBCPP_INLINE_VISIBILITY
2014bool
2015atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2016{
2017    return __o->compare_exchange_weak(*__e, __d);
2018}
2019
2020template <class _Tp>
2021_LIBCPP_INLINE_VISIBILITY
2022bool
2023atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2024{
2025    return __o->compare_exchange_weak(*__e, __d);
2026}
2027
2028// atomic_compare_exchange_strong
2029
2030template <class _Tp>
2031_LIBCPP_INLINE_VISIBILITY
2032bool
2033atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2034{
2035    return __o->compare_exchange_strong(*__e, __d);
2036}
2037
2038template <class _Tp>
2039_LIBCPP_INLINE_VISIBILITY
2040bool
2041atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2042{
2043    return __o->compare_exchange_strong(*__e, __d);
2044}
2045
2046// atomic_compare_exchange_weak_explicit
2047
2048template <class _Tp>
2049_LIBCPP_INLINE_VISIBILITY
2050bool
2051atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2052                                      typename atomic<_Tp>::value_type __d,
2053                                      memory_order __s, memory_order __f) _NOEXCEPT
2054  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2055{
2056    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2057}
2058
2059template <class _Tp>
2060_LIBCPP_INLINE_VISIBILITY
2061bool
2062atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2063                                      memory_order __s, memory_order __f) _NOEXCEPT
2064  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2065{
2066    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2067}
2068
2069// atomic_compare_exchange_strong_explicit
2070
2071template <class _Tp>
2072_LIBCPP_INLINE_VISIBILITY
2073bool
2074atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2075                                        typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2076                                        memory_order __s, memory_order __f) _NOEXCEPT
2077  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2078{
2079    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2080}
2081
2082template <class _Tp>
2083_LIBCPP_INLINE_VISIBILITY
2084bool
2085atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2086                                        typename atomic<_Tp>::value_type __d,
2087                                        memory_order __s, memory_order __f) _NOEXCEPT
2088  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2089{
2090    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2091}
2092
2093// atomic_wait
2094
2095template <class _Tp>
2096_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2097void atomic_wait(const volatile atomic<_Tp>* __o,
2098                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2099{
2100    return __o->wait(__v);
2101}
2102
2103template <class _Tp>
2104_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2105void atomic_wait(const atomic<_Tp>* __o,
2106                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2107{
2108    return __o->wait(__v);
2109}
2110
2111// atomic_wait_explicit
2112
2113template <class _Tp>
2114_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2115void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2116                          typename atomic<_Tp>::value_type __v,
2117                          memory_order __m) _NOEXCEPT
2118  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2119{
2120    return __o->wait(__v, __m);
2121}
2122
2123template <class _Tp>
2124_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2125void atomic_wait_explicit(const atomic<_Tp>* __o,
2126                          typename atomic<_Tp>::value_type __v,
2127                          memory_order __m) _NOEXCEPT
2128  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2129{
2130    return __o->wait(__v, __m);
2131}
2132
2133// atomic_notify_one
2134
2135template <class _Tp>
2136_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2137void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2138{
2139    __o->notify_one();
2140}
2141template <class _Tp>
2142_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2143void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2144{
2145    __o->notify_one();
2146}
2147
2148// atomic_notify_one
2149
2150template <class _Tp>
2151_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2152void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2153{
2154    __o->notify_all();
2155}
2156template <class _Tp>
2157_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2158void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2159{
2160    __o->notify_all();
2161}
2162
2163// atomic_fetch_add
2164
2165template <class _Tp>
2166_LIBCPP_INLINE_VISIBILITY
2167_Tp
2168atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2169{
2170    return __o->fetch_add(__op);
2171}
2172
2173template <class _Tp>
2174_LIBCPP_INLINE_VISIBILITY
2175_Tp
2176atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2177{
2178    return __o->fetch_add(__op);
2179}
2180
2181// atomic_fetch_add_explicit
2182
2183template <class _Tp>
2184_LIBCPP_INLINE_VISIBILITY
2185_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2186{
2187    return __o->fetch_add(__op, __m);
2188}
2189
2190template <class _Tp>
2191_LIBCPP_INLINE_VISIBILITY
2192_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2193{
2194    return __o->fetch_add(__op, __m);
2195}
2196
2197// atomic_fetch_sub
2198
2199template <class _Tp>
2200_LIBCPP_INLINE_VISIBILITY
2201_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2202{
2203    return __o->fetch_sub(__op);
2204}
2205
2206template <class _Tp>
2207_LIBCPP_INLINE_VISIBILITY
2208_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2209{
2210    return __o->fetch_sub(__op);
2211}
2212
2213// atomic_fetch_sub_explicit
2214
2215template <class _Tp>
2216_LIBCPP_INLINE_VISIBILITY
2217_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2218{
2219    return __o->fetch_sub(__op, __m);
2220}
2221
2222template <class _Tp>
2223_LIBCPP_INLINE_VISIBILITY
2224_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2225{
2226    return __o->fetch_sub(__op, __m);
2227}
2228
2229// atomic_fetch_and
2230
2231template <class _Tp>
2232_LIBCPP_INLINE_VISIBILITY
2233typename enable_if
2234<
2235    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2236    _Tp
2237>::type
2238atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2239{
2240    return __o->fetch_and(__op);
2241}
2242
2243template <class _Tp>
2244_LIBCPP_INLINE_VISIBILITY
2245typename enable_if
2246<
2247    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2248    _Tp
2249>::type
2250atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2251{
2252    return __o->fetch_and(__op);
2253}
2254
2255// atomic_fetch_and_explicit
2256
2257template <class _Tp>
2258_LIBCPP_INLINE_VISIBILITY
2259typename enable_if
2260<
2261    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2262    _Tp
2263>::type
2264atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2265{
2266    return __o->fetch_and(__op, __m);
2267}
2268
2269template <class _Tp>
2270_LIBCPP_INLINE_VISIBILITY
2271typename enable_if
2272<
2273    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2274    _Tp
2275>::type
2276atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2277{
2278    return __o->fetch_and(__op, __m);
2279}
2280
2281// atomic_fetch_or
2282
2283template <class _Tp>
2284_LIBCPP_INLINE_VISIBILITY
2285typename enable_if
2286<
2287    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2288    _Tp
2289>::type
2290atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2291{
2292    return __o->fetch_or(__op);
2293}
2294
2295template <class _Tp>
2296_LIBCPP_INLINE_VISIBILITY
2297typename enable_if
2298<
2299    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2300    _Tp
2301>::type
2302atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2303{
2304    return __o->fetch_or(__op);
2305}
2306
2307// atomic_fetch_or_explicit
2308
2309template <class _Tp>
2310_LIBCPP_INLINE_VISIBILITY
2311typename enable_if
2312<
2313    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2314    _Tp
2315>::type
2316atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2317{
2318    return __o->fetch_or(__op, __m);
2319}
2320
2321template <class _Tp>
2322_LIBCPP_INLINE_VISIBILITY
2323typename enable_if
2324<
2325    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2326    _Tp
2327>::type
2328atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2329{
2330    return __o->fetch_or(__op, __m);
2331}
2332
2333// atomic_fetch_xor
2334
2335template <class _Tp>
2336_LIBCPP_INLINE_VISIBILITY
2337typename enable_if
2338<
2339    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2340    _Tp
2341>::type
2342atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2343{
2344    return __o->fetch_xor(__op);
2345}
2346
2347template <class _Tp>
2348_LIBCPP_INLINE_VISIBILITY
2349typename enable_if
2350<
2351    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2352    _Tp
2353>::type
2354atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2355{
2356    return __o->fetch_xor(__op);
2357}
2358
2359// atomic_fetch_xor_explicit
2360
2361template <class _Tp>
2362_LIBCPP_INLINE_VISIBILITY
2363typename enable_if
2364<
2365    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2366    _Tp
2367>::type
2368atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2369{
2370    return __o->fetch_xor(__op, __m);
2371}
2372
2373template <class _Tp>
2374_LIBCPP_INLINE_VISIBILITY
2375typename enable_if
2376<
2377    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2378    _Tp
2379>::type
2380atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2381{
2382    return __o->fetch_xor(__op, __m);
2383}
2384
2385// flag type and operations
2386
2387typedef struct atomic_flag
2388{
2389    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2390
2391    _LIBCPP_INLINE_VISIBILITY
2392    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2393        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2394    _LIBCPP_INLINE_VISIBILITY
2395    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2396        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2397
2398    _LIBCPP_INLINE_VISIBILITY
2399    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2400        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2401    _LIBCPP_INLINE_VISIBILITY
2402    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2403        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2404    _LIBCPP_INLINE_VISIBILITY
2405    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2406        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2407    _LIBCPP_INLINE_VISIBILITY
2408    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2409        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2410
2411    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2412    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2413        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2414    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2415    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2416        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2417    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2418    void notify_one() volatile _NOEXCEPT
2419        {__cxx_atomic_notify_one(&__a_);}
2420    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2421    void notify_one() _NOEXCEPT
2422        {__cxx_atomic_notify_one(&__a_);}
2423    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2424    void notify_all() volatile _NOEXCEPT
2425        {__cxx_atomic_notify_all(&__a_);}
2426    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2427    void notify_all() _NOEXCEPT
2428        {__cxx_atomic_notify_all(&__a_);}
2429
2430#if _LIBCPP_STD_VER > 17
2431    _LIBCPP_INLINE_VISIBILITY constexpr
2432    atomic_flag() _NOEXCEPT : __a_(false) {}
2433#else
2434    _LIBCPP_INLINE_VISIBILITY
2435    atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2436#endif
2437
2438    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2439    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2440
2441#ifndef _LIBCPP_CXX03_LANG
2442    atomic_flag(const atomic_flag&) = delete;
2443    atomic_flag& operator=(const atomic_flag&) = delete;
2444    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2445#else
2446private:
2447    _LIBCPP_INLINE_VISIBILITY
2448    atomic_flag(const atomic_flag&);
2449    _LIBCPP_INLINE_VISIBILITY
2450    atomic_flag& operator=(const atomic_flag&);
2451    _LIBCPP_INLINE_VISIBILITY
2452    atomic_flag& operator=(const atomic_flag&) volatile;
2453#endif
2454} atomic_flag;
2455
2456
2457inline _LIBCPP_INLINE_VISIBILITY
2458bool
2459atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2460{
2461    return __o->test();
2462}
2463
2464inline _LIBCPP_INLINE_VISIBILITY
2465bool
2466atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2467{
2468    return __o->test();
2469}
2470
2471inline _LIBCPP_INLINE_VISIBILITY
2472bool
2473atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2474{
2475    return __o->test(__m);
2476}
2477
2478inline _LIBCPP_INLINE_VISIBILITY
2479bool
2480atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2481{
2482    return __o->test(__m);
2483}
2484
2485inline _LIBCPP_INLINE_VISIBILITY
2486bool
2487atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2488{
2489    return __o->test_and_set();
2490}
2491
2492inline _LIBCPP_INLINE_VISIBILITY
2493bool
2494atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2495{
2496    return __o->test_and_set();
2497}
2498
2499inline _LIBCPP_INLINE_VISIBILITY
2500bool
2501atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2502{
2503    return __o->test_and_set(__m);
2504}
2505
2506inline _LIBCPP_INLINE_VISIBILITY
2507bool
2508atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2509{
2510    return __o->test_and_set(__m);
2511}
2512
2513inline _LIBCPP_INLINE_VISIBILITY
2514void
2515atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2516{
2517    __o->clear();
2518}
2519
2520inline _LIBCPP_INLINE_VISIBILITY
2521void
2522atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2523{
2524    __o->clear();
2525}
2526
2527inline _LIBCPP_INLINE_VISIBILITY
2528void
2529atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2530{
2531    __o->clear(__m);
2532}
2533
2534inline _LIBCPP_INLINE_VISIBILITY
2535void
2536atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2537{
2538    __o->clear(__m);
2539}
2540
2541inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2542void
2543atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2544{
2545    __o->wait(__v);
2546}
2547
2548inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2549void
2550atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2551{
2552    __o->wait(__v);
2553}
2554
2555inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2556void
2557atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2558                          bool __v, memory_order __m) _NOEXCEPT
2559{
2560    __o->wait(__v, __m);
2561}
2562
2563inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2564void
2565atomic_flag_wait_explicit(const atomic_flag* __o,
2566                          bool __v, memory_order __m) _NOEXCEPT
2567{
2568    __o->wait(__v, __m);
2569}
2570
2571inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2572void
2573atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2574{
2575    __o->notify_one();
2576}
2577
2578inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2579void
2580atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2581{
2582    __o->notify_one();
2583}
2584
2585inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2586void
2587atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2588{
2589    __o->notify_all();
2590}
2591
2592inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2593void
2594atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2595{
2596    __o->notify_all();
2597}
2598
2599// fences
2600
2601inline _LIBCPP_INLINE_VISIBILITY
2602void
2603atomic_thread_fence(memory_order __m) _NOEXCEPT
2604{
2605    __cxx_atomic_thread_fence(__m);
2606}
2607
2608inline _LIBCPP_INLINE_VISIBILITY
2609void
2610atomic_signal_fence(memory_order __m) _NOEXCEPT
2611{
2612    __cxx_atomic_signal_fence(__m);
2613}
2614
2615// Atomics for standard typedef types
2616
2617typedef atomic<bool>               atomic_bool;
2618typedef atomic<char>               atomic_char;
2619typedef atomic<signed char>        atomic_schar;
2620typedef atomic<unsigned char>      atomic_uchar;
2621typedef atomic<short>              atomic_short;
2622typedef atomic<unsigned short>     atomic_ushort;
2623typedef atomic<int>                atomic_int;
2624typedef atomic<unsigned int>       atomic_uint;
2625typedef atomic<long>               atomic_long;
2626typedef atomic<unsigned long>      atomic_ulong;
2627typedef atomic<long long>          atomic_llong;
2628typedef atomic<unsigned long long> atomic_ullong;
2629#ifndef _LIBCPP_HAS_NO_CHAR8_T
2630typedef atomic<char8_t>            atomic_char8_t;
2631#endif
2632typedef atomic<char16_t>           atomic_char16_t;
2633typedef atomic<char32_t>           atomic_char32_t;
2634#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
2635typedef atomic<wchar_t>            atomic_wchar_t;
2636#endif
2637
2638typedef atomic<int_least8_t>   atomic_int_least8_t;
2639typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2640typedef atomic<int_least16_t>  atomic_int_least16_t;
2641typedef atomic<uint_least16_t> atomic_uint_least16_t;
2642typedef atomic<int_least32_t>  atomic_int_least32_t;
2643typedef atomic<uint_least32_t> atomic_uint_least32_t;
2644typedef atomic<int_least64_t>  atomic_int_least64_t;
2645typedef atomic<uint_least64_t> atomic_uint_least64_t;
2646
2647typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2648typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2649typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2650typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2651typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2652typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2653typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2654typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2655
2656typedef atomic< int8_t>  atomic_int8_t;
2657typedef atomic<uint8_t>  atomic_uint8_t;
2658typedef atomic< int16_t> atomic_int16_t;
2659typedef atomic<uint16_t> atomic_uint16_t;
2660typedef atomic< int32_t> atomic_int32_t;
2661typedef atomic<uint32_t> atomic_uint32_t;
2662typedef atomic< int64_t> atomic_int64_t;
2663typedef atomic<uint64_t> atomic_uint64_t;
2664
2665typedef atomic<intptr_t>  atomic_intptr_t;
2666typedef atomic<uintptr_t> atomic_uintptr_t;
2667typedef atomic<size_t>    atomic_size_t;
2668typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2669typedef atomic<intmax_t>  atomic_intmax_t;
2670typedef atomic<uintmax_t> atomic_uintmax_t;
2671
2672// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2673
2674#ifdef __cpp_lib_atomic_is_always_lock_free
2675# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2676#else
2677# define _LIBCPP_CONTENTION_LOCK_FREE false
2678#endif
2679
2680#if ATOMIC_LLONG_LOCK_FREE == 2
2681typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2682typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2683#elif ATOMIC_INT_LOCK_FREE == 2
2684typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2685typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2686#elif ATOMIC_SHORT_LOCK_FREE == 2
2687typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2688typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2689#elif ATOMIC_CHAR_LOCK_FREE == 2
2690typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2691typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2692#else
2693    // No signed/unsigned lock-free types
2694#endif
2695
2696typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2697typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2698
2699#define ATOMIC_FLAG_INIT {false}
2700#define ATOMIC_VAR_INIT(__v) {__v}
2701
2702_LIBCPP_END_NAMESPACE_STD
2703
2704#endif // _LIBCPP_ATOMIC
2705