xref: /freebsd/contrib/llvm-project/libcxx/include/atomic (revision 972a253a57b6f144b0e4a3e2080a2a0076ec55a0)
1// -*- C++ -*-
2//===----------------------------------------------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro [version.syn]
20
21#define __cpp_lib_atomic_is_always_lock_free
22#define __cpp_lib_atomic_flag_test
23#define __cpp_lib_atomic_lock_free_type_aliases
24#define __cpp_lib_atomic_wait
25
26 // order and consistency
27
28 enum memory_order: unspecified // enum class in C++20
29 {
30    relaxed,
31    consume, // load-consume
32    acquire, // load-acquire
33    release, // store-release
34    acq_rel, // store-release load-acquire
35    seq_cst // store-release load-acquire
36 };
37
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
44
45template <class T> T kill_dependency(T y) noexcept;
46
47// lock-free property
48
49#define ATOMIC_BOOL_LOCK_FREE unspecified
50#define ATOMIC_CHAR_LOCK_FREE unspecified
51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
55#define ATOMIC_SHORT_LOCK_FREE unspecified
56#define ATOMIC_INT_LOCK_FREE unspecified
57#define ATOMIC_LONG_LOCK_FREE unspecified
58#define ATOMIC_LLONG_LOCK_FREE unspecified
59#define ATOMIC_POINTER_LOCK_FREE unspecified
60
61template <class T>
62struct atomic
63{
64    using value_type = T;
65
66    static constexpr bool is_always_lock_free;
67    bool is_lock_free() const volatile noexcept;
68    bool is_lock_free() const noexcept;
69
70    atomic() noexcept = default; // until C++20
71    constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
72    constexpr atomic(T desr) noexcept;
73    atomic(const atomic&) = delete;
74    atomic& operator=(const atomic&) = delete;
75    atomic& operator=(const atomic&) volatile = delete;
76
77    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
78    T load(memory_order m = memory_order_seq_cst) const noexcept;
79    operator T() const volatile noexcept;
80    operator T() const noexcept;
81    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
83    T operator=(T) volatile noexcept;
84    T operator=(T) noexcept;
85
86    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
87    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
88    bool compare_exchange_weak(T& expc, T desr,
89                               memory_order s, memory_order f) volatile noexcept;
90    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
91    bool compare_exchange_strong(T& expc, T desr,
92                                 memory_order s, memory_order f) volatile noexcept;
93    bool compare_exchange_strong(T& expc, T desr,
94                                 memory_order s, memory_order f) noexcept;
95    bool compare_exchange_weak(T& expc, T desr,
96                               memory_order m = memory_order_seq_cst) volatile noexcept;
97    bool compare_exchange_weak(T& expc, T desr,
98                               memory_order m = memory_order_seq_cst) noexcept;
99    bool compare_exchange_strong(T& expc, T desr,
100                                memory_order m = memory_order_seq_cst) volatile noexcept;
101    bool compare_exchange_strong(T& expc, T desr,
102                                 memory_order m = memory_order_seq_cst) noexcept;
103
104    void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
105    void wait(T, memory_order = memory_order::seq_cst) const noexcept;
106    void notify_one() volatile noexcept;
107    void notify_one() noexcept;
108    void notify_all() volatile noexcept;
109    void notify_all() noexcept;
110};
111
112template <>
113struct atomic<integral>
114{
115    using value_type = integral;
116    using difference_type = value_type;
117
118    static constexpr bool is_always_lock_free;
119    bool is_lock_free() const volatile noexcept;
120    bool is_lock_free() const noexcept;
121
122    atomic() noexcept = default;
123    constexpr atomic(integral desr) noexcept;
124    atomic(const atomic&) = delete;
125    atomic& operator=(const atomic&) = delete;
126    atomic& operator=(const atomic&) volatile = delete;
127
128    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
129    integral load(memory_order m = memory_order_seq_cst) const noexcept;
130    operator integral() const volatile noexcept;
131    operator integral() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral operator=(integral desr) volatile noexcept;
135    integral operator=(integral desr) noexcept;
136
137    integral exchange(integral desr,
138                      memory_order m = memory_order_seq_cst) volatile noexcept;
139    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    bool compare_exchange_weak(integral& expc, integral desr,
141                               memory_order s, memory_order f) volatile noexcept;
142    bool compare_exchange_weak(integral& expc, integral desr,
143                               memory_order s, memory_order f) noexcept;
144    bool compare_exchange_strong(integral& expc, integral desr,
145                                 memory_order s, memory_order f) volatile noexcept;
146    bool compare_exchange_strong(integral& expc, integral desr,
147                                 memory_order s, memory_order f) noexcept;
148    bool compare_exchange_weak(integral& expc, integral desr,
149                               memory_order m = memory_order_seq_cst) volatile noexcept;
150    bool compare_exchange_weak(integral& expc, integral desr,
151                               memory_order m = memory_order_seq_cst) noexcept;
152    bool compare_exchange_strong(integral& expc, integral desr,
153                                memory_order m = memory_order_seq_cst) volatile noexcept;
154    bool compare_exchange_strong(integral& expc, integral desr,
155                                 memory_order m = memory_order_seq_cst) noexcept;
156
157    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
159    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
162    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
163    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
164    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
165    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
167
168    integral operator++(int) volatile noexcept;
169    integral operator++(int) noexcept;
170    integral operator--(int) volatile noexcept;
171    integral operator--(int) noexcept;
172    integral operator++() volatile noexcept;
173    integral operator++() noexcept;
174    integral operator--() volatile noexcept;
175    integral operator--() noexcept;
176    integral operator+=(integral op) volatile noexcept;
177    integral operator+=(integral op) noexcept;
178    integral operator-=(integral op) volatile noexcept;
179    integral operator-=(integral op) noexcept;
180    integral operator&=(integral op) volatile noexcept;
181    integral operator&=(integral op) noexcept;
182    integral operator|=(integral op) volatile noexcept;
183    integral operator|=(integral op) noexcept;
184    integral operator^=(integral op) volatile noexcept;
185    integral operator^=(integral op) noexcept;
186
187    void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
188    void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
189    void notify_one() volatile noexcept;
190    void notify_one() noexcept;
191    void notify_all() volatile noexcept;
192    void notify_all() noexcept;
193};
194
195template <class T>
196struct atomic<T*>
197{
198    using value_type = T*;
199    using difference_type = ptrdiff_t;
200
201    static constexpr bool is_always_lock_free;
202    bool is_lock_free() const volatile noexcept;
203    bool is_lock_free() const noexcept;
204
205    atomic() noexcept = default; // until C++20
206    constexpr atomic() noexcept; // since C++20
207    constexpr atomic(T* desr) noexcept;
208    atomic(const atomic&) = delete;
209    atomic& operator=(const atomic&) = delete;
210    atomic& operator=(const atomic&) volatile = delete;
211
212    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
213    T* load(memory_order m = memory_order_seq_cst) const noexcept;
214    operator T*() const volatile noexcept;
215    operator T*() const noexcept;
216    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
217    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
218    T* operator=(T*) volatile noexcept;
219    T* operator=(T*) noexcept;
220
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
222    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order s, memory_order f) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order s, memory_order f) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                 memory_order s, memory_order f) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order s, memory_order f) noexcept;
231    bool compare_exchange_weak(T*& expc, T* desr,
232                               memory_order m = memory_order_seq_cst) volatile noexcept;
233    bool compare_exchange_weak(T*& expc, T* desr,
234                               memory_order m = memory_order_seq_cst) noexcept;
235    bool compare_exchange_strong(T*& expc, T* desr,
236                                memory_order m = memory_order_seq_cst) volatile noexcept;
237    bool compare_exchange_strong(T*& expc, T* desr,
238                                 memory_order m = memory_order_seq_cst) noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
242    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
243
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256
257    void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
258    void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
259    void notify_one() volatile noexcept;
260    void notify_one() noexcept;
261    void notify_all() volatile noexcept;
262    void notify_all() noexcept;
263};
264
265
266// [atomics.nonmembers], non-member functions
267template<class T>
268  bool atomic_is_lock_free(const volatile atomic<T>*) noexcept;
269template<class T>
270  bool atomic_is_lock_free(const atomic<T>*) noexcept;
271template<class T>
272  void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept;
273template<class T>
274  void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept;
275template<class T>
276  void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type,
277                             memory_order) noexcept;
278template<class T>
279  void atomic_store_explicit(atomic<T>*, atomic<T>::value_type,
280                             memory_order) noexcept;
281template<class T>
282  T atomic_load(const volatile atomic<T>*) noexcept;
283template<class T>
284  T atomic_load(const atomic<T>*) noexcept;
285template<class T>
286  T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
287template<class T>
288  T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
289template<class T>
290  T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept;
291template<class T>
292  T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept;
293template<class T>
294  T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type,
295                             memory_order) noexcept;
296template<class T>
297  T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type,
298                             memory_order) noexcept;
299template<class T>
300  bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*,
301                                    atomic<T>::value_type) noexcept;
302template<class T>
303  bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*,
304                                    atomic<T>::value_type) noexcept;
305template<class T>
306  bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*,
307                                      atomic<T>::value_type) noexcept;
308template<class T>
309  bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*,
310                                      atomic<T>::value_type) noexcept;
311template<class T>
312  bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*,
313                                             atomic<T>::value_type,
314                                             memory_order, memory_order) noexcept;
315template<class T>
316  bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*,
317                                             atomic<T>::value_type,
318                                             memory_order, memory_order) noexcept;
319template<class T>
320  bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*,
321                                               atomic<T>::value_type,
322                                               memory_order, memory_order) noexcept;
323template<class T>
324  bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*,
325                                               atomic<T>::value_type,
326                                               memory_order, memory_order) noexcept;
327
328template<class T>
329  T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
330template<class T>
331  T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept;
332template<class T>
333  T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type,
334                              memory_order) noexcept;
335template<class T>
336  T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type,
337                              memory_order) noexcept;
338template<class T>
339  T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
340template<class T>
341  T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept;
342template<class T>
343  T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type,
344                              memory_order) noexcept;
345template<class T>
346  T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type,
347                              memory_order) noexcept;
348template<class T>
349  T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept;
350template<class T>
351  T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept;
352template<class T>
353  T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type,
354                              memory_order) noexcept;
355template<class T>
356  T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type,
357                              memory_order) noexcept;
358template<class T>
359  T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept;
360template<class T>
361  T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept;
362template<class T>
363  T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type,
364                             memory_order) noexcept;
365template<class T>
366  T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type,
367                             memory_order) noexcept;
368template<class T>
369  T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept;
370template<class T>
371  T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept;
372template<class T>
373  T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type,
374                              memory_order) noexcept;
375template<class T>
376  T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type,
377                              memory_order) noexcept;
378
379template<class T>
380  void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type);
381template<class T>
382  void atomic_wait(const atomic<T>*, atomic<T>::value_type);
383template<class T>
384  void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type,
385                            memory_order);
386template<class T>
387  void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type,
388                            memory_order);
389template<class T>
390  void atomic_notify_one(volatile atomic<T>*);
391template<class T>
392  void atomic_notify_one(atomic<T>*);
393template<class T>
394  void atomic_notify_all(volatile atomic<T>*);
395template<class T>
396  void atomic_notify_all(atomic<T>*);
397
398// Atomics for standard typedef types
399
400typedef atomic<bool>               atomic_bool;
401typedef atomic<char>               atomic_char;
402typedef atomic<signed char>        atomic_schar;
403typedef atomic<unsigned char>      atomic_uchar;
404typedef atomic<short>              atomic_short;
405typedef atomic<unsigned short>     atomic_ushort;
406typedef atomic<int>                atomic_int;
407typedef atomic<unsigned int>       atomic_uint;
408typedef atomic<long>               atomic_long;
409typedef atomic<unsigned long>      atomic_ulong;
410typedef atomic<long long>          atomic_llong;
411typedef atomic<unsigned long long> atomic_ullong;
412typedef atomic<char8_t>            atomic_char8_t; // C++20
413typedef atomic<char16_t>           atomic_char16_t;
414typedef atomic<char32_t>           atomic_char32_t;
415typedef atomic<wchar_t>            atomic_wchar_t;
416
417typedef atomic<int_least8_t>   atomic_int_least8_t;
418typedef atomic<uint_least8_t>  atomic_uint_least8_t;
419typedef atomic<int_least16_t>  atomic_int_least16_t;
420typedef atomic<uint_least16_t> atomic_uint_least16_t;
421typedef atomic<int_least32_t>  atomic_int_least32_t;
422typedef atomic<uint_least32_t> atomic_uint_least32_t;
423typedef atomic<int_least64_t>  atomic_int_least64_t;
424typedef atomic<uint_least64_t> atomic_uint_least64_t;
425
426typedef atomic<int_fast8_t>   atomic_int_fast8_t;
427typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
428typedef atomic<int_fast16_t>  atomic_int_fast16_t;
429typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
430typedef atomic<int_fast32_t>  atomic_int_fast32_t;
431typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
432typedef atomic<int_fast64_t>  atomic_int_fast64_t;
433typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
434
435typedef atomic<int8_t>   atomic_int8_t;
436typedef atomic<uint8_t>  atomic_uint8_t;
437typedef atomic<int16_t>  atomic_int16_t;
438typedef atomic<uint16_t> atomic_uint16_t;
439typedef atomic<int32_t>  atomic_int32_t;
440typedef atomic<uint32_t> atomic_uint32_t;
441typedef atomic<int64_t>  atomic_int64_t;
442typedef atomic<uint64_t> atomic_uint64_t;
443
444typedef atomic<intptr_t>  atomic_intptr_t;
445typedef atomic<uintptr_t> atomic_uintptr_t;
446typedef atomic<size_t>    atomic_size_t;
447typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
448typedef atomic<intmax_t>  atomic_intmax_t;
449typedef atomic<uintmax_t> atomic_uintmax_t;
450
451// flag type and operations
452
453typedef struct atomic_flag
454{
455    atomic_flag() noexcept = default; // until C++20
456    constexpr atomic_flag() noexcept; // since C++20
457    atomic_flag(const atomic_flag&) = delete;
458    atomic_flag& operator=(const atomic_flag&) = delete;
459    atomic_flag& operator=(const atomic_flag&) volatile = delete;
460
461    bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
462    bool test(memory_order m = memory_order_seq_cst) noexcept;
463    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
464    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
465    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
466    void clear(memory_order m = memory_order_seq_cst) noexcept;
467
468    void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
469    void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
470    void notify_one() volatile noexcept;
471    void notify_one() noexcept;
472    void notify_all() volatile noexcept;
473    void notify_all() noexcept;
474} atomic_flag;
475
476bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
477bool atomic_flag_test(atomic_flag* obj) noexcept;
478bool atomic_flag_test_explicit(volatile atomic_flag* obj,
479                               memory_order m) noexcept;
480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
484                                       memory_order m) noexcept;
485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
486void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
487void atomic_flag_clear(atomic_flag* obj) noexcept;
488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
490
491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
492void atomic_wait(const atomic_flag* obj, T old) noexcept;
493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
495void atomic_one(volatile atomic_flag* obj) noexcept;
496void atomic_one(atomic_flag* obj) noexcept;
497void atomic_all(volatile atomic_flag* obj) noexcept;
498void atomic_all(atomic_flag* obj) noexcept;
499
500// fences
501
502void atomic_thread_fence(memory_order m) noexcept;
503void atomic_signal_fence(memory_order m) noexcept;
504
505// deprecated
506
507template <class T>
508  void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
509
510template <class T>
511  void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
512
513#define ATOMIC_VAR_INIT(value) see below
514
515#define ATOMIC_FLAG_INIT see below
516
517}  // std
518
519*/
520
521#include <__assert> // all public C++ headers provide the assertion handler
522#include <__availability>
523#include <__chrono/duration.h>
524#include <__config>
525#include <__thread/poll_with_backoff.h>
526#include <__thread/timed_backoff_policy.h>
527#include <cstddef>
528#include <cstdint>
529#include <cstring>
530#include <type_traits>
531#include <version>
532
533#ifndef _LIBCPP_HAS_NO_THREADS
534# include <__threading_support>
535#endif
536
537#ifndef _LIBCPP_REMOVE_TRANSITIVE_INCLUDES
538#  include <chrono>
539#endif
540
541#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
542#  pragma GCC system_header
543#endif
544
545#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
546# error <atomic> is not implemented
547#endif
548#ifdef kill_dependency
549# error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23.
550#endif
551
552#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
553  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
554                           __m == memory_order_acquire || \
555                           __m == memory_order_acq_rel,   \
556                        "memory order argument to atomic operation is invalid")
557
558#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
559  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
560                           __m == memory_order_acq_rel,   \
561                        "memory order argument to atomic operation is invalid")
562
563#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
564  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
565                           __f == memory_order_acq_rel,   \
566                        "memory order argument to atomic operation is invalid")
567
568_LIBCPP_BEGIN_NAMESPACE_STD
569
570// Figure out what the underlying type for `memory_order` would be if it were
571// declared as an unscoped enum (accounting for -fshort-enums). Use this result
572// to pin the underlying type in C++20.
573enum __legacy_memory_order {
574    __mo_relaxed,
575    __mo_consume,
576    __mo_acquire,
577    __mo_release,
578    __mo_acq_rel,
579    __mo_seq_cst
580};
581
582typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
583
584#if _LIBCPP_STD_VER > 17
585
586enum class memory_order : __memory_order_underlying_t {
587  relaxed = __mo_relaxed,
588  consume = __mo_consume,
589  acquire = __mo_acquire,
590  release = __mo_release,
591  acq_rel = __mo_acq_rel,
592  seq_cst = __mo_seq_cst
593};
594
595inline constexpr auto memory_order_relaxed = memory_order::relaxed;
596inline constexpr auto memory_order_consume = memory_order::consume;
597inline constexpr auto memory_order_acquire = memory_order::acquire;
598inline constexpr auto memory_order_release = memory_order::release;
599inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
600inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
601
602#else
603
604typedef enum memory_order {
605  memory_order_relaxed = __mo_relaxed,
606  memory_order_consume = __mo_consume,
607  memory_order_acquire = __mo_acquire,
608  memory_order_release = __mo_release,
609  memory_order_acq_rel = __mo_acq_rel,
610  memory_order_seq_cst = __mo_seq_cst,
611} memory_order;
612
613#endif // _LIBCPP_STD_VER > 17
614
615template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
616bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
617    return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
618}
619
620static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
621  "unexpected underlying type for std::memory_order");
622
623#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
624    defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
625
626// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
627// the default operator= in an object is not volatile, a byte-by-byte copy
628// is required.
629template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
630typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
631__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
632  __a_value = __val;
633}
634template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
635typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
636__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
637  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
638  volatile char* __end = __to + sizeof(_Tp);
639  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
640  while (__to != __end)
641    *__to++ = *__from++;
642}
643
644#endif
645
646#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
647
648template <typename _Tp>
649struct __cxx_atomic_base_impl {
650
651  _LIBCPP_INLINE_VISIBILITY
652#ifndef _LIBCPP_CXX03_LANG
653    __cxx_atomic_base_impl() _NOEXCEPT = default;
654#else
655    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
656#endif // _LIBCPP_CXX03_LANG
657  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
658    : __a_value(value) {}
659  _Tp __a_value;
660};
661
662_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
663  // Avoid switch statement to make this a constexpr.
664  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
665         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
666          (__order == memory_order_release ? __ATOMIC_RELEASE:
667           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
668            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
669              __ATOMIC_CONSUME))));
670}
671
672_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
673  // Avoid switch statement to make this a constexpr.
674  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
675         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
676          (__order == memory_order_release ? __ATOMIC_RELAXED:
677           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
678            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
679              __ATOMIC_CONSUME))));
680}
681
682template <typename _Tp>
683_LIBCPP_INLINE_VISIBILITY
684void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
685  __cxx_atomic_assign_volatile(__a->__a_value, __val);
686}
687
688template <typename _Tp>
689_LIBCPP_INLINE_VISIBILITY
690void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
691  __a->__a_value = __val;
692}
693
694_LIBCPP_INLINE_VISIBILITY inline
695void __cxx_atomic_thread_fence(memory_order __order) {
696  __atomic_thread_fence(__to_gcc_order(__order));
697}
698
699_LIBCPP_INLINE_VISIBILITY inline
700void __cxx_atomic_signal_fence(memory_order __order) {
701  __atomic_signal_fence(__to_gcc_order(__order));
702}
703
704template <typename _Tp>
705_LIBCPP_INLINE_VISIBILITY
706void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
707                        memory_order __order) {
708  __atomic_store(&__a->__a_value, &__val,
709                 __to_gcc_order(__order));
710}
711
712template <typename _Tp>
713_LIBCPP_INLINE_VISIBILITY
714void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
715                        memory_order __order) {
716  __atomic_store(&__a->__a_value, &__val,
717                 __to_gcc_order(__order));
718}
719
720template <typename _Tp>
721_LIBCPP_INLINE_VISIBILITY
722_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
723                      memory_order __order) {
724  _Tp __ret;
725  __atomic_load(&__a->__a_value, &__ret,
726                __to_gcc_order(__order));
727  return __ret;
728}
729
730template <typename _Tp>
731_LIBCPP_INLINE_VISIBILITY
732_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
733  _Tp __ret;
734  __atomic_load(&__a->__a_value, &__ret,
735                __to_gcc_order(__order));
736  return __ret;
737}
738
739template <typename _Tp>
740_LIBCPP_INLINE_VISIBILITY
741_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
742                          _Tp __value, memory_order __order) {
743  _Tp __ret;
744  __atomic_exchange(&__a->__a_value, &__value, &__ret,
745                    __to_gcc_order(__order));
746  return __ret;
747}
748
749template <typename _Tp>
750_LIBCPP_INLINE_VISIBILITY
751_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
752                          memory_order __order) {
753  _Tp __ret;
754  __atomic_exchange(&__a->__a_value, &__value, &__ret,
755                    __to_gcc_order(__order));
756  return __ret;
757}
758
759template <typename _Tp>
760_LIBCPP_INLINE_VISIBILITY
761bool __cxx_atomic_compare_exchange_strong(
762    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
763    memory_order __success, memory_order __failure) {
764  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
765                                   false,
766                                   __to_gcc_order(__success),
767                                   __to_gcc_failure_order(__failure));
768}
769
770template <typename _Tp>
771_LIBCPP_INLINE_VISIBILITY
772bool __cxx_atomic_compare_exchange_strong(
773    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
774    memory_order __failure) {
775  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
776                                   false,
777                                   __to_gcc_order(__success),
778                                   __to_gcc_failure_order(__failure));
779}
780
781template <typename _Tp>
782_LIBCPP_INLINE_VISIBILITY
783bool __cxx_atomic_compare_exchange_weak(
784    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
785    memory_order __success, memory_order __failure) {
786  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
787                                   true,
788                                   __to_gcc_order(__success),
789                                   __to_gcc_failure_order(__failure));
790}
791
792template <typename _Tp>
793_LIBCPP_INLINE_VISIBILITY
794bool __cxx_atomic_compare_exchange_weak(
795    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
796    memory_order __failure) {
797  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
798                                   true,
799                                   __to_gcc_order(__success),
800                                   __to_gcc_failure_order(__failure));
801}
802
803template <typename _Tp>
804struct __skip_amt { enum {value = 1}; };
805
806template <typename _Tp>
807struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
808
809// FIXME: Haven't figured out what the spec says about using arrays with
810// atomic_fetch_add. Force a failure rather than creating bad behavior.
811template <typename _Tp>
812struct __skip_amt<_Tp[]> { };
813template <typename _Tp, int n>
814struct __skip_amt<_Tp[n]> { };
815
816template <typename _Tp, typename _Td>
817_LIBCPP_INLINE_VISIBILITY
818_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
819                           _Td __delta, memory_order __order) {
820  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
821                            __to_gcc_order(__order));
822}
823
824template <typename _Tp, typename _Td>
825_LIBCPP_INLINE_VISIBILITY
826_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
827                           memory_order __order) {
828  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
829                            __to_gcc_order(__order));
830}
831
832template <typename _Tp, typename _Td>
833_LIBCPP_INLINE_VISIBILITY
834_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
835                           _Td __delta, memory_order __order) {
836  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
837                            __to_gcc_order(__order));
838}
839
840template <typename _Tp, typename _Td>
841_LIBCPP_INLINE_VISIBILITY
842_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
843                           memory_order __order) {
844  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
845                            __to_gcc_order(__order));
846}
847
848template <typename _Tp>
849_LIBCPP_INLINE_VISIBILITY
850_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
851                           _Tp __pattern, memory_order __order) {
852  return __atomic_fetch_and(&__a->__a_value, __pattern,
853                            __to_gcc_order(__order));
854}
855
856template <typename _Tp>
857_LIBCPP_INLINE_VISIBILITY
858_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
859                           _Tp __pattern, memory_order __order) {
860  return __atomic_fetch_and(&__a->__a_value, __pattern,
861                            __to_gcc_order(__order));
862}
863
864template <typename _Tp>
865_LIBCPP_INLINE_VISIBILITY
866_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
867                          _Tp __pattern, memory_order __order) {
868  return __atomic_fetch_or(&__a->__a_value, __pattern,
869                           __to_gcc_order(__order));
870}
871
872template <typename _Tp>
873_LIBCPP_INLINE_VISIBILITY
874_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
875                          memory_order __order) {
876  return __atomic_fetch_or(&__a->__a_value, __pattern,
877                           __to_gcc_order(__order));
878}
879
880template <typename _Tp>
881_LIBCPP_INLINE_VISIBILITY
882_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
883                           _Tp __pattern, memory_order __order) {
884  return __atomic_fetch_xor(&__a->__a_value, __pattern,
885                            __to_gcc_order(__order));
886}
887
888template <typename _Tp>
889_LIBCPP_INLINE_VISIBILITY
890_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
891                           memory_order __order) {
892  return __atomic_fetch_xor(&__a->__a_value, __pattern,
893                            __to_gcc_order(__order));
894}
895
896#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
897
898#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
899
900template <typename _Tp>
901struct __cxx_atomic_base_impl {
902
903  _LIBCPP_INLINE_VISIBILITY
904#ifndef _LIBCPP_CXX03_LANG
905    __cxx_atomic_base_impl() _NOEXCEPT = default;
906#else
907    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
908#endif // _LIBCPP_CXX03_LANG
909  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT
910    : __a_value(__value) {}
911  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
912};
913
914#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
915
916_LIBCPP_INLINE_VISIBILITY inline
917void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
918    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
919}
920
921_LIBCPP_INLINE_VISIBILITY inline
922void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
923    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
924}
925
926template<class _Tp>
927_LIBCPP_INLINE_VISIBILITY
928void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
929    __c11_atomic_init(&__a->__a_value, __val);
930}
931template<class _Tp>
932_LIBCPP_INLINE_VISIBILITY
933void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
934    __c11_atomic_init(&__a->__a_value, __val);
935}
936
937template<class _Tp>
938_LIBCPP_INLINE_VISIBILITY
939void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
940    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
941}
942template<class _Tp>
943_LIBCPP_INLINE_VISIBILITY
944void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
945    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
946}
947
948template<class _Tp>
949_LIBCPP_INLINE_VISIBILITY
950_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
951    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
952    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
953}
954template<class _Tp>
955_LIBCPP_INLINE_VISIBILITY
956_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
957    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
958    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
959}
960
961template<class _Tp>
962_LIBCPP_INLINE_VISIBILITY
963_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
964    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
965}
966template<class _Tp>
967_LIBCPP_INLINE_VISIBILITY
968_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
969    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
970}
971
972_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
973  // Avoid switch statement to make this a constexpr.
974  return __order == memory_order_release ? memory_order_relaxed:
975         (__order == memory_order_acq_rel ? memory_order_acquire:
976             __order);
977}
978
979template<class _Tp>
980_LIBCPP_INLINE_VISIBILITY
981bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
982    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
983}
984template<class _Tp>
985_LIBCPP_INLINE_VISIBILITY
986bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
987    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
988}
989
990template<class _Tp>
991_LIBCPP_INLINE_VISIBILITY
992bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
993    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
994}
995template<class _Tp>
996_LIBCPP_INLINE_VISIBILITY
997bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
998    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
999}
1000
1001template<class _Tp>
1002_LIBCPP_INLINE_VISIBILITY
1003_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1004    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1005}
1006template<class _Tp>
1007_LIBCPP_INLINE_VISIBILITY
1008_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1009    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1010}
1011
1012template<class _Tp>
1013_LIBCPP_INLINE_VISIBILITY
1014_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1015    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1016}
1017template<class _Tp>
1018_LIBCPP_INLINE_VISIBILITY
1019_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1020    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1021}
1022
1023template<class _Tp>
1024_LIBCPP_INLINE_VISIBILITY
1025_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1026    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1027}
1028template<class _Tp>
1029_LIBCPP_INLINE_VISIBILITY
1030_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1031    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1032}
1033template<class _Tp>
1034_LIBCPP_INLINE_VISIBILITY
1035_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1036    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1037}
1038template<class _Tp>
1039_LIBCPP_INLINE_VISIBILITY
1040_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1041    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1042}
1043
1044template<class _Tp>
1045_LIBCPP_INLINE_VISIBILITY
1046_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1047    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1048}
1049template<class _Tp>
1050_LIBCPP_INLINE_VISIBILITY
1051_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1052    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1053}
1054
1055template<class _Tp>
1056_LIBCPP_INLINE_VISIBILITY
1057_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1058    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1059}
1060template<class _Tp>
1061_LIBCPP_INLINE_VISIBILITY
1062_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1063    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1064}
1065
1066template<class _Tp>
1067_LIBCPP_INLINE_VISIBILITY
1068_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1069    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1070}
1071template<class _Tp>
1072_LIBCPP_INLINE_VISIBILITY
1073_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1074    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1075}
1076
1077#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1078
1079template <class _Tp>
1080_LIBCPP_INLINE_VISIBILITY
1081_Tp kill_dependency(_Tp __y) _NOEXCEPT
1082{
1083    return __y;
1084}
1085
1086#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1087# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1088# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1089#ifndef _LIBCPP_HAS_NO_CHAR8_T
1090# define ATOMIC_CHAR8_T_LOCK_FREE   __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
1091#endif
1092# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1093# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1094# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1095# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1096# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1097# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1098# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1099# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1100#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1101# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1102# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1103#ifndef _LIBCPP_HAS_NO_CHAR8_T
1104# define ATOMIC_CHAR8_T_LOCK_FREE   __GCC_ATOMIC_CHAR8_T_LOCK_FREE
1105#endif
1106# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1107# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1108# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1109# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1110# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1111# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1112# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1113# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1114#endif
1115
1116#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1117
1118template<typename _Tp>
1119struct __cxx_atomic_lock_impl {
1120
1121  _LIBCPP_INLINE_VISIBILITY
1122  __cxx_atomic_lock_impl() _NOEXCEPT
1123    : __a_value(), __a_lock(0) {}
1124  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1125  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1126    : __a_value(value), __a_lock(0) {}
1127
1128  _Tp __a_value;
1129  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1130
1131  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1132    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1133        /*spin*/;
1134  }
1135  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1136    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1137        /*spin*/;
1138  }
1139  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1140    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1141  }
1142  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1143    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1144  }
1145  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1146    __lock();
1147    _Tp __old;
1148    __cxx_atomic_assign_volatile(__old, __a_value);
1149    __unlock();
1150    return __old;
1151  }
1152  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1153    __lock();
1154    _Tp __old = __a_value;
1155    __unlock();
1156    return __old;
1157  }
1158};
1159
1160template <typename _Tp>
1161_LIBCPP_INLINE_VISIBILITY
1162void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1163  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1164}
1165template <typename _Tp>
1166_LIBCPP_INLINE_VISIBILITY
1167void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1168  __a->__a_value = __val;
1169}
1170
1171template <typename _Tp>
1172_LIBCPP_INLINE_VISIBILITY
1173void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1174  __a->__lock();
1175  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1176  __a->__unlock();
1177}
1178template <typename _Tp>
1179_LIBCPP_INLINE_VISIBILITY
1180void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1181  __a->__lock();
1182  __a->__a_value = __val;
1183  __a->__unlock();
1184}
1185
1186template <typename _Tp>
1187_LIBCPP_INLINE_VISIBILITY
1188_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1189  return __a->__read();
1190}
1191template <typename _Tp>
1192_LIBCPP_INLINE_VISIBILITY
1193_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1194  return __a->__read();
1195}
1196
1197template <typename _Tp>
1198_LIBCPP_INLINE_VISIBILITY
1199_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1200  __a->__lock();
1201  _Tp __old;
1202  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1203  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1204  __a->__unlock();
1205  return __old;
1206}
1207template <typename _Tp>
1208_LIBCPP_INLINE_VISIBILITY
1209_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1210  __a->__lock();
1211  _Tp __old = __a->__a_value;
1212  __a->__a_value = __value;
1213  __a->__unlock();
1214  return __old;
1215}
1216
1217template <typename _Tp>
1218_LIBCPP_INLINE_VISIBILITY
1219bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1220                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1221  _Tp __temp;
1222  __a->__lock();
1223  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1224  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1225  if(__ret)
1226    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1227  else
1228    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1229  __a->__unlock();
1230  return __ret;
1231}
1232template <typename _Tp>
1233_LIBCPP_INLINE_VISIBILITY
1234bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1235                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1236  __a->__lock();
1237  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1238  if(__ret)
1239    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1240  else
1241    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1242  __a->__unlock();
1243  return __ret;
1244}
1245
1246template <typename _Tp>
1247_LIBCPP_INLINE_VISIBILITY
1248bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1249                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1250  _Tp __temp;
1251  __a->__lock();
1252  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1253  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1254  if(__ret)
1255    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1256  else
1257    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1258  __a->__unlock();
1259  return __ret;
1260}
1261template <typename _Tp>
1262_LIBCPP_INLINE_VISIBILITY
1263bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1264                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1265  __a->__lock();
1266  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1267  if(__ret)
1268    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1269  else
1270    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1271  __a->__unlock();
1272  return __ret;
1273}
1274
1275template <typename _Tp, typename _Td>
1276_LIBCPP_INLINE_VISIBILITY
1277_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1278                           _Td __delta, memory_order) {
1279  __a->__lock();
1280  _Tp __old;
1281  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1282  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1283  __a->__unlock();
1284  return __old;
1285}
1286template <typename _Tp, typename _Td>
1287_LIBCPP_INLINE_VISIBILITY
1288_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1289                           _Td __delta, memory_order) {
1290  __a->__lock();
1291  _Tp __old = __a->__a_value;
1292  __a->__a_value += __delta;
1293  __a->__unlock();
1294  return __old;
1295}
1296
1297template <typename _Tp, typename _Td>
1298_LIBCPP_INLINE_VISIBILITY
1299_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1300                           ptrdiff_t __delta, memory_order) {
1301  __a->__lock();
1302  _Tp* __old;
1303  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1304  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1305  __a->__unlock();
1306  return __old;
1307}
1308template <typename _Tp, typename _Td>
1309_LIBCPP_INLINE_VISIBILITY
1310_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1311                           ptrdiff_t __delta, memory_order) {
1312  __a->__lock();
1313  _Tp* __old = __a->__a_value;
1314  __a->__a_value += __delta;
1315  __a->__unlock();
1316  return __old;
1317}
1318
1319template <typename _Tp, typename _Td>
1320_LIBCPP_INLINE_VISIBILITY
1321_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1322                           _Td __delta, memory_order) {
1323  __a->__lock();
1324  _Tp __old;
1325  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1326  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1327  __a->__unlock();
1328  return __old;
1329}
1330template <typename _Tp, typename _Td>
1331_LIBCPP_INLINE_VISIBILITY
1332_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1333                           _Td __delta, memory_order) {
1334  __a->__lock();
1335  _Tp __old = __a->__a_value;
1336  __a->__a_value -= __delta;
1337  __a->__unlock();
1338  return __old;
1339}
1340
1341template <typename _Tp>
1342_LIBCPP_INLINE_VISIBILITY
1343_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1344                           _Tp __pattern, memory_order) {
1345  __a->__lock();
1346  _Tp __old;
1347  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1348  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1349  __a->__unlock();
1350  return __old;
1351}
1352template <typename _Tp>
1353_LIBCPP_INLINE_VISIBILITY
1354_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1355                           _Tp __pattern, memory_order) {
1356  __a->__lock();
1357  _Tp __old = __a->__a_value;
1358  __a->__a_value &= __pattern;
1359  __a->__unlock();
1360  return __old;
1361}
1362
1363template <typename _Tp>
1364_LIBCPP_INLINE_VISIBILITY
1365_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1366                          _Tp __pattern, memory_order) {
1367  __a->__lock();
1368  _Tp __old;
1369  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1370  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1371  __a->__unlock();
1372  return __old;
1373}
1374template <typename _Tp>
1375_LIBCPP_INLINE_VISIBILITY
1376_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1377                          _Tp __pattern, memory_order) {
1378  __a->__lock();
1379  _Tp __old = __a->__a_value;
1380  __a->__a_value |= __pattern;
1381  __a->__unlock();
1382  return __old;
1383}
1384
1385template <typename _Tp>
1386_LIBCPP_INLINE_VISIBILITY
1387_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1388                           _Tp __pattern, memory_order) {
1389  __a->__lock();
1390  _Tp __old;
1391  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1392  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1393  __a->__unlock();
1394  return __old;
1395}
1396template <typename _Tp>
1397_LIBCPP_INLINE_VISIBILITY
1398_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1399                           _Tp __pattern, memory_order) {
1400  __a->__lock();
1401  _Tp __old = __a->__a_value;
1402  __a->__a_value ^= __pattern;
1403  __a->__unlock();
1404  return __old;
1405}
1406
1407#ifdef __cpp_lib_atomic_is_always_lock_free
1408
1409template<typename _Tp> struct __cxx_is_always_lock_free {
1410    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1411
1412#else
1413
1414template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1415// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1416template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1417template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1418template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1419template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1420#ifndef _LIBCPP_HAS_NO_CHAR8_T
1421template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
1422#endif
1423template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1424template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1425#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
1426template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1427#endif
1428template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1429template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1430template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1431template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1432template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1433template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1434template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1435template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1436template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1437template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1438
1439#endif //__cpp_lib_atomic_is_always_lock_free
1440
1441template <typename _Tp,
1442          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1443                                                __cxx_atomic_base_impl<_Tp>,
1444                                                __cxx_atomic_lock_impl<_Tp> >::type>
1445#else
1446template <typename _Tp,
1447          typename _Base = __cxx_atomic_base_impl<_Tp> >
1448#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1449struct __cxx_atomic_impl : public _Base {
1450    static_assert(is_trivially_copyable<_Tp>::value,
1451      "std::atomic<T> requires that 'T' be a trivially copyable type");
1452
1453  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default;
1454  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT
1455    : _Base(__value) {}
1456};
1457
1458#if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__)) || \
1459    (defined(__FreeBSD__) && defined(__mips__))
1460    using __cxx_contention_t = int32_t;
1461#else
1462    using __cxx_contention_t = int64_t;
1463#endif // __linux__ || (_AIX && !__64BIT__) || (__FreeBSD__ && __mips__)
1464
1465using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1466
1467#if defined(_LIBCPP_HAS_NO_THREADS)
1468#   define _LIBCPP_HAS_NO_PLATFORM_WAIT
1469#endif
1470
1471// TODO:
1472// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that
1473// it is not tied anywhere into the build system or even documented. We should
1474// clean it up because it is technically never defined except when threads are
1475// disabled. We should clean it up in its own changeset in case we break "bad"
1476// users.
1477#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1478
1479_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1480_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1481_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1482_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1483
1484_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1485_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1486_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1487_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1488
1489template <class _Atp, class _Fn>
1490struct __libcpp_atomic_wait_backoff_impl {
1491    _Atp* __a;
1492    _Fn __test_fn;
1493    _LIBCPP_AVAILABILITY_SYNC
1494    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1495    {
1496        if(__elapsed > chrono::microseconds(64))
1497        {
1498            auto const __monitor = __libcpp_atomic_monitor(__a);
1499            if(__test_fn())
1500                return true;
1501            __libcpp_atomic_wait(__a, __monitor);
1502        }
1503        else if(__elapsed > chrono::microseconds(4))
1504            __libcpp_thread_yield();
1505        else
1506            {} // poll
1507        return false;
1508    }
1509};
1510
1511template <class _Atp, class _Fn>
1512_LIBCPP_AVAILABILITY_SYNC
1513_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1514{
1515    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1516    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1517}
1518
1519#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1520
1521template <class _Tp>
1522_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1523template <class _Tp>
1524_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1525template <class _Atp, class _Fn>
1526_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1527{
1528#if defined(_LIBCPP_HAS_NO_THREADS)
1529    using _Policy = __spinning_backoff_policy;
1530#else
1531    using _Policy = __libcpp_timed_backoff_policy;
1532#endif
1533    return __libcpp_thread_poll_with_backoff(__test_fn, _Policy());
1534}
1535
1536#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1537
1538template <class _Atp, class _Tp>
1539struct __cxx_atomic_wait_test_fn_impl {
1540    _Atp* __a;
1541    _Tp __val;
1542    memory_order __order;
1543    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1544    {
1545        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1546    }
1547};
1548
1549template <class _Atp, class _Tp>
1550_LIBCPP_AVAILABILITY_SYNC
1551_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1552{
1553    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1554    return __cxx_atomic_wait(__a, __test_fn);
1555}
1556
1557// general atomic<T>
1558
1559template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1560struct __atomic_base  // false
1561{
1562    mutable __cxx_atomic_impl<_Tp> __a_;
1563
1564#if defined(__cpp_lib_atomic_is_always_lock_free)
1565  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1566#endif
1567
1568    _LIBCPP_INLINE_VISIBILITY
1569    bool is_lock_free() const volatile _NOEXCEPT
1570        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1571    _LIBCPP_INLINE_VISIBILITY
1572    bool is_lock_free() const _NOEXCEPT
1573        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1574    _LIBCPP_INLINE_VISIBILITY
1575    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1576      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1577        {__cxx_atomic_store(&__a_, __d, __m);}
1578    _LIBCPP_INLINE_VISIBILITY
1579    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1580      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1581        {__cxx_atomic_store(&__a_, __d, __m);}
1582    _LIBCPP_INLINE_VISIBILITY
1583    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1584      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1585        {return __cxx_atomic_load(&__a_, __m);}
1586    _LIBCPP_INLINE_VISIBILITY
1587    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1588      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1589        {return __cxx_atomic_load(&__a_, __m);}
1590    _LIBCPP_INLINE_VISIBILITY
1591    operator _Tp() const volatile _NOEXCEPT {return load();}
1592    _LIBCPP_INLINE_VISIBILITY
1593    operator _Tp() const _NOEXCEPT          {return load();}
1594    _LIBCPP_INLINE_VISIBILITY
1595    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1596        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1597    _LIBCPP_INLINE_VISIBILITY
1598    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1599        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1600    _LIBCPP_INLINE_VISIBILITY
1601    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1602                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1603      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1604        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1605    _LIBCPP_INLINE_VISIBILITY
1606    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1607                               memory_order __s, memory_order __f) _NOEXCEPT
1608      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1609        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1610    _LIBCPP_INLINE_VISIBILITY
1611    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1612                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1613      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1614        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1615    _LIBCPP_INLINE_VISIBILITY
1616    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1617                                 memory_order __s, memory_order __f) _NOEXCEPT
1618      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1619        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1620    _LIBCPP_INLINE_VISIBILITY
1621    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1622                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1623        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1624    _LIBCPP_INLINE_VISIBILITY
1625    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1626                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1627        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1628    _LIBCPP_INLINE_VISIBILITY
1629    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1630                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1631        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1632    _LIBCPP_INLINE_VISIBILITY
1633    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1634                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1635        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1636
1637    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1638        {__cxx_atomic_wait(&__a_, __v, __m);}
1639    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1640        {__cxx_atomic_wait(&__a_, __v, __m);}
1641    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1642        {__cxx_atomic_notify_one(&__a_);}
1643    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1644        {__cxx_atomic_notify_one(&__a_);}
1645    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1646        {__cxx_atomic_notify_all(&__a_);}
1647    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1648        {__cxx_atomic_notify_all(&__a_);}
1649
1650#if _LIBCPP_STD_VER > 17
1651    _LIBCPP_INLINE_VISIBILITY constexpr
1652    __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
1653#else
1654    _LIBCPP_INLINE_VISIBILITY
1655    __atomic_base() _NOEXCEPT = default;
1656#endif
1657
1658    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1659    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1660
1661    __atomic_base(const __atomic_base&) = delete;
1662};
1663
1664#if defined(__cpp_lib_atomic_is_always_lock_free)
1665template <class _Tp, bool __b>
1666_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1667#endif
1668
1669// atomic<Integral>
1670
1671template <class _Tp>
1672struct __atomic_base<_Tp, true>
1673    : public __atomic_base<_Tp, false>
1674{
1675    typedef __atomic_base<_Tp, false> __base;
1676
1677    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
1678    __atomic_base() _NOEXCEPT = default;
1679
1680    _LIBCPP_INLINE_VISIBILITY
1681    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1682
1683    _LIBCPP_INLINE_VISIBILITY
1684    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1685        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1686    _LIBCPP_INLINE_VISIBILITY
1687    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1688        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1689    _LIBCPP_INLINE_VISIBILITY
1690    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1691        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1692    _LIBCPP_INLINE_VISIBILITY
1693    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1694        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1695    _LIBCPP_INLINE_VISIBILITY
1696    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1697        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1698    _LIBCPP_INLINE_VISIBILITY
1699    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1700        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1701    _LIBCPP_INLINE_VISIBILITY
1702    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1703        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1704    _LIBCPP_INLINE_VISIBILITY
1705    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1706        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1707    _LIBCPP_INLINE_VISIBILITY
1708    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1709        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1710    _LIBCPP_INLINE_VISIBILITY
1711    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1712        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1713
1714    _LIBCPP_INLINE_VISIBILITY
1715    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1716    _LIBCPP_INLINE_VISIBILITY
1717    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1718    _LIBCPP_INLINE_VISIBILITY
1719    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1720    _LIBCPP_INLINE_VISIBILITY
1721    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1722    _LIBCPP_INLINE_VISIBILITY
1723    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1724    _LIBCPP_INLINE_VISIBILITY
1725    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1726    _LIBCPP_INLINE_VISIBILITY
1727    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1728    _LIBCPP_INLINE_VISIBILITY
1729    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1730    _LIBCPP_INLINE_VISIBILITY
1731    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1732    _LIBCPP_INLINE_VISIBILITY
1733    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1734    _LIBCPP_INLINE_VISIBILITY
1735    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1736    _LIBCPP_INLINE_VISIBILITY
1737    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1738    _LIBCPP_INLINE_VISIBILITY
1739    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1740    _LIBCPP_INLINE_VISIBILITY
1741    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1742    _LIBCPP_INLINE_VISIBILITY
1743    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1744    _LIBCPP_INLINE_VISIBILITY
1745    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1746    _LIBCPP_INLINE_VISIBILITY
1747    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1748    _LIBCPP_INLINE_VISIBILITY
1749    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1750};
1751
1752// atomic<T>
1753
1754template <class _Tp>
1755struct atomic
1756    : public __atomic_base<_Tp>
1757{
1758    typedef __atomic_base<_Tp> __base;
1759    typedef _Tp value_type;
1760    typedef value_type difference_type;
1761
1762#if _LIBCPP_STD_VER > 17
1763    _LIBCPP_INLINE_VISIBILITY
1764    atomic() = default;
1765#else
1766    _LIBCPP_INLINE_VISIBILITY
1767    atomic() _NOEXCEPT = default;
1768#endif
1769
1770    _LIBCPP_INLINE_VISIBILITY
1771    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1772
1773    _LIBCPP_INLINE_VISIBILITY
1774    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1775        {__base::store(__d); return __d;}
1776    _LIBCPP_INLINE_VISIBILITY
1777    _Tp operator=(_Tp __d) _NOEXCEPT
1778        {__base::store(__d); return __d;}
1779
1780    atomic& operator=(const atomic&) = delete;
1781    atomic& operator=(const atomic&) volatile = delete;
1782};
1783
1784// atomic<T*>
1785
1786template <class _Tp>
1787struct atomic<_Tp*>
1788    : public __atomic_base<_Tp*>
1789{
1790    typedef __atomic_base<_Tp*> __base;
1791    typedef _Tp* value_type;
1792    typedef ptrdiff_t difference_type;
1793
1794    _LIBCPP_INLINE_VISIBILITY
1795    atomic() _NOEXCEPT = default;
1796
1797    _LIBCPP_INLINE_VISIBILITY
1798    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1799
1800    _LIBCPP_INLINE_VISIBILITY
1801    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1802        {__base::store(__d); return __d;}
1803    _LIBCPP_INLINE_VISIBILITY
1804    _Tp* operator=(_Tp* __d) _NOEXCEPT
1805        {__base::store(__d); return __d;}
1806
1807    _LIBCPP_INLINE_VISIBILITY
1808    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1809        // __atomic_fetch_add accepts function pointers, guard against them.
1810        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1811        return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1812    }
1813
1814    _LIBCPP_INLINE_VISIBILITY
1815    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1816        // __atomic_fetch_add accepts function pointers, guard against them.
1817        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1818        return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1819    }
1820
1821    _LIBCPP_INLINE_VISIBILITY
1822    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1823        // __atomic_fetch_add accepts function pointers, guard against them.
1824        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1825        return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1826    }
1827
1828    _LIBCPP_INLINE_VISIBILITY
1829    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1830        // __atomic_fetch_add accepts function pointers, guard against them.
1831        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1832        return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1833    }
1834
1835    _LIBCPP_INLINE_VISIBILITY
1836    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1837    _LIBCPP_INLINE_VISIBILITY
1838    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1839    _LIBCPP_INLINE_VISIBILITY
1840    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1841    _LIBCPP_INLINE_VISIBILITY
1842    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1843    _LIBCPP_INLINE_VISIBILITY
1844    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1845    _LIBCPP_INLINE_VISIBILITY
1846    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1847    _LIBCPP_INLINE_VISIBILITY
1848    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1849    _LIBCPP_INLINE_VISIBILITY
1850    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1851    _LIBCPP_INLINE_VISIBILITY
1852    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1853    _LIBCPP_INLINE_VISIBILITY
1854    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1855    _LIBCPP_INLINE_VISIBILITY
1856    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1857    _LIBCPP_INLINE_VISIBILITY
1858    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1859
1860    atomic& operator=(const atomic&) = delete;
1861    atomic& operator=(const atomic&) volatile = delete;
1862};
1863
1864// atomic_is_lock_free
1865
1866template <class _Tp>
1867_LIBCPP_INLINE_VISIBILITY
1868bool
1869atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1870{
1871    return __o->is_lock_free();
1872}
1873
1874template <class _Tp>
1875_LIBCPP_INLINE_VISIBILITY
1876bool
1877atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1878{
1879    return __o->is_lock_free();
1880}
1881
1882// atomic_init
1883
1884template <class _Tp>
1885_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1886void
1887atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1888{
1889    __cxx_atomic_init(&__o->__a_, __d);
1890}
1891
1892template <class _Tp>
1893_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1894void
1895atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1896{
1897    __cxx_atomic_init(&__o->__a_, __d);
1898}
1899
1900// atomic_store
1901
1902template <class _Tp>
1903_LIBCPP_INLINE_VISIBILITY
1904void
1905atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1906{
1907    __o->store(__d);
1908}
1909
1910template <class _Tp>
1911_LIBCPP_INLINE_VISIBILITY
1912void
1913atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1914{
1915    __o->store(__d);
1916}
1917
1918// atomic_store_explicit
1919
1920template <class _Tp>
1921_LIBCPP_INLINE_VISIBILITY
1922void
1923atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1924  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1925{
1926    __o->store(__d, __m);
1927}
1928
1929template <class _Tp>
1930_LIBCPP_INLINE_VISIBILITY
1931void
1932atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1933  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1934{
1935    __o->store(__d, __m);
1936}
1937
1938// atomic_load
1939
1940template <class _Tp>
1941_LIBCPP_INLINE_VISIBILITY
1942_Tp
1943atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1944{
1945    return __o->load();
1946}
1947
1948template <class _Tp>
1949_LIBCPP_INLINE_VISIBILITY
1950_Tp
1951atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1952{
1953    return __o->load();
1954}
1955
1956// atomic_load_explicit
1957
1958template <class _Tp>
1959_LIBCPP_INLINE_VISIBILITY
1960_Tp
1961atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1962  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1963{
1964    return __o->load(__m);
1965}
1966
1967template <class _Tp>
1968_LIBCPP_INLINE_VISIBILITY
1969_Tp
1970atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1971  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1972{
1973    return __o->load(__m);
1974}
1975
1976// atomic_exchange
1977
1978template <class _Tp>
1979_LIBCPP_INLINE_VISIBILITY
1980_Tp
1981atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1982{
1983    return __o->exchange(__d);
1984}
1985
1986template <class _Tp>
1987_LIBCPP_INLINE_VISIBILITY
1988_Tp
1989atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1990{
1991    return __o->exchange(__d);
1992}
1993
1994// atomic_exchange_explicit
1995
1996template <class _Tp>
1997_LIBCPP_INLINE_VISIBILITY
1998_Tp
1999atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2000{
2001    return __o->exchange(__d, __m);
2002}
2003
2004template <class _Tp>
2005_LIBCPP_INLINE_VISIBILITY
2006_Tp
2007atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2008{
2009    return __o->exchange(__d, __m);
2010}
2011
2012// atomic_compare_exchange_weak
2013
2014template <class _Tp>
2015_LIBCPP_INLINE_VISIBILITY
2016bool
2017atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2018{
2019    return __o->compare_exchange_weak(*__e, __d);
2020}
2021
2022template <class _Tp>
2023_LIBCPP_INLINE_VISIBILITY
2024bool
2025atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2026{
2027    return __o->compare_exchange_weak(*__e, __d);
2028}
2029
2030// atomic_compare_exchange_strong
2031
2032template <class _Tp>
2033_LIBCPP_INLINE_VISIBILITY
2034bool
2035atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2036{
2037    return __o->compare_exchange_strong(*__e, __d);
2038}
2039
2040template <class _Tp>
2041_LIBCPP_INLINE_VISIBILITY
2042bool
2043atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2044{
2045    return __o->compare_exchange_strong(*__e, __d);
2046}
2047
2048// atomic_compare_exchange_weak_explicit
2049
2050template <class _Tp>
2051_LIBCPP_INLINE_VISIBILITY
2052bool
2053atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2054                                      typename atomic<_Tp>::value_type __d,
2055                                      memory_order __s, memory_order __f) _NOEXCEPT
2056  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2057{
2058    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2059}
2060
2061template <class _Tp>
2062_LIBCPP_INLINE_VISIBILITY
2063bool
2064atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2065                                      memory_order __s, memory_order __f) _NOEXCEPT
2066  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2067{
2068    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2069}
2070
2071// atomic_compare_exchange_strong_explicit
2072
2073template <class _Tp>
2074_LIBCPP_INLINE_VISIBILITY
2075bool
2076atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2077                                        typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2078                                        memory_order __s, memory_order __f) _NOEXCEPT
2079  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2080{
2081    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2082}
2083
2084template <class _Tp>
2085_LIBCPP_INLINE_VISIBILITY
2086bool
2087atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2088                                        typename atomic<_Tp>::value_type __d,
2089                                        memory_order __s, memory_order __f) _NOEXCEPT
2090  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2091{
2092    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2093}
2094
2095// atomic_wait
2096
2097template <class _Tp>
2098_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2099void atomic_wait(const volatile atomic<_Tp>* __o,
2100                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2101{
2102    return __o->wait(__v);
2103}
2104
2105template <class _Tp>
2106_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2107void atomic_wait(const atomic<_Tp>* __o,
2108                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2109{
2110    return __o->wait(__v);
2111}
2112
2113// atomic_wait_explicit
2114
2115template <class _Tp>
2116_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2117void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2118                          typename atomic<_Tp>::value_type __v,
2119                          memory_order __m) _NOEXCEPT
2120  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2121{
2122    return __o->wait(__v, __m);
2123}
2124
2125template <class _Tp>
2126_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2127void atomic_wait_explicit(const atomic<_Tp>* __o,
2128                          typename atomic<_Tp>::value_type __v,
2129                          memory_order __m) _NOEXCEPT
2130  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2131{
2132    return __o->wait(__v, __m);
2133}
2134
2135// atomic_notify_one
2136
2137template <class _Tp>
2138_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2139void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2140{
2141    __o->notify_one();
2142}
2143template <class _Tp>
2144_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2145void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2146{
2147    __o->notify_one();
2148}
2149
2150// atomic_notify_one
2151
2152template <class _Tp>
2153_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2154void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2155{
2156    __o->notify_all();
2157}
2158template <class _Tp>
2159_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2160void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2161{
2162    __o->notify_all();
2163}
2164
2165// atomic_fetch_add
2166
2167template <class _Tp>
2168_LIBCPP_INLINE_VISIBILITY
2169_Tp
2170atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2171{
2172    return __o->fetch_add(__op);
2173}
2174
2175template <class _Tp>
2176_LIBCPP_INLINE_VISIBILITY
2177_Tp
2178atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2179{
2180    return __o->fetch_add(__op);
2181}
2182
2183// atomic_fetch_add_explicit
2184
2185template <class _Tp>
2186_LIBCPP_INLINE_VISIBILITY
2187_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2188{
2189    return __o->fetch_add(__op, __m);
2190}
2191
2192template <class _Tp>
2193_LIBCPP_INLINE_VISIBILITY
2194_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2195{
2196    return __o->fetch_add(__op, __m);
2197}
2198
2199// atomic_fetch_sub
2200
2201template <class _Tp>
2202_LIBCPP_INLINE_VISIBILITY
2203_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2204{
2205    return __o->fetch_sub(__op);
2206}
2207
2208template <class _Tp>
2209_LIBCPP_INLINE_VISIBILITY
2210_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2211{
2212    return __o->fetch_sub(__op);
2213}
2214
2215// atomic_fetch_sub_explicit
2216
2217template <class _Tp>
2218_LIBCPP_INLINE_VISIBILITY
2219_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2220{
2221    return __o->fetch_sub(__op, __m);
2222}
2223
2224template <class _Tp>
2225_LIBCPP_INLINE_VISIBILITY
2226_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2227{
2228    return __o->fetch_sub(__op, __m);
2229}
2230
2231// atomic_fetch_and
2232
2233template <class _Tp>
2234_LIBCPP_INLINE_VISIBILITY
2235typename enable_if
2236<
2237    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2238    _Tp
2239>::type
2240atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2241{
2242    return __o->fetch_and(__op);
2243}
2244
2245template <class _Tp>
2246_LIBCPP_INLINE_VISIBILITY
2247typename enable_if
2248<
2249    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2250    _Tp
2251>::type
2252atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2253{
2254    return __o->fetch_and(__op);
2255}
2256
2257// atomic_fetch_and_explicit
2258
2259template <class _Tp>
2260_LIBCPP_INLINE_VISIBILITY
2261typename enable_if
2262<
2263    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2264    _Tp
2265>::type
2266atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2267{
2268    return __o->fetch_and(__op, __m);
2269}
2270
2271template <class _Tp>
2272_LIBCPP_INLINE_VISIBILITY
2273typename enable_if
2274<
2275    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2276    _Tp
2277>::type
2278atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2279{
2280    return __o->fetch_and(__op, __m);
2281}
2282
2283// atomic_fetch_or
2284
2285template <class _Tp>
2286_LIBCPP_INLINE_VISIBILITY
2287typename enable_if
2288<
2289    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2290    _Tp
2291>::type
2292atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2293{
2294    return __o->fetch_or(__op);
2295}
2296
2297template <class _Tp>
2298_LIBCPP_INLINE_VISIBILITY
2299typename enable_if
2300<
2301    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2302    _Tp
2303>::type
2304atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2305{
2306    return __o->fetch_or(__op);
2307}
2308
2309// atomic_fetch_or_explicit
2310
2311template <class _Tp>
2312_LIBCPP_INLINE_VISIBILITY
2313typename enable_if
2314<
2315    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2316    _Tp
2317>::type
2318atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2319{
2320    return __o->fetch_or(__op, __m);
2321}
2322
2323template <class _Tp>
2324_LIBCPP_INLINE_VISIBILITY
2325typename enable_if
2326<
2327    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2328    _Tp
2329>::type
2330atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2331{
2332    return __o->fetch_or(__op, __m);
2333}
2334
2335// atomic_fetch_xor
2336
2337template <class _Tp>
2338_LIBCPP_INLINE_VISIBILITY
2339typename enable_if
2340<
2341    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2342    _Tp
2343>::type
2344atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2345{
2346    return __o->fetch_xor(__op);
2347}
2348
2349template <class _Tp>
2350_LIBCPP_INLINE_VISIBILITY
2351typename enable_if
2352<
2353    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2354    _Tp
2355>::type
2356atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2357{
2358    return __o->fetch_xor(__op);
2359}
2360
2361// atomic_fetch_xor_explicit
2362
2363template <class _Tp>
2364_LIBCPP_INLINE_VISIBILITY
2365typename enable_if
2366<
2367    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2368    _Tp
2369>::type
2370atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2371{
2372    return __o->fetch_xor(__op, __m);
2373}
2374
2375template <class _Tp>
2376_LIBCPP_INLINE_VISIBILITY
2377typename enable_if
2378<
2379    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2380    _Tp
2381>::type
2382atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2383{
2384    return __o->fetch_xor(__op, __m);
2385}
2386
2387// flag type and operations
2388
2389typedef struct atomic_flag
2390{
2391    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2392
2393    _LIBCPP_INLINE_VISIBILITY
2394    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2395        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2396    _LIBCPP_INLINE_VISIBILITY
2397    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2398        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2399
2400    _LIBCPP_INLINE_VISIBILITY
2401    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2402        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2403    _LIBCPP_INLINE_VISIBILITY
2404    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2405        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2406    _LIBCPP_INLINE_VISIBILITY
2407    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2408        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2409    _LIBCPP_INLINE_VISIBILITY
2410    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2411        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2412
2413    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2414    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2415        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2416    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2417    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2418        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2419    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2420    void notify_one() volatile _NOEXCEPT
2421        {__cxx_atomic_notify_one(&__a_);}
2422    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2423    void notify_one() _NOEXCEPT
2424        {__cxx_atomic_notify_one(&__a_);}
2425    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2426    void notify_all() volatile _NOEXCEPT
2427        {__cxx_atomic_notify_all(&__a_);}
2428    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2429    void notify_all() _NOEXCEPT
2430        {__cxx_atomic_notify_all(&__a_);}
2431
2432#if _LIBCPP_STD_VER > 17
2433    _LIBCPP_INLINE_VISIBILITY constexpr
2434    atomic_flag() _NOEXCEPT : __a_(false) {}
2435#else
2436    _LIBCPP_INLINE_VISIBILITY
2437    atomic_flag() _NOEXCEPT = default;
2438#endif
2439
2440    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2441    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2442
2443    atomic_flag(const atomic_flag&) = delete;
2444    atomic_flag& operator=(const atomic_flag&) = delete;
2445    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2446
2447} atomic_flag;
2448
2449
2450inline _LIBCPP_INLINE_VISIBILITY
2451bool
2452atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2453{
2454    return __o->test();
2455}
2456
2457inline _LIBCPP_INLINE_VISIBILITY
2458bool
2459atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2460{
2461    return __o->test();
2462}
2463
2464inline _LIBCPP_INLINE_VISIBILITY
2465bool
2466atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2467{
2468    return __o->test(__m);
2469}
2470
2471inline _LIBCPP_INLINE_VISIBILITY
2472bool
2473atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2474{
2475    return __o->test(__m);
2476}
2477
2478inline _LIBCPP_INLINE_VISIBILITY
2479bool
2480atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2481{
2482    return __o->test_and_set();
2483}
2484
2485inline _LIBCPP_INLINE_VISIBILITY
2486bool
2487atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2488{
2489    return __o->test_and_set();
2490}
2491
2492inline _LIBCPP_INLINE_VISIBILITY
2493bool
2494atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2495{
2496    return __o->test_and_set(__m);
2497}
2498
2499inline _LIBCPP_INLINE_VISIBILITY
2500bool
2501atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2502{
2503    return __o->test_and_set(__m);
2504}
2505
2506inline _LIBCPP_INLINE_VISIBILITY
2507void
2508atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2509{
2510    __o->clear();
2511}
2512
2513inline _LIBCPP_INLINE_VISIBILITY
2514void
2515atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2516{
2517    __o->clear();
2518}
2519
2520inline _LIBCPP_INLINE_VISIBILITY
2521void
2522atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2523{
2524    __o->clear(__m);
2525}
2526
2527inline _LIBCPP_INLINE_VISIBILITY
2528void
2529atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2530{
2531    __o->clear(__m);
2532}
2533
2534inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2535void
2536atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2537{
2538    __o->wait(__v);
2539}
2540
2541inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2542void
2543atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2544{
2545    __o->wait(__v);
2546}
2547
2548inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2549void
2550atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2551                          bool __v, memory_order __m) _NOEXCEPT
2552{
2553    __o->wait(__v, __m);
2554}
2555
2556inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2557void
2558atomic_flag_wait_explicit(const atomic_flag* __o,
2559                          bool __v, memory_order __m) _NOEXCEPT
2560{
2561    __o->wait(__v, __m);
2562}
2563
2564inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2565void
2566atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2567{
2568    __o->notify_one();
2569}
2570
2571inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2572void
2573atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2574{
2575    __o->notify_one();
2576}
2577
2578inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2579void
2580atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2581{
2582    __o->notify_all();
2583}
2584
2585inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2586void
2587atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2588{
2589    __o->notify_all();
2590}
2591
2592// fences
2593
2594inline _LIBCPP_INLINE_VISIBILITY
2595void
2596atomic_thread_fence(memory_order __m) _NOEXCEPT
2597{
2598    __cxx_atomic_thread_fence(__m);
2599}
2600
2601inline _LIBCPP_INLINE_VISIBILITY
2602void
2603atomic_signal_fence(memory_order __m) _NOEXCEPT
2604{
2605    __cxx_atomic_signal_fence(__m);
2606}
2607
2608// Atomics for standard typedef types
2609
2610typedef atomic<bool>               atomic_bool;
2611typedef atomic<char>               atomic_char;
2612typedef atomic<signed char>        atomic_schar;
2613typedef atomic<unsigned char>      atomic_uchar;
2614typedef atomic<short>              atomic_short;
2615typedef atomic<unsigned short>     atomic_ushort;
2616typedef atomic<int>                atomic_int;
2617typedef atomic<unsigned int>       atomic_uint;
2618typedef atomic<long>               atomic_long;
2619typedef atomic<unsigned long>      atomic_ulong;
2620typedef atomic<long long>          atomic_llong;
2621typedef atomic<unsigned long long> atomic_ullong;
2622#ifndef _LIBCPP_HAS_NO_CHAR8_T
2623typedef atomic<char8_t>            atomic_char8_t;
2624#endif
2625typedef atomic<char16_t>           atomic_char16_t;
2626typedef atomic<char32_t>           atomic_char32_t;
2627#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
2628typedef atomic<wchar_t>            atomic_wchar_t;
2629#endif
2630
2631typedef atomic<int_least8_t>   atomic_int_least8_t;
2632typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2633typedef atomic<int_least16_t>  atomic_int_least16_t;
2634typedef atomic<uint_least16_t> atomic_uint_least16_t;
2635typedef atomic<int_least32_t>  atomic_int_least32_t;
2636typedef atomic<uint_least32_t> atomic_uint_least32_t;
2637typedef atomic<int_least64_t>  atomic_int_least64_t;
2638typedef atomic<uint_least64_t> atomic_uint_least64_t;
2639
2640typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2641typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2642typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2643typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2644typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2645typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2646typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2647typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2648
2649typedef atomic< int8_t>  atomic_int8_t;
2650typedef atomic<uint8_t>  atomic_uint8_t;
2651typedef atomic< int16_t> atomic_int16_t;
2652typedef atomic<uint16_t> atomic_uint16_t;
2653typedef atomic< int32_t> atomic_int32_t;
2654typedef atomic<uint32_t> atomic_uint32_t;
2655typedef atomic< int64_t> atomic_int64_t;
2656typedef atomic<uint64_t> atomic_uint64_t;
2657
2658typedef atomic<intptr_t>  atomic_intptr_t;
2659typedef atomic<uintptr_t> atomic_uintptr_t;
2660typedef atomic<size_t>    atomic_size_t;
2661typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2662typedef atomic<intmax_t>  atomic_intmax_t;
2663typedef atomic<uintmax_t> atomic_uintmax_t;
2664
2665// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2666
2667#ifdef __cpp_lib_atomic_is_always_lock_free
2668# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2669#else
2670# define _LIBCPP_CONTENTION_LOCK_FREE false
2671#endif
2672
2673#if ATOMIC_LLONG_LOCK_FREE == 2
2674typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2675typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2676#elif ATOMIC_INT_LOCK_FREE == 2
2677typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2678typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2679#elif ATOMIC_SHORT_LOCK_FREE == 2
2680typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2681typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2682#elif ATOMIC_CHAR_LOCK_FREE == 2
2683typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2684typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2685#else
2686    // No signed/unsigned lock-free types
2687#define _LIBCPP_NO_LOCK_FREE_TYPES
2688#endif
2689
2690#if !defined(_LIBCPP_NO_LOCK_FREE_TYPES)
2691typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2692typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2693#endif
2694
2695#define ATOMIC_FLAG_INIT {false}
2696#define ATOMIC_VAR_INIT(__v) {__v}
2697
2698#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
2699# if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400
2700#  pragma clang deprecated(ATOMIC_VAR_INIT)
2701# endif
2702#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
2703
2704_LIBCPP_END_NAMESPACE_STD
2705
2706#endif // _LIBCPP_ATOMIC
2707