xref: /freebsd/contrib/llvm-project/libcxx/include/atomic (revision f7c32ed617858bcd22f8d1b03199099d50125721)
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro [version.syn]
20
21#define __cpp_lib_atomic_is_always_lock_free
22#define __cpp_lib_atomic_flag_test
23#define __cpp_lib_atomic_lock_free_type_aliases
24#define __cpp_lib_atomic_wait
25
26 // order and consistency
27
28 enum memory_order: unspecified // enum class in C++20
29 {
30    relaxed,
31    consume, // load-consume
32    acquire, // load-acquire
33    release, // store-release
34    acq_rel, // store-release load-acquire
35    seq_cst // store-release load-acquire
36 };
37
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
44
45template <class T> T kill_dependency(T y) noexcept;
46
47// lock-free property
48
49#define ATOMIC_BOOL_LOCK_FREE unspecified
50#define ATOMIC_CHAR_LOCK_FREE unspecified
51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
55#define ATOMIC_SHORT_LOCK_FREE unspecified
56#define ATOMIC_INT_LOCK_FREE unspecified
57#define ATOMIC_LONG_LOCK_FREE unspecified
58#define ATOMIC_LLONG_LOCK_FREE unspecified
59#define ATOMIC_POINTER_LOCK_FREE unspecified
60
61template <class T>
62struct atomic
63{
64    using value_type = T;
65
66    static constexpr bool is_always_lock_free;
67    bool is_lock_free() const volatile noexcept;
68    bool is_lock_free() const noexcept;
69
70    atomic() noexcept = default; // until C++20
71    constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
72    constexpr atomic(T desr) noexcept;
73    atomic(const atomic&) = delete;
74    atomic& operator=(const atomic&) = delete;
75    atomic& operator=(const atomic&) volatile = delete;
76
77    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
78    T load(memory_order m = memory_order_seq_cst) const noexcept;
79    operator T() const volatile noexcept;
80    operator T() const noexcept;
81    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
83    T operator=(T) volatile noexcept;
84    T operator=(T) noexcept;
85
86    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
87    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
88    bool compare_exchange_weak(T& expc, T desr,
89                               memory_order s, memory_order f) volatile noexcept;
90    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
91    bool compare_exchange_strong(T& expc, T desr,
92                                 memory_order s, memory_order f) volatile noexcept;
93    bool compare_exchange_strong(T& expc, T desr,
94                                 memory_order s, memory_order f) noexcept;
95    bool compare_exchange_weak(T& expc, T desr,
96                               memory_order m = memory_order_seq_cst) volatile noexcept;
97    bool compare_exchange_weak(T& expc, T desr,
98                               memory_order m = memory_order_seq_cst) noexcept;
99    bool compare_exchange_strong(T& expc, T desr,
100                                memory_order m = memory_order_seq_cst) volatile noexcept;
101    bool compare_exchange_strong(T& expc, T desr,
102                                 memory_order m = memory_order_seq_cst) noexcept;
103
104    void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
105    void wait(T, memory_order = memory_order::seq_cst) const noexcept;
106    void notify_one() volatile noexcept;
107    void notify_one() noexcept;
108    void notify_all() volatile noexcept;
109    void notify_all() noexcept;
110};
111
112template <>
113struct atomic<integral>
114{
115    using value_type = integral;
116    using difference_type = value_type;
117
118    static constexpr bool is_always_lock_free;
119    bool is_lock_free() const volatile noexcept;
120    bool is_lock_free() const noexcept;
121
122    atomic() noexcept = default;
123    constexpr atomic(integral desr) noexcept;
124    atomic(const atomic&) = delete;
125    atomic& operator=(const atomic&) = delete;
126    atomic& operator=(const atomic&) volatile = delete;
127
128    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
129    integral load(memory_order m = memory_order_seq_cst) const noexcept;
130    operator integral() const volatile noexcept;
131    operator integral() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral operator=(integral desr) volatile noexcept;
135    integral operator=(integral desr) noexcept;
136
137    integral exchange(integral desr,
138                      memory_order m = memory_order_seq_cst) volatile noexcept;
139    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    bool compare_exchange_weak(integral& expc, integral desr,
141                               memory_order s, memory_order f) volatile noexcept;
142    bool compare_exchange_weak(integral& expc, integral desr,
143                               memory_order s, memory_order f) noexcept;
144    bool compare_exchange_strong(integral& expc, integral desr,
145                                 memory_order s, memory_order f) volatile noexcept;
146    bool compare_exchange_strong(integral& expc, integral desr,
147                                 memory_order s, memory_order f) noexcept;
148    bool compare_exchange_weak(integral& expc, integral desr,
149                               memory_order m = memory_order_seq_cst) volatile noexcept;
150    bool compare_exchange_weak(integral& expc, integral desr,
151                               memory_order m = memory_order_seq_cst) noexcept;
152    bool compare_exchange_strong(integral& expc, integral desr,
153                                memory_order m = memory_order_seq_cst) volatile noexcept;
154    bool compare_exchange_strong(integral& expc, integral desr,
155                                 memory_order m = memory_order_seq_cst) noexcept;
156
157    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
159    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
162    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
163    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
164    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
165    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
167
168    integral operator++(int) volatile noexcept;
169    integral operator++(int) noexcept;
170    integral operator--(int) volatile noexcept;
171    integral operator--(int) noexcept;
172    integral operator++() volatile noexcept;
173    integral operator++() noexcept;
174    integral operator--() volatile noexcept;
175    integral operator--() noexcept;
176    integral operator+=(integral op) volatile noexcept;
177    integral operator+=(integral op) noexcept;
178    integral operator-=(integral op) volatile noexcept;
179    integral operator-=(integral op) noexcept;
180    integral operator&=(integral op) volatile noexcept;
181    integral operator&=(integral op) noexcept;
182    integral operator|=(integral op) volatile noexcept;
183    integral operator|=(integral op) noexcept;
184    integral operator^=(integral op) volatile noexcept;
185    integral operator^=(integral op) noexcept;
186
187    void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
188    void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
189    void notify_one() volatile noexcept;
190    void notify_one() noexcept;
191    void notify_all() volatile noexcept;
192    void notify_all() noexcept;
193};
194
195template <class T>
196struct atomic<T*>
197{
198    using value_type = T*;
199    using difference_type = ptrdiff_t;
200
201    static constexpr bool is_always_lock_free;
202    bool is_lock_free() const volatile noexcept;
203    bool is_lock_free() const noexcept;
204
205    atomic() noexcept = default; // until C++20
206    constexpr atomic() noexcept; // since C++20
207    constexpr atomic(T* desr) noexcept;
208    atomic(const atomic&) = delete;
209    atomic& operator=(const atomic&) = delete;
210    atomic& operator=(const atomic&) volatile = delete;
211
212    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
213    T* load(memory_order m = memory_order_seq_cst) const noexcept;
214    operator T*() const volatile noexcept;
215    operator T*() const noexcept;
216    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
217    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
218    T* operator=(T*) volatile noexcept;
219    T* operator=(T*) noexcept;
220
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
222    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order s, memory_order f) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order s, memory_order f) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                 memory_order s, memory_order f) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order s, memory_order f) noexcept;
231    bool compare_exchange_weak(T*& expc, T* desr,
232                               memory_order m = memory_order_seq_cst) volatile noexcept;
233    bool compare_exchange_weak(T*& expc, T* desr,
234                               memory_order m = memory_order_seq_cst) noexcept;
235    bool compare_exchange_strong(T*& expc, T* desr,
236                                memory_order m = memory_order_seq_cst) volatile noexcept;
237    bool compare_exchange_strong(T*& expc, T* desr,
238                                 memory_order m = memory_order_seq_cst) noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
242    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
243
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256
257    void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
258    void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
259    void notify_one() volatile noexcept;
260    void notify_one() noexcept;
261    void notify_all() volatile noexcept;
262    void notify_all() noexcept;
263};
264
265
266template <class T>
267  bool atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
268
269template <class T>
270  bool atomic_is_lock_free(const atomic<T>* obj) noexcept;
271
272template <class T>
273  void atomic_store(volatile atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276  void atomic_store(atomic<T>* obj, T desr) noexcept;
277
278template <class T>
279  void atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
280
281template <class T>
282  void atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
283
284template <class T>
285  T atomic_load(const volatile atomic<T>* obj) noexcept;
286
287template <class T>
288  T atomic_load(const atomic<T>* obj) noexcept;
289
290template <class T>
291  T atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
292
293template <class T>
294  T atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
295
296template <class T>
297  T atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
298
299template <class T>
300  T atomic_exchange(atomic<T>* obj, T desr) noexcept;
301
302template <class T>
303  T atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
304
305template <class T>
306  T atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
307
308template <class T>
309  bool atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
310
311template <class T>
312  bool atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
313
314template <class T>
315  bool atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
316
317template <class T>
318  bool atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
319
320template <class T>
321  bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
322                                             T desr,
323                                             memory_order s, memory_order f) noexcept;
324
325template <class T>
326  bool atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
327                                             memory_order s, memory_order f) noexcept;
328
329template <class T>
330  bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
331                                               T* expc, T desr,
332                                               memory_order s, memory_order f) noexcept;
333
334template <class T>
335  bool atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
336                                               T desr,
337                                               memory_order s, memory_order f) noexcept;
338
339template <class T>
340  void atomic_wait(const volatile atomic<T>* obj, T old) noexcept;
341
342template <class T>
343  void atomic_wait(const atomic<T>* obj, T old) noexcept;
344
345template <class T>
346  void atomic_wait_explicit(const volatile atomic<T>* obj, T old, memory_order m) noexcept;
347
348template <class T>
349  void atomic_wait_explicit(const atomic<T>* obj, T old, memory_order m) noexcept;
350
351template <class T>
352  void atomic_one(volatile atomic<T>* obj) noexcept;
353
354template <class T>
355  void atomic_one(atomic<T>* obj) noexcept;
356
357template <class T>
358  void atomic_all(volatile atomic<T>* obj) noexcept;
359
360template <class T>
361  void atomic_all(atomic<T>* obj) noexcept;
362
363template <class Integral>
364  Integral atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367  Integral atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
368
369template <class Integral>
370  Integral atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
371                              memory_order m) noexcept;
372template <class Integral>
373  Integral atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
374                              memory_order m) noexcept;
375template <class Integral>
376  Integral atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
377
378template <class Integral>
379  Integral atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
380
381template <class Integral>
382  Integral atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
383                                     memory_order m) noexcept;
384
385template <class Integral>
386  Integral atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
387                                     memory_order m) noexcept;
388
389template <class Integral>
390  Integral atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
391
392template <class Integral>
393  Integral atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
394
395template <class Integral>
396  Integral atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
397                                     memory_order m) noexcept;
398
399template <class Integral>
400  Integral atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
401                                     memory_order m) noexcept;
402
403template <class Integral>
404  Integral atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
405
406template <class Integral>
407  Integral atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
408
409template <class Integral>
410  Integral atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
411                             memory_order m) noexcept;
412
413template <class Integral>
414  Integral atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
415                             memory_order m) noexcept;
416
417template <class Integral>
418  Integral atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
419
420template <class Integral>
421  Integral atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
422
423template <class Integral>
424  Integral atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
425                                     memory_order m) noexcept;
426
427template <class Integral>
428  Integral atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
429                                     memory_order m) noexcept;
430
431template <class T>
432  T* atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
433
434template <class T>
435  T* atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
436
437template <class T>
438  T* atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
439                               memory_order m) noexcept;
440
441template <class T>
442  T* atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
443
444template <class T>
445  T* atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448  T* atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
449
450template <class T>
451  T* atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
452                               memory_order m) noexcept;
453
454template <class T>
455  T* atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
456
457// Atomics for standard typedef types
458
459typedef atomic<bool>               atomic_bool;
460typedef atomic<char>               atomic_char;
461typedef atomic<signed char>        atomic_schar;
462typedef atomic<unsigned char>      atomic_uchar;
463typedef atomic<short>              atomic_short;
464typedef atomic<unsigned short>     atomic_ushort;
465typedef atomic<int>                atomic_int;
466typedef atomic<unsigned int>       atomic_uint;
467typedef atomic<long>               atomic_long;
468typedef atomic<unsigned long>      atomic_ulong;
469typedef atomic<long long>          atomic_llong;
470typedef atomic<unsigned long long> atomic_ullong;
471typedef atomic<char8_t>            atomic_char8_t; // C++20
472typedef atomic<char16_t>           atomic_char16_t;
473typedef atomic<char32_t>           atomic_char32_t;
474typedef atomic<wchar_t>            atomic_wchar_t;
475
476typedef atomic<int_least8_t>   atomic_int_least8_t;
477typedef atomic<uint_least8_t>  atomic_uint_least8_t;
478typedef atomic<int_least16_t>  atomic_int_least16_t;
479typedef atomic<uint_least16_t> atomic_uint_least16_t;
480typedef atomic<int_least32_t>  atomic_int_least32_t;
481typedef atomic<uint_least32_t> atomic_uint_least32_t;
482typedef atomic<int_least64_t>  atomic_int_least64_t;
483typedef atomic<uint_least64_t> atomic_uint_least64_t;
484
485typedef atomic<int_fast8_t>   atomic_int_fast8_t;
486typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
487typedef atomic<int_fast16_t>  atomic_int_fast16_t;
488typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
489typedef atomic<int_fast32_t>  atomic_int_fast32_t;
490typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
491typedef atomic<int_fast64_t>  atomic_int_fast64_t;
492typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
493
494typedef atomic<int8_t>   atomic_int8_t;
495typedef atomic<uint8_t>  atomic_uint8_t;
496typedef atomic<int16_t>  atomic_int16_t;
497typedef atomic<uint16_t> atomic_uint16_t;
498typedef atomic<int32_t>  atomic_int32_t;
499typedef atomic<uint32_t> atomic_uint32_t;
500typedef atomic<int64_t>  atomic_int64_t;
501typedef atomic<uint64_t> atomic_uint64_t;
502
503typedef atomic<intptr_t>  atomic_intptr_t;
504typedef atomic<uintptr_t> atomic_uintptr_t;
505typedef atomic<size_t>    atomic_size_t;
506typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
507typedef atomic<intmax_t>  atomic_intmax_t;
508typedef atomic<uintmax_t> atomic_uintmax_t;
509
510// flag type and operations
511
512typedef struct atomic_flag
513{
514    atomic_flag() noexcept = default; // until C++20
515    constexpr atomic_flag() noexcept; // since C++20
516    atomic_flag(const atomic_flag&) = delete;
517    atomic_flag& operator=(const atomic_flag&) = delete;
518    atomic_flag& operator=(const atomic_flag&) volatile = delete;
519
520    bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
521    bool test(memory_order m = memory_order_seq_cst) noexcept;
522    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
523    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
524    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
525    void clear(memory_order m = memory_order_seq_cst) noexcept;
526
527    void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
528    void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
529    void notify_one() volatile noexcept;
530    void notify_one() noexcept;
531    void notify_all() volatile noexcept;
532    void notify_all() noexcept;
533} atomic_flag;
534
535bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
536bool atomic_flag_test(atomic_flag* obj) noexcept;
537bool atomic_flag_test_explicit(volatile atomic_flag* obj,
538                               memory_order m) noexcept;
539bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
540bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
541bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
542bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
543                                       memory_order m) noexcept;
544bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
545void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
546void atomic_flag_clear(atomic_flag* obj) noexcept;
547void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
548void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
549
550void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
551void atomic_wait(const atomic_flag* obj, T old) noexcept;
552void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
553void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
554void atomic_one(volatile atomic_flag* obj) noexcept;
555void atomic_one(atomic_flag* obj) noexcept;
556void atomic_all(volatile atomic_flag* obj) noexcept;
557void atomic_all(atomic_flag* obj) noexcept;
558
559// fences
560
561void atomic_thread_fence(memory_order m) noexcept;
562void atomic_signal_fence(memory_order m) noexcept;
563
564// deprecated
565
566template <class T>
567  void atomic_init(volatile atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
568
569template <class T>
570  void atomic_init(atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
571
572#define ATOMIC_VAR_INIT(value) see below
573
574#define ATOMIC_FLAG_INIT see below
575
576}  // std
577
578*/
579
580#include <__availability>
581#include <__config>
582#include <__threading_support>
583#include <cstddef>
584#include <cstdint>
585#include <cstring>
586#include <type_traits>
587#include <version>
588
589#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
590#pragma GCC system_header
591#endif
592
593#ifdef _LIBCPP_HAS_NO_THREADS
594# error <atomic> is not supported on this single threaded system
595#endif
596#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
597# error <atomic> is not implemented
598#endif
599#ifdef kill_dependency
600# error C++ standard library is incompatible with <stdatomic.h>
601#endif
602
603#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
604  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
605                           __m == memory_order_acquire || \
606                           __m == memory_order_acq_rel,   \
607                        "memory order argument to atomic operation is invalid")
608
609#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
610  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
611                           __m == memory_order_acq_rel,   \
612                        "memory order argument to atomic operation is invalid")
613
614#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
615  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
616                           __f == memory_order_acq_rel,   \
617                        "memory order argument to atomic operation is invalid")
618
619_LIBCPP_BEGIN_NAMESPACE_STD
620
621// Figure out what the underlying type for `memory_order` would be if it were
622// declared as an unscoped enum (accounting for -fshort-enums). Use this result
623// to pin the underlying type in C++20.
624enum __legacy_memory_order {
625    __mo_relaxed,
626    __mo_consume,
627    __mo_acquire,
628    __mo_release,
629    __mo_acq_rel,
630    __mo_seq_cst
631};
632
633typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
634
635#if _LIBCPP_STD_VER > 17
636
637enum class memory_order : __memory_order_underlying_t {
638  relaxed = __mo_relaxed,
639  consume = __mo_consume,
640  acquire = __mo_acquire,
641  release = __mo_release,
642  acq_rel = __mo_acq_rel,
643  seq_cst = __mo_seq_cst
644};
645
646inline constexpr auto memory_order_relaxed = memory_order::relaxed;
647inline constexpr auto memory_order_consume = memory_order::consume;
648inline constexpr auto memory_order_acquire = memory_order::acquire;
649inline constexpr auto memory_order_release = memory_order::release;
650inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
651inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
652
653#else
654
655typedef enum memory_order {
656  memory_order_relaxed = __mo_relaxed,
657  memory_order_consume = __mo_consume,
658  memory_order_acquire = __mo_acquire,
659  memory_order_release = __mo_release,
660  memory_order_acq_rel = __mo_acq_rel,
661  memory_order_seq_cst = __mo_seq_cst,
662} memory_order;
663
664#endif // _LIBCPP_STD_VER > 17
665
666template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
667bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
668    return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
669}
670
671static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
672  "unexpected underlying type for std::memory_order");
673
674#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
675    defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
676
677// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
678// the default operator= in an object is not volatile, a byte-by-byte copy
679// is required.
680template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
681typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
682__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
683  __a_value = __val;
684}
685template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
686typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
687__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
688  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
689  volatile char* __end = __to + sizeof(_Tp);
690  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
691  while (__to != __end)
692    *__to++ = *__from++;
693}
694
695#endif
696
697#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
698
699template <typename _Tp>
700struct __cxx_atomic_base_impl {
701
702  _LIBCPP_INLINE_VISIBILITY
703#ifndef _LIBCPP_CXX03_LANG
704    __cxx_atomic_base_impl() _NOEXCEPT = default;
705#else
706    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
707#endif // _LIBCPP_CXX03_LANG
708  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
709    : __a_value(value) {}
710  _Tp __a_value;
711};
712
713_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
714  // Avoid switch statement to make this a constexpr.
715  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
716         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
717          (__order == memory_order_release ? __ATOMIC_RELEASE:
718           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
719            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
720              __ATOMIC_CONSUME))));
721}
722
723_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
724  // Avoid switch statement to make this a constexpr.
725  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
726         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
727          (__order == memory_order_release ? __ATOMIC_RELAXED:
728           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
729            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
730              __ATOMIC_CONSUME))));
731}
732
733template <typename _Tp>
734_LIBCPP_INLINE_VISIBILITY
735void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
736  __cxx_atomic_assign_volatile(__a->__a_value, __val);
737}
738
739template <typename _Tp>
740_LIBCPP_INLINE_VISIBILITY
741void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
742  __a->__a_value = __val;
743}
744
745_LIBCPP_INLINE_VISIBILITY inline
746void __cxx_atomic_thread_fence(memory_order __order) {
747  __atomic_thread_fence(__to_gcc_order(__order));
748}
749
750_LIBCPP_INLINE_VISIBILITY inline
751void __cxx_atomic_signal_fence(memory_order __order) {
752  __atomic_signal_fence(__to_gcc_order(__order));
753}
754
755template <typename _Tp>
756_LIBCPP_INLINE_VISIBILITY
757void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
758                        memory_order __order) {
759  __atomic_store(&__a->__a_value, &__val,
760                 __to_gcc_order(__order));
761}
762
763template <typename _Tp>
764_LIBCPP_INLINE_VISIBILITY
765void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
766                        memory_order __order) {
767  __atomic_store(&__a->__a_value, &__val,
768                 __to_gcc_order(__order));
769}
770
771template <typename _Tp>
772_LIBCPP_INLINE_VISIBILITY
773_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
774                      memory_order __order) {
775  _Tp __ret;
776  __atomic_load(&__a->__a_value, &__ret,
777                __to_gcc_order(__order));
778  return __ret;
779}
780
781template <typename _Tp>
782_LIBCPP_INLINE_VISIBILITY
783_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
784  _Tp __ret;
785  __atomic_load(&__a->__a_value, &__ret,
786                __to_gcc_order(__order));
787  return __ret;
788}
789
790template <typename _Tp>
791_LIBCPP_INLINE_VISIBILITY
792_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
793                          _Tp __value, memory_order __order) {
794  _Tp __ret;
795  __atomic_exchange(&__a->__a_value, &__value, &__ret,
796                    __to_gcc_order(__order));
797  return __ret;
798}
799
800template <typename _Tp>
801_LIBCPP_INLINE_VISIBILITY
802_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
803                          memory_order __order) {
804  _Tp __ret;
805  __atomic_exchange(&__a->__a_value, &__value, &__ret,
806                    __to_gcc_order(__order));
807  return __ret;
808}
809
810template <typename _Tp>
811_LIBCPP_INLINE_VISIBILITY
812bool __cxx_atomic_compare_exchange_strong(
813    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
814    memory_order __success, memory_order __failure) {
815  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
816                                   false,
817                                   __to_gcc_order(__success),
818                                   __to_gcc_failure_order(__failure));
819}
820
821template <typename _Tp>
822_LIBCPP_INLINE_VISIBILITY
823bool __cxx_atomic_compare_exchange_strong(
824    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
825    memory_order __failure) {
826  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
827                                   false,
828                                   __to_gcc_order(__success),
829                                   __to_gcc_failure_order(__failure));
830}
831
832template <typename _Tp>
833_LIBCPP_INLINE_VISIBILITY
834bool __cxx_atomic_compare_exchange_weak(
835    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
836    memory_order __success, memory_order __failure) {
837  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
838                                   true,
839                                   __to_gcc_order(__success),
840                                   __to_gcc_failure_order(__failure));
841}
842
843template <typename _Tp>
844_LIBCPP_INLINE_VISIBILITY
845bool __cxx_atomic_compare_exchange_weak(
846    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
847    memory_order __failure) {
848  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
849                                   true,
850                                   __to_gcc_order(__success),
851                                   __to_gcc_failure_order(__failure));
852}
853
854template <typename _Tp>
855struct __skip_amt { enum {value = 1}; };
856
857template <typename _Tp>
858struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
859
860// FIXME: Haven't figured out what the spec says about using arrays with
861// atomic_fetch_add. Force a failure rather than creating bad behavior.
862template <typename _Tp>
863struct __skip_amt<_Tp[]> { };
864template <typename _Tp, int n>
865struct __skip_amt<_Tp[n]> { };
866
867template <typename _Tp, typename _Td>
868_LIBCPP_INLINE_VISIBILITY
869_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
870                           _Td __delta, memory_order __order) {
871  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
872                            __to_gcc_order(__order));
873}
874
875template <typename _Tp, typename _Td>
876_LIBCPP_INLINE_VISIBILITY
877_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
878                           memory_order __order) {
879  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
880                            __to_gcc_order(__order));
881}
882
883template <typename _Tp, typename _Td>
884_LIBCPP_INLINE_VISIBILITY
885_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
886                           _Td __delta, memory_order __order) {
887  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
888                            __to_gcc_order(__order));
889}
890
891template <typename _Tp, typename _Td>
892_LIBCPP_INLINE_VISIBILITY
893_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
894                           memory_order __order) {
895  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
896                            __to_gcc_order(__order));
897}
898
899template <typename _Tp>
900_LIBCPP_INLINE_VISIBILITY
901_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
902                           _Tp __pattern, memory_order __order) {
903  return __atomic_fetch_and(&__a->__a_value, __pattern,
904                            __to_gcc_order(__order));
905}
906
907template <typename _Tp>
908_LIBCPP_INLINE_VISIBILITY
909_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
910                           _Tp __pattern, memory_order __order) {
911  return __atomic_fetch_and(&__a->__a_value, __pattern,
912                            __to_gcc_order(__order));
913}
914
915template <typename _Tp>
916_LIBCPP_INLINE_VISIBILITY
917_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
918                          _Tp __pattern, memory_order __order) {
919  return __atomic_fetch_or(&__a->__a_value, __pattern,
920                           __to_gcc_order(__order));
921}
922
923template <typename _Tp>
924_LIBCPP_INLINE_VISIBILITY
925_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
926                          memory_order __order) {
927  return __atomic_fetch_or(&__a->__a_value, __pattern,
928                           __to_gcc_order(__order));
929}
930
931template <typename _Tp>
932_LIBCPP_INLINE_VISIBILITY
933_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
934                           _Tp __pattern, memory_order __order) {
935  return __atomic_fetch_xor(&__a->__a_value, __pattern,
936                            __to_gcc_order(__order));
937}
938
939template <typename _Tp>
940_LIBCPP_INLINE_VISIBILITY
941_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
942                           memory_order __order) {
943  return __atomic_fetch_xor(&__a->__a_value, __pattern,
944                            __to_gcc_order(__order));
945}
946
947#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
948
949#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
950
951template <typename _Tp>
952struct __cxx_atomic_base_impl {
953
954  _LIBCPP_INLINE_VISIBILITY
955#ifndef _LIBCPP_CXX03_LANG
956    __cxx_atomic_base_impl() _NOEXCEPT = default;
957#else
958    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
959#endif // _LIBCPP_CXX03_LANG
960  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
961    : __a_value(value) {}
962  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
963};
964
965#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
966
967_LIBCPP_INLINE_VISIBILITY inline
968void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
969    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
970}
971
972_LIBCPP_INLINE_VISIBILITY inline
973void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
974    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
975}
976
977template<class _Tp>
978_LIBCPP_INLINE_VISIBILITY
979void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
980    __c11_atomic_init(&__a->__a_value, __val);
981}
982template<class _Tp>
983_LIBCPP_INLINE_VISIBILITY
984void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
985    __c11_atomic_init(&__a->__a_value, __val);
986}
987
988template<class _Tp>
989_LIBCPP_INLINE_VISIBILITY
990void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
991    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
992}
993template<class _Tp>
994_LIBCPP_INLINE_VISIBILITY
995void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
996    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
997}
998
999template<class _Tp>
1000_LIBCPP_INLINE_VISIBILITY
1001_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
1002    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
1003    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
1004}
1005template<class _Tp>
1006_LIBCPP_INLINE_VISIBILITY
1007_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
1008    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
1009    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
1010}
1011
1012template<class _Tp>
1013_LIBCPP_INLINE_VISIBILITY
1014_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
1015    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1016}
1017template<class _Tp>
1018_LIBCPP_INLINE_VISIBILITY
1019_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
1020    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1021}
1022
1023_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
1024  // Avoid switch statement to make this a constexpr.
1025  return __order == memory_order_release ? memory_order_relaxed:
1026         (__order == memory_order_acq_rel ? memory_order_acquire:
1027             __order);
1028}
1029
1030template<class _Tp>
1031_LIBCPP_INLINE_VISIBILITY
1032bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1033    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
1034}
1035template<class _Tp>
1036_LIBCPP_INLINE_VISIBILITY
1037bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1038    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
1039}
1040
1041template<class _Tp>
1042_LIBCPP_INLINE_VISIBILITY
1043bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1044    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
1045}
1046template<class _Tp>
1047_LIBCPP_INLINE_VISIBILITY
1048bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1049    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
1050}
1051
1052template<class _Tp>
1053_LIBCPP_INLINE_VISIBILITY
1054_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1055    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1056}
1057template<class _Tp>
1058_LIBCPP_INLINE_VISIBILITY
1059_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1060    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1061}
1062
1063template<class _Tp>
1064_LIBCPP_INLINE_VISIBILITY
1065_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1066    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1067}
1068template<class _Tp>
1069_LIBCPP_INLINE_VISIBILITY
1070_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1071    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1072}
1073
1074template<class _Tp>
1075_LIBCPP_INLINE_VISIBILITY
1076_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1077    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1078}
1079template<class _Tp>
1080_LIBCPP_INLINE_VISIBILITY
1081_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1082    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1083}
1084template<class _Tp>
1085_LIBCPP_INLINE_VISIBILITY
1086_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1087    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1088}
1089template<class _Tp>
1090_LIBCPP_INLINE_VISIBILITY
1091_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1092    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1093}
1094
1095template<class _Tp>
1096_LIBCPP_INLINE_VISIBILITY
1097_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1098    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1099}
1100template<class _Tp>
1101_LIBCPP_INLINE_VISIBILITY
1102_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1103    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1104}
1105
1106template<class _Tp>
1107_LIBCPP_INLINE_VISIBILITY
1108_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1109    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1110}
1111template<class _Tp>
1112_LIBCPP_INLINE_VISIBILITY
1113_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1114    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1115}
1116
1117template<class _Tp>
1118_LIBCPP_INLINE_VISIBILITY
1119_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1120    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1121}
1122template<class _Tp>
1123_LIBCPP_INLINE_VISIBILITY
1124_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1125    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1126}
1127
1128#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1129
1130template <class _Tp>
1131_LIBCPP_INLINE_VISIBILITY
1132_Tp kill_dependency(_Tp __y) _NOEXCEPT
1133{
1134    return __y;
1135}
1136
1137#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1138# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1139# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1140#ifndef _LIBCPP_HAS_NO_CHAR8_T
1141# define ATOMIC_CHAR8_T_LOCK_FREE   __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
1142#endif
1143# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1144# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1145# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1146# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1147# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1148# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1149# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1150# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1151#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1152# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1153# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1154#ifndef _LIBCPP_HAS_NO_CHAR8_T
1155# define ATOMIC_CHAR8_T_LOCK_FREE   __GCC_ATOMIC_CHAR8_T_LOCK_FREE
1156#endif
1157# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1158# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1159# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1160# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1161# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1162# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1163# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1164# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1165#endif
1166
1167#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1168
1169template<typename _Tp>
1170struct __cxx_atomic_lock_impl {
1171
1172  _LIBCPP_INLINE_VISIBILITY
1173  __cxx_atomic_lock_impl() _NOEXCEPT
1174    : __a_value(), __a_lock(0) {}
1175  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1176  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1177    : __a_value(value), __a_lock(0) {}
1178
1179  _Tp __a_value;
1180  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1181
1182  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1183    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1184        /*spin*/;
1185  }
1186  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1187    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1188        /*spin*/;
1189  }
1190  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1191    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1192  }
1193  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1194    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1195  }
1196  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1197    __lock();
1198    _Tp __old;
1199    __cxx_atomic_assign_volatile(__old, __a_value);
1200    __unlock();
1201    return __old;
1202  }
1203  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1204    __lock();
1205    _Tp __old = __a_value;
1206    __unlock();
1207    return __old;
1208  }
1209};
1210
1211template <typename _Tp>
1212_LIBCPP_INLINE_VISIBILITY
1213void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1214  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1215}
1216template <typename _Tp>
1217_LIBCPP_INLINE_VISIBILITY
1218void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1219  __a->__a_value = __val;
1220}
1221
1222template <typename _Tp>
1223_LIBCPP_INLINE_VISIBILITY
1224void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1225  __a->__lock();
1226  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1227  __a->__unlock();
1228}
1229template <typename _Tp>
1230_LIBCPP_INLINE_VISIBILITY
1231void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1232  __a->__lock();
1233  __a->__a_value = __val;
1234  __a->__unlock();
1235}
1236
1237template <typename _Tp>
1238_LIBCPP_INLINE_VISIBILITY
1239_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1240  return __a->__read();
1241}
1242template <typename _Tp>
1243_LIBCPP_INLINE_VISIBILITY
1244_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1245  return __a->__read();
1246}
1247
1248template <typename _Tp>
1249_LIBCPP_INLINE_VISIBILITY
1250_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1251  __a->__lock();
1252  _Tp __old;
1253  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1254  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1255  __a->__unlock();
1256  return __old;
1257}
1258template <typename _Tp>
1259_LIBCPP_INLINE_VISIBILITY
1260_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1261  __a->__lock();
1262  _Tp __old = __a->__a_value;
1263  __a->__a_value = __value;
1264  __a->__unlock();
1265  return __old;
1266}
1267
1268template <typename _Tp>
1269_LIBCPP_INLINE_VISIBILITY
1270bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1271                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1272  _Tp __temp;
1273  __a->__lock();
1274  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1275  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1276  if(__ret)
1277    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1278  else
1279    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1280  __a->__unlock();
1281  return __ret;
1282}
1283template <typename _Tp>
1284_LIBCPP_INLINE_VISIBILITY
1285bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1286                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1287  __a->__lock();
1288  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1289  if(__ret)
1290    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1291  else
1292    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1293  __a->__unlock();
1294  return __ret;
1295}
1296
1297template <typename _Tp>
1298_LIBCPP_INLINE_VISIBILITY
1299bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1300                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1301  _Tp __temp;
1302  __a->__lock();
1303  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1304  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1305  if(__ret)
1306    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1307  else
1308    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1309  __a->__unlock();
1310  return __ret;
1311}
1312template <typename _Tp>
1313_LIBCPP_INLINE_VISIBILITY
1314bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1315                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1316  __a->__lock();
1317  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1318  if(__ret)
1319    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1320  else
1321    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1322  __a->__unlock();
1323  return __ret;
1324}
1325
1326template <typename _Tp, typename _Td>
1327_LIBCPP_INLINE_VISIBILITY
1328_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1329                           _Td __delta, memory_order) {
1330  __a->__lock();
1331  _Tp __old;
1332  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1333  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1334  __a->__unlock();
1335  return __old;
1336}
1337template <typename _Tp, typename _Td>
1338_LIBCPP_INLINE_VISIBILITY
1339_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1340                           _Td __delta, memory_order) {
1341  __a->__lock();
1342  _Tp __old = __a->__a_value;
1343  __a->__a_value += __delta;
1344  __a->__unlock();
1345  return __old;
1346}
1347
1348template <typename _Tp, typename _Td>
1349_LIBCPP_INLINE_VISIBILITY
1350_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1351                           ptrdiff_t __delta, memory_order) {
1352  __a->__lock();
1353  _Tp* __old;
1354  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1355  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1356  __a->__unlock();
1357  return __old;
1358}
1359template <typename _Tp, typename _Td>
1360_LIBCPP_INLINE_VISIBILITY
1361_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1362                           ptrdiff_t __delta, memory_order) {
1363  __a->__lock();
1364  _Tp* __old = __a->__a_value;
1365  __a->__a_value += __delta;
1366  __a->__unlock();
1367  return __old;
1368}
1369
1370template <typename _Tp, typename _Td>
1371_LIBCPP_INLINE_VISIBILITY
1372_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1373                           _Td __delta, memory_order) {
1374  __a->__lock();
1375  _Tp __old;
1376  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1377  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1378  __a->__unlock();
1379  return __old;
1380}
1381template <typename _Tp, typename _Td>
1382_LIBCPP_INLINE_VISIBILITY
1383_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1384                           _Td __delta, memory_order) {
1385  __a->__lock();
1386  _Tp __old = __a->__a_value;
1387  __a->__a_value -= __delta;
1388  __a->__unlock();
1389  return __old;
1390}
1391
1392template <typename _Tp>
1393_LIBCPP_INLINE_VISIBILITY
1394_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1395                           _Tp __pattern, memory_order) {
1396  __a->__lock();
1397  _Tp __old;
1398  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1399  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1400  __a->__unlock();
1401  return __old;
1402}
1403template <typename _Tp>
1404_LIBCPP_INLINE_VISIBILITY
1405_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1406                           _Tp __pattern, memory_order) {
1407  __a->__lock();
1408  _Tp __old = __a->__a_value;
1409  __a->__a_value &= __pattern;
1410  __a->__unlock();
1411  return __old;
1412}
1413
1414template <typename _Tp>
1415_LIBCPP_INLINE_VISIBILITY
1416_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1417                          _Tp __pattern, memory_order) {
1418  __a->__lock();
1419  _Tp __old;
1420  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1421  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1422  __a->__unlock();
1423  return __old;
1424}
1425template <typename _Tp>
1426_LIBCPP_INLINE_VISIBILITY
1427_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1428                          _Tp __pattern, memory_order) {
1429  __a->__lock();
1430  _Tp __old = __a->__a_value;
1431  __a->__a_value |= __pattern;
1432  __a->__unlock();
1433  return __old;
1434}
1435
1436template <typename _Tp>
1437_LIBCPP_INLINE_VISIBILITY
1438_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1439                           _Tp __pattern, memory_order) {
1440  __a->__lock();
1441  _Tp __old;
1442  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1443  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1444  __a->__unlock();
1445  return __old;
1446}
1447template <typename _Tp>
1448_LIBCPP_INLINE_VISIBILITY
1449_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1450                           _Tp __pattern, memory_order) {
1451  __a->__lock();
1452  _Tp __old = __a->__a_value;
1453  __a->__a_value ^= __pattern;
1454  __a->__unlock();
1455  return __old;
1456}
1457
1458#ifdef __cpp_lib_atomic_is_always_lock_free
1459
1460template<typename _Tp> struct __cxx_is_always_lock_free {
1461    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1462
1463#else
1464
1465template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1466// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1467template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1468template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1469template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1470template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1471#ifndef _LIBCPP_HAS_NO_CHAR8_T
1472template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
1473#endif
1474template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1475template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1476template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1477template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1478template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1479template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1480template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1481template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1482template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1483template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1484template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1485template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1486template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1487
1488#endif //__cpp_lib_atomic_is_always_lock_free
1489
1490template <typename _Tp,
1491          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1492                                                __cxx_atomic_base_impl<_Tp>,
1493                                                __cxx_atomic_lock_impl<_Tp> >::type>
1494#else
1495template <typename _Tp,
1496          typename _Base = __cxx_atomic_base_impl<_Tp> >
1497#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1498struct __cxx_atomic_impl : public _Base {
1499
1500#if _GNUC_VER >= 501
1501    static_assert(is_trivially_copyable<_Tp>::value,
1502      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1503#endif
1504
1505  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1506  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1507    : _Base(value) {}
1508};
1509
1510#if defined(__linux__) || (defined(__FreeBSD__) && defined(__mips__))
1511    using __cxx_contention_t = int32_t;
1512#else
1513    using __cxx_contention_t = int64_t;
1514#endif
1515
1516using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1517
1518#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1519
1520_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1521_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1522_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1523_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1524
1525_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1526_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1527_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1528_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1529
1530template <class _Atp, class _Fn>
1531struct __libcpp_atomic_wait_backoff_impl {
1532    _Atp* __a;
1533    _Fn __test_fn;
1534    _LIBCPP_AVAILABILITY_SYNC
1535    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1536    {
1537        if(__elapsed > chrono::microseconds(64))
1538        {
1539            auto const __monitor = __libcpp_atomic_monitor(__a);
1540            if(__test_fn())
1541                return true;
1542            __libcpp_atomic_wait(__a, __monitor);
1543        }
1544        else if(__elapsed > chrono::microseconds(4))
1545            __libcpp_thread_yield();
1546        else
1547            {} // poll
1548        return false;
1549    }
1550};
1551
1552template <class _Atp, class _Fn>
1553_LIBCPP_AVAILABILITY_SYNC
1554_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1555{
1556    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1557    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1558}
1559
1560#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1561
1562template <class _Tp>
1563_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1564template <class _Tp>
1565_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1566template <class _Atp, class _Fn>
1567_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1568{
1569    return __libcpp_thread_poll_with_backoff(__test_fn, __libcpp_timed_backoff_policy());
1570}
1571
1572#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1573
1574template <class _Atp, class _Tp>
1575struct __cxx_atomic_wait_test_fn_impl {
1576    _Atp* __a;
1577    _Tp __val;
1578    memory_order __order;
1579    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1580    {
1581        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1582    }
1583};
1584
1585template <class _Atp, class _Tp>
1586_LIBCPP_AVAILABILITY_SYNC
1587_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1588{
1589    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1590    return __cxx_atomic_wait(__a, __test_fn);
1591}
1592
1593// general atomic<T>
1594
1595template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1596struct __atomic_base  // false
1597{
1598    mutable __cxx_atomic_impl<_Tp> __a_;
1599
1600#if defined(__cpp_lib_atomic_is_always_lock_free)
1601  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1602#endif
1603
1604    _LIBCPP_INLINE_VISIBILITY
1605    bool is_lock_free() const volatile _NOEXCEPT
1606        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1607    _LIBCPP_INLINE_VISIBILITY
1608    bool is_lock_free() const _NOEXCEPT
1609        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1610    _LIBCPP_INLINE_VISIBILITY
1611    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1612      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1613        {__cxx_atomic_store(&__a_, __d, __m);}
1614    _LIBCPP_INLINE_VISIBILITY
1615    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1616      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1617        {__cxx_atomic_store(&__a_, __d, __m);}
1618    _LIBCPP_INLINE_VISIBILITY
1619    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1620      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1621        {return __cxx_atomic_load(&__a_, __m);}
1622    _LIBCPP_INLINE_VISIBILITY
1623    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1624      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1625        {return __cxx_atomic_load(&__a_, __m);}
1626    _LIBCPP_INLINE_VISIBILITY
1627    operator _Tp() const volatile _NOEXCEPT {return load();}
1628    _LIBCPP_INLINE_VISIBILITY
1629    operator _Tp() const _NOEXCEPT          {return load();}
1630    _LIBCPP_INLINE_VISIBILITY
1631    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1632        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1633    _LIBCPP_INLINE_VISIBILITY
1634    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1635        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1636    _LIBCPP_INLINE_VISIBILITY
1637    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1638                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1639      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1640        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1641    _LIBCPP_INLINE_VISIBILITY
1642    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1643                               memory_order __s, memory_order __f) _NOEXCEPT
1644      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1645        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1646    _LIBCPP_INLINE_VISIBILITY
1647    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1648                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1649      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1650        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1651    _LIBCPP_INLINE_VISIBILITY
1652    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1653                                 memory_order __s, memory_order __f) _NOEXCEPT
1654      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1655        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1656    _LIBCPP_INLINE_VISIBILITY
1657    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1658                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1659        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1660    _LIBCPP_INLINE_VISIBILITY
1661    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1662                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1663        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1664    _LIBCPP_INLINE_VISIBILITY
1665    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1666                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1667        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1668    _LIBCPP_INLINE_VISIBILITY
1669    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1670                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1671        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1672
1673    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1674        {__cxx_atomic_wait(&__a_, __v, __m);}
1675    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1676        {__cxx_atomic_wait(&__a_, __v, __m);}
1677    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1678        {__cxx_atomic_notify_one(&__a_);}
1679    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1680        {__cxx_atomic_notify_one(&__a_);}
1681    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1682        {__cxx_atomic_notify_all(&__a_);}
1683    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1684        {__cxx_atomic_notify_all(&__a_);}
1685
1686#if _LIBCPP_STD_VER > 17
1687    _LIBCPP_INLINE_VISIBILITY constexpr
1688    __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
1689#else
1690    _LIBCPP_INLINE_VISIBILITY
1691    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1692#endif
1693
1694    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1695    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1696
1697#ifndef _LIBCPP_CXX03_LANG
1698    __atomic_base(const __atomic_base&) = delete;
1699#else
1700private:
1701    _LIBCPP_INLINE_VISIBILITY
1702    __atomic_base(const __atomic_base&);
1703#endif
1704};
1705
1706#if defined(__cpp_lib_atomic_is_always_lock_free)
1707template <class _Tp, bool __b>
1708_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1709#endif
1710
1711// atomic<Integral>
1712
1713template <class _Tp>
1714struct __atomic_base<_Tp, true>
1715    : public __atomic_base<_Tp, false>
1716{
1717    typedef __atomic_base<_Tp, false> __base;
1718
1719    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
1720    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1721
1722    _LIBCPP_INLINE_VISIBILITY
1723    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1724
1725    _LIBCPP_INLINE_VISIBILITY
1726    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1727        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1728    _LIBCPP_INLINE_VISIBILITY
1729    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1730        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1731    _LIBCPP_INLINE_VISIBILITY
1732    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1733        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1734    _LIBCPP_INLINE_VISIBILITY
1735    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1736        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1737    _LIBCPP_INLINE_VISIBILITY
1738    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1739        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1740    _LIBCPP_INLINE_VISIBILITY
1741    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1742        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1743    _LIBCPP_INLINE_VISIBILITY
1744    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1745        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1746    _LIBCPP_INLINE_VISIBILITY
1747    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1748        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1749    _LIBCPP_INLINE_VISIBILITY
1750    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1751        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1752    _LIBCPP_INLINE_VISIBILITY
1753    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1754        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1755
1756    _LIBCPP_INLINE_VISIBILITY
1757    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1758    _LIBCPP_INLINE_VISIBILITY
1759    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1760    _LIBCPP_INLINE_VISIBILITY
1761    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1762    _LIBCPP_INLINE_VISIBILITY
1763    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1764    _LIBCPP_INLINE_VISIBILITY
1765    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1766    _LIBCPP_INLINE_VISIBILITY
1767    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1768    _LIBCPP_INLINE_VISIBILITY
1769    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1770    _LIBCPP_INLINE_VISIBILITY
1771    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1772    _LIBCPP_INLINE_VISIBILITY
1773    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1774    _LIBCPP_INLINE_VISIBILITY
1775    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1776    _LIBCPP_INLINE_VISIBILITY
1777    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1778    _LIBCPP_INLINE_VISIBILITY
1779    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1780    _LIBCPP_INLINE_VISIBILITY
1781    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1782    _LIBCPP_INLINE_VISIBILITY
1783    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1784    _LIBCPP_INLINE_VISIBILITY
1785    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1786    _LIBCPP_INLINE_VISIBILITY
1787    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1788    _LIBCPP_INLINE_VISIBILITY
1789    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1790    _LIBCPP_INLINE_VISIBILITY
1791    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1792};
1793
1794// atomic<T>
1795
1796template <class _Tp>
1797struct atomic
1798    : public __atomic_base<_Tp>
1799{
1800    typedef __atomic_base<_Tp> __base;
1801    typedef _Tp value_type;
1802    typedef value_type difference_type;
1803
1804#if _LIBCPP_STD_VER > 17
1805    _LIBCPP_INLINE_VISIBILITY
1806    atomic() = default;
1807#else
1808    _LIBCPP_INLINE_VISIBILITY
1809    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1810#endif
1811
1812    _LIBCPP_INLINE_VISIBILITY
1813    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1814
1815    _LIBCPP_INLINE_VISIBILITY
1816    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1817        {__base::store(__d); return __d;}
1818    _LIBCPP_INLINE_VISIBILITY
1819    _Tp operator=(_Tp __d) _NOEXCEPT
1820        {__base::store(__d); return __d;}
1821
1822    atomic& operator=(const atomic&) = delete;
1823    atomic& operator=(const atomic&) volatile = delete;
1824};
1825
1826// atomic<T*>
1827
1828template <class _Tp>
1829struct atomic<_Tp*>
1830    : public __atomic_base<_Tp*>
1831{
1832    typedef __atomic_base<_Tp*> __base;
1833    typedef _Tp* value_type;
1834    typedef ptrdiff_t difference_type;
1835
1836    _LIBCPP_INLINE_VISIBILITY
1837    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1838
1839    _LIBCPP_INLINE_VISIBILITY
1840    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1841
1842    _LIBCPP_INLINE_VISIBILITY
1843    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1844        {__base::store(__d); return __d;}
1845    _LIBCPP_INLINE_VISIBILITY
1846    _Tp* operator=(_Tp* __d) _NOEXCEPT
1847        {__base::store(__d); return __d;}
1848
1849    _LIBCPP_INLINE_VISIBILITY
1850    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1851                                                                        volatile _NOEXCEPT
1852        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1853    _LIBCPP_INLINE_VISIBILITY
1854    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1855        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1856    _LIBCPP_INLINE_VISIBILITY
1857    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1858                                                                        volatile _NOEXCEPT
1859        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1860    _LIBCPP_INLINE_VISIBILITY
1861    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1862        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1863
1864    _LIBCPP_INLINE_VISIBILITY
1865    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1866    _LIBCPP_INLINE_VISIBILITY
1867    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1868    _LIBCPP_INLINE_VISIBILITY
1869    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1870    _LIBCPP_INLINE_VISIBILITY
1871    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1872    _LIBCPP_INLINE_VISIBILITY
1873    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1874    _LIBCPP_INLINE_VISIBILITY
1875    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1876    _LIBCPP_INLINE_VISIBILITY
1877    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1878    _LIBCPP_INLINE_VISIBILITY
1879    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1880    _LIBCPP_INLINE_VISIBILITY
1881    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1882    _LIBCPP_INLINE_VISIBILITY
1883    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1884    _LIBCPP_INLINE_VISIBILITY
1885    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1886    _LIBCPP_INLINE_VISIBILITY
1887    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1888
1889    atomic& operator=(const atomic&) = delete;
1890    atomic& operator=(const atomic&) volatile = delete;
1891};
1892
1893// atomic_is_lock_free
1894
1895template <class _Tp>
1896_LIBCPP_INLINE_VISIBILITY
1897bool
1898atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1899{
1900    return __o->is_lock_free();
1901}
1902
1903template <class _Tp>
1904_LIBCPP_INLINE_VISIBILITY
1905bool
1906atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1907{
1908    return __o->is_lock_free();
1909}
1910
1911// atomic_init
1912
1913template <class _Tp>
1914_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1915void
1916atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1917{
1918    __cxx_atomic_init(&__o->__a_, __d);
1919}
1920
1921template <class _Tp>
1922_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1923void
1924atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1925{
1926    __cxx_atomic_init(&__o->__a_, __d);
1927}
1928
1929// atomic_store
1930
1931template <class _Tp>
1932_LIBCPP_INLINE_VISIBILITY
1933void
1934atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1935{
1936    __o->store(__d);
1937}
1938
1939template <class _Tp>
1940_LIBCPP_INLINE_VISIBILITY
1941void
1942atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1943{
1944    __o->store(__d);
1945}
1946
1947// atomic_store_explicit
1948
1949template <class _Tp>
1950_LIBCPP_INLINE_VISIBILITY
1951void
1952atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1953  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1954{
1955    __o->store(__d, __m);
1956}
1957
1958template <class _Tp>
1959_LIBCPP_INLINE_VISIBILITY
1960void
1961atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1962  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1963{
1964    __o->store(__d, __m);
1965}
1966
1967// atomic_load
1968
1969template <class _Tp>
1970_LIBCPP_INLINE_VISIBILITY
1971_Tp
1972atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1973{
1974    return __o->load();
1975}
1976
1977template <class _Tp>
1978_LIBCPP_INLINE_VISIBILITY
1979_Tp
1980atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1981{
1982    return __o->load();
1983}
1984
1985// atomic_load_explicit
1986
1987template <class _Tp>
1988_LIBCPP_INLINE_VISIBILITY
1989_Tp
1990atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1991  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1992{
1993    return __o->load(__m);
1994}
1995
1996template <class _Tp>
1997_LIBCPP_INLINE_VISIBILITY
1998_Tp
1999atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
2000  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2001{
2002    return __o->load(__m);
2003}
2004
2005// atomic_exchange
2006
2007template <class _Tp>
2008_LIBCPP_INLINE_VISIBILITY
2009_Tp
2010atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2011{
2012    return __o->exchange(__d);
2013}
2014
2015template <class _Tp>
2016_LIBCPP_INLINE_VISIBILITY
2017_Tp
2018atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2019{
2020    return __o->exchange(__d);
2021}
2022
2023// atomic_exchange_explicit
2024
2025template <class _Tp>
2026_LIBCPP_INLINE_VISIBILITY
2027_Tp
2028atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2029{
2030    return __o->exchange(__d, __m);
2031}
2032
2033template <class _Tp>
2034_LIBCPP_INLINE_VISIBILITY
2035_Tp
2036atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2037{
2038    return __o->exchange(__d, __m);
2039}
2040
2041// atomic_compare_exchange_weak
2042
2043template <class _Tp>
2044_LIBCPP_INLINE_VISIBILITY
2045bool
2046atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2047{
2048    return __o->compare_exchange_weak(*__e, __d);
2049}
2050
2051template <class _Tp>
2052_LIBCPP_INLINE_VISIBILITY
2053bool
2054atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2055{
2056    return __o->compare_exchange_weak(*__e, __d);
2057}
2058
2059// atomic_compare_exchange_strong
2060
2061template <class _Tp>
2062_LIBCPP_INLINE_VISIBILITY
2063bool
2064atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2065{
2066    return __o->compare_exchange_strong(*__e, __d);
2067}
2068
2069template <class _Tp>
2070_LIBCPP_INLINE_VISIBILITY
2071bool
2072atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2073{
2074    return __o->compare_exchange_strong(*__e, __d);
2075}
2076
2077// atomic_compare_exchange_weak_explicit
2078
2079template <class _Tp>
2080_LIBCPP_INLINE_VISIBILITY
2081bool
2082atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2083                                      typename atomic<_Tp>::value_type __d,
2084                                      memory_order __s, memory_order __f) _NOEXCEPT
2085  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2086{
2087    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2088}
2089
2090template <class _Tp>
2091_LIBCPP_INLINE_VISIBILITY
2092bool
2093atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2094                                      memory_order __s, memory_order __f) _NOEXCEPT
2095  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2096{
2097    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2098}
2099
2100// atomic_compare_exchange_strong_explicit
2101
2102template <class _Tp>
2103_LIBCPP_INLINE_VISIBILITY
2104bool
2105atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2106                                        typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2107                                        memory_order __s, memory_order __f) _NOEXCEPT
2108  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2109{
2110    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2111}
2112
2113template <class _Tp>
2114_LIBCPP_INLINE_VISIBILITY
2115bool
2116atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2117                                        typename atomic<_Tp>::value_type __d,
2118                                        memory_order __s, memory_order __f) _NOEXCEPT
2119  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2120{
2121    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2122}
2123
2124// atomic_wait
2125
2126template <class _Tp>
2127_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2128void atomic_wait(const volatile atomic<_Tp>* __o,
2129                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2130{
2131    return __o->wait(__v);
2132}
2133
2134template <class _Tp>
2135_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2136void atomic_wait(const atomic<_Tp>* __o,
2137                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2138{
2139    return __o->wait(__v);
2140}
2141
2142// atomic_wait_explicit
2143
2144template <class _Tp>
2145_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2146void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2147                          typename atomic<_Tp>::value_type __v,
2148                          memory_order __m) _NOEXCEPT
2149  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2150{
2151    return __o->wait(__v, __m);
2152}
2153
2154template <class _Tp>
2155_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2156void atomic_wait_explicit(const atomic<_Tp>* __o,
2157                          typename atomic<_Tp>::value_type __v,
2158                          memory_order __m) _NOEXCEPT
2159  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2160{
2161    return __o->wait(__v, __m);
2162}
2163
2164// atomic_notify_one
2165
2166template <class _Tp>
2167_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2168void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2169{
2170    __o->notify_one();
2171}
2172template <class _Tp>
2173_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2174void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2175{
2176    __o->notify_one();
2177}
2178
2179// atomic_notify_one
2180
2181template <class _Tp>
2182_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2183void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2184{
2185    __o->notify_all();
2186}
2187template <class _Tp>
2188_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2189void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2190{
2191    __o->notify_all();
2192}
2193
2194// atomic_fetch_add
2195
2196template <class _Tp>
2197_LIBCPP_INLINE_VISIBILITY
2198typename enable_if
2199<
2200    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2201    _Tp
2202>::type
2203atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2204{
2205    return __o->fetch_add(__op);
2206}
2207
2208template <class _Tp>
2209_LIBCPP_INLINE_VISIBILITY
2210typename enable_if
2211<
2212    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2213    _Tp
2214>::type
2215atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2216{
2217    return __o->fetch_add(__op);
2218}
2219
2220template <class _Tp>
2221_LIBCPP_INLINE_VISIBILITY
2222_Tp*
2223atomic_fetch_add(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2224{
2225    return __o->fetch_add(__op);
2226}
2227
2228template <class _Tp>
2229_LIBCPP_INLINE_VISIBILITY
2230_Tp*
2231atomic_fetch_add(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2232{
2233    return __o->fetch_add(__op);
2234}
2235
2236// atomic_fetch_add_explicit
2237
2238template <class _Tp>
2239_LIBCPP_INLINE_VISIBILITY
2240typename enable_if
2241<
2242    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2243    _Tp
2244>::type
2245atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2246{
2247    return __o->fetch_add(__op, __m);
2248}
2249
2250template <class _Tp>
2251_LIBCPP_INLINE_VISIBILITY
2252typename enable_if
2253<
2254    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2255    _Tp
2256>::type
2257atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2258{
2259    return __o->fetch_add(__op, __m);
2260}
2261
2262template <class _Tp>
2263_LIBCPP_INLINE_VISIBILITY
2264_Tp*
2265atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2266{
2267    return __o->fetch_add(__op, __m);
2268}
2269
2270template <class _Tp>
2271_LIBCPP_INLINE_VISIBILITY
2272_Tp*
2273atomic_fetch_add_explicit(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2274{
2275    return __o->fetch_add(__op, __m);
2276}
2277
2278// atomic_fetch_sub
2279
2280template <class _Tp>
2281_LIBCPP_INLINE_VISIBILITY
2282typename enable_if
2283<
2284    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2285    _Tp
2286>::type
2287atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2288{
2289    return __o->fetch_sub(__op);
2290}
2291
2292template <class _Tp>
2293_LIBCPP_INLINE_VISIBILITY
2294typename enable_if
2295<
2296    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2297    _Tp
2298>::type
2299atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2300{
2301    return __o->fetch_sub(__op);
2302}
2303
2304template <class _Tp>
2305_LIBCPP_INLINE_VISIBILITY
2306_Tp*
2307atomic_fetch_sub(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2308{
2309    return __o->fetch_sub(__op);
2310}
2311
2312template <class _Tp>
2313_LIBCPP_INLINE_VISIBILITY
2314_Tp*
2315atomic_fetch_sub(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2316{
2317    return __o->fetch_sub(__op);
2318}
2319
2320// atomic_fetch_sub_explicit
2321
2322template <class _Tp>
2323_LIBCPP_INLINE_VISIBILITY
2324typename enable_if
2325<
2326    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2327    _Tp
2328>::type
2329atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2330{
2331    return __o->fetch_sub(__op, __m);
2332}
2333
2334template <class _Tp>
2335_LIBCPP_INLINE_VISIBILITY
2336typename enable_if
2337<
2338    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2339    _Tp
2340>::type
2341atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2342{
2343    return __o->fetch_sub(__op, __m);
2344}
2345
2346template <class _Tp>
2347_LIBCPP_INLINE_VISIBILITY
2348_Tp*
2349atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2350{
2351    return __o->fetch_sub(__op, __m);
2352}
2353
2354template <class _Tp>
2355_LIBCPP_INLINE_VISIBILITY
2356_Tp*
2357atomic_fetch_sub_explicit(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2358{
2359    return __o->fetch_sub(__op, __m);
2360}
2361
2362// atomic_fetch_and
2363
2364template <class _Tp>
2365_LIBCPP_INLINE_VISIBILITY
2366typename enable_if
2367<
2368    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2369    _Tp
2370>::type
2371atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2372{
2373    return __o->fetch_and(__op);
2374}
2375
2376template <class _Tp>
2377_LIBCPP_INLINE_VISIBILITY
2378typename enable_if
2379<
2380    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2381    _Tp
2382>::type
2383atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2384{
2385    return __o->fetch_and(__op);
2386}
2387
2388// atomic_fetch_and_explicit
2389
2390template <class _Tp>
2391_LIBCPP_INLINE_VISIBILITY
2392typename enable_if
2393<
2394    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2395    _Tp
2396>::type
2397atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2398{
2399    return __o->fetch_and(__op, __m);
2400}
2401
2402template <class _Tp>
2403_LIBCPP_INLINE_VISIBILITY
2404typename enable_if
2405<
2406    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2407    _Tp
2408>::type
2409atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2410{
2411    return __o->fetch_and(__op, __m);
2412}
2413
2414// atomic_fetch_or
2415
2416template <class _Tp>
2417_LIBCPP_INLINE_VISIBILITY
2418typename enable_if
2419<
2420    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2421    _Tp
2422>::type
2423atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2424{
2425    return __o->fetch_or(__op);
2426}
2427
2428template <class _Tp>
2429_LIBCPP_INLINE_VISIBILITY
2430typename enable_if
2431<
2432    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2433    _Tp
2434>::type
2435atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2436{
2437    return __o->fetch_or(__op);
2438}
2439
2440// atomic_fetch_or_explicit
2441
2442template <class _Tp>
2443_LIBCPP_INLINE_VISIBILITY
2444typename enable_if
2445<
2446    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2447    _Tp
2448>::type
2449atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2450{
2451    return __o->fetch_or(__op, __m);
2452}
2453
2454template <class _Tp>
2455_LIBCPP_INLINE_VISIBILITY
2456typename enable_if
2457<
2458    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2459    _Tp
2460>::type
2461atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2462{
2463    return __o->fetch_or(__op, __m);
2464}
2465
2466// atomic_fetch_xor
2467
2468template <class _Tp>
2469_LIBCPP_INLINE_VISIBILITY
2470typename enable_if
2471<
2472    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2473    _Tp
2474>::type
2475atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2476{
2477    return __o->fetch_xor(__op);
2478}
2479
2480template <class _Tp>
2481_LIBCPP_INLINE_VISIBILITY
2482typename enable_if
2483<
2484    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2485    _Tp
2486>::type
2487atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2488{
2489    return __o->fetch_xor(__op);
2490}
2491
2492// atomic_fetch_xor_explicit
2493
2494template <class _Tp>
2495_LIBCPP_INLINE_VISIBILITY
2496typename enable_if
2497<
2498    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2499    _Tp
2500>::type
2501atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2502{
2503    return __o->fetch_xor(__op, __m);
2504}
2505
2506template <class _Tp>
2507_LIBCPP_INLINE_VISIBILITY
2508typename enable_if
2509<
2510    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2511    _Tp
2512>::type
2513atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2514{
2515    return __o->fetch_xor(__op, __m);
2516}
2517
2518// flag type and operations
2519
2520typedef struct atomic_flag
2521{
2522    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2523
2524    _LIBCPP_INLINE_VISIBILITY
2525    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2526        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2527    _LIBCPP_INLINE_VISIBILITY
2528    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2529        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2530
2531    _LIBCPP_INLINE_VISIBILITY
2532    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2533        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2534    _LIBCPP_INLINE_VISIBILITY
2535    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2536        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2537    _LIBCPP_INLINE_VISIBILITY
2538    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2539        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2540    _LIBCPP_INLINE_VISIBILITY
2541    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2542        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2543
2544    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2545    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2546        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2547    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2548    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2549        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2550    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2551    void notify_one() volatile _NOEXCEPT
2552        {__cxx_atomic_notify_one(&__a_);}
2553    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2554    void notify_one() _NOEXCEPT
2555        {__cxx_atomic_notify_one(&__a_);}
2556    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2557    void notify_all() volatile _NOEXCEPT
2558        {__cxx_atomic_notify_all(&__a_);}
2559    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2560    void notify_all() _NOEXCEPT
2561        {__cxx_atomic_notify_all(&__a_);}
2562
2563#if _LIBCPP_STD_VER > 17
2564    _LIBCPP_INLINE_VISIBILITY constexpr
2565    atomic_flag() _NOEXCEPT : __a_(false) {}
2566#else
2567    _LIBCPP_INLINE_VISIBILITY
2568    atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2569#endif
2570
2571    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2572    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2573
2574#ifndef _LIBCPP_CXX03_LANG
2575    atomic_flag(const atomic_flag&) = delete;
2576    atomic_flag& operator=(const atomic_flag&) = delete;
2577    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2578#else
2579private:
2580    _LIBCPP_INLINE_VISIBILITY
2581    atomic_flag(const atomic_flag&);
2582    _LIBCPP_INLINE_VISIBILITY
2583    atomic_flag& operator=(const atomic_flag&);
2584    _LIBCPP_INLINE_VISIBILITY
2585    atomic_flag& operator=(const atomic_flag&) volatile;
2586#endif
2587} atomic_flag;
2588
2589
2590inline _LIBCPP_INLINE_VISIBILITY
2591bool
2592atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2593{
2594    return __o->test();
2595}
2596
2597inline _LIBCPP_INLINE_VISIBILITY
2598bool
2599atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2600{
2601    return __o->test();
2602}
2603
2604inline _LIBCPP_INLINE_VISIBILITY
2605bool
2606atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2607{
2608    return __o->test(__m);
2609}
2610
2611inline _LIBCPP_INLINE_VISIBILITY
2612bool
2613atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2614{
2615    return __o->test(__m);
2616}
2617
2618inline _LIBCPP_INLINE_VISIBILITY
2619bool
2620atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2621{
2622    return __o->test_and_set();
2623}
2624
2625inline _LIBCPP_INLINE_VISIBILITY
2626bool
2627atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2628{
2629    return __o->test_and_set();
2630}
2631
2632inline _LIBCPP_INLINE_VISIBILITY
2633bool
2634atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2635{
2636    return __o->test_and_set(__m);
2637}
2638
2639inline _LIBCPP_INLINE_VISIBILITY
2640bool
2641atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2642{
2643    return __o->test_and_set(__m);
2644}
2645
2646inline _LIBCPP_INLINE_VISIBILITY
2647void
2648atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2649{
2650    __o->clear();
2651}
2652
2653inline _LIBCPP_INLINE_VISIBILITY
2654void
2655atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2656{
2657    __o->clear();
2658}
2659
2660inline _LIBCPP_INLINE_VISIBILITY
2661void
2662atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2663{
2664    __o->clear(__m);
2665}
2666
2667inline _LIBCPP_INLINE_VISIBILITY
2668void
2669atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2670{
2671    __o->clear(__m);
2672}
2673
2674inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2675void
2676atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2677{
2678    __o->wait(__v);
2679}
2680
2681inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2682void
2683atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2684{
2685    __o->wait(__v);
2686}
2687
2688inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2689void
2690atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2691                          bool __v, memory_order __m) _NOEXCEPT
2692{
2693    __o->wait(__v, __m);
2694}
2695
2696inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2697void
2698atomic_flag_wait_explicit(const atomic_flag* __o,
2699                          bool __v, memory_order __m) _NOEXCEPT
2700{
2701    __o->wait(__v, __m);
2702}
2703
2704inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2705void
2706atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2707{
2708    __o->notify_one();
2709}
2710
2711inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2712void
2713atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2714{
2715    __o->notify_one();
2716}
2717
2718inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2719void
2720atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2721{
2722    __o->notify_all();
2723}
2724
2725inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2726void
2727atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2728{
2729    __o->notify_all();
2730}
2731
2732// fences
2733
2734inline _LIBCPP_INLINE_VISIBILITY
2735void
2736atomic_thread_fence(memory_order __m) _NOEXCEPT
2737{
2738    __cxx_atomic_thread_fence(__m);
2739}
2740
2741inline _LIBCPP_INLINE_VISIBILITY
2742void
2743atomic_signal_fence(memory_order __m) _NOEXCEPT
2744{
2745    __cxx_atomic_signal_fence(__m);
2746}
2747
2748// Atomics for standard typedef types
2749
2750typedef atomic<bool>               atomic_bool;
2751typedef atomic<char>               atomic_char;
2752typedef atomic<signed char>        atomic_schar;
2753typedef atomic<unsigned char>      atomic_uchar;
2754typedef atomic<short>              atomic_short;
2755typedef atomic<unsigned short>     atomic_ushort;
2756typedef atomic<int>                atomic_int;
2757typedef atomic<unsigned int>       atomic_uint;
2758typedef atomic<long>               atomic_long;
2759typedef atomic<unsigned long>      atomic_ulong;
2760typedef atomic<long long>          atomic_llong;
2761typedef atomic<unsigned long long> atomic_ullong;
2762#ifndef _LIBCPP_HAS_NO_CHAR8_T
2763typedef atomic<char8_t>            atomic_char8_t;
2764#endif
2765typedef atomic<char16_t>           atomic_char16_t;
2766typedef atomic<char32_t>           atomic_char32_t;
2767typedef atomic<wchar_t>            atomic_wchar_t;
2768
2769typedef atomic<int_least8_t>   atomic_int_least8_t;
2770typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2771typedef atomic<int_least16_t>  atomic_int_least16_t;
2772typedef atomic<uint_least16_t> atomic_uint_least16_t;
2773typedef atomic<int_least32_t>  atomic_int_least32_t;
2774typedef atomic<uint_least32_t> atomic_uint_least32_t;
2775typedef atomic<int_least64_t>  atomic_int_least64_t;
2776typedef atomic<uint_least64_t> atomic_uint_least64_t;
2777
2778typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2779typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2780typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2781typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2782typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2783typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2784typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2785typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2786
2787typedef atomic< int8_t>  atomic_int8_t;
2788typedef atomic<uint8_t>  atomic_uint8_t;
2789typedef atomic< int16_t> atomic_int16_t;
2790typedef atomic<uint16_t> atomic_uint16_t;
2791typedef atomic< int32_t> atomic_int32_t;
2792typedef atomic<uint32_t> atomic_uint32_t;
2793typedef atomic< int64_t> atomic_int64_t;
2794typedef atomic<uint64_t> atomic_uint64_t;
2795
2796typedef atomic<intptr_t>  atomic_intptr_t;
2797typedef atomic<uintptr_t> atomic_uintptr_t;
2798typedef atomic<size_t>    atomic_size_t;
2799typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2800typedef atomic<intmax_t>  atomic_intmax_t;
2801typedef atomic<uintmax_t> atomic_uintmax_t;
2802
2803// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2804
2805#ifdef __cpp_lib_atomic_is_always_lock_free
2806# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2807#else
2808# define _LIBCPP_CONTENTION_LOCK_FREE false
2809#endif
2810
2811#if ATOMIC_LLONG_LOCK_FREE == 2
2812typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2813typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2814#elif ATOMIC_INT_LOCK_FREE == 2
2815typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2816typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2817#elif ATOMIC_SHORT_LOCK_FREE == 2
2818typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2819typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2820#elif ATOMIC_CHAR_LOCK_FREE == 2
2821typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2822typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2823#else
2824    // No signed/unsigned lock-free types
2825#endif
2826
2827typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2828typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2829
2830#define ATOMIC_FLAG_INIT {false}
2831#define ATOMIC_VAR_INIT(__v) {__v}
2832
2833_LIBCPP_END_NAMESPACE_STD
2834
2835#endif // _LIBCPP_ATOMIC
2836