xref: /freebsd/contrib/llvm-project/libcxx/include/atomic (revision dd41de95a84d979615a2ef11df6850622bf6184e)
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro
20
21#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
22
23 // order and consistency
24
25 enum memory_order: unspecified // enum class in C++20
26 {
27    relaxed,
28    consume, // load-consume
29    acquire, // load-acquire
30    release, // store-release
31    acq_rel, // store-release load-acquire
32    seq_cst // store-release load-acquire
33 };
34
35 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
36 inline constexpr auto memory_order_consume = memory_order::consume;
37 inline constexpr auto memory_order_acquire = memory_order::acquire;
38 inline constexpr auto memory_order_release = memory_order::release;
39 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
40 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
41
42template <class T> T kill_dependency(T y) noexcept;
43
44// lock-free property
45
46#define ATOMIC_BOOL_LOCK_FREE unspecified
47#define ATOMIC_CHAR_LOCK_FREE unspecified
48#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
49#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
50#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
51#define ATOMIC_SHORT_LOCK_FREE unspecified
52#define ATOMIC_INT_LOCK_FREE unspecified
53#define ATOMIC_LONG_LOCK_FREE unspecified
54#define ATOMIC_LLONG_LOCK_FREE unspecified
55#define ATOMIC_POINTER_LOCK_FREE unspecified
56
57template <class T>
58struct atomic
59{
60    using value_type = T;
61
62    static constexpr bool is_always_lock_free;
63    bool is_lock_free() const volatile noexcept;
64    bool is_lock_free() const noexcept;
65
66    atomic() noexcept = default;
67    constexpr atomic(T desr) noexcept;
68    atomic(const atomic&) = delete;
69    atomic& operator=(const atomic&) = delete;
70    atomic& operator=(const atomic&) volatile = delete;
71
72    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
73    T load(memory_order m = memory_order_seq_cst) const noexcept;
74    operator T() const volatile noexcept;
75    operator T() const noexcept;
76    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
77    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
78    T operator=(T) volatile noexcept;
79    T operator=(T) noexcept;
80
81    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
83    bool compare_exchange_weak(T& expc, T desr,
84                               memory_order s, memory_order f) volatile noexcept;
85    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
86    bool compare_exchange_strong(T& expc, T desr,
87                                 memory_order s, memory_order f) volatile noexcept;
88    bool compare_exchange_strong(T& expc, T desr,
89                                 memory_order s, memory_order f) noexcept;
90    bool compare_exchange_weak(T& expc, T desr,
91                               memory_order m = memory_order_seq_cst) volatile noexcept;
92    bool compare_exchange_weak(T& expc, T desr,
93                               memory_order m = memory_order_seq_cst) noexcept;
94    bool compare_exchange_strong(T& expc, T desr,
95                                memory_order m = memory_order_seq_cst) volatile noexcept;
96    bool compare_exchange_strong(T& expc, T desr,
97                                 memory_order m = memory_order_seq_cst) noexcept;
98
99    void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
100    void wait(T, memory_order = memory_order::seq_cst) const noexcept;
101    void notify_one() volatile noexcept;
102    void notify_one() noexcept;
103    void notify_all() volatile noexcept;
104    void notify_all() noexcept;
105};
106
107template <>
108struct atomic<integral>
109{
110    using value_type = integral;
111
112    static constexpr bool is_always_lock_free;
113    bool is_lock_free() const volatile noexcept;
114    bool is_lock_free() const noexcept;
115
116    atomic() noexcept = default;
117    constexpr atomic(integral desr) noexcept;
118    atomic(const atomic&) = delete;
119    atomic& operator=(const atomic&) = delete;
120    atomic& operator=(const atomic&) volatile = delete;
121
122    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
123    integral load(memory_order m = memory_order_seq_cst) const noexcept;
124    operator integral() const volatile noexcept;
125    operator integral() const noexcept;
126    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
127    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
128    integral operator=(integral desr) volatile noexcept;
129    integral operator=(integral desr) noexcept;
130
131    integral exchange(integral desr,
132                      memory_order m = memory_order_seq_cst) volatile noexcept;
133    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    bool compare_exchange_weak(integral& expc, integral desr,
135                               memory_order s, memory_order f) volatile noexcept;
136    bool compare_exchange_weak(integral& expc, integral desr,
137                               memory_order s, memory_order f) noexcept;
138    bool compare_exchange_strong(integral& expc, integral desr,
139                                 memory_order s, memory_order f) volatile noexcept;
140    bool compare_exchange_strong(integral& expc, integral desr,
141                                 memory_order s, memory_order f) noexcept;
142    bool compare_exchange_weak(integral& expc, integral desr,
143                               memory_order m = memory_order_seq_cst) volatile noexcept;
144    bool compare_exchange_weak(integral& expc, integral desr,
145                               memory_order m = memory_order_seq_cst) noexcept;
146    bool compare_exchange_strong(integral& expc, integral desr,
147                                memory_order m = memory_order_seq_cst) volatile noexcept;
148    bool compare_exchange_strong(integral& expc, integral desr,
149                                 memory_order m = memory_order_seq_cst) noexcept;
150
151    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
152    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
153    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
154    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
155    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
156    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
157    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
159    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
161
162    integral operator++(int) volatile noexcept;
163    integral operator++(int) noexcept;
164    integral operator--(int) volatile noexcept;
165    integral operator--(int) noexcept;
166    integral operator++() volatile noexcept;
167    integral operator++() noexcept;
168    integral operator--() volatile noexcept;
169    integral operator--() noexcept;
170    integral operator+=(integral op) volatile noexcept;
171    integral operator+=(integral op) noexcept;
172    integral operator-=(integral op) volatile noexcept;
173    integral operator-=(integral op) noexcept;
174    integral operator&=(integral op) volatile noexcept;
175    integral operator&=(integral op) noexcept;
176    integral operator|=(integral op) volatile noexcept;
177    integral operator|=(integral op) noexcept;
178    integral operator^=(integral op) volatile noexcept;
179    integral operator^=(integral op) noexcept;
180
181    void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
182    void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
183    void notify_one() volatile noexcept;
184    void notify_one() noexcept;
185    void notify_all() volatile noexcept;
186    void notify_all() noexcept;
187};
188
189template <class T>
190struct atomic<T*>
191{
192    using value_type = T*;
193
194    static constexpr bool is_always_lock_free;
195    bool is_lock_free() const volatile noexcept;
196    bool is_lock_free() const noexcept;
197
198    atomic() noexcept = default;
199    constexpr atomic(T* desr) noexcept;
200    atomic(const atomic&) = delete;
201    atomic& operator=(const atomic&) = delete;
202    atomic& operator=(const atomic&) volatile = delete;
203
204    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
205    T* load(memory_order m = memory_order_seq_cst) const noexcept;
206    operator T*() const volatile noexcept;
207    operator T*() const noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
209    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
210    T* operator=(T*) volatile noexcept;
211    T* operator=(T*) noexcept;
212
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    T* operator++(int) volatile noexcept;
237    T* operator++(int) noexcept;
238    T* operator--(int) volatile noexcept;
239    T* operator--(int) noexcept;
240    T* operator++() volatile noexcept;
241    T* operator++() noexcept;
242    T* operator--() volatile noexcept;
243    T* operator--() noexcept;
244    T* operator+=(ptrdiff_t op) volatile noexcept;
245    T* operator+=(ptrdiff_t op) noexcept;
246    T* operator-=(ptrdiff_t op) volatile noexcept;
247    T* operator-=(ptrdiff_t op) noexcept;
248
249    void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
250    void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
251    void notify_one() volatile noexcept;
252    void notify_one() noexcept;
253    void notify_all() volatile noexcept;
254    void notify_all() noexcept;
255};
256
257
258template <class T>
259  bool atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
260
261template <class T>
262  bool atomic_is_lock_free(const atomic<T>* obj) noexcept;
263
264template <class T>
265  void atomic_store(volatile atomic<T>* obj, T desr) noexcept;
266
267template <class T>
268  void atomic_store(atomic<T>* obj, T desr) noexcept;
269
270template <class T>
271  void atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
272
273template <class T>
274  void atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
275
276template <class T>
277  T atomic_load(const volatile atomic<T>* obj) noexcept;
278
279template <class T>
280  T atomic_load(const atomic<T>* obj) noexcept;
281
282template <class T>
283  T atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
284
285template <class T>
286  T atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
287
288template <class T>
289  T atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
290
291template <class T>
292  T atomic_exchange(atomic<T>* obj, T desr) noexcept;
293
294template <class T>
295  T atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
296
297template <class T>
298  T atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
299
300template <class T>
301  bool atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
302
303template <class T>
304  bool atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
305
306template <class T>
307  bool atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
308
309template <class T>
310  bool atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
311
312template <class T>
313  bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
314                                             T desr,
315                                             memory_order s, memory_order f) noexcept;
316
317template <class T>
318  bool atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
319                                             memory_order s, memory_order f) noexcept;
320
321template <class T>
322  bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
323                                               T* expc, T desr,
324                                               memory_order s, memory_order f) noexcept;
325
326template <class T>
327  bool atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
328                                               T desr,
329                                               memory_order s, memory_order f) noexcept;
330
331template <class T>
332  void atomic_wait(const volatile atomic<T>* obj, T old) noexcept;
333
334template <class T>
335  void atomic_wait(const atomic<T>* obj, T old) noexcept;
336
337template <class T>
338  void atomic_wait_explicit(const volatile atomic<T>* obj, T old, memory_order m) noexcept;
339
340template <class T>
341  void atomic_wait_explicit(const atomic<T>* obj, T old, memory_order m) noexcept;
342
343template <class T>
344  void atomic_one(volatile atomic<T>* obj) noexcept;
345
346template <class T>
347  void atomic_one(atomic<T>* obj) noexcept;
348
349template <class T>
350  void atomic_all(volatile atomic<T>* obj) noexcept;
351
352template <class T>
353  void atomic_all(atomic<T>* obj) noexcept;
354
355template <class Integral>
356  Integral atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
357
358template <class Integral>
359  Integral atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
360
361template <class Integral>
362  Integral atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
363                              memory_order m) noexcept;
364template <class Integral>
365  Integral atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
366                              memory_order m) noexcept;
367template <class Integral>
368  Integral atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371  Integral atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
372
373template <class Integral>
374  Integral atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
375                                     memory_order m) noexcept;
376
377template <class Integral>
378  Integral atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
379                                     memory_order m) noexcept;
380
381template <class Integral>
382  Integral atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
383
384template <class Integral>
385  Integral atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
386
387template <class Integral>
388  Integral atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
389                                     memory_order m) noexcept;
390
391template <class Integral>
392  Integral atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
393                                     memory_order m) noexcept;
394
395template <class Integral>
396  Integral atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399  Integral atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
400
401template <class Integral>
402  Integral atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
403                             memory_order m) noexcept;
404
405template <class Integral>
406  Integral atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
407                             memory_order m) noexcept;
408
409template <class Integral>
410  Integral atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
411
412template <class Integral>
413  Integral atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
414
415template <class Integral>
416  Integral atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
417                                     memory_order m) noexcept;
418
419template <class Integral>
420  Integral atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
421                                     memory_order m) noexcept;
422
423template <class T>
424  T* atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
425
426template <class T>
427  T* atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
428
429template <class T>
430  T* atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
431                               memory_order m) noexcept;
432
433template <class T>
434  T* atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
435
436template <class T>
437  T* atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
438
439template <class T>
440  T* atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
441
442template <class T>
443  T* atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
444                               memory_order m) noexcept;
445
446template <class T>
447  T* atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
448
449// Atomics for standard typedef types
450
451typedef atomic<bool>               atomic_bool;
452typedef atomic<char>               atomic_char;
453typedef atomic<signed char>        atomic_schar;
454typedef atomic<unsigned char>      atomic_uchar;
455typedef atomic<short>              atomic_short;
456typedef atomic<unsigned short>     atomic_ushort;
457typedef atomic<int>                atomic_int;
458typedef atomic<unsigned int>       atomic_uint;
459typedef atomic<long>               atomic_long;
460typedef atomic<unsigned long>      atomic_ulong;
461typedef atomic<long long>          atomic_llong;
462typedef atomic<unsigned long long> atomic_ullong;
463typedef atomic<char16_t>           atomic_char16_t;
464typedef atomic<char32_t>           atomic_char32_t;
465typedef atomic<wchar_t>            atomic_wchar_t;
466
467typedef atomic<int_least8_t>   atomic_int_least8_t;
468typedef atomic<uint_least8_t>  atomic_uint_least8_t;
469typedef atomic<int_least16_t>  atomic_int_least16_t;
470typedef atomic<uint_least16_t> atomic_uint_least16_t;
471typedef atomic<int_least32_t>  atomic_int_least32_t;
472typedef atomic<uint_least32_t> atomic_uint_least32_t;
473typedef atomic<int_least64_t>  atomic_int_least64_t;
474typedef atomic<uint_least64_t> atomic_uint_least64_t;
475
476typedef atomic<int_fast8_t>   atomic_int_fast8_t;
477typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
478typedef atomic<int_fast16_t>  atomic_int_fast16_t;
479typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
480typedef atomic<int_fast32_t>  atomic_int_fast32_t;
481typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
482typedef atomic<int_fast64_t>  atomic_int_fast64_t;
483typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
484
485typedef atomic<int8_t>   atomic_int8_t;
486typedef atomic<uint8_t>  atomic_uint8_t;
487typedef atomic<int16_t>  atomic_int16_t;
488typedef atomic<uint16_t> atomic_uint16_t;
489typedef atomic<int32_t>  atomic_int32_t;
490typedef atomic<uint32_t> atomic_uint32_t;
491typedef atomic<int64_t>  atomic_int64_t;
492typedef atomic<uint64_t> atomic_uint64_t;
493
494typedef atomic<intptr_t>  atomic_intptr_t;
495typedef atomic<uintptr_t> atomic_uintptr_t;
496typedef atomic<size_t>    atomic_size_t;
497typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
498typedef atomic<intmax_t>  atomic_intmax_t;
499typedef atomic<uintmax_t> atomic_uintmax_t;
500
501// flag type and operations
502
503typedef struct atomic_flag
504{
505    atomic_flag() noexcept = default;
506    atomic_flag(const atomic_flag&) = delete;
507    atomic_flag& operator=(const atomic_flag&) = delete;
508    atomic_flag& operator=(const atomic_flag&) volatile = delete;
509
510    bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
511    bool test(memory_order m = memory_order_seq_cst) noexcept;
512    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
513    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
514    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
515    void clear(memory_order m = memory_order_seq_cst) noexcept;
516
517    void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
518    void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
519    void notify_one() volatile noexcept;
520    void notify_one() noexcept;
521    void notify_all() volatile noexcept;
522    void notify_all() noexcept;
523} atomic_flag;
524
525bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
526bool atomic_flag_test(atomic_flag* obj) noexcept;
527bool atomic_flag_test_explicit(volatile atomic_flag* obj,
528                               memory_order m) noexcept;
529bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
530bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
531bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
532bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
533                                       memory_order m) noexcept;
534bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
535void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
536void atomic_flag_clear(atomic_flag* obj) noexcept;
537void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
538void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
539
540void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
541void atomic_wait(const atomic_flag* obj, T old) noexcept;
542void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
543void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
544void atomic_one(volatile atomic_flag* obj) noexcept;
545void atomic_one(atomic_flag* obj) noexcept;
546void atomic_all(volatile atomic_flag* obj) noexcept;
547void atomic_all(atomic_flag* obj) noexcept;
548
549// fences
550
551void atomic_thread_fence(memory_order m) noexcept;
552void atomic_signal_fence(memory_order m) noexcept;
553
554// deprecated
555
556template <class T>
557  void atomic_init(volatile atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
558
559template <class T>
560  void atomic_init(atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
561
562#define ATOMIC_VAR_INIT(value) see below
563
564#define ATOMIC_FLAG_INIT see below
565
566}  // std
567
568*/
569
570#include <__config>
571#include <__threading_support>
572#include <cstddef>
573#include <cstdint>
574#include <cstring>
575#include <type_traits>
576#include <version>
577
578#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
579#pragma GCC system_header
580#endif
581
582#ifdef _LIBCPP_HAS_NO_THREADS
583# error <atomic> is not supported on this single threaded system
584#endif
585#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
586# error <atomic> is not implemented
587#endif
588#ifdef kill_dependency
589# error C++ standard library is incompatible with <stdatomic.h>
590#endif
591
592#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
593  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
594                           __m == memory_order_acquire || \
595                           __m == memory_order_acq_rel,   \
596                        "memory order argument to atomic operation is invalid")
597
598#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
599  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
600                           __m == memory_order_acq_rel,   \
601                        "memory order argument to atomic operation is invalid")
602
603#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
604  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
605                           __f == memory_order_acq_rel,   \
606                        "memory order argument to atomic operation is invalid")
607
608_LIBCPP_BEGIN_NAMESPACE_STD
609
610// Figure out what the underlying type for `memory_order` would be if it were
611// declared as an unscoped enum (accounting for -fshort-enums). Use this result
612// to pin the underlying type in C++20.
613enum __legacy_memory_order {
614    __mo_relaxed,
615    __mo_consume,
616    __mo_acquire,
617    __mo_release,
618    __mo_acq_rel,
619    __mo_seq_cst
620};
621
622typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
623
624#if _LIBCPP_STD_VER > 17
625
626enum class memory_order : __memory_order_underlying_t {
627  relaxed = __mo_relaxed,
628  consume = __mo_consume,
629  acquire = __mo_acquire,
630  release = __mo_release,
631  acq_rel = __mo_acq_rel,
632  seq_cst = __mo_seq_cst
633};
634
635inline constexpr auto memory_order_relaxed = memory_order::relaxed;
636inline constexpr auto memory_order_consume = memory_order::consume;
637inline constexpr auto memory_order_acquire = memory_order::acquire;
638inline constexpr auto memory_order_release = memory_order::release;
639inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
640inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
641
642#else
643
644typedef enum memory_order {
645  memory_order_relaxed = __mo_relaxed,
646  memory_order_consume = __mo_consume,
647  memory_order_acquire = __mo_acquire,
648  memory_order_release = __mo_release,
649  memory_order_acq_rel = __mo_acq_rel,
650  memory_order_seq_cst = __mo_seq_cst,
651} memory_order;
652
653#endif // _LIBCPP_STD_VER > 17
654
655template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
656bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
657    return memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
658}
659
660static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
661  "unexpected underlying type for std::memory_order");
662
663#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
664	defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
665
666// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
667// the default operator= in an object is not volatile, a byte-by-byte copy
668// is required.
669template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
670typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
671__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
672  __a_value = __val;
673}
674template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
675typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
676__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
677  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
678  volatile char* __end = __to + sizeof(_Tp);
679  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
680  while (__to != __end)
681    *__to++ = *__from++;
682}
683
684#endif
685
686#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
687
688template <typename _Tp>
689struct __cxx_atomic_base_impl {
690
691  _LIBCPP_INLINE_VISIBILITY
692#ifndef _LIBCPP_CXX03_LANG
693    __cxx_atomic_base_impl() _NOEXCEPT = default;
694#else
695    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
696#endif // _LIBCPP_CXX03_LANG
697  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
698    : __a_value(value) {}
699  _Tp __a_value;
700};
701
702_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
703  // Avoid switch statement to make this a constexpr.
704  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
705         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
706          (__order == memory_order_release ? __ATOMIC_RELEASE:
707           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
708            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
709              __ATOMIC_CONSUME))));
710}
711
712_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
713  // Avoid switch statement to make this a constexpr.
714  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
715         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
716          (__order == memory_order_release ? __ATOMIC_RELAXED:
717           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
718            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
719              __ATOMIC_CONSUME))));
720}
721
722template <typename _Tp>
723_LIBCPP_INLINE_VISIBILITY
724void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
725  __cxx_atomic_assign_volatile(__a->__a_value, __val);
726}
727
728template <typename _Tp>
729_LIBCPP_INLINE_VISIBILITY
730void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
731  __a->__a_value = __val;
732}
733
734_LIBCPP_INLINE_VISIBILITY inline
735void __cxx_atomic_thread_fence(memory_order __order) {
736  __atomic_thread_fence(__to_gcc_order(__order));
737}
738
739_LIBCPP_INLINE_VISIBILITY inline
740void __cxx_atomic_signal_fence(memory_order __order) {
741  __atomic_signal_fence(__to_gcc_order(__order));
742}
743
744template <typename _Tp>
745_LIBCPP_INLINE_VISIBILITY
746void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
747                        memory_order __order) {
748  __atomic_store(&__a->__a_value, &__val,
749                 __to_gcc_order(__order));
750}
751
752template <typename _Tp>
753_LIBCPP_INLINE_VISIBILITY
754void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
755                        memory_order __order) {
756  __atomic_store(&__a->__a_value, &__val,
757                 __to_gcc_order(__order));
758}
759
760template <typename _Tp>
761_LIBCPP_INLINE_VISIBILITY
762_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
763                      memory_order __order) {
764  _Tp __ret;
765  __atomic_load(&__a->__a_value, &__ret,
766                __to_gcc_order(__order));
767  return __ret;
768}
769
770template <typename _Tp>
771_LIBCPP_INLINE_VISIBILITY
772_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
773  _Tp __ret;
774  __atomic_load(&__a->__a_value, &__ret,
775                __to_gcc_order(__order));
776  return __ret;
777}
778
779template <typename _Tp>
780_LIBCPP_INLINE_VISIBILITY
781_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
782                          _Tp __value, memory_order __order) {
783  _Tp __ret;
784  __atomic_exchange(&__a->__a_value, &__value, &__ret,
785                    __to_gcc_order(__order));
786  return __ret;
787}
788
789template <typename _Tp>
790_LIBCPP_INLINE_VISIBILITY
791_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
792                          memory_order __order) {
793  _Tp __ret;
794  __atomic_exchange(&__a->__a_value, &__value, &__ret,
795                    __to_gcc_order(__order));
796  return __ret;
797}
798
799template <typename _Tp>
800_LIBCPP_INLINE_VISIBILITY
801bool __cxx_atomic_compare_exchange_strong(
802    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
803    memory_order __success, memory_order __failure) {
804  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
805                                   false,
806                                   __to_gcc_order(__success),
807                                   __to_gcc_failure_order(__failure));
808}
809
810template <typename _Tp>
811_LIBCPP_INLINE_VISIBILITY
812bool __cxx_atomic_compare_exchange_strong(
813    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
814    memory_order __failure) {
815  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
816                                   false,
817                                   __to_gcc_order(__success),
818                                   __to_gcc_failure_order(__failure));
819}
820
821template <typename _Tp>
822_LIBCPP_INLINE_VISIBILITY
823bool __cxx_atomic_compare_exchange_weak(
824    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
825    memory_order __success, memory_order __failure) {
826  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
827                                   true,
828                                   __to_gcc_order(__success),
829                                   __to_gcc_failure_order(__failure));
830}
831
832template <typename _Tp>
833_LIBCPP_INLINE_VISIBILITY
834bool __cxx_atomic_compare_exchange_weak(
835    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
836    memory_order __failure) {
837  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
838                                   true,
839                                   __to_gcc_order(__success),
840                                   __to_gcc_failure_order(__failure));
841}
842
843template <typename _Tp>
844struct __skip_amt { enum {value = 1}; };
845
846template <typename _Tp>
847struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
848
849// FIXME: Haven't figured out what the spec says about using arrays with
850// atomic_fetch_add. Force a failure rather than creating bad behavior.
851template <typename _Tp>
852struct __skip_amt<_Tp[]> { };
853template <typename _Tp, int n>
854struct __skip_amt<_Tp[n]> { };
855
856template <typename _Tp, typename _Td>
857_LIBCPP_INLINE_VISIBILITY
858_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
859                           _Td __delta, memory_order __order) {
860  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
861                            __to_gcc_order(__order));
862}
863
864template <typename _Tp, typename _Td>
865_LIBCPP_INLINE_VISIBILITY
866_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
867                           memory_order __order) {
868  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
869                            __to_gcc_order(__order));
870}
871
872template <typename _Tp, typename _Td>
873_LIBCPP_INLINE_VISIBILITY
874_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
875                           _Td __delta, memory_order __order) {
876  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
877                            __to_gcc_order(__order));
878}
879
880template <typename _Tp, typename _Td>
881_LIBCPP_INLINE_VISIBILITY
882_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
883                           memory_order __order) {
884  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
885                            __to_gcc_order(__order));
886}
887
888template <typename _Tp>
889_LIBCPP_INLINE_VISIBILITY
890_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
891                           _Tp __pattern, memory_order __order) {
892  return __atomic_fetch_and(&__a->__a_value, __pattern,
893                            __to_gcc_order(__order));
894}
895
896template <typename _Tp>
897_LIBCPP_INLINE_VISIBILITY
898_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
899                           _Tp __pattern, memory_order __order) {
900  return __atomic_fetch_and(&__a->__a_value, __pattern,
901                            __to_gcc_order(__order));
902}
903
904template <typename _Tp>
905_LIBCPP_INLINE_VISIBILITY
906_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
907                          _Tp __pattern, memory_order __order) {
908  return __atomic_fetch_or(&__a->__a_value, __pattern,
909                           __to_gcc_order(__order));
910}
911
912template <typename _Tp>
913_LIBCPP_INLINE_VISIBILITY
914_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
915                          memory_order __order) {
916  return __atomic_fetch_or(&__a->__a_value, __pattern,
917                           __to_gcc_order(__order));
918}
919
920template <typename _Tp>
921_LIBCPP_INLINE_VISIBILITY
922_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
923                           _Tp __pattern, memory_order __order) {
924  return __atomic_fetch_xor(&__a->__a_value, __pattern,
925                            __to_gcc_order(__order));
926}
927
928template <typename _Tp>
929_LIBCPP_INLINE_VISIBILITY
930_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
931                           memory_order __order) {
932  return __atomic_fetch_xor(&__a->__a_value, __pattern,
933                            __to_gcc_order(__order));
934}
935
936#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
937
938#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
939
940template <typename _Tp>
941struct __cxx_atomic_base_impl {
942
943  _LIBCPP_INLINE_VISIBILITY
944#ifndef _LIBCPP_CXX03_LANG
945    __cxx_atomic_base_impl() _NOEXCEPT = default;
946#else
947    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
948#endif // _LIBCPP_CXX03_LANG
949  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
950    : __a_value(value) {}
951  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
952};
953
954#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
955
956_LIBCPP_INLINE_VISIBILITY inline
957void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
958    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
959}
960
961_LIBCPP_INLINE_VISIBILITY inline
962void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
963    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
964}
965
966template<class _Tp>
967_LIBCPP_INLINE_VISIBILITY
968void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
969    __c11_atomic_init(&__a->__a_value, __val);
970}
971template<class _Tp>
972_LIBCPP_INLINE_VISIBILITY
973void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
974    __c11_atomic_init(&__a->__a_value, __val);
975}
976
977template<class _Tp>
978_LIBCPP_INLINE_VISIBILITY
979void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
980    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
981}
982template<class _Tp>
983_LIBCPP_INLINE_VISIBILITY
984void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
985    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
986}
987
988template<class _Tp>
989_LIBCPP_INLINE_VISIBILITY
990_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
991    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
992    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
993}
994template<class _Tp>
995_LIBCPP_INLINE_VISIBILITY
996_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
997    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
998    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
999}
1000
1001template<class _Tp>
1002_LIBCPP_INLINE_VISIBILITY
1003_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
1004    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1005}
1006template<class _Tp>
1007_LIBCPP_INLINE_VISIBILITY
1008_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
1009    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1010}
1011
1012template<class _Tp>
1013_LIBCPP_INLINE_VISIBILITY
1014bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1015    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1016}
1017template<class _Tp>
1018_LIBCPP_INLINE_VISIBILITY
1019bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1020    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1021}
1022
1023template<class _Tp>
1024_LIBCPP_INLINE_VISIBILITY
1025bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1026    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1027}
1028template<class _Tp>
1029_LIBCPP_INLINE_VISIBILITY
1030bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1031    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1032}
1033
1034template<class _Tp>
1035_LIBCPP_INLINE_VISIBILITY
1036_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1037    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1038}
1039template<class _Tp>
1040_LIBCPP_INLINE_VISIBILITY
1041_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1042    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1043}
1044
1045template<class _Tp>
1046_LIBCPP_INLINE_VISIBILITY
1047_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1048    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1049}
1050template<class _Tp>
1051_LIBCPP_INLINE_VISIBILITY
1052_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1053    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1054}
1055
1056template<class _Tp>
1057_LIBCPP_INLINE_VISIBILITY
1058_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1059    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1060}
1061template<class _Tp>
1062_LIBCPP_INLINE_VISIBILITY
1063_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1064    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1065}
1066template<class _Tp>
1067_LIBCPP_INLINE_VISIBILITY
1068_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1069    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1070}
1071template<class _Tp>
1072_LIBCPP_INLINE_VISIBILITY
1073_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1074    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1075}
1076
1077template<class _Tp>
1078_LIBCPP_INLINE_VISIBILITY
1079_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1080    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1081}
1082template<class _Tp>
1083_LIBCPP_INLINE_VISIBILITY
1084_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1085    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1086}
1087
1088template<class _Tp>
1089_LIBCPP_INLINE_VISIBILITY
1090_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1091    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1092}
1093template<class _Tp>
1094_LIBCPP_INLINE_VISIBILITY
1095_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1096    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1097}
1098
1099template<class _Tp>
1100_LIBCPP_INLINE_VISIBILITY
1101_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1102    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1103}
1104template<class _Tp>
1105_LIBCPP_INLINE_VISIBILITY
1106_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1107    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1108}
1109
1110#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1111
1112template <class _Tp>
1113_LIBCPP_INLINE_VISIBILITY
1114_Tp kill_dependency(_Tp __y) _NOEXCEPT
1115{
1116    return __y;
1117}
1118
1119#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1120# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1121# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1122# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1123# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1124# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1125# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1126# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1127# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1128# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1129# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1130#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1131# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1132# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1133# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1134# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1135# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1136# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1137# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1138# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1139# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1140# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1141#endif
1142
1143#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1144
1145template<typename _Tp>
1146struct __cxx_atomic_lock_impl {
1147
1148  _LIBCPP_INLINE_VISIBILITY
1149  __cxx_atomic_lock_impl() _NOEXCEPT
1150    : __a_value(), __a_lock(0) {}
1151  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1152  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1153    : __a_value(value), __a_lock(0) {}
1154
1155  _Tp __a_value;
1156  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1157
1158  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1159    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1160        /*spin*/;
1161  }
1162  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1163    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1164        /*spin*/;
1165  }
1166  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1167    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1168  }
1169  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1170    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1171  }
1172  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1173    __lock();
1174    _Tp __old;
1175    __cxx_atomic_assign_volatile(__old, __a_value);
1176    __unlock();
1177    return __old;
1178  }
1179  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1180    __lock();
1181    _Tp __old = __a_value;
1182    __unlock();
1183    return __old;
1184  }
1185};
1186
1187template <typename _Tp>
1188_LIBCPP_INLINE_VISIBILITY
1189void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1190  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1191}
1192template <typename _Tp>
1193_LIBCPP_INLINE_VISIBILITY
1194void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1195  __a->__a_value = __val;
1196}
1197
1198template <typename _Tp>
1199_LIBCPP_INLINE_VISIBILITY
1200void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1201  __a->__lock();
1202  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1203  __a->__unlock();
1204}
1205template <typename _Tp>
1206_LIBCPP_INLINE_VISIBILITY
1207void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1208  __a->__lock();
1209  __a->__a_value = __val;
1210  __a->__unlock();
1211}
1212
1213template <typename _Tp>
1214_LIBCPP_INLINE_VISIBILITY
1215_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1216  return __a->__read();
1217}
1218template <typename _Tp>
1219_LIBCPP_INLINE_VISIBILITY
1220_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1221  return __a->__read();
1222}
1223
1224template <typename _Tp>
1225_LIBCPP_INLINE_VISIBILITY
1226_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1227  __a->__lock();
1228  _Tp __old;
1229  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1230  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1231  __a->__unlock();
1232  return __old;
1233}
1234template <typename _Tp>
1235_LIBCPP_INLINE_VISIBILITY
1236_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1237  __a->__lock();
1238  _Tp __old = __a->__a_value;
1239  __a->__a_value = __value;
1240  __a->__unlock();
1241  return __old;
1242}
1243
1244template <typename _Tp>
1245_LIBCPP_INLINE_VISIBILITY
1246bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1247                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1248  __a->__lock();
1249  _Tp __temp;
1250  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1251  bool __ret = __temp == *__expected;
1252  if(__ret)
1253    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1254  else
1255    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1256  __a->__unlock();
1257  return __ret;
1258}
1259template <typename _Tp>
1260_LIBCPP_INLINE_VISIBILITY
1261bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1262                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1263  __a->__lock();
1264  bool __ret = __a->__a_value == *__expected;
1265  if(__ret)
1266    __a->__a_value = __value;
1267  else
1268    *__expected = __a->__a_value;
1269  __a->__unlock();
1270  return __ret;
1271}
1272
1273template <typename _Tp>
1274_LIBCPP_INLINE_VISIBILITY
1275bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1276                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1277  __a->__lock();
1278  _Tp __temp;
1279  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1280  bool __ret = __temp == *__expected;
1281  if(__ret)
1282    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1283  else
1284    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1285  __a->__unlock();
1286  return __ret;
1287}
1288template <typename _Tp>
1289_LIBCPP_INLINE_VISIBILITY
1290bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1291                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1292  __a->__lock();
1293  bool __ret = __a->__a_value == *__expected;
1294  if(__ret)
1295    __a->__a_value = __value;
1296  else
1297    *__expected = __a->__a_value;
1298  __a->__unlock();
1299  return __ret;
1300}
1301
1302template <typename _Tp, typename _Td>
1303_LIBCPP_INLINE_VISIBILITY
1304_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1305                           _Td __delta, memory_order) {
1306  __a->__lock();
1307  _Tp __old;
1308  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1309  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1310  __a->__unlock();
1311  return __old;
1312}
1313template <typename _Tp, typename _Td>
1314_LIBCPP_INLINE_VISIBILITY
1315_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1316                           _Td __delta, memory_order) {
1317  __a->__lock();
1318  _Tp __old = __a->__a_value;
1319  __a->__a_value += __delta;
1320  __a->__unlock();
1321  return __old;
1322}
1323
1324template <typename _Tp, typename _Td>
1325_LIBCPP_INLINE_VISIBILITY
1326_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1327                           ptrdiff_t __delta, memory_order) {
1328  __a->__lock();
1329  _Tp* __old;
1330  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1331  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1332  __a->__unlock();
1333  return __old;
1334}
1335template <typename _Tp, typename _Td>
1336_LIBCPP_INLINE_VISIBILITY
1337_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1338                           ptrdiff_t __delta, memory_order) {
1339  __a->__lock();
1340  _Tp* __old = __a->__a_value;
1341  __a->__a_value += __delta;
1342  __a->__unlock();
1343  return __old;
1344}
1345
1346template <typename _Tp, typename _Td>
1347_LIBCPP_INLINE_VISIBILITY
1348_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1349                           _Td __delta, memory_order) {
1350  __a->__lock();
1351  _Tp __old;
1352  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1353  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1354  __a->__unlock();
1355  return __old;
1356}
1357template <typename _Tp, typename _Td>
1358_LIBCPP_INLINE_VISIBILITY
1359_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1360                           _Td __delta, memory_order) {
1361  __a->__lock();
1362  _Tp __old = __a->__a_value;
1363  __a->__a_value -= __delta;
1364  __a->__unlock();
1365  return __old;
1366}
1367
1368template <typename _Tp>
1369_LIBCPP_INLINE_VISIBILITY
1370_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1371                           _Tp __pattern, memory_order) {
1372  __a->__lock();
1373  _Tp __old;
1374  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1375  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1376  __a->__unlock();
1377  return __old;
1378}
1379template <typename _Tp>
1380_LIBCPP_INLINE_VISIBILITY
1381_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1382                           _Tp __pattern, memory_order) {
1383  __a->__lock();
1384  _Tp __old = __a->__a_value;
1385  __a->__a_value &= __pattern;
1386  __a->__unlock();
1387  return __old;
1388}
1389
1390template <typename _Tp>
1391_LIBCPP_INLINE_VISIBILITY
1392_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1393                          _Tp __pattern, memory_order) {
1394  __a->__lock();
1395  _Tp __old;
1396  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1397  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1398  __a->__unlock();
1399  return __old;
1400}
1401template <typename _Tp>
1402_LIBCPP_INLINE_VISIBILITY
1403_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1404                          _Tp __pattern, memory_order) {
1405  __a->__lock();
1406  _Tp __old = __a->__a_value;
1407  __a->__a_value |= __pattern;
1408  __a->__unlock();
1409  return __old;
1410}
1411
1412template <typename _Tp>
1413_LIBCPP_INLINE_VISIBILITY
1414_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1415                           _Tp __pattern, memory_order) {
1416  __a->__lock();
1417  _Tp __old;
1418  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1419  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1420  __a->__unlock();
1421  return __old;
1422}
1423template <typename _Tp>
1424_LIBCPP_INLINE_VISIBILITY
1425_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1426                           _Tp __pattern, memory_order) {
1427  __a->__lock();
1428  _Tp __old = __a->__a_value;
1429  __a->__a_value ^= __pattern;
1430  __a->__unlock();
1431  return __old;
1432}
1433
1434#ifdef __cpp_lib_atomic_is_always_lock_free
1435
1436template<typename _Tp> struct __cxx_is_always_lock_free {
1437    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1438
1439#else
1440
1441template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1442// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1443template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1444template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1445template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1446template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1447template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1448template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1449template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1450template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1451template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1452template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1453template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1454template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1455template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1456template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1457template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1458template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1459template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1460
1461#endif //__cpp_lib_atomic_is_always_lock_free
1462
1463template <typename _Tp,
1464          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1465                                                __cxx_atomic_base_impl<_Tp>,
1466                                                __cxx_atomic_lock_impl<_Tp> >::type>
1467#else
1468template <typename _Tp,
1469          typename _Base = __cxx_atomic_base_impl<_Tp> >
1470#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1471struct __cxx_atomic_impl : public _Base {
1472
1473#if _GNUC_VER >= 501
1474    static_assert(is_trivially_copyable<_Tp>::value,
1475      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1476#endif
1477
1478  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1479  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1480    : _Base(value) {}
1481};
1482
1483#if defined(__linux__) || (defined(__FreeBSD__) && defined(__mips__))
1484    using __cxx_contention_t = int32_t;
1485#else
1486    using __cxx_contention_t = int64_t;
1487#endif
1488
1489#if _LIBCPP_STD_VER >= 11
1490
1491using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1492
1493#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1494
1495_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1496_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1497_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1498_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1499
1500_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1501_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1502_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1503_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1504
1505template <class _Atp, class _Fn>
1506struct __libcpp_atomic_wait_backoff_impl {
1507    _Atp* __a;
1508    _Fn __test_fn;
1509    _LIBCPP_AVAILABILITY_SYNC
1510    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1511    {
1512        if(__elapsed > chrono::microseconds(64))
1513        {
1514            auto const __monitor = __libcpp_atomic_monitor(__a);
1515            if(__test_fn())
1516                return true;
1517            __libcpp_atomic_wait(__a, __monitor);
1518        }
1519        else if(__elapsed > chrono::microseconds(4))
1520            __libcpp_thread_yield();
1521        else
1522            ; // poll
1523        return false;
1524    }
1525};
1526
1527template <class _Atp, class _Fn>
1528_LIBCPP_AVAILABILITY_SYNC
1529_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1530{
1531    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1532    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1533}
1534
1535#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1536
1537template <class _Tp>
1538_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1539template <class _Tp>
1540_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1541template <class _Atp, class _Fn>
1542_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1543{
1544    return __libcpp_thread_poll_with_backoff(__test_fn, __libcpp_timed_backoff_policy());
1545}
1546
1547#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1548
1549template <class _Atp, class _Tp>
1550struct __cxx_atomic_wait_test_fn_impl {
1551    _Atp* __a;
1552    _Tp __val;
1553    memory_order __order;
1554    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1555    {
1556        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1557    }
1558};
1559
1560template <class _Atp, class _Tp>
1561_LIBCPP_AVAILABILITY_SYNC
1562_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1563{
1564    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1565    return __cxx_atomic_wait(__a, __test_fn);
1566}
1567
1568#endif //_LIBCPP_STD_VER >= 11
1569
1570// general atomic<T>
1571
1572template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1573struct __atomic_base  // false
1574{
1575    mutable __cxx_atomic_impl<_Tp> __a_;
1576
1577#if defined(__cpp_lib_atomic_is_always_lock_free)
1578  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1579#endif
1580
1581    _LIBCPP_INLINE_VISIBILITY
1582    bool is_lock_free() const volatile _NOEXCEPT
1583        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1584    _LIBCPP_INLINE_VISIBILITY
1585    bool is_lock_free() const _NOEXCEPT
1586        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1587    _LIBCPP_INLINE_VISIBILITY
1588    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1589      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1590        {__cxx_atomic_store(&__a_, __d, __m);}
1591    _LIBCPP_INLINE_VISIBILITY
1592    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1593      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1594        {__cxx_atomic_store(&__a_, __d, __m);}
1595    _LIBCPP_INLINE_VISIBILITY
1596    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1597      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1598        {return __cxx_atomic_load(&__a_, __m);}
1599    _LIBCPP_INLINE_VISIBILITY
1600    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1601      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1602        {return __cxx_atomic_load(&__a_, __m);}
1603    _LIBCPP_INLINE_VISIBILITY
1604    operator _Tp() const volatile _NOEXCEPT {return load();}
1605    _LIBCPP_INLINE_VISIBILITY
1606    operator _Tp() const _NOEXCEPT          {return load();}
1607    _LIBCPP_INLINE_VISIBILITY
1608    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1609        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1610    _LIBCPP_INLINE_VISIBILITY
1611    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1612        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1613    _LIBCPP_INLINE_VISIBILITY
1614    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1615                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1616      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1617        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1618    _LIBCPP_INLINE_VISIBILITY
1619    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1620                               memory_order __s, memory_order __f) _NOEXCEPT
1621      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1622        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1623    _LIBCPP_INLINE_VISIBILITY
1624    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1625                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1626      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1627        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1628    _LIBCPP_INLINE_VISIBILITY
1629    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1630                                 memory_order __s, memory_order __f) _NOEXCEPT
1631      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1632        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1633    _LIBCPP_INLINE_VISIBILITY
1634    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1635                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1636        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1637    _LIBCPP_INLINE_VISIBILITY
1638    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1639                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1640        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1641    _LIBCPP_INLINE_VISIBILITY
1642    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1643                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1644        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1645    _LIBCPP_INLINE_VISIBILITY
1646    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1647                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1648        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1649
1650    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1651        {__cxx_atomic_wait(&__a_, __v, __m);}
1652    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1653        {__cxx_atomic_wait(&__a_, __v, __m);}
1654    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1655        {__cxx_atomic_notify_one(&__a_);}
1656    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1657        {__cxx_atomic_notify_one(&__a_);}
1658    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1659        {__cxx_atomic_notify_all(&__a_);}
1660    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1661        {__cxx_atomic_notify_all(&__a_);}
1662
1663    _LIBCPP_INLINE_VISIBILITY
1664    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1665
1666    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1667    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1668
1669#ifndef _LIBCPP_CXX03_LANG
1670    __atomic_base(const __atomic_base&) = delete;
1671    __atomic_base& operator=(const __atomic_base&) = delete;
1672    __atomic_base& operator=(const __atomic_base&) volatile = delete;
1673#else
1674private:
1675    _LIBCPP_INLINE_VISIBILITY
1676    __atomic_base(const __atomic_base&);
1677    _LIBCPP_INLINE_VISIBILITY
1678    __atomic_base& operator=(const __atomic_base&);
1679    _LIBCPP_INLINE_VISIBILITY
1680    __atomic_base& operator=(const __atomic_base&) volatile;
1681#endif
1682};
1683
1684#if defined(__cpp_lib_atomic_is_always_lock_free)
1685template <class _Tp, bool __b>
1686_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1687#endif
1688
1689// atomic<Integral>
1690
1691template <class _Tp>
1692struct __atomic_base<_Tp, true>
1693    : public __atomic_base<_Tp, false>
1694{
1695    typedef __atomic_base<_Tp, false> __base;
1696    _LIBCPP_INLINE_VISIBILITY
1697    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1698    _LIBCPP_INLINE_VISIBILITY
1699    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1700
1701    _LIBCPP_INLINE_VISIBILITY
1702    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1703        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1704    _LIBCPP_INLINE_VISIBILITY
1705    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1706        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1707    _LIBCPP_INLINE_VISIBILITY
1708    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1709        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1710    _LIBCPP_INLINE_VISIBILITY
1711    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1712        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1713    _LIBCPP_INLINE_VISIBILITY
1714    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1715        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1716    _LIBCPP_INLINE_VISIBILITY
1717    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1718        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1719    _LIBCPP_INLINE_VISIBILITY
1720    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1721        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1722    _LIBCPP_INLINE_VISIBILITY
1723    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1724        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1725    _LIBCPP_INLINE_VISIBILITY
1726    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1727        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1728    _LIBCPP_INLINE_VISIBILITY
1729    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1730        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1731
1732    _LIBCPP_INLINE_VISIBILITY
1733    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1734    _LIBCPP_INLINE_VISIBILITY
1735    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1736    _LIBCPP_INLINE_VISIBILITY
1737    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1738    _LIBCPP_INLINE_VISIBILITY
1739    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1740    _LIBCPP_INLINE_VISIBILITY
1741    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1742    _LIBCPP_INLINE_VISIBILITY
1743    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1744    _LIBCPP_INLINE_VISIBILITY
1745    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1746    _LIBCPP_INLINE_VISIBILITY
1747    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1748    _LIBCPP_INLINE_VISIBILITY
1749    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1750    _LIBCPP_INLINE_VISIBILITY
1751    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1752    _LIBCPP_INLINE_VISIBILITY
1753    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1754    _LIBCPP_INLINE_VISIBILITY
1755    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1756    _LIBCPP_INLINE_VISIBILITY
1757    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1758    _LIBCPP_INLINE_VISIBILITY
1759    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1760    _LIBCPP_INLINE_VISIBILITY
1761    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1762    _LIBCPP_INLINE_VISIBILITY
1763    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1764    _LIBCPP_INLINE_VISIBILITY
1765    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1766    _LIBCPP_INLINE_VISIBILITY
1767    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1768};
1769
1770// atomic<T>
1771
1772template <class _Tp>
1773struct atomic
1774    : public __atomic_base<_Tp>
1775{
1776    typedef __atomic_base<_Tp> __base;
1777    typedef _Tp value_type;
1778    _LIBCPP_INLINE_VISIBILITY
1779    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1780    _LIBCPP_INLINE_VISIBILITY
1781    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1782
1783    _LIBCPP_INLINE_VISIBILITY
1784    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1785        {__base::store(__d); return __d;}
1786    _LIBCPP_INLINE_VISIBILITY
1787    _Tp operator=(_Tp __d) _NOEXCEPT
1788        {__base::store(__d); return __d;}
1789};
1790
1791// atomic<T*>
1792
1793template <class _Tp>
1794struct atomic<_Tp*>
1795    : public __atomic_base<_Tp*>
1796{
1797    typedef __atomic_base<_Tp*> __base;
1798    typedef _Tp* value_type;
1799    _LIBCPP_INLINE_VISIBILITY
1800    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1801    _LIBCPP_INLINE_VISIBILITY
1802    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1803
1804    _LIBCPP_INLINE_VISIBILITY
1805    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1806        {__base::store(__d); return __d;}
1807    _LIBCPP_INLINE_VISIBILITY
1808    _Tp* operator=(_Tp* __d) _NOEXCEPT
1809        {__base::store(__d); return __d;}
1810
1811    _LIBCPP_INLINE_VISIBILITY
1812    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1813                                                                        volatile _NOEXCEPT
1814        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1815    _LIBCPP_INLINE_VISIBILITY
1816    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1817        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1818    _LIBCPP_INLINE_VISIBILITY
1819    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1820                                                                        volatile _NOEXCEPT
1821        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1822    _LIBCPP_INLINE_VISIBILITY
1823    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1824        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1825
1826    _LIBCPP_INLINE_VISIBILITY
1827    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1828    _LIBCPP_INLINE_VISIBILITY
1829    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1830    _LIBCPP_INLINE_VISIBILITY
1831    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1832    _LIBCPP_INLINE_VISIBILITY
1833    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1834    _LIBCPP_INLINE_VISIBILITY
1835    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1836    _LIBCPP_INLINE_VISIBILITY
1837    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1838    _LIBCPP_INLINE_VISIBILITY
1839    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1840    _LIBCPP_INLINE_VISIBILITY
1841    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1842    _LIBCPP_INLINE_VISIBILITY
1843    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1844    _LIBCPP_INLINE_VISIBILITY
1845    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1846    _LIBCPP_INLINE_VISIBILITY
1847    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1848    _LIBCPP_INLINE_VISIBILITY
1849    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1850};
1851
1852// atomic_is_lock_free
1853
1854template <class _Tp>
1855_LIBCPP_INLINE_VISIBILITY
1856bool
1857atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1858{
1859    return __o->is_lock_free();
1860}
1861
1862template <class _Tp>
1863_LIBCPP_INLINE_VISIBILITY
1864bool
1865atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1866{
1867    return __o->is_lock_free();
1868}
1869
1870// atomic_init
1871
1872template <class _Tp>
1873_LIBCPP_INLINE_VISIBILITY
1874void
1875atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1876{
1877    __cxx_atomic_init(&__o->__a_, __d);
1878}
1879
1880template <class _Tp>
1881_LIBCPP_INLINE_VISIBILITY
1882void
1883atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1884{
1885    __cxx_atomic_init(&__o->__a_, __d);
1886}
1887
1888// atomic_store
1889
1890template <class _Tp>
1891_LIBCPP_INLINE_VISIBILITY
1892void
1893atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1894{
1895    __o->store(__d);
1896}
1897
1898template <class _Tp>
1899_LIBCPP_INLINE_VISIBILITY
1900void
1901atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1902{
1903    __o->store(__d);
1904}
1905
1906// atomic_store_explicit
1907
1908template <class _Tp>
1909_LIBCPP_INLINE_VISIBILITY
1910void
1911atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1912  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1913{
1914    __o->store(__d, __m);
1915}
1916
1917template <class _Tp>
1918_LIBCPP_INLINE_VISIBILITY
1919void
1920atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1921  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1922{
1923    __o->store(__d, __m);
1924}
1925
1926// atomic_load
1927
1928template <class _Tp>
1929_LIBCPP_INLINE_VISIBILITY
1930_Tp
1931atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1932{
1933    return __o->load();
1934}
1935
1936template <class _Tp>
1937_LIBCPP_INLINE_VISIBILITY
1938_Tp
1939atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1940{
1941    return __o->load();
1942}
1943
1944// atomic_load_explicit
1945
1946template <class _Tp>
1947_LIBCPP_INLINE_VISIBILITY
1948_Tp
1949atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1950  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1951{
1952    return __o->load(__m);
1953}
1954
1955template <class _Tp>
1956_LIBCPP_INLINE_VISIBILITY
1957_Tp
1958atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1959  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1960{
1961    return __o->load(__m);
1962}
1963
1964// atomic_exchange
1965
1966template <class _Tp>
1967_LIBCPP_INLINE_VISIBILITY
1968_Tp
1969atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1970{
1971    return __o->exchange(__d);
1972}
1973
1974template <class _Tp>
1975_LIBCPP_INLINE_VISIBILITY
1976_Tp
1977atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1978{
1979    return __o->exchange(__d);
1980}
1981
1982// atomic_exchange_explicit
1983
1984template <class _Tp>
1985_LIBCPP_INLINE_VISIBILITY
1986_Tp
1987atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1988{
1989    return __o->exchange(__d, __m);
1990}
1991
1992template <class _Tp>
1993_LIBCPP_INLINE_VISIBILITY
1994_Tp
1995atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1996{
1997    return __o->exchange(__d, __m);
1998}
1999
2000// atomic_compare_exchange_weak
2001
2002template <class _Tp>
2003_LIBCPP_INLINE_VISIBILITY
2004bool
2005atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2006{
2007    return __o->compare_exchange_weak(*__e, __d);
2008}
2009
2010template <class _Tp>
2011_LIBCPP_INLINE_VISIBILITY
2012bool
2013atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2014{
2015    return __o->compare_exchange_weak(*__e, __d);
2016}
2017
2018// atomic_compare_exchange_strong
2019
2020template <class _Tp>
2021_LIBCPP_INLINE_VISIBILITY
2022bool
2023atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2024{
2025    return __o->compare_exchange_strong(*__e, __d);
2026}
2027
2028template <class _Tp>
2029_LIBCPP_INLINE_VISIBILITY
2030bool
2031atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2032{
2033    return __o->compare_exchange_strong(*__e, __d);
2034}
2035
2036// atomic_compare_exchange_weak_explicit
2037
2038template <class _Tp>
2039_LIBCPP_INLINE_VISIBILITY
2040bool
2041atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
2042                                      _Tp __d,
2043                                      memory_order __s, memory_order __f) _NOEXCEPT
2044  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2045{
2046    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2047}
2048
2049template <class _Tp>
2050_LIBCPP_INLINE_VISIBILITY
2051bool
2052atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
2053                                      memory_order __s, memory_order __f) _NOEXCEPT
2054  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2055{
2056    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2057}
2058
2059// atomic_compare_exchange_strong_explicit
2060
2061template <class _Tp>
2062_LIBCPP_INLINE_VISIBILITY
2063bool
2064atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2065                                        _Tp* __e, _Tp __d,
2066                                        memory_order __s, memory_order __f) _NOEXCEPT
2067  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2068{
2069    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2070}
2071
2072template <class _Tp>
2073_LIBCPP_INLINE_VISIBILITY
2074bool
2075atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
2076                                        _Tp __d,
2077                                        memory_order __s, memory_order __f) _NOEXCEPT
2078  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2079{
2080    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2081}
2082
2083// atomic_wait
2084
2085template <class _Tp>
2086_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2087void atomic_wait(const volatile atomic<_Tp>* __o,
2088                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2089{
2090    return __o->wait(__v);
2091}
2092
2093template <class _Tp>
2094_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2095void atomic_wait(const atomic<_Tp>* __o,
2096                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2097{
2098    return __o->wait(__v);
2099}
2100
2101// atomic_wait_explicit
2102
2103template <class _Tp>
2104_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2105void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2106                          typename atomic<_Tp>::value_type __v,
2107                          memory_order __m) _NOEXCEPT
2108  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2109{
2110    return __o->wait(__v, __m);
2111}
2112
2113template <class _Tp>
2114_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2115void atomic_wait_explicit(const atomic<_Tp>* __o,
2116                          typename atomic<_Tp>::value_type __v,
2117                          memory_order __m) _NOEXCEPT
2118  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2119{
2120    return __o->wait(__v, __m);
2121}
2122
2123// atomic_notify_one
2124
2125template <class _Tp>
2126_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2127void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2128{
2129    __o->notify_one();
2130}
2131template <class _Tp>
2132_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2133void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2134{
2135    __o->notify_one();
2136}
2137
2138// atomic_notify_one
2139
2140template <class _Tp>
2141_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2142void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2143{
2144    __o->notify_all();
2145}
2146template <class _Tp>
2147_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2148void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2149{
2150    __o->notify_all();
2151}
2152
2153// atomic_fetch_add
2154
2155template <class _Tp>
2156_LIBCPP_INLINE_VISIBILITY
2157typename enable_if
2158<
2159    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2160    _Tp
2161>::type
2162atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2163{
2164    return __o->fetch_add(__op);
2165}
2166
2167template <class _Tp>
2168_LIBCPP_INLINE_VISIBILITY
2169typename enable_if
2170<
2171    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2172    _Tp
2173>::type
2174atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2175{
2176    return __o->fetch_add(__op);
2177}
2178
2179template <class _Tp>
2180_LIBCPP_INLINE_VISIBILITY
2181_Tp*
2182atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2183{
2184    return __o->fetch_add(__op);
2185}
2186
2187template <class _Tp>
2188_LIBCPP_INLINE_VISIBILITY
2189_Tp*
2190atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2191{
2192    return __o->fetch_add(__op);
2193}
2194
2195// atomic_fetch_add_explicit
2196
2197template <class _Tp>
2198_LIBCPP_INLINE_VISIBILITY
2199typename enable_if
2200<
2201    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2202    _Tp
2203>::type
2204atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2205{
2206    return __o->fetch_add(__op, __m);
2207}
2208
2209template <class _Tp>
2210_LIBCPP_INLINE_VISIBILITY
2211typename enable_if
2212<
2213    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2214    _Tp
2215>::type
2216atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2217{
2218    return __o->fetch_add(__op, __m);
2219}
2220
2221template <class _Tp>
2222_LIBCPP_INLINE_VISIBILITY
2223_Tp*
2224atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2225                          memory_order __m) _NOEXCEPT
2226{
2227    return __o->fetch_add(__op, __m);
2228}
2229
2230template <class _Tp>
2231_LIBCPP_INLINE_VISIBILITY
2232_Tp*
2233atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2234{
2235    return __o->fetch_add(__op, __m);
2236}
2237
2238// atomic_fetch_sub
2239
2240template <class _Tp>
2241_LIBCPP_INLINE_VISIBILITY
2242typename enable_if
2243<
2244    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2245    _Tp
2246>::type
2247atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2248{
2249    return __o->fetch_sub(__op);
2250}
2251
2252template <class _Tp>
2253_LIBCPP_INLINE_VISIBILITY
2254typename enable_if
2255<
2256    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2257    _Tp
2258>::type
2259atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2260{
2261    return __o->fetch_sub(__op);
2262}
2263
2264template <class _Tp>
2265_LIBCPP_INLINE_VISIBILITY
2266_Tp*
2267atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2268{
2269    return __o->fetch_sub(__op);
2270}
2271
2272template <class _Tp>
2273_LIBCPP_INLINE_VISIBILITY
2274_Tp*
2275atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2276{
2277    return __o->fetch_sub(__op);
2278}
2279
2280// atomic_fetch_sub_explicit
2281
2282template <class _Tp>
2283_LIBCPP_INLINE_VISIBILITY
2284typename enable_if
2285<
2286    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2287    _Tp
2288>::type
2289atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2290{
2291    return __o->fetch_sub(__op, __m);
2292}
2293
2294template <class _Tp>
2295_LIBCPP_INLINE_VISIBILITY
2296typename enable_if
2297<
2298    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2299    _Tp
2300>::type
2301atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2302{
2303    return __o->fetch_sub(__op, __m);
2304}
2305
2306template <class _Tp>
2307_LIBCPP_INLINE_VISIBILITY
2308_Tp*
2309atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2310                          memory_order __m) _NOEXCEPT
2311{
2312    return __o->fetch_sub(__op, __m);
2313}
2314
2315template <class _Tp>
2316_LIBCPP_INLINE_VISIBILITY
2317_Tp*
2318atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2319{
2320    return __o->fetch_sub(__op, __m);
2321}
2322
2323// atomic_fetch_and
2324
2325template <class _Tp>
2326_LIBCPP_INLINE_VISIBILITY
2327typename enable_if
2328<
2329    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2330    _Tp
2331>::type
2332atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2333{
2334    return __o->fetch_and(__op);
2335}
2336
2337template <class _Tp>
2338_LIBCPP_INLINE_VISIBILITY
2339typename enable_if
2340<
2341    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2342    _Tp
2343>::type
2344atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2345{
2346    return __o->fetch_and(__op);
2347}
2348
2349// atomic_fetch_and_explicit
2350
2351template <class _Tp>
2352_LIBCPP_INLINE_VISIBILITY
2353typename enable_if
2354<
2355    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2356    _Tp
2357>::type
2358atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2359{
2360    return __o->fetch_and(__op, __m);
2361}
2362
2363template <class _Tp>
2364_LIBCPP_INLINE_VISIBILITY
2365typename enable_if
2366<
2367    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2368    _Tp
2369>::type
2370atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2371{
2372    return __o->fetch_and(__op, __m);
2373}
2374
2375// atomic_fetch_or
2376
2377template <class _Tp>
2378_LIBCPP_INLINE_VISIBILITY
2379typename enable_if
2380<
2381    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2382    _Tp
2383>::type
2384atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2385{
2386    return __o->fetch_or(__op);
2387}
2388
2389template <class _Tp>
2390_LIBCPP_INLINE_VISIBILITY
2391typename enable_if
2392<
2393    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2394    _Tp
2395>::type
2396atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2397{
2398    return __o->fetch_or(__op);
2399}
2400
2401// atomic_fetch_or_explicit
2402
2403template <class _Tp>
2404_LIBCPP_INLINE_VISIBILITY
2405typename enable_if
2406<
2407    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2408    _Tp
2409>::type
2410atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2411{
2412    return __o->fetch_or(__op, __m);
2413}
2414
2415template <class _Tp>
2416_LIBCPP_INLINE_VISIBILITY
2417typename enable_if
2418<
2419    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2420    _Tp
2421>::type
2422atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2423{
2424    return __o->fetch_or(__op, __m);
2425}
2426
2427// atomic_fetch_xor
2428
2429template <class _Tp>
2430_LIBCPP_INLINE_VISIBILITY
2431typename enable_if
2432<
2433    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2434    _Tp
2435>::type
2436atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2437{
2438    return __o->fetch_xor(__op);
2439}
2440
2441template <class _Tp>
2442_LIBCPP_INLINE_VISIBILITY
2443typename enable_if
2444<
2445    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2446    _Tp
2447>::type
2448atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2449{
2450    return __o->fetch_xor(__op);
2451}
2452
2453// atomic_fetch_xor_explicit
2454
2455template <class _Tp>
2456_LIBCPP_INLINE_VISIBILITY
2457typename enable_if
2458<
2459    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2460    _Tp
2461>::type
2462atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2463{
2464    return __o->fetch_xor(__op, __m);
2465}
2466
2467template <class _Tp>
2468_LIBCPP_INLINE_VISIBILITY
2469typename enable_if
2470<
2471    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2472    _Tp
2473>::type
2474atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2475{
2476    return __o->fetch_xor(__op, __m);
2477}
2478
2479// flag type and operations
2480
2481typedef struct atomic_flag
2482{
2483    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2484
2485    _LIBCPP_INLINE_VISIBILITY
2486    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2487        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2488    _LIBCPP_INLINE_VISIBILITY
2489    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2490        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2491
2492    _LIBCPP_INLINE_VISIBILITY
2493    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2494        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2495    _LIBCPP_INLINE_VISIBILITY
2496    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2497        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2498    _LIBCPP_INLINE_VISIBILITY
2499    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2500        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2501    _LIBCPP_INLINE_VISIBILITY
2502    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2503        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2504
2505    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2506    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2507        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2508    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2509    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2510        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2511    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2512    void notify_one() volatile _NOEXCEPT
2513        {__cxx_atomic_notify_one(&__a_);}
2514    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2515    void notify_one() _NOEXCEPT
2516        {__cxx_atomic_notify_one(&__a_);}
2517    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2518    void notify_all() volatile _NOEXCEPT
2519        {__cxx_atomic_notify_all(&__a_);}
2520    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2521    void notify_all() _NOEXCEPT
2522        {__cxx_atomic_notify_all(&__a_);}
2523
2524    _LIBCPP_INLINE_VISIBILITY
2525    atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2526
2527    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2528    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2529
2530#ifndef _LIBCPP_CXX03_LANG
2531    atomic_flag(const atomic_flag&) = delete;
2532    atomic_flag& operator=(const atomic_flag&) = delete;
2533    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2534#else
2535private:
2536    _LIBCPP_INLINE_VISIBILITY
2537    atomic_flag(const atomic_flag&);
2538    _LIBCPP_INLINE_VISIBILITY
2539    atomic_flag& operator=(const atomic_flag&);
2540    _LIBCPP_INLINE_VISIBILITY
2541    atomic_flag& operator=(const atomic_flag&) volatile;
2542#endif
2543} atomic_flag;
2544
2545
2546inline _LIBCPP_INLINE_VISIBILITY
2547bool
2548atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2549{
2550    return __o->test();
2551}
2552
2553inline _LIBCPP_INLINE_VISIBILITY
2554bool
2555atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2556{
2557    return __o->test();
2558}
2559
2560inline _LIBCPP_INLINE_VISIBILITY
2561bool
2562atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2563{
2564    return __o->test(__m);
2565}
2566
2567inline _LIBCPP_INLINE_VISIBILITY
2568bool
2569atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2570{
2571    return __o->test(__m);
2572}
2573
2574inline _LIBCPP_INLINE_VISIBILITY
2575bool
2576atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2577{
2578    return __o->test_and_set();
2579}
2580
2581inline _LIBCPP_INLINE_VISIBILITY
2582bool
2583atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2584{
2585    return __o->test_and_set();
2586}
2587
2588inline _LIBCPP_INLINE_VISIBILITY
2589bool
2590atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2591{
2592    return __o->test_and_set(__m);
2593}
2594
2595inline _LIBCPP_INLINE_VISIBILITY
2596bool
2597atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2598{
2599    return __o->test_and_set(__m);
2600}
2601
2602inline _LIBCPP_INLINE_VISIBILITY
2603void
2604atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2605{
2606    __o->clear();
2607}
2608
2609inline _LIBCPP_INLINE_VISIBILITY
2610void
2611atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2612{
2613    __o->clear();
2614}
2615
2616inline _LIBCPP_INLINE_VISIBILITY
2617void
2618atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2619{
2620    __o->clear(__m);
2621}
2622
2623inline _LIBCPP_INLINE_VISIBILITY
2624void
2625atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2626{
2627    __o->clear(__m);
2628}
2629
2630inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2631void
2632atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2633{
2634    __o->wait(__v);
2635}
2636
2637inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2638void
2639atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2640{
2641    __o->wait(__v);
2642}
2643
2644inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2645void
2646atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2647                          bool __v, memory_order __m) _NOEXCEPT
2648{
2649    __o->wait(__v, __m);
2650}
2651
2652inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2653void
2654atomic_flag_wait_explicit(const atomic_flag* __o,
2655                          bool __v, memory_order __m) _NOEXCEPT
2656{
2657    __o->wait(__v, __m);
2658}
2659
2660inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2661void
2662atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2663{
2664    __o->notify_one();
2665}
2666
2667inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2668void
2669atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2670{
2671    __o->notify_one();
2672}
2673
2674inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2675void
2676atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2677{
2678    __o->notify_all();
2679}
2680
2681inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2682void
2683atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2684{
2685    __o->notify_all();
2686}
2687
2688// fences
2689
2690inline _LIBCPP_INLINE_VISIBILITY
2691void
2692atomic_thread_fence(memory_order __m) _NOEXCEPT
2693{
2694    __cxx_atomic_thread_fence(__m);
2695}
2696
2697inline _LIBCPP_INLINE_VISIBILITY
2698void
2699atomic_signal_fence(memory_order __m) _NOEXCEPT
2700{
2701    __cxx_atomic_signal_fence(__m);
2702}
2703
2704// Atomics for standard typedef types
2705
2706typedef atomic<bool>               atomic_bool;
2707typedef atomic<char>               atomic_char;
2708typedef atomic<signed char>        atomic_schar;
2709typedef atomic<unsigned char>      atomic_uchar;
2710typedef atomic<short>              atomic_short;
2711typedef atomic<unsigned short>     atomic_ushort;
2712typedef atomic<int>                atomic_int;
2713typedef atomic<unsigned int>       atomic_uint;
2714typedef atomic<long>               atomic_long;
2715typedef atomic<unsigned long>      atomic_ulong;
2716typedef atomic<long long>          atomic_llong;
2717typedef atomic<unsigned long long> atomic_ullong;
2718typedef atomic<char16_t>           atomic_char16_t;
2719typedef atomic<char32_t>           atomic_char32_t;
2720typedef atomic<wchar_t>            atomic_wchar_t;
2721
2722typedef atomic<int_least8_t>   atomic_int_least8_t;
2723typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2724typedef atomic<int_least16_t>  atomic_int_least16_t;
2725typedef atomic<uint_least16_t> atomic_uint_least16_t;
2726typedef atomic<int_least32_t>  atomic_int_least32_t;
2727typedef atomic<uint_least32_t> atomic_uint_least32_t;
2728typedef atomic<int_least64_t>  atomic_int_least64_t;
2729typedef atomic<uint_least64_t> atomic_uint_least64_t;
2730
2731typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2732typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2733typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2734typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2735typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2736typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2737typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2738typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2739
2740typedef atomic< int8_t>  atomic_int8_t;
2741typedef atomic<uint8_t>  atomic_uint8_t;
2742typedef atomic< int16_t> atomic_int16_t;
2743typedef atomic<uint16_t> atomic_uint16_t;
2744typedef atomic< int32_t> atomic_int32_t;
2745typedef atomic<uint32_t> atomic_uint32_t;
2746typedef atomic< int64_t> atomic_int64_t;
2747typedef atomic<uint64_t> atomic_uint64_t;
2748
2749typedef atomic<intptr_t>  atomic_intptr_t;
2750typedef atomic<uintptr_t> atomic_uintptr_t;
2751typedef atomic<size_t>    atomic_size_t;
2752typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2753typedef atomic<intmax_t>  atomic_intmax_t;
2754typedef atomic<uintmax_t> atomic_uintmax_t;
2755
2756// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2757
2758#ifdef __cpp_lib_atomic_is_always_lock_free
2759# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2760#else
2761# define _LIBCPP_CONTENTION_LOCK_FREE false
2762#endif
2763
2764#if ATOMIC_LLONG_LOCK_FREE == 2
2765typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2766typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2767#elif ATOMIC_INT_LOCK_FREE == 2
2768typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2769typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2770#elif ATOMIC_SHORT_LOCK_FREE == 2
2771typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2772typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2773#elif ATOMIC_CHAR_LOCK_FREE == 2
2774typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2775typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2776#else
2777    // No signed/unsigned lock-free types
2778#endif
2779
2780typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2781typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2782
2783#define ATOMIC_FLAG_INIT {false}
2784#define ATOMIC_VAR_INIT(__v) {__v}
2785
2786_LIBCPP_END_NAMESPACE_STD
2787
2788#endif  // _LIBCPP_ATOMIC
2789