1// -*- C++ -*- 2//===----------------------------------------------------------------------===// 3// 4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 5// See https://llvm.org/LICENSE.txt for license information. 6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 7// 8//===----------------------------------------------------------------------===// 9 10#ifndef _LIBCPP_ATOMIC 11#define _LIBCPP_ATOMIC 12 13/* 14 atomic synopsis 15 16namespace std 17{ 18 19// feature test macro [version.syn] 20 21#define __cpp_lib_atomic_is_always_lock_free 22#define __cpp_lib_atomic_flag_test 23#define __cpp_lib_atomic_lock_free_type_aliases 24#define __cpp_lib_atomic_wait 25 26 // order and consistency 27 28 enum memory_order: unspecified // enum class in C++20 29 { 30 relaxed, 31 consume, // load-consume 32 acquire, // load-acquire 33 release, // store-release 34 acq_rel, // store-release load-acquire 35 seq_cst // store-release load-acquire 36 }; 37 38 inline constexpr auto memory_order_relaxed = memory_order::relaxed; 39 inline constexpr auto memory_order_consume = memory_order::consume; 40 inline constexpr auto memory_order_acquire = memory_order::acquire; 41 inline constexpr auto memory_order_release = memory_order::release; 42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 44 45template <class T> T kill_dependency(T y) noexcept; 46 47// lock-free property 48 49#define ATOMIC_BOOL_LOCK_FREE unspecified 50#define ATOMIC_CHAR_LOCK_FREE unspecified 51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20 52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified 53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified 54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified 55#define ATOMIC_SHORT_LOCK_FREE unspecified 56#define ATOMIC_INT_LOCK_FREE unspecified 57#define ATOMIC_LONG_LOCK_FREE unspecified 58#define ATOMIC_LLONG_LOCK_FREE unspecified 59#define ATOMIC_POINTER_LOCK_FREE unspecified 60 61template <class T> 62struct atomic 63{ 64 using value_type = T; 65 66 static constexpr bool is_always_lock_free; 67 bool is_lock_free() const volatile noexcept; 68 bool is_lock_free() const noexcept; 69 70 atomic() noexcept = default; // until C++20 71 constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20 72 constexpr atomic(T desr) noexcept; 73 atomic(const atomic&) = delete; 74 atomic& operator=(const atomic&) = delete; 75 atomic& operator=(const atomic&) volatile = delete; 76 77 T load(memory_order m = memory_order_seq_cst) const volatile noexcept; 78 T load(memory_order m = memory_order_seq_cst) const noexcept; 79 operator T() const volatile noexcept; 80 operator T() const noexcept; 81 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 82 void store(T desr, memory_order m = memory_order_seq_cst) noexcept; 83 T operator=(T) volatile noexcept; 84 T operator=(T) noexcept; 85 86 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 87 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept; 88 bool compare_exchange_weak(T& expc, T desr, 89 memory_order s, memory_order f) volatile noexcept; 90 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept; 91 bool compare_exchange_strong(T& expc, T desr, 92 memory_order s, memory_order f) volatile noexcept; 93 bool compare_exchange_strong(T& expc, T desr, 94 memory_order s, memory_order f) noexcept; 95 bool compare_exchange_weak(T& expc, T desr, 96 memory_order m = memory_order_seq_cst) volatile noexcept; 97 bool compare_exchange_weak(T& expc, T desr, 98 memory_order m = memory_order_seq_cst) noexcept; 99 bool compare_exchange_strong(T& expc, T desr, 100 memory_order m = memory_order_seq_cst) volatile noexcept; 101 bool compare_exchange_strong(T& expc, T desr, 102 memory_order m = memory_order_seq_cst) noexcept; 103 104 void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept; 105 void wait(T, memory_order = memory_order::seq_cst) const noexcept; 106 void notify_one() volatile noexcept; 107 void notify_one() noexcept; 108 void notify_all() volatile noexcept; 109 void notify_all() noexcept; 110}; 111 112template <> 113struct atomic<integral> 114{ 115 using value_type = integral; 116 using difference_type = value_type; 117 118 static constexpr bool is_always_lock_free; 119 bool is_lock_free() const volatile noexcept; 120 bool is_lock_free() const noexcept; 121 122 atomic() noexcept = default; 123 constexpr atomic(integral desr) noexcept; 124 atomic(const atomic&) = delete; 125 atomic& operator=(const atomic&) = delete; 126 atomic& operator=(const atomic&) volatile = delete; 127 128 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept; 129 integral load(memory_order m = memory_order_seq_cst) const noexcept; 130 operator integral() const volatile noexcept; 131 operator integral() const noexcept; 132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept; 133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept; 134 integral operator=(integral desr) volatile noexcept; 135 integral operator=(integral desr) noexcept; 136 137 integral exchange(integral desr, 138 memory_order m = memory_order_seq_cst) volatile noexcept; 139 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept; 140 bool compare_exchange_weak(integral& expc, integral desr, 141 memory_order s, memory_order f) volatile noexcept; 142 bool compare_exchange_weak(integral& expc, integral desr, 143 memory_order s, memory_order f) noexcept; 144 bool compare_exchange_strong(integral& expc, integral desr, 145 memory_order s, memory_order f) volatile noexcept; 146 bool compare_exchange_strong(integral& expc, integral desr, 147 memory_order s, memory_order f) noexcept; 148 bool compare_exchange_weak(integral& expc, integral desr, 149 memory_order m = memory_order_seq_cst) volatile noexcept; 150 bool compare_exchange_weak(integral& expc, integral desr, 151 memory_order m = memory_order_seq_cst) noexcept; 152 bool compare_exchange_strong(integral& expc, integral desr, 153 memory_order m = memory_order_seq_cst) volatile noexcept; 154 bool compare_exchange_strong(integral& expc, integral desr, 155 memory_order m = memory_order_seq_cst) noexcept; 156 157 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 158 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept; 159 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 160 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept; 161 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 162 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept; 163 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 164 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept; 165 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 166 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept; 167 168 integral operator++(int) volatile noexcept; 169 integral operator++(int) noexcept; 170 integral operator--(int) volatile noexcept; 171 integral operator--(int) noexcept; 172 integral operator++() volatile noexcept; 173 integral operator++() noexcept; 174 integral operator--() volatile noexcept; 175 integral operator--() noexcept; 176 integral operator+=(integral op) volatile noexcept; 177 integral operator+=(integral op) noexcept; 178 integral operator-=(integral op) volatile noexcept; 179 integral operator-=(integral op) noexcept; 180 integral operator&=(integral op) volatile noexcept; 181 integral operator&=(integral op) noexcept; 182 integral operator|=(integral op) volatile noexcept; 183 integral operator|=(integral op) noexcept; 184 integral operator^=(integral op) volatile noexcept; 185 integral operator^=(integral op) noexcept; 186 187 void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept; 188 void wait(integral, memory_order = memory_order::seq_cst) const noexcept; 189 void notify_one() volatile noexcept; 190 void notify_one() noexcept; 191 void notify_all() volatile noexcept; 192 void notify_all() noexcept; 193}; 194 195template <class T> 196struct atomic<T*> 197{ 198 using value_type = T*; 199 using difference_type = ptrdiff_t; 200 201 static constexpr bool is_always_lock_free; 202 bool is_lock_free() const volatile noexcept; 203 bool is_lock_free() const noexcept; 204 205 atomic() noexcept = default; // until C++20 206 constexpr atomic() noexcept; // since C++20 207 constexpr atomic(T* desr) noexcept; 208 atomic(const atomic&) = delete; 209 atomic& operator=(const atomic&) = delete; 210 atomic& operator=(const atomic&) volatile = delete; 211 212 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept; 213 T* load(memory_order m = memory_order_seq_cst) const noexcept; 214 operator T*() const volatile noexcept; 215 operator T*() const noexcept; 216 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 217 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept; 218 T* operator=(T*) volatile noexcept; 219 T* operator=(T*) noexcept; 220 221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 222 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept; 223 bool compare_exchange_weak(T*& expc, T* desr, 224 memory_order s, memory_order f) volatile noexcept; 225 bool compare_exchange_weak(T*& expc, T* desr, 226 memory_order s, memory_order f) noexcept; 227 bool compare_exchange_strong(T*& expc, T* desr, 228 memory_order s, memory_order f) volatile noexcept; 229 bool compare_exchange_strong(T*& expc, T* desr, 230 memory_order s, memory_order f) noexcept; 231 bool compare_exchange_weak(T*& expc, T* desr, 232 memory_order m = memory_order_seq_cst) volatile noexcept; 233 bool compare_exchange_weak(T*& expc, T* desr, 234 memory_order m = memory_order_seq_cst) noexcept; 235 bool compare_exchange_strong(T*& expc, T* desr, 236 memory_order m = memory_order_seq_cst) volatile noexcept; 237 bool compare_exchange_strong(T*& expc, T* desr, 238 memory_order m = memory_order_seq_cst) noexcept; 239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 240 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 242 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 243 244 T* operator++(int) volatile noexcept; 245 T* operator++(int) noexcept; 246 T* operator--(int) volatile noexcept; 247 T* operator--(int) noexcept; 248 T* operator++() volatile noexcept; 249 T* operator++() noexcept; 250 T* operator--() volatile noexcept; 251 T* operator--() noexcept; 252 T* operator+=(ptrdiff_t op) volatile noexcept; 253 T* operator+=(ptrdiff_t op) noexcept; 254 T* operator-=(ptrdiff_t op) volatile noexcept; 255 T* operator-=(ptrdiff_t op) noexcept; 256 257 void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept; 258 void wait(T*, memory_order = memory_order::seq_cst) const noexcept; 259 void notify_one() volatile noexcept; 260 void notify_one() noexcept; 261 void notify_all() volatile noexcept; 262 void notify_all() noexcept; 263}; 264 265 266// [atomics.nonmembers], non-member functions 267template<class T> 268 bool atomic_is_lock_free(const volatile atomic<T>*) noexcept; 269template<class T> 270 bool atomic_is_lock_free(const atomic<T>*) noexcept; 271template<class T> 272 void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept; 273template<class T> 274 void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept; 275template<class T> 276 void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type, 277 memory_order) noexcept; 278template<class T> 279 void atomic_store_explicit(atomic<T>*, atomic<T>::value_type, 280 memory_order) noexcept; 281template<class T> 282 T atomic_load(const volatile atomic<T>*) noexcept; 283template<class T> 284 T atomic_load(const atomic<T>*) noexcept; 285template<class T> 286 T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept; 287template<class T> 288 T atomic_load_explicit(const atomic<T>*, memory_order) noexcept; 289template<class T> 290 T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept; 291template<class T> 292 T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept; 293template<class T> 294 T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type, 295 memory_order) noexcept; 296template<class T> 297 T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type, 298 memory_order) noexcept; 299template<class T> 300 bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*, 301 atomic<T>::value_type) noexcept; 302template<class T> 303 bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*, 304 atomic<T>::value_type) noexcept; 305template<class T> 306 bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*, 307 atomic<T>::value_type) noexcept; 308template<class T> 309 bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*, 310 atomic<T>::value_type) noexcept; 311template<class T> 312 bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*, 313 atomic<T>::value_type, 314 memory_order, memory_order) noexcept; 315template<class T> 316 bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*, 317 atomic<T>::value_type, 318 memory_order, memory_order) noexcept; 319template<class T> 320 bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*, 321 atomic<T>::value_type, 322 memory_order, memory_order) noexcept; 323template<class T> 324 bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*, 325 atomic<T>::value_type, 326 memory_order, memory_order) noexcept; 327 328template<class T> 329 T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept; 330template<class T> 331 T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept; 332template<class T> 333 T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type, 334 memory_order) noexcept; 335template<class T> 336 T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type, 337 memory_order) noexcept; 338template<class T> 339 T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept; 340template<class T> 341 T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept; 342template<class T> 343 T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type, 344 memory_order) noexcept; 345template<class T> 346 T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type, 347 memory_order) noexcept; 348template<class T> 349 T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept; 350template<class T> 351 T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept; 352template<class T> 353 T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type, 354 memory_order) noexcept; 355template<class T> 356 T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type, 357 memory_order) noexcept; 358template<class T> 359 T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept; 360template<class T> 361 T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept; 362template<class T> 363 T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type, 364 memory_order) noexcept; 365template<class T> 366 T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type, 367 memory_order) noexcept; 368template<class T> 369 T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept; 370template<class T> 371 T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept; 372template<class T> 373 T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type, 374 memory_order) noexcept; 375template<class T> 376 T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type, 377 memory_order) noexcept; 378 379template<class T> 380 void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type); 381template<class T> 382 void atomic_wait(const atomic<T>*, atomic<T>::value_type); 383template<class T> 384 void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type, 385 memory_order); 386template<class T> 387 void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type, 388 memory_order); 389template<class T> 390 void atomic_notify_one(volatile atomic<T>*); 391template<class T> 392 void atomic_notify_one(atomic<T>*); 393template<class T> 394 void atomic_notify_all(volatile atomic<T>*); 395template<class T> 396 void atomic_notify_all(atomic<T>*); 397 398// Atomics for standard typedef types 399 400typedef atomic<bool> atomic_bool; 401typedef atomic<char> atomic_char; 402typedef atomic<signed char> atomic_schar; 403typedef atomic<unsigned char> atomic_uchar; 404typedef atomic<short> atomic_short; 405typedef atomic<unsigned short> atomic_ushort; 406typedef atomic<int> atomic_int; 407typedef atomic<unsigned int> atomic_uint; 408typedef atomic<long> atomic_long; 409typedef atomic<unsigned long> atomic_ulong; 410typedef atomic<long long> atomic_llong; 411typedef atomic<unsigned long long> atomic_ullong; 412typedef atomic<char8_t> atomic_char8_t; // C++20 413typedef atomic<char16_t> atomic_char16_t; 414typedef atomic<char32_t> atomic_char32_t; 415typedef atomic<wchar_t> atomic_wchar_t; 416 417typedef atomic<int_least8_t> atomic_int_least8_t; 418typedef atomic<uint_least8_t> atomic_uint_least8_t; 419typedef atomic<int_least16_t> atomic_int_least16_t; 420typedef atomic<uint_least16_t> atomic_uint_least16_t; 421typedef atomic<int_least32_t> atomic_int_least32_t; 422typedef atomic<uint_least32_t> atomic_uint_least32_t; 423typedef atomic<int_least64_t> atomic_int_least64_t; 424typedef atomic<uint_least64_t> atomic_uint_least64_t; 425 426typedef atomic<int_fast8_t> atomic_int_fast8_t; 427typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 428typedef atomic<int_fast16_t> atomic_int_fast16_t; 429typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 430typedef atomic<int_fast32_t> atomic_int_fast32_t; 431typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 432typedef atomic<int_fast64_t> atomic_int_fast64_t; 433typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 434 435typedef atomic<int8_t> atomic_int8_t; 436typedef atomic<uint8_t> atomic_uint8_t; 437typedef atomic<int16_t> atomic_int16_t; 438typedef atomic<uint16_t> atomic_uint16_t; 439typedef atomic<int32_t> atomic_int32_t; 440typedef atomic<uint32_t> atomic_uint32_t; 441typedef atomic<int64_t> atomic_int64_t; 442typedef atomic<uint64_t> atomic_uint64_t; 443 444typedef atomic<intptr_t> atomic_intptr_t; 445typedef atomic<uintptr_t> atomic_uintptr_t; 446typedef atomic<size_t> atomic_size_t; 447typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 448typedef atomic<intmax_t> atomic_intmax_t; 449typedef atomic<uintmax_t> atomic_uintmax_t; 450 451// flag type and operations 452 453typedef struct atomic_flag 454{ 455 atomic_flag() noexcept = default; // until C++20 456 constexpr atomic_flag() noexcept; // since C++20 457 atomic_flag(const atomic_flag&) = delete; 458 atomic_flag& operator=(const atomic_flag&) = delete; 459 atomic_flag& operator=(const atomic_flag&) volatile = delete; 460 461 bool test(memory_order m = memory_order_seq_cst) volatile noexcept; 462 bool test(memory_order m = memory_order_seq_cst) noexcept; 463 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept; 464 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept; 465 void clear(memory_order m = memory_order_seq_cst) volatile noexcept; 466 void clear(memory_order m = memory_order_seq_cst) noexcept; 467 468 void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept; 469 void wait(bool, memory_order = memory_order::seq_cst) const noexcept; 470 void notify_one() volatile noexcept; 471 void notify_one() noexcept; 472 void notify_all() volatile noexcept; 473 void notify_all() noexcept; 474} atomic_flag; 475 476bool atomic_flag_test(volatile atomic_flag* obj) noexcept; 477bool atomic_flag_test(atomic_flag* obj) noexcept; 478bool atomic_flag_test_explicit(volatile atomic_flag* obj, 479 memory_order m) noexcept; 480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept; 481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept; 482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept; 483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj, 484 memory_order m) noexcept; 485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept; 486void atomic_flag_clear(volatile atomic_flag* obj) noexcept; 487void atomic_flag_clear(atomic_flag* obj) noexcept; 488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept; 489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept; 490 491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept; 492void atomic_wait(const atomic_flag* obj, T old) noexcept; 493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept; 494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept; 495void atomic_one(volatile atomic_flag* obj) noexcept; 496void atomic_one(atomic_flag* obj) noexcept; 497void atomic_all(volatile atomic_flag* obj) noexcept; 498void atomic_all(atomic_flag* obj) noexcept; 499 500// fences 501 502void atomic_thread_fence(memory_order m) noexcept; 503void atomic_signal_fence(memory_order m) noexcept; 504 505// deprecated 506 507template <class T> 508 void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept; 509 510template <class T> 511 void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept; 512 513#define ATOMIC_VAR_INIT(value) see below 514 515#define ATOMIC_FLAG_INIT see below 516 517} // std 518 519*/ 520 521#include <__assert> // all public C++ headers provide the assertion handler 522#include <__availability> 523#include <__chrono/duration.h> 524#include <__config> 525#include <__thread/poll_with_backoff.h> 526#include <__thread/timed_backoff_policy.h> 527#include <cstddef> 528#include <cstdint> 529#include <cstring> 530#include <type_traits> 531#include <version> 532 533#ifndef _LIBCPP_HAS_NO_THREADS 534# include <__threading_support> 535#endif 536 537#ifndef _LIBCPP_REMOVE_TRANSITIVE_INCLUDES 538# include <chrono> 539#endif 540 541#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 542# pragma GCC system_header 543#endif 544 545#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER 546# error <atomic> is not implemented 547#endif 548#ifdef kill_dependency 549# error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23. 550#endif 551 552#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \ 553 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \ 554 __m == memory_order_acquire || \ 555 __m == memory_order_acq_rel, \ 556 "memory order argument to atomic operation is invalid") 557 558#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \ 559 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \ 560 __m == memory_order_acq_rel, \ 561 "memory order argument to atomic operation is invalid") 562 563#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \ 564 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \ 565 __f == memory_order_acq_rel, \ 566 "memory order argument to atomic operation is invalid") 567 568_LIBCPP_BEGIN_NAMESPACE_STD 569 570// Figure out what the underlying type for `memory_order` would be if it were 571// declared as an unscoped enum (accounting for -fshort-enums). Use this result 572// to pin the underlying type in C++20. 573enum __legacy_memory_order { 574 __mo_relaxed, 575 __mo_consume, 576 __mo_acquire, 577 __mo_release, 578 __mo_acq_rel, 579 __mo_seq_cst 580}; 581 582typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t; 583 584#if _LIBCPP_STD_VER > 17 585 586enum class memory_order : __memory_order_underlying_t { 587 relaxed = __mo_relaxed, 588 consume = __mo_consume, 589 acquire = __mo_acquire, 590 release = __mo_release, 591 acq_rel = __mo_acq_rel, 592 seq_cst = __mo_seq_cst 593}; 594 595inline constexpr auto memory_order_relaxed = memory_order::relaxed; 596inline constexpr auto memory_order_consume = memory_order::consume; 597inline constexpr auto memory_order_acquire = memory_order::acquire; 598inline constexpr auto memory_order_release = memory_order::release; 599inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 600inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 601 602#else 603 604typedef enum memory_order { 605 memory_order_relaxed = __mo_relaxed, 606 memory_order_consume = __mo_consume, 607 memory_order_acquire = __mo_acquire, 608 memory_order_release = __mo_release, 609 memory_order_acq_rel = __mo_acq_rel, 610 memory_order_seq_cst = __mo_seq_cst, 611} memory_order; 612 613#endif // _LIBCPP_STD_VER > 17 614 615template <typename _Tp> _LIBCPP_INLINE_VISIBILITY 616bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) { 617 return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0; 618} 619 620static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value), 621 "unexpected underlying type for std::memory_order"); 622 623#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \ 624 defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS) 625 626// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because 627// the default operator= in an object is not volatile, a byte-by-byte copy 628// is required. 629template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY 630typename enable_if<is_assignable<_Tp&, _Tv>::value>::type 631__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) { 632 __a_value = __val; 633} 634template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY 635typename enable_if<is_assignable<_Tp&, _Tv>::value>::type 636__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) { 637 volatile char* __to = reinterpret_cast<volatile char*>(&__a_value); 638 volatile char* __end = __to + sizeof(_Tp); 639 volatile const char* __from = reinterpret_cast<volatile const char*>(&__val); 640 while (__to != __end) 641 *__to++ = *__from++; 642} 643 644#endif 645 646#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) 647 648template <typename _Tp> 649struct __cxx_atomic_base_impl { 650 651 _LIBCPP_INLINE_VISIBILITY 652#ifndef _LIBCPP_CXX03_LANG 653 __cxx_atomic_base_impl() _NOEXCEPT = default; 654#else 655 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} 656#endif // _LIBCPP_CXX03_LANG 657 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT 658 : __a_value(value) {} 659 _Tp __a_value; 660}; 661 662_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) { 663 // Avoid switch statement to make this a constexpr. 664 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 665 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 666 (__order == memory_order_release ? __ATOMIC_RELEASE: 667 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 668 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL: 669 __ATOMIC_CONSUME)))); 670} 671 672_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) { 673 // Avoid switch statement to make this a constexpr. 674 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 675 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 676 (__order == memory_order_release ? __ATOMIC_RELAXED: 677 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 678 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE: 679 __ATOMIC_CONSUME)))); 680} 681 682template <typename _Tp> 683_LIBCPP_INLINE_VISIBILITY 684void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { 685 __cxx_atomic_assign_volatile(__a->__a_value, __val); 686} 687 688template <typename _Tp> 689_LIBCPP_INLINE_VISIBILITY 690void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { 691 __a->__a_value = __val; 692} 693 694_LIBCPP_INLINE_VISIBILITY inline 695void __cxx_atomic_thread_fence(memory_order __order) { 696 __atomic_thread_fence(__to_gcc_order(__order)); 697} 698 699_LIBCPP_INLINE_VISIBILITY inline 700void __cxx_atomic_signal_fence(memory_order __order) { 701 __atomic_signal_fence(__to_gcc_order(__order)); 702} 703 704template <typename _Tp> 705_LIBCPP_INLINE_VISIBILITY 706void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, 707 memory_order __order) { 708 __atomic_store(&__a->__a_value, &__val, 709 __to_gcc_order(__order)); 710} 711 712template <typename _Tp> 713_LIBCPP_INLINE_VISIBILITY 714void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, 715 memory_order __order) { 716 __atomic_store(&__a->__a_value, &__val, 717 __to_gcc_order(__order)); 718} 719 720template <typename _Tp> 721_LIBCPP_INLINE_VISIBILITY 722_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a, 723 memory_order __order) { 724 _Tp __ret; 725 __atomic_load(&__a->__a_value, &__ret, 726 __to_gcc_order(__order)); 727 return __ret; 728} 729 730template <typename _Tp> 731_LIBCPP_INLINE_VISIBILITY 732_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) { 733 _Tp __ret; 734 __atomic_load(&__a->__a_value, &__ret, 735 __to_gcc_order(__order)); 736 return __ret; 737} 738 739template <typename _Tp> 740_LIBCPP_INLINE_VISIBILITY 741_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a, 742 _Tp __value, memory_order __order) { 743 _Tp __ret; 744 __atomic_exchange(&__a->__a_value, &__value, &__ret, 745 __to_gcc_order(__order)); 746 return __ret; 747} 748 749template <typename _Tp> 750_LIBCPP_INLINE_VISIBILITY 751_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, 752 memory_order __order) { 753 _Tp __ret; 754 __atomic_exchange(&__a->__a_value, &__value, &__ret, 755 __to_gcc_order(__order)); 756 return __ret; 757} 758 759template <typename _Tp> 760_LIBCPP_INLINE_VISIBILITY 761bool __cxx_atomic_compare_exchange_strong( 762 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, 763 memory_order __success, memory_order __failure) { 764 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 765 false, 766 __to_gcc_order(__success), 767 __to_gcc_failure_order(__failure)); 768} 769 770template <typename _Tp> 771_LIBCPP_INLINE_VISIBILITY 772bool __cxx_atomic_compare_exchange_strong( 773 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, 774 memory_order __failure) { 775 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 776 false, 777 __to_gcc_order(__success), 778 __to_gcc_failure_order(__failure)); 779} 780 781template <typename _Tp> 782_LIBCPP_INLINE_VISIBILITY 783bool __cxx_atomic_compare_exchange_weak( 784 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, 785 memory_order __success, memory_order __failure) { 786 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 787 true, 788 __to_gcc_order(__success), 789 __to_gcc_failure_order(__failure)); 790} 791 792template <typename _Tp> 793_LIBCPP_INLINE_VISIBILITY 794bool __cxx_atomic_compare_exchange_weak( 795 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, 796 memory_order __failure) { 797 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 798 true, 799 __to_gcc_order(__success), 800 __to_gcc_failure_order(__failure)); 801} 802 803template <typename _Tp> 804struct __skip_amt { enum {value = 1}; }; 805 806template <typename _Tp> 807struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; }; 808 809// FIXME: Haven't figured out what the spec says about using arrays with 810// atomic_fetch_add. Force a failure rather than creating bad behavior. 811template <typename _Tp> 812struct __skip_amt<_Tp[]> { }; 813template <typename _Tp, int n> 814struct __skip_amt<_Tp[n]> { }; 815 816template <typename _Tp, typename _Td> 817_LIBCPP_INLINE_VISIBILITY 818_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a, 819 _Td __delta, memory_order __order) { 820 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 821 __to_gcc_order(__order)); 822} 823 824template <typename _Tp, typename _Td> 825_LIBCPP_INLINE_VISIBILITY 826_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, 827 memory_order __order) { 828 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 829 __to_gcc_order(__order)); 830} 831 832template <typename _Tp, typename _Td> 833_LIBCPP_INLINE_VISIBILITY 834_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a, 835 _Td __delta, memory_order __order) { 836 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 837 __to_gcc_order(__order)); 838} 839 840template <typename _Tp, typename _Td> 841_LIBCPP_INLINE_VISIBILITY 842_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, 843 memory_order __order) { 844 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 845 __to_gcc_order(__order)); 846} 847 848template <typename _Tp> 849_LIBCPP_INLINE_VISIBILITY 850_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a, 851 _Tp __pattern, memory_order __order) { 852 return __atomic_fetch_and(&__a->__a_value, __pattern, 853 __to_gcc_order(__order)); 854} 855 856template <typename _Tp> 857_LIBCPP_INLINE_VISIBILITY 858_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, 859 _Tp __pattern, memory_order __order) { 860 return __atomic_fetch_and(&__a->__a_value, __pattern, 861 __to_gcc_order(__order)); 862} 863 864template <typename _Tp> 865_LIBCPP_INLINE_VISIBILITY 866_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a, 867 _Tp __pattern, memory_order __order) { 868 return __atomic_fetch_or(&__a->__a_value, __pattern, 869 __to_gcc_order(__order)); 870} 871 872template <typename _Tp> 873_LIBCPP_INLINE_VISIBILITY 874_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, 875 memory_order __order) { 876 return __atomic_fetch_or(&__a->__a_value, __pattern, 877 __to_gcc_order(__order)); 878} 879 880template <typename _Tp> 881_LIBCPP_INLINE_VISIBILITY 882_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a, 883 _Tp __pattern, memory_order __order) { 884 return __atomic_fetch_xor(&__a->__a_value, __pattern, 885 __to_gcc_order(__order)); 886} 887 888template <typename _Tp> 889_LIBCPP_INLINE_VISIBILITY 890_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, 891 memory_order __order) { 892 return __atomic_fetch_xor(&__a->__a_value, __pattern, 893 __to_gcc_order(__order)); 894} 895 896#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0) 897 898#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP) 899 900template <typename _Tp> 901struct __cxx_atomic_base_impl { 902 903 _LIBCPP_INLINE_VISIBILITY 904#ifndef _LIBCPP_CXX03_LANG 905 __cxx_atomic_base_impl() _NOEXCEPT = default; 906#else 907 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} 908#endif // _LIBCPP_CXX03_LANG 909 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT 910 : __a_value(__value) {} 911 _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value; 912}; 913 914#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s) 915 916_LIBCPP_INLINE_VISIBILITY inline 917void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT { 918 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order)); 919} 920 921_LIBCPP_INLINE_VISIBILITY inline 922void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT { 923 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order)); 924} 925 926template<class _Tp> 927_LIBCPP_INLINE_VISIBILITY 928void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT { 929 __c11_atomic_init(&__a->__a_value, __val); 930} 931template<class _Tp> 932_LIBCPP_INLINE_VISIBILITY 933void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT { 934 __c11_atomic_init(&__a->__a_value, __val); 935} 936 937template<class _Tp> 938_LIBCPP_INLINE_VISIBILITY 939void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT { 940 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); 941} 942template<class _Tp> 943_LIBCPP_INLINE_VISIBILITY 944void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT { 945 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); 946} 947 948template<class _Tp> 949_LIBCPP_INLINE_VISIBILITY 950_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT { 951 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; 952 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); 953} 954template<class _Tp> 955_LIBCPP_INLINE_VISIBILITY 956_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT { 957 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; 958 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); 959} 960 961template<class _Tp> 962_LIBCPP_INLINE_VISIBILITY 963_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT { 964 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); 965} 966template<class _Tp> 967_LIBCPP_INLINE_VISIBILITY 968_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT { 969 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); 970} 971 972_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) { 973 // Avoid switch statement to make this a constexpr. 974 return __order == memory_order_release ? memory_order_relaxed: 975 (__order == memory_order_acq_rel ? memory_order_acquire: 976 __order); 977} 978 979template<class _Tp> 980_LIBCPP_INLINE_VISIBILITY 981bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 982 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 983} 984template<class _Tp> 985_LIBCPP_INLINE_VISIBILITY 986bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 987 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 988} 989 990template<class _Tp> 991_LIBCPP_INLINE_VISIBILITY 992bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 993 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 994} 995template<class _Tp> 996_LIBCPP_INLINE_VISIBILITY 997bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 998 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 999} 1000 1001template<class _Tp> 1002_LIBCPP_INLINE_VISIBILITY 1003_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1004 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1005} 1006template<class _Tp> 1007_LIBCPP_INLINE_VISIBILITY 1008_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1009 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1010} 1011 1012template<class _Tp> 1013_LIBCPP_INLINE_VISIBILITY 1014_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1015 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1016} 1017template<class _Tp> 1018_LIBCPP_INLINE_VISIBILITY 1019_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1020 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1021} 1022 1023template<class _Tp> 1024_LIBCPP_INLINE_VISIBILITY 1025_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1026 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1027} 1028template<class _Tp> 1029_LIBCPP_INLINE_VISIBILITY 1030_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1031 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1032} 1033template<class _Tp> 1034_LIBCPP_INLINE_VISIBILITY 1035_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1036 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1037} 1038template<class _Tp> 1039_LIBCPP_INLINE_VISIBILITY 1040_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1041 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1042} 1043 1044template<class _Tp> 1045_LIBCPP_INLINE_VISIBILITY 1046_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1047 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1048} 1049template<class _Tp> 1050_LIBCPP_INLINE_VISIBILITY 1051_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1052 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1053} 1054 1055template<class _Tp> 1056_LIBCPP_INLINE_VISIBILITY 1057_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1058 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1059} 1060template<class _Tp> 1061_LIBCPP_INLINE_VISIBILITY 1062_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1063 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1064} 1065 1066template<class _Tp> 1067_LIBCPP_INLINE_VISIBILITY 1068_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1069 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1070} 1071template<class _Tp> 1072_LIBCPP_INLINE_VISIBILITY 1073_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1074 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1075} 1076 1077#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP 1078 1079template <class _Tp> 1080_LIBCPP_INLINE_VISIBILITY 1081_Tp kill_dependency(_Tp __y) _NOEXCEPT 1082{ 1083 return __y; 1084} 1085 1086#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE) 1087# define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE 1088# define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE 1089#ifndef _LIBCPP_HAS_NO_CHAR8_T 1090# define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE 1091#endif 1092# define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE 1093# define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE 1094# define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE 1095# define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE 1096# define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE 1097# define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE 1098# define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE 1099# define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE 1100#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE) 1101# define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE 1102# define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE 1103#ifndef _LIBCPP_HAS_NO_CHAR8_T 1104# define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE 1105#endif 1106# define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE 1107# define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE 1108# define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE 1109# define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE 1110# define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE 1111# define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE 1112# define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE 1113# define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE 1114#endif 1115 1116template <class _Tp> 1117struct __libcpp_is_always_lock_free { 1118 // __atomic_always_lock_free is available in all Standard modes 1119 static const bool __value = __atomic_always_lock_free(sizeof(_Tp), 0); 1120}; 1121 1122#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS 1123 1124template<typename _Tp> 1125struct __cxx_atomic_lock_impl { 1126 1127 _LIBCPP_INLINE_VISIBILITY 1128 __cxx_atomic_lock_impl() _NOEXCEPT 1129 : __a_value(), __a_lock(0) {} 1130 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit 1131 __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT 1132 : __a_value(value), __a_lock(0) {} 1133 1134 _Tp __a_value; 1135 mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock; 1136 1137 _LIBCPP_INLINE_VISIBILITY void __lock() const volatile { 1138 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) 1139 /*spin*/; 1140 } 1141 _LIBCPP_INLINE_VISIBILITY void __lock() const { 1142 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) 1143 /*spin*/; 1144 } 1145 _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile { 1146 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); 1147 } 1148 _LIBCPP_INLINE_VISIBILITY void __unlock() const { 1149 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); 1150 } 1151 _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile { 1152 __lock(); 1153 _Tp __old; 1154 __cxx_atomic_assign_volatile(__old, __a_value); 1155 __unlock(); 1156 return __old; 1157 } 1158 _LIBCPP_INLINE_VISIBILITY _Tp __read() const { 1159 __lock(); 1160 _Tp __old = __a_value; 1161 __unlock(); 1162 return __old; 1163 } 1164}; 1165 1166template <typename _Tp> 1167_LIBCPP_INLINE_VISIBILITY 1168void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { 1169 __cxx_atomic_assign_volatile(__a->__a_value, __val); 1170} 1171template <typename _Tp> 1172_LIBCPP_INLINE_VISIBILITY 1173void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { 1174 __a->__a_value = __val; 1175} 1176 1177template <typename _Tp> 1178_LIBCPP_INLINE_VISIBILITY 1179void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { 1180 __a->__lock(); 1181 __cxx_atomic_assign_volatile(__a->__a_value, __val); 1182 __a->__unlock(); 1183} 1184template <typename _Tp> 1185_LIBCPP_INLINE_VISIBILITY 1186void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { 1187 __a->__lock(); 1188 __a->__a_value = __val; 1189 __a->__unlock(); 1190} 1191 1192template <typename _Tp> 1193_LIBCPP_INLINE_VISIBILITY 1194_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { 1195 return __a->__read(); 1196} 1197template <typename _Tp> 1198_LIBCPP_INLINE_VISIBILITY 1199_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { 1200 return __a->__read(); 1201} 1202 1203template <typename _Tp> 1204_LIBCPP_INLINE_VISIBILITY 1205_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { 1206 __a->__lock(); 1207 _Tp __old; 1208 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1209 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1210 __a->__unlock(); 1211 return __old; 1212} 1213template <typename _Tp> 1214_LIBCPP_INLINE_VISIBILITY 1215_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { 1216 __a->__lock(); 1217 _Tp __old = __a->__a_value; 1218 __a->__a_value = __value; 1219 __a->__unlock(); 1220 return __old; 1221} 1222 1223template <typename _Tp> 1224_LIBCPP_INLINE_VISIBILITY 1225bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1226 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1227 _Tp __temp; 1228 __a->__lock(); 1229 __cxx_atomic_assign_volatile(__temp, __a->__a_value); 1230 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); 1231 if(__ret) 1232 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1233 else 1234 __cxx_atomic_assign_volatile(*__expected, __a->__a_value); 1235 __a->__unlock(); 1236 return __ret; 1237} 1238template <typename _Tp> 1239_LIBCPP_INLINE_VISIBILITY 1240bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a, 1241 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1242 __a->__lock(); 1243 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); 1244 if(__ret) 1245 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); 1246 else 1247 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); 1248 __a->__unlock(); 1249 return __ret; 1250} 1251 1252template <typename _Tp> 1253_LIBCPP_INLINE_VISIBILITY 1254bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1255 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1256 _Tp __temp; 1257 __a->__lock(); 1258 __cxx_atomic_assign_volatile(__temp, __a->__a_value); 1259 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); 1260 if(__ret) 1261 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1262 else 1263 __cxx_atomic_assign_volatile(*__expected, __a->__a_value); 1264 __a->__unlock(); 1265 return __ret; 1266} 1267template <typename _Tp> 1268_LIBCPP_INLINE_VISIBILITY 1269bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a, 1270 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1271 __a->__lock(); 1272 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); 1273 if(__ret) 1274 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); 1275 else 1276 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); 1277 __a->__unlock(); 1278 return __ret; 1279} 1280 1281template <typename _Tp, typename _Td> 1282_LIBCPP_INLINE_VISIBILITY 1283_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1284 _Td __delta, memory_order) { 1285 __a->__lock(); 1286 _Tp __old; 1287 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1288 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta)); 1289 __a->__unlock(); 1290 return __old; 1291} 1292template <typename _Tp, typename _Td> 1293_LIBCPP_INLINE_VISIBILITY 1294_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a, 1295 _Td __delta, memory_order) { 1296 __a->__lock(); 1297 _Tp __old = __a->__a_value; 1298 __a->__a_value += __delta; 1299 __a->__unlock(); 1300 return __old; 1301} 1302 1303template <typename _Tp, typename _Td> 1304_LIBCPP_INLINE_VISIBILITY 1305_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a, 1306 ptrdiff_t __delta, memory_order) { 1307 __a->__lock(); 1308 _Tp* __old; 1309 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1310 __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta); 1311 __a->__unlock(); 1312 return __old; 1313} 1314template <typename _Tp, typename _Td> 1315_LIBCPP_INLINE_VISIBILITY 1316_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a, 1317 ptrdiff_t __delta, memory_order) { 1318 __a->__lock(); 1319 _Tp* __old = __a->__a_value; 1320 __a->__a_value += __delta; 1321 __a->__unlock(); 1322 return __old; 1323} 1324 1325template <typename _Tp, typename _Td> 1326_LIBCPP_INLINE_VISIBILITY 1327_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1328 _Td __delta, memory_order) { 1329 __a->__lock(); 1330 _Tp __old; 1331 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1332 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta)); 1333 __a->__unlock(); 1334 return __old; 1335} 1336template <typename _Tp, typename _Td> 1337_LIBCPP_INLINE_VISIBILITY 1338_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a, 1339 _Td __delta, memory_order) { 1340 __a->__lock(); 1341 _Tp __old = __a->__a_value; 1342 __a->__a_value -= __delta; 1343 __a->__unlock(); 1344 return __old; 1345} 1346 1347template <typename _Tp> 1348_LIBCPP_INLINE_VISIBILITY 1349_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1350 _Tp __pattern, memory_order) { 1351 __a->__lock(); 1352 _Tp __old; 1353 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1354 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern)); 1355 __a->__unlock(); 1356 return __old; 1357} 1358template <typename _Tp> 1359_LIBCPP_INLINE_VISIBILITY 1360_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a, 1361 _Tp __pattern, memory_order) { 1362 __a->__lock(); 1363 _Tp __old = __a->__a_value; 1364 __a->__a_value &= __pattern; 1365 __a->__unlock(); 1366 return __old; 1367} 1368 1369template <typename _Tp> 1370_LIBCPP_INLINE_VISIBILITY 1371_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1372 _Tp __pattern, memory_order) { 1373 __a->__lock(); 1374 _Tp __old; 1375 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1376 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern)); 1377 __a->__unlock(); 1378 return __old; 1379} 1380template <typename _Tp> 1381_LIBCPP_INLINE_VISIBILITY 1382_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a, 1383 _Tp __pattern, memory_order) { 1384 __a->__lock(); 1385 _Tp __old = __a->__a_value; 1386 __a->__a_value |= __pattern; 1387 __a->__unlock(); 1388 return __old; 1389} 1390 1391template <typename _Tp> 1392_LIBCPP_INLINE_VISIBILITY 1393_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1394 _Tp __pattern, memory_order) { 1395 __a->__lock(); 1396 _Tp __old; 1397 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1398 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern)); 1399 __a->__unlock(); 1400 return __old; 1401} 1402template <typename _Tp> 1403_LIBCPP_INLINE_VISIBILITY 1404_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a, 1405 _Tp __pattern, memory_order) { 1406 __a->__lock(); 1407 _Tp __old = __a->__a_value; 1408 __a->__a_value ^= __pattern; 1409 __a->__unlock(); 1410 return __old; 1411} 1412 1413template <typename _Tp, 1414 typename _Base = typename conditional<__libcpp_is_always_lock_free<_Tp>::__value, 1415 __cxx_atomic_base_impl<_Tp>, 1416 __cxx_atomic_lock_impl<_Tp> >::type> 1417#else 1418template <typename _Tp, 1419 typename _Base = __cxx_atomic_base_impl<_Tp> > 1420#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS 1421struct __cxx_atomic_impl : public _Base { 1422 static_assert(is_trivially_copyable<_Tp>::value, 1423 "std::atomic<T> requires that 'T' be a trivially copyable type"); 1424 1425 _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default; 1426 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT 1427 : _Base(__value) {} 1428}; 1429 1430#if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__)) || \ 1431 (defined(__FreeBSD__) && defined(__mips__)) 1432 using __cxx_contention_t = int32_t; 1433#else 1434 using __cxx_contention_t = int64_t; 1435#endif // __linux__ || (_AIX && !__64BIT__) || (__FreeBSD__ && __mips__) 1436 1437using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>; 1438 1439#if defined(_LIBCPP_HAS_NO_THREADS) 1440# define _LIBCPP_HAS_NO_PLATFORM_WAIT 1441#endif 1442 1443// TODO: 1444// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that 1445// it is not tied anywhere into the build system or even documented. We should 1446// clean it up because it is technically never defined except when threads are 1447// disabled. We should clean it up in its own changeset in case we break "bad" 1448// users. 1449#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT 1450 1451_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*); 1452_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*); 1453_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*); 1454_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t); 1455 1456_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*); 1457_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*); 1458_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*); 1459_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t); 1460 1461template <class _Atp, class _Fn> 1462struct __libcpp_atomic_wait_backoff_impl { 1463 _Atp* __a; 1464 _Fn __test_fn; 1465 _LIBCPP_AVAILABILITY_SYNC 1466 _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const 1467 { 1468 if(__elapsed > chrono::microseconds(64)) 1469 { 1470 auto const __monitor = __libcpp_atomic_monitor(__a); 1471 if(__test_fn()) 1472 return true; 1473 __libcpp_atomic_wait(__a, __monitor); 1474 } 1475 else if(__elapsed > chrono::microseconds(4)) 1476 __libcpp_thread_yield(); 1477 else 1478 {} // poll 1479 return false; 1480 } 1481}; 1482 1483template <class _Atp, class _Fn> 1484_LIBCPP_AVAILABILITY_SYNC 1485_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn) 1486{ 1487 __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn}; 1488 return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn); 1489} 1490 1491#else // _LIBCPP_HAS_NO_PLATFORM_WAIT 1492 1493template <class _Tp> 1494_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { } 1495template <class _Tp> 1496_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { } 1497template <class _Atp, class _Fn> 1498_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn) 1499{ 1500#if defined(_LIBCPP_HAS_NO_THREADS) 1501 using _Policy = __spinning_backoff_policy; 1502#else 1503 using _Policy = __libcpp_timed_backoff_policy; 1504#endif 1505 return __libcpp_thread_poll_with_backoff(__test_fn, _Policy()); 1506} 1507 1508#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT 1509 1510template <class _Atp, class _Tp> 1511struct __cxx_atomic_wait_test_fn_impl { 1512 _Atp* __a; 1513 _Tp __val; 1514 memory_order __order; 1515 _LIBCPP_INLINE_VISIBILITY bool operator()() const 1516 { 1517 return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val); 1518 } 1519}; 1520 1521template <class _Atp, class _Tp> 1522_LIBCPP_AVAILABILITY_SYNC 1523_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order) 1524{ 1525 __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order}; 1526 return __cxx_atomic_wait(__a, __test_fn); 1527} 1528 1529// general atomic<T> 1530 1531template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> 1532struct __atomic_base // false 1533{ 1534 mutable __cxx_atomic_impl<_Tp> __a_; 1535 1536#if defined(__cpp_lib_atomic_is_always_lock_free) 1537 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __libcpp_is_always_lock_free<__cxx_atomic_impl<_Tp> >::__value; 1538#endif 1539 1540 _LIBCPP_INLINE_VISIBILITY 1541 bool is_lock_free() const volatile _NOEXCEPT 1542 {return __cxx_atomic_is_lock_free(sizeof(_Tp));} 1543 _LIBCPP_INLINE_VISIBILITY 1544 bool is_lock_free() const _NOEXCEPT 1545 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();} 1546 _LIBCPP_INLINE_VISIBILITY 1547 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1548 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1549 {__cxx_atomic_store(&__a_, __d, __m);} 1550 _LIBCPP_INLINE_VISIBILITY 1551 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1552 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1553 {__cxx_atomic_store(&__a_, __d, __m);} 1554 _LIBCPP_INLINE_VISIBILITY 1555 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 1556 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1557 {return __cxx_atomic_load(&__a_, __m);} 1558 _LIBCPP_INLINE_VISIBILITY 1559 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 1560 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1561 {return __cxx_atomic_load(&__a_, __m);} 1562 _LIBCPP_INLINE_VISIBILITY 1563 operator _Tp() const volatile _NOEXCEPT {return load();} 1564 _LIBCPP_INLINE_VISIBILITY 1565 operator _Tp() const _NOEXCEPT {return load();} 1566 _LIBCPP_INLINE_VISIBILITY 1567 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1568 {return __cxx_atomic_exchange(&__a_, __d, __m);} 1569 _LIBCPP_INLINE_VISIBILITY 1570 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1571 {return __cxx_atomic_exchange(&__a_, __d, __m);} 1572 _LIBCPP_INLINE_VISIBILITY 1573 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1574 memory_order __s, memory_order __f) volatile _NOEXCEPT 1575 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1576 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 1577 _LIBCPP_INLINE_VISIBILITY 1578 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1579 memory_order __s, memory_order __f) _NOEXCEPT 1580 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1581 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 1582 _LIBCPP_INLINE_VISIBILITY 1583 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1584 memory_order __s, memory_order __f) volatile _NOEXCEPT 1585 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1586 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 1587 _LIBCPP_INLINE_VISIBILITY 1588 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1589 memory_order __s, memory_order __f) _NOEXCEPT 1590 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1591 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 1592 _LIBCPP_INLINE_VISIBILITY 1593 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1594 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1595 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 1596 _LIBCPP_INLINE_VISIBILITY 1597 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1598 memory_order __m = memory_order_seq_cst) _NOEXCEPT 1599 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 1600 _LIBCPP_INLINE_VISIBILITY 1601 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1602 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1603 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 1604 _LIBCPP_INLINE_VISIBILITY 1605 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1606 memory_order __m = memory_order_seq_cst) _NOEXCEPT 1607 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 1608 1609 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 1610 {__cxx_atomic_wait(&__a_, __v, __m);} 1611 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT 1612 {__cxx_atomic_wait(&__a_, __v, __m);} 1613 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT 1614 {__cxx_atomic_notify_one(&__a_);} 1615 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT 1616 {__cxx_atomic_notify_one(&__a_);} 1617 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT 1618 {__cxx_atomic_notify_all(&__a_);} 1619 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT 1620 {__cxx_atomic_notify_all(&__a_);} 1621 1622#if _LIBCPP_STD_VER > 17 1623 _LIBCPP_INLINE_VISIBILITY constexpr 1624 __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {} 1625#else 1626 _LIBCPP_INLINE_VISIBILITY 1627 __atomic_base() _NOEXCEPT = default; 1628#endif 1629 1630 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR 1631 __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} 1632 1633 __atomic_base(const __atomic_base&) = delete; 1634}; 1635 1636#if defined(__cpp_lib_atomic_is_always_lock_free) 1637template <class _Tp, bool __b> 1638_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free; 1639#endif 1640 1641// atomic<Integral> 1642 1643template <class _Tp> 1644struct __atomic_base<_Tp, true> 1645 : public __atomic_base<_Tp, false> 1646{ 1647 typedef __atomic_base<_Tp, false> __base; 1648 1649 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17 1650 __atomic_base() _NOEXCEPT = default; 1651 1652 _LIBCPP_INLINE_VISIBILITY 1653 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} 1654 1655 _LIBCPP_INLINE_VISIBILITY 1656 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1657 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} 1658 _LIBCPP_INLINE_VISIBILITY 1659 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1660 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} 1661 _LIBCPP_INLINE_VISIBILITY 1662 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1663 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} 1664 _LIBCPP_INLINE_VISIBILITY 1665 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1666 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} 1667 _LIBCPP_INLINE_VISIBILITY 1668 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1669 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} 1670 _LIBCPP_INLINE_VISIBILITY 1671 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1672 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} 1673 _LIBCPP_INLINE_VISIBILITY 1674 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1675 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} 1676 _LIBCPP_INLINE_VISIBILITY 1677 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1678 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} 1679 _LIBCPP_INLINE_VISIBILITY 1680 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1681 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} 1682 _LIBCPP_INLINE_VISIBILITY 1683 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1684 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} 1685 1686 _LIBCPP_INLINE_VISIBILITY 1687 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} 1688 _LIBCPP_INLINE_VISIBILITY 1689 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));} 1690 _LIBCPP_INLINE_VISIBILITY 1691 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));} 1692 _LIBCPP_INLINE_VISIBILITY 1693 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));} 1694 _LIBCPP_INLINE_VISIBILITY 1695 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1696 _LIBCPP_INLINE_VISIBILITY 1697 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1698 _LIBCPP_INLINE_VISIBILITY 1699 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1700 _LIBCPP_INLINE_VISIBILITY 1701 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1702 _LIBCPP_INLINE_VISIBILITY 1703 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1704 _LIBCPP_INLINE_VISIBILITY 1705 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1706 _LIBCPP_INLINE_VISIBILITY 1707 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1708 _LIBCPP_INLINE_VISIBILITY 1709 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1710 _LIBCPP_INLINE_VISIBILITY 1711 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;} 1712 _LIBCPP_INLINE_VISIBILITY 1713 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;} 1714 _LIBCPP_INLINE_VISIBILITY 1715 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;} 1716 _LIBCPP_INLINE_VISIBILITY 1717 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;} 1718 _LIBCPP_INLINE_VISIBILITY 1719 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1720 _LIBCPP_INLINE_VISIBILITY 1721 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1722}; 1723 1724// atomic<T> 1725 1726template <class _Tp> 1727struct atomic 1728 : public __atomic_base<_Tp> 1729{ 1730 typedef __atomic_base<_Tp> __base; 1731 typedef _Tp value_type; 1732 typedef value_type difference_type; 1733 1734#if _LIBCPP_STD_VER > 17 1735 _LIBCPP_INLINE_VISIBILITY 1736 atomic() = default; 1737#else 1738 _LIBCPP_INLINE_VISIBILITY 1739 atomic() _NOEXCEPT = default; 1740#endif 1741 1742 _LIBCPP_INLINE_VISIBILITY 1743 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} 1744 1745 _LIBCPP_INLINE_VISIBILITY 1746 _Tp operator=(_Tp __d) volatile _NOEXCEPT 1747 {__base::store(__d); return __d;} 1748 _LIBCPP_INLINE_VISIBILITY 1749 _Tp operator=(_Tp __d) _NOEXCEPT 1750 {__base::store(__d); return __d;} 1751 1752 atomic& operator=(const atomic&) = delete; 1753 atomic& operator=(const atomic&) volatile = delete; 1754}; 1755 1756// atomic<T*> 1757 1758template <class _Tp> 1759struct atomic<_Tp*> 1760 : public __atomic_base<_Tp*> 1761{ 1762 typedef __atomic_base<_Tp*> __base; 1763 typedef _Tp* value_type; 1764 typedef ptrdiff_t difference_type; 1765 1766 _LIBCPP_INLINE_VISIBILITY 1767 atomic() _NOEXCEPT = default; 1768 1769 _LIBCPP_INLINE_VISIBILITY 1770 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} 1771 1772 _LIBCPP_INLINE_VISIBILITY 1773 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT 1774 {__base::store(__d); return __d;} 1775 _LIBCPP_INLINE_VISIBILITY 1776 _Tp* operator=(_Tp* __d) _NOEXCEPT 1777 {__base::store(__d); return __d;} 1778 1779 _LIBCPP_INLINE_VISIBILITY 1780 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 1781 // __atomic_fetch_add accepts function pointers, guard against them. 1782 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1783 return __cxx_atomic_fetch_add(&this->__a_, __op, __m); 1784 } 1785 1786 _LIBCPP_INLINE_VISIBILITY 1787 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 1788 // __atomic_fetch_add accepts function pointers, guard against them. 1789 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1790 return __cxx_atomic_fetch_add(&this->__a_, __op, __m); 1791 } 1792 1793 _LIBCPP_INLINE_VISIBILITY 1794 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 1795 // __atomic_fetch_add accepts function pointers, guard against them. 1796 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1797 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); 1798 } 1799 1800 _LIBCPP_INLINE_VISIBILITY 1801 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 1802 // __atomic_fetch_add accepts function pointers, guard against them. 1803 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1804 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); 1805 } 1806 1807 _LIBCPP_INLINE_VISIBILITY 1808 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} 1809 _LIBCPP_INLINE_VISIBILITY 1810 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);} 1811 _LIBCPP_INLINE_VISIBILITY 1812 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);} 1813 _LIBCPP_INLINE_VISIBILITY 1814 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);} 1815 _LIBCPP_INLINE_VISIBILITY 1816 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;} 1817 _LIBCPP_INLINE_VISIBILITY 1818 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;} 1819 _LIBCPP_INLINE_VISIBILITY 1820 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;} 1821 _LIBCPP_INLINE_VISIBILITY 1822 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;} 1823 _LIBCPP_INLINE_VISIBILITY 1824 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1825 _LIBCPP_INLINE_VISIBILITY 1826 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1827 _LIBCPP_INLINE_VISIBILITY 1828 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1829 _LIBCPP_INLINE_VISIBILITY 1830 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1831 1832 atomic& operator=(const atomic&) = delete; 1833 atomic& operator=(const atomic&) volatile = delete; 1834}; 1835 1836// atomic_is_lock_free 1837 1838template <class _Tp> 1839_LIBCPP_INLINE_VISIBILITY 1840bool 1841atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT 1842{ 1843 return __o->is_lock_free(); 1844} 1845 1846template <class _Tp> 1847_LIBCPP_INLINE_VISIBILITY 1848bool 1849atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT 1850{ 1851 return __o->is_lock_free(); 1852} 1853 1854// atomic_init 1855 1856template <class _Tp> 1857_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY 1858void 1859atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1860{ 1861 __cxx_atomic_init(&__o->__a_, __d); 1862} 1863 1864template <class _Tp> 1865_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY 1866void 1867atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1868{ 1869 __cxx_atomic_init(&__o->__a_, __d); 1870} 1871 1872// atomic_store 1873 1874template <class _Tp> 1875_LIBCPP_INLINE_VISIBILITY 1876void 1877atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1878{ 1879 __o->store(__d); 1880} 1881 1882template <class _Tp> 1883_LIBCPP_INLINE_VISIBILITY 1884void 1885atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1886{ 1887 __o->store(__d); 1888} 1889 1890// atomic_store_explicit 1891 1892template <class _Tp> 1893_LIBCPP_INLINE_VISIBILITY 1894void 1895atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1896 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1897{ 1898 __o->store(__d, __m); 1899} 1900 1901template <class _Tp> 1902_LIBCPP_INLINE_VISIBILITY 1903void 1904atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1905 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1906{ 1907 __o->store(__d, __m); 1908} 1909 1910// atomic_load 1911 1912template <class _Tp> 1913_LIBCPP_INLINE_VISIBILITY 1914_Tp 1915atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT 1916{ 1917 return __o->load(); 1918} 1919 1920template <class _Tp> 1921_LIBCPP_INLINE_VISIBILITY 1922_Tp 1923atomic_load(const atomic<_Tp>* __o) _NOEXCEPT 1924{ 1925 return __o->load(); 1926} 1927 1928// atomic_load_explicit 1929 1930template <class _Tp> 1931_LIBCPP_INLINE_VISIBILITY 1932_Tp 1933atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1934 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1935{ 1936 return __o->load(__m); 1937} 1938 1939template <class _Tp> 1940_LIBCPP_INLINE_VISIBILITY 1941_Tp 1942atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1943 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1944{ 1945 return __o->load(__m); 1946} 1947 1948// atomic_exchange 1949 1950template <class _Tp> 1951_LIBCPP_INLINE_VISIBILITY 1952_Tp 1953atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1954{ 1955 return __o->exchange(__d); 1956} 1957 1958template <class _Tp> 1959_LIBCPP_INLINE_VISIBILITY 1960_Tp 1961atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1962{ 1963 return __o->exchange(__d); 1964} 1965 1966// atomic_exchange_explicit 1967 1968template <class _Tp> 1969_LIBCPP_INLINE_VISIBILITY 1970_Tp 1971atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1972{ 1973 return __o->exchange(__d, __m); 1974} 1975 1976template <class _Tp> 1977_LIBCPP_INLINE_VISIBILITY 1978_Tp 1979atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1980{ 1981 return __o->exchange(__d, __m); 1982} 1983 1984// atomic_compare_exchange_weak 1985 1986template <class _Tp> 1987_LIBCPP_INLINE_VISIBILITY 1988bool 1989atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1990{ 1991 return __o->compare_exchange_weak(*__e, __d); 1992} 1993 1994template <class _Tp> 1995_LIBCPP_INLINE_VISIBILITY 1996bool 1997atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1998{ 1999 return __o->compare_exchange_weak(*__e, __d); 2000} 2001 2002// atomic_compare_exchange_strong 2003 2004template <class _Tp> 2005_LIBCPP_INLINE_VISIBILITY 2006bool 2007atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2008{ 2009 return __o->compare_exchange_strong(*__e, __d); 2010} 2011 2012template <class _Tp> 2013_LIBCPP_INLINE_VISIBILITY 2014bool 2015atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2016{ 2017 return __o->compare_exchange_strong(*__e, __d); 2018} 2019 2020// atomic_compare_exchange_weak_explicit 2021 2022template <class _Tp> 2023_LIBCPP_INLINE_VISIBILITY 2024bool 2025atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, 2026 typename atomic<_Tp>::value_type __d, 2027 memory_order __s, memory_order __f) _NOEXCEPT 2028 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2029{ 2030 return __o->compare_exchange_weak(*__e, __d, __s, __f); 2031} 2032 2033template <class _Tp> 2034_LIBCPP_INLINE_VISIBILITY 2035bool 2036atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, 2037 memory_order __s, memory_order __f) _NOEXCEPT 2038 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2039{ 2040 return __o->compare_exchange_weak(*__e, __d, __s, __f); 2041} 2042 2043// atomic_compare_exchange_strong_explicit 2044 2045template <class _Tp> 2046_LIBCPP_INLINE_VISIBILITY 2047bool 2048atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o, 2049 typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, 2050 memory_order __s, memory_order __f) _NOEXCEPT 2051 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2052{ 2053 return __o->compare_exchange_strong(*__e, __d, __s, __f); 2054} 2055 2056template <class _Tp> 2057_LIBCPP_INLINE_VISIBILITY 2058bool 2059atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, 2060 typename atomic<_Tp>::value_type __d, 2061 memory_order __s, memory_order __f) _NOEXCEPT 2062 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2063{ 2064 return __o->compare_exchange_strong(*__e, __d, __s, __f); 2065} 2066 2067// atomic_wait 2068 2069template <class _Tp> 2070_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2071void atomic_wait(const volatile atomic<_Tp>* __o, 2072 typename atomic<_Tp>::value_type __v) _NOEXCEPT 2073{ 2074 return __o->wait(__v); 2075} 2076 2077template <class _Tp> 2078_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2079void atomic_wait(const atomic<_Tp>* __o, 2080 typename atomic<_Tp>::value_type __v) _NOEXCEPT 2081{ 2082 return __o->wait(__v); 2083} 2084 2085// atomic_wait_explicit 2086 2087template <class _Tp> 2088_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2089void atomic_wait_explicit(const volatile atomic<_Tp>* __o, 2090 typename atomic<_Tp>::value_type __v, 2091 memory_order __m) _NOEXCEPT 2092 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 2093{ 2094 return __o->wait(__v, __m); 2095} 2096 2097template <class _Tp> 2098_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2099void atomic_wait_explicit(const atomic<_Tp>* __o, 2100 typename atomic<_Tp>::value_type __v, 2101 memory_order __m) _NOEXCEPT 2102 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 2103{ 2104 return __o->wait(__v, __m); 2105} 2106 2107// atomic_notify_one 2108 2109template <class _Tp> 2110_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2111void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT 2112{ 2113 __o->notify_one(); 2114} 2115template <class _Tp> 2116_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2117void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT 2118{ 2119 __o->notify_one(); 2120} 2121 2122// atomic_notify_one 2123 2124template <class _Tp> 2125_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2126void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT 2127{ 2128 __o->notify_all(); 2129} 2130template <class _Tp> 2131_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2132void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT 2133{ 2134 __o->notify_all(); 2135} 2136 2137// atomic_fetch_add 2138 2139template <class _Tp> 2140_LIBCPP_INLINE_VISIBILITY 2141_Tp 2142atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2143{ 2144 return __o->fetch_add(__op); 2145} 2146 2147template <class _Tp> 2148_LIBCPP_INLINE_VISIBILITY 2149_Tp 2150atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2151{ 2152 return __o->fetch_add(__op); 2153} 2154 2155// atomic_fetch_add_explicit 2156 2157template <class _Tp> 2158_LIBCPP_INLINE_VISIBILITY 2159_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2160{ 2161 return __o->fetch_add(__op, __m); 2162} 2163 2164template <class _Tp> 2165_LIBCPP_INLINE_VISIBILITY 2166_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2167{ 2168 return __o->fetch_add(__op, __m); 2169} 2170 2171// atomic_fetch_sub 2172 2173template <class _Tp> 2174_LIBCPP_INLINE_VISIBILITY 2175_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2176{ 2177 return __o->fetch_sub(__op); 2178} 2179 2180template <class _Tp> 2181_LIBCPP_INLINE_VISIBILITY 2182_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2183{ 2184 return __o->fetch_sub(__op); 2185} 2186 2187// atomic_fetch_sub_explicit 2188 2189template <class _Tp> 2190_LIBCPP_INLINE_VISIBILITY 2191_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2192{ 2193 return __o->fetch_sub(__op, __m); 2194} 2195 2196template <class _Tp> 2197_LIBCPP_INLINE_VISIBILITY 2198_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2199{ 2200 return __o->fetch_sub(__op, __m); 2201} 2202 2203// atomic_fetch_and 2204 2205template <class _Tp> 2206_LIBCPP_INLINE_VISIBILITY 2207typename enable_if 2208< 2209 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2210 _Tp 2211>::type 2212atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2213{ 2214 return __o->fetch_and(__op); 2215} 2216 2217template <class _Tp> 2218_LIBCPP_INLINE_VISIBILITY 2219typename enable_if 2220< 2221 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2222 _Tp 2223>::type 2224atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2225{ 2226 return __o->fetch_and(__op); 2227} 2228 2229// atomic_fetch_and_explicit 2230 2231template <class _Tp> 2232_LIBCPP_INLINE_VISIBILITY 2233typename enable_if 2234< 2235 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2236 _Tp 2237>::type 2238atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2239{ 2240 return __o->fetch_and(__op, __m); 2241} 2242 2243template <class _Tp> 2244_LIBCPP_INLINE_VISIBILITY 2245typename enable_if 2246< 2247 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2248 _Tp 2249>::type 2250atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2251{ 2252 return __o->fetch_and(__op, __m); 2253} 2254 2255// atomic_fetch_or 2256 2257template <class _Tp> 2258_LIBCPP_INLINE_VISIBILITY 2259typename enable_if 2260< 2261 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2262 _Tp 2263>::type 2264atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2265{ 2266 return __o->fetch_or(__op); 2267} 2268 2269template <class _Tp> 2270_LIBCPP_INLINE_VISIBILITY 2271typename enable_if 2272< 2273 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2274 _Tp 2275>::type 2276atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2277{ 2278 return __o->fetch_or(__op); 2279} 2280 2281// atomic_fetch_or_explicit 2282 2283template <class _Tp> 2284_LIBCPP_INLINE_VISIBILITY 2285typename enable_if 2286< 2287 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2288 _Tp 2289>::type 2290atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2291{ 2292 return __o->fetch_or(__op, __m); 2293} 2294 2295template <class _Tp> 2296_LIBCPP_INLINE_VISIBILITY 2297typename enable_if 2298< 2299 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2300 _Tp 2301>::type 2302atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2303{ 2304 return __o->fetch_or(__op, __m); 2305} 2306 2307// atomic_fetch_xor 2308 2309template <class _Tp> 2310_LIBCPP_INLINE_VISIBILITY 2311typename enable_if 2312< 2313 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2314 _Tp 2315>::type 2316atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2317{ 2318 return __o->fetch_xor(__op); 2319} 2320 2321template <class _Tp> 2322_LIBCPP_INLINE_VISIBILITY 2323typename enable_if 2324< 2325 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2326 _Tp 2327>::type 2328atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2329{ 2330 return __o->fetch_xor(__op); 2331} 2332 2333// atomic_fetch_xor_explicit 2334 2335template <class _Tp> 2336_LIBCPP_INLINE_VISIBILITY 2337typename enable_if 2338< 2339 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2340 _Tp 2341>::type 2342atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2343{ 2344 return __o->fetch_xor(__op, __m); 2345} 2346 2347template <class _Tp> 2348_LIBCPP_INLINE_VISIBILITY 2349typename enable_if 2350< 2351 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2352 _Tp 2353>::type 2354atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2355{ 2356 return __o->fetch_xor(__op, __m); 2357} 2358 2359// flag type and operations 2360 2361typedef struct atomic_flag 2362{ 2363 __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_; 2364 2365 _LIBCPP_INLINE_VISIBILITY 2366 bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 2367 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);} 2368 _LIBCPP_INLINE_VISIBILITY 2369 bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 2370 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);} 2371 2372 _LIBCPP_INLINE_VISIBILITY 2373 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 2374 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);} 2375 _LIBCPP_INLINE_VISIBILITY 2376 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT 2377 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);} 2378 _LIBCPP_INLINE_VISIBILITY 2379 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 2380 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);} 2381 _LIBCPP_INLINE_VISIBILITY 2382 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT 2383 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);} 2384 2385 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2386 void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 2387 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);} 2388 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2389 void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT 2390 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);} 2391 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2392 void notify_one() volatile _NOEXCEPT 2393 {__cxx_atomic_notify_one(&__a_);} 2394 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2395 void notify_one() _NOEXCEPT 2396 {__cxx_atomic_notify_one(&__a_);} 2397 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2398 void notify_all() volatile _NOEXCEPT 2399 {__cxx_atomic_notify_all(&__a_);} 2400 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2401 void notify_all() _NOEXCEPT 2402 {__cxx_atomic_notify_all(&__a_);} 2403 2404#if _LIBCPP_STD_VER > 17 2405 _LIBCPP_INLINE_VISIBILITY constexpr 2406 atomic_flag() _NOEXCEPT : __a_(false) {} 2407#else 2408 _LIBCPP_INLINE_VISIBILITY 2409 atomic_flag() _NOEXCEPT = default; 2410#endif 2411 2412 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR 2413 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION 2414 2415 atomic_flag(const atomic_flag&) = delete; 2416 atomic_flag& operator=(const atomic_flag&) = delete; 2417 atomic_flag& operator=(const atomic_flag&) volatile = delete; 2418 2419} atomic_flag; 2420 2421 2422inline _LIBCPP_INLINE_VISIBILITY 2423bool 2424atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT 2425{ 2426 return __o->test(); 2427} 2428 2429inline _LIBCPP_INLINE_VISIBILITY 2430bool 2431atomic_flag_test(const atomic_flag* __o) _NOEXCEPT 2432{ 2433 return __o->test(); 2434} 2435 2436inline _LIBCPP_INLINE_VISIBILITY 2437bool 2438atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2439{ 2440 return __o->test(__m); 2441} 2442 2443inline _LIBCPP_INLINE_VISIBILITY 2444bool 2445atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT 2446{ 2447 return __o->test(__m); 2448} 2449 2450inline _LIBCPP_INLINE_VISIBILITY 2451bool 2452atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT 2453{ 2454 return __o->test_and_set(); 2455} 2456 2457inline _LIBCPP_INLINE_VISIBILITY 2458bool 2459atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT 2460{ 2461 return __o->test_and_set(); 2462} 2463 2464inline _LIBCPP_INLINE_VISIBILITY 2465bool 2466atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2467{ 2468 return __o->test_and_set(__m); 2469} 2470 2471inline _LIBCPP_INLINE_VISIBILITY 2472bool 2473atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 2474{ 2475 return __o->test_and_set(__m); 2476} 2477 2478inline _LIBCPP_INLINE_VISIBILITY 2479void 2480atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT 2481{ 2482 __o->clear(); 2483} 2484 2485inline _LIBCPP_INLINE_VISIBILITY 2486void 2487atomic_flag_clear(atomic_flag* __o) _NOEXCEPT 2488{ 2489 __o->clear(); 2490} 2491 2492inline _LIBCPP_INLINE_VISIBILITY 2493void 2494atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2495{ 2496 __o->clear(__m); 2497} 2498 2499inline _LIBCPP_INLINE_VISIBILITY 2500void 2501atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 2502{ 2503 __o->clear(__m); 2504} 2505 2506inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2507void 2508atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT 2509{ 2510 __o->wait(__v); 2511} 2512 2513inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2514void 2515atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT 2516{ 2517 __o->wait(__v); 2518} 2519 2520inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2521void 2522atomic_flag_wait_explicit(const volatile atomic_flag* __o, 2523 bool __v, memory_order __m) _NOEXCEPT 2524{ 2525 __o->wait(__v, __m); 2526} 2527 2528inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2529void 2530atomic_flag_wait_explicit(const atomic_flag* __o, 2531 bool __v, memory_order __m) _NOEXCEPT 2532{ 2533 __o->wait(__v, __m); 2534} 2535 2536inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2537void 2538atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT 2539{ 2540 __o->notify_one(); 2541} 2542 2543inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2544void 2545atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT 2546{ 2547 __o->notify_one(); 2548} 2549 2550inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2551void 2552atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT 2553{ 2554 __o->notify_all(); 2555} 2556 2557inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2558void 2559atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT 2560{ 2561 __o->notify_all(); 2562} 2563 2564// fences 2565 2566inline _LIBCPP_INLINE_VISIBILITY 2567void 2568atomic_thread_fence(memory_order __m) _NOEXCEPT 2569{ 2570 __cxx_atomic_thread_fence(__m); 2571} 2572 2573inline _LIBCPP_INLINE_VISIBILITY 2574void 2575atomic_signal_fence(memory_order __m) _NOEXCEPT 2576{ 2577 __cxx_atomic_signal_fence(__m); 2578} 2579 2580// Atomics for standard typedef types 2581 2582typedef atomic<bool> atomic_bool; 2583typedef atomic<char> atomic_char; 2584typedef atomic<signed char> atomic_schar; 2585typedef atomic<unsigned char> atomic_uchar; 2586typedef atomic<short> atomic_short; 2587typedef atomic<unsigned short> atomic_ushort; 2588typedef atomic<int> atomic_int; 2589typedef atomic<unsigned int> atomic_uint; 2590typedef atomic<long> atomic_long; 2591typedef atomic<unsigned long> atomic_ulong; 2592typedef atomic<long long> atomic_llong; 2593typedef atomic<unsigned long long> atomic_ullong; 2594#ifndef _LIBCPP_HAS_NO_CHAR8_T 2595typedef atomic<char8_t> atomic_char8_t; 2596#endif 2597typedef atomic<char16_t> atomic_char16_t; 2598typedef atomic<char32_t> atomic_char32_t; 2599#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS 2600typedef atomic<wchar_t> atomic_wchar_t; 2601#endif 2602 2603typedef atomic<int_least8_t> atomic_int_least8_t; 2604typedef atomic<uint_least8_t> atomic_uint_least8_t; 2605typedef atomic<int_least16_t> atomic_int_least16_t; 2606typedef atomic<uint_least16_t> atomic_uint_least16_t; 2607typedef atomic<int_least32_t> atomic_int_least32_t; 2608typedef atomic<uint_least32_t> atomic_uint_least32_t; 2609typedef atomic<int_least64_t> atomic_int_least64_t; 2610typedef atomic<uint_least64_t> atomic_uint_least64_t; 2611 2612typedef atomic<int_fast8_t> atomic_int_fast8_t; 2613typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 2614typedef atomic<int_fast16_t> atomic_int_fast16_t; 2615typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 2616typedef atomic<int_fast32_t> atomic_int_fast32_t; 2617typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 2618typedef atomic<int_fast64_t> atomic_int_fast64_t; 2619typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 2620 2621typedef atomic< int8_t> atomic_int8_t; 2622typedef atomic<uint8_t> atomic_uint8_t; 2623typedef atomic< int16_t> atomic_int16_t; 2624typedef atomic<uint16_t> atomic_uint16_t; 2625typedef atomic< int32_t> atomic_int32_t; 2626typedef atomic<uint32_t> atomic_uint32_t; 2627typedef atomic< int64_t> atomic_int64_t; 2628typedef atomic<uint64_t> atomic_uint64_t; 2629 2630typedef atomic<intptr_t> atomic_intptr_t; 2631typedef atomic<uintptr_t> atomic_uintptr_t; 2632typedef atomic<size_t> atomic_size_t; 2633typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 2634typedef atomic<intmax_t> atomic_intmax_t; 2635typedef atomic<uintmax_t> atomic_uintmax_t; 2636 2637// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type 2638 2639#ifdef __cpp_lib_atomic_is_always_lock_free 2640# define _LIBCPP_CONTENTION_LOCK_FREE ::std::__libcpp_is_always_lock_free<__cxx_contention_t>::__value 2641#else 2642# define _LIBCPP_CONTENTION_LOCK_FREE false 2643#endif 2644 2645#if ATOMIC_LLONG_LOCK_FREE == 2 2646typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free; 2647typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free; 2648#elif ATOMIC_INT_LOCK_FREE == 2 2649typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free; 2650typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free; 2651#elif ATOMIC_SHORT_LOCK_FREE == 2 2652typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free; 2653typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free; 2654#elif ATOMIC_CHAR_LOCK_FREE == 2 2655typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free; 2656typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free; 2657#else 2658 // No signed/unsigned lock-free types 2659#define _LIBCPP_NO_LOCK_FREE_TYPES 2660#endif 2661 2662#if !defined(_LIBCPP_NO_LOCK_FREE_TYPES) 2663typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free; 2664typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free; 2665#endif 2666 2667#define ATOMIC_FLAG_INIT {false} 2668#define ATOMIC_VAR_INIT(__v) {__v} 2669 2670#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) 2671# if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400 2672# pragma clang deprecated(ATOMIC_VAR_INIT) 2673# endif 2674#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) 2675 2676_LIBCPP_END_NAMESPACE_STD 2677 2678#endif // _LIBCPP_ATOMIC 2679