1// -*- C++ -*- 2//===----------------------------------------------------------------------===// 3// 4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 5// See https://llvm.org/LICENSE.txt for license information. 6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 7// 8//===----------------------------------------------------------------------===// 9 10#ifndef _LIBCPP_ATOMIC 11#define _LIBCPP_ATOMIC 12 13/* 14 atomic synopsis 15 16namespace std 17{ 18 19// feature test macro [version.syn] 20 21#define __cpp_lib_atomic_is_always_lock_free 22#define __cpp_lib_atomic_flag_test 23#define __cpp_lib_atomic_lock_free_type_aliases 24#define __cpp_lib_atomic_wait 25 26 // order and consistency 27 28 enum memory_order: unspecified // enum class in C++20 29 { 30 relaxed, 31 consume, // load-consume 32 acquire, // load-acquire 33 release, // store-release 34 acq_rel, // store-release load-acquire 35 seq_cst // store-release load-acquire 36 }; 37 38 inline constexpr auto memory_order_relaxed = memory_order::relaxed; 39 inline constexpr auto memory_order_consume = memory_order::consume; 40 inline constexpr auto memory_order_acquire = memory_order::acquire; 41 inline constexpr auto memory_order_release = memory_order::release; 42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 44 45template <class T> T kill_dependency(T y) noexcept; 46 47// lock-free property 48 49#define ATOMIC_BOOL_LOCK_FREE unspecified 50#define ATOMIC_CHAR_LOCK_FREE unspecified 51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20 52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified 53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified 54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified 55#define ATOMIC_SHORT_LOCK_FREE unspecified 56#define ATOMIC_INT_LOCK_FREE unspecified 57#define ATOMIC_LONG_LOCK_FREE unspecified 58#define ATOMIC_LLONG_LOCK_FREE unspecified 59#define ATOMIC_POINTER_LOCK_FREE unspecified 60 61template <class T> 62struct atomic 63{ 64 using value_type = T; 65 66 static constexpr bool is_always_lock_free; 67 bool is_lock_free() const volatile noexcept; 68 bool is_lock_free() const noexcept; 69 70 atomic() noexcept = default; // until C++20 71 constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20 72 constexpr atomic(T desr) noexcept; 73 atomic(const atomic&) = delete; 74 atomic& operator=(const atomic&) = delete; 75 atomic& operator=(const atomic&) volatile = delete; 76 77 T load(memory_order m = memory_order_seq_cst) const volatile noexcept; 78 T load(memory_order m = memory_order_seq_cst) const noexcept; 79 operator T() const volatile noexcept; 80 operator T() const noexcept; 81 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 82 void store(T desr, memory_order m = memory_order_seq_cst) noexcept; 83 T operator=(T) volatile noexcept; 84 T operator=(T) noexcept; 85 86 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 87 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept; 88 bool compare_exchange_weak(T& expc, T desr, 89 memory_order s, memory_order f) volatile noexcept; 90 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept; 91 bool compare_exchange_strong(T& expc, T desr, 92 memory_order s, memory_order f) volatile noexcept; 93 bool compare_exchange_strong(T& expc, T desr, 94 memory_order s, memory_order f) noexcept; 95 bool compare_exchange_weak(T& expc, T desr, 96 memory_order m = memory_order_seq_cst) volatile noexcept; 97 bool compare_exchange_weak(T& expc, T desr, 98 memory_order m = memory_order_seq_cst) noexcept; 99 bool compare_exchange_strong(T& expc, T desr, 100 memory_order m = memory_order_seq_cst) volatile noexcept; 101 bool compare_exchange_strong(T& expc, T desr, 102 memory_order m = memory_order_seq_cst) noexcept; 103 104 void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept; 105 void wait(T, memory_order = memory_order::seq_cst) const noexcept; 106 void notify_one() volatile noexcept; 107 void notify_one() noexcept; 108 void notify_all() volatile noexcept; 109 void notify_all() noexcept; 110}; 111 112template <> 113struct atomic<integral> 114{ 115 using value_type = integral; 116 using difference_type = value_type; 117 118 static constexpr bool is_always_lock_free; 119 bool is_lock_free() const volatile noexcept; 120 bool is_lock_free() const noexcept; 121 122 atomic() noexcept = default; 123 constexpr atomic(integral desr) noexcept; 124 atomic(const atomic&) = delete; 125 atomic& operator=(const atomic&) = delete; 126 atomic& operator=(const atomic&) volatile = delete; 127 128 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept; 129 integral load(memory_order m = memory_order_seq_cst) const noexcept; 130 operator integral() const volatile noexcept; 131 operator integral() const noexcept; 132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept; 133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept; 134 integral operator=(integral desr) volatile noexcept; 135 integral operator=(integral desr) noexcept; 136 137 integral exchange(integral desr, 138 memory_order m = memory_order_seq_cst) volatile noexcept; 139 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept; 140 bool compare_exchange_weak(integral& expc, integral desr, 141 memory_order s, memory_order f) volatile noexcept; 142 bool compare_exchange_weak(integral& expc, integral desr, 143 memory_order s, memory_order f) noexcept; 144 bool compare_exchange_strong(integral& expc, integral desr, 145 memory_order s, memory_order f) volatile noexcept; 146 bool compare_exchange_strong(integral& expc, integral desr, 147 memory_order s, memory_order f) noexcept; 148 bool compare_exchange_weak(integral& expc, integral desr, 149 memory_order m = memory_order_seq_cst) volatile noexcept; 150 bool compare_exchange_weak(integral& expc, integral desr, 151 memory_order m = memory_order_seq_cst) noexcept; 152 bool compare_exchange_strong(integral& expc, integral desr, 153 memory_order m = memory_order_seq_cst) volatile noexcept; 154 bool compare_exchange_strong(integral& expc, integral desr, 155 memory_order m = memory_order_seq_cst) noexcept; 156 157 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 158 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept; 159 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 160 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept; 161 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 162 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept; 163 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 164 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept; 165 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 166 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept; 167 168 integral operator++(int) volatile noexcept; 169 integral operator++(int) noexcept; 170 integral operator--(int) volatile noexcept; 171 integral operator--(int) noexcept; 172 integral operator++() volatile noexcept; 173 integral operator++() noexcept; 174 integral operator--() volatile noexcept; 175 integral operator--() noexcept; 176 integral operator+=(integral op) volatile noexcept; 177 integral operator+=(integral op) noexcept; 178 integral operator-=(integral op) volatile noexcept; 179 integral operator-=(integral op) noexcept; 180 integral operator&=(integral op) volatile noexcept; 181 integral operator&=(integral op) noexcept; 182 integral operator|=(integral op) volatile noexcept; 183 integral operator|=(integral op) noexcept; 184 integral operator^=(integral op) volatile noexcept; 185 integral operator^=(integral op) noexcept; 186 187 void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept; 188 void wait(integral, memory_order = memory_order::seq_cst) const noexcept; 189 void notify_one() volatile noexcept; 190 void notify_one() noexcept; 191 void notify_all() volatile noexcept; 192 void notify_all() noexcept; 193}; 194 195template <class T> 196struct atomic<T*> 197{ 198 using value_type = T*; 199 using difference_type = ptrdiff_t; 200 201 static constexpr bool is_always_lock_free; 202 bool is_lock_free() const volatile noexcept; 203 bool is_lock_free() const noexcept; 204 205 atomic() noexcept = default; // until C++20 206 constexpr atomic() noexcept; // since C++20 207 constexpr atomic(T* desr) noexcept; 208 atomic(const atomic&) = delete; 209 atomic& operator=(const atomic&) = delete; 210 atomic& operator=(const atomic&) volatile = delete; 211 212 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept; 213 T* load(memory_order m = memory_order_seq_cst) const noexcept; 214 operator T*() const volatile noexcept; 215 operator T*() const noexcept; 216 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 217 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept; 218 T* operator=(T*) volatile noexcept; 219 T* operator=(T*) noexcept; 220 221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 222 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept; 223 bool compare_exchange_weak(T*& expc, T* desr, 224 memory_order s, memory_order f) volatile noexcept; 225 bool compare_exchange_weak(T*& expc, T* desr, 226 memory_order s, memory_order f) noexcept; 227 bool compare_exchange_strong(T*& expc, T* desr, 228 memory_order s, memory_order f) volatile noexcept; 229 bool compare_exchange_strong(T*& expc, T* desr, 230 memory_order s, memory_order f) noexcept; 231 bool compare_exchange_weak(T*& expc, T* desr, 232 memory_order m = memory_order_seq_cst) volatile noexcept; 233 bool compare_exchange_weak(T*& expc, T* desr, 234 memory_order m = memory_order_seq_cst) noexcept; 235 bool compare_exchange_strong(T*& expc, T* desr, 236 memory_order m = memory_order_seq_cst) volatile noexcept; 237 bool compare_exchange_strong(T*& expc, T* desr, 238 memory_order m = memory_order_seq_cst) noexcept; 239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 240 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 242 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 243 244 T* operator++(int) volatile noexcept; 245 T* operator++(int) noexcept; 246 T* operator--(int) volatile noexcept; 247 T* operator--(int) noexcept; 248 T* operator++() volatile noexcept; 249 T* operator++() noexcept; 250 T* operator--() volatile noexcept; 251 T* operator--() noexcept; 252 T* operator+=(ptrdiff_t op) volatile noexcept; 253 T* operator+=(ptrdiff_t op) noexcept; 254 T* operator-=(ptrdiff_t op) volatile noexcept; 255 T* operator-=(ptrdiff_t op) noexcept; 256 257 void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept; 258 void wait(T*, memory_order = memory_order::seq_cst) const noexcept; 259 void notify_one() volatile noexcept; 260 void notify_one() noexcept; 261 void notify_all() volatile noexcept; 262 void notify_all() noexcept; 263}; 264 265 266// [atomics.nonmembers], non-member functions 267template<class T> 268 bool atomic_is_lock_free(const volatile atomic<T>*) noexcept; 269template<class T> 270 bool atomic_is_lock_free(const atomic<T>*) noexcept; 271template<class T> 272 void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept; 273template<class T> 274 void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept; 275template<class T> 276 void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type, 277 memory_order) noexcept; 278template<class T> 279 void atomic_store_explicit(atomic<T>*, atomic<T>::value_type, 280 memory_order) noexcept; 281template<class T> 282 T atomic_load(const volatile atomic<T>*) noexcept; 283template<class T> 284 T atomic_load(const atomic<T>*) noexcept; 285template<class T> 286 T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept; 287template<class T> 288 T atomic_load_explicit(const atomic<T>*, memory_order) noexcept; 289template<class T> 290 T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept; 291template<class T> 292 T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept; 293template<class T> 294 T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type, 295 memory_order) noexcept; 296template<class T> 297 T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type, 298 memory_order) noexcept; 299template<class T> 300 bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*, 301 atomic<T>::value_type) noexcept; 302template<class T> 303 bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*, 304 atomic<T>::value_type) noexcept; 305template<class T> 306 bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*, 307 atomic<T>::value_type) noexcept; 308template<class T> 309 bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*, 310 atomic<T>::value_type) noexcept; 311template<class T> 312 bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*, 313 atomic<T>::value_type, 314 memory_order, memory_order) noexcept; 315template<class T> 316 bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*, 317 atomic<T>::value_type, 318 memory_order, memory_order) noexcept; 319template<class T> 320 bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*, 321 atomic<T>::value_type, 322 memory_order, memory_order) noexcept; 323template<class T> 324 bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*, 325 atomic<T>::value_type, 326 memory_order, memory_order) noexcept; 327 328template<class T> 329 T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept; 330template<class T> 331 T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept; 332template<class T> 333 T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type, 334 memory_order) noexcept; 335template<class T> 336 T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type, 337 memory_order) noexcept; 338template<class T> 339 T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept; 340template<class T> 341 T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept; 342template<class T> 343 T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type, 344 memory_order) noexcept; 345template<class T> 346 T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type, 347 memory_order) noexcept; 348template<class T> 349 T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept; 350template<class T> 351 T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept; 352template<class T> 353 T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type, 354 memory_order) noexcept; 355template<class T> 356 T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type, 357 memory_order) noexcept; 358template<class T> 359 T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept; 360template<class T> 361 T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept; 362template<class T> 363 T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type, 364 memory_order) noexcept; 365template<class T> 366 T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type, 367 memory_order) noexcept; 368template<class T> 369 T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept; 370template<class T> 371 T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept; 372template<class T> 373 T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type, 374 memory_order) noexcept; 375template<class T> 376 T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type, 377 memory_order) noexcept; 378 379template<class T> 380 void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type); 381template<class T> 382 void atomic_wait(const atomic<T>*, atomic<T>::value_type); 383template<class T> 384 void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type, 385 memory_order); 386template<class T> 387 void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type, 388 memory_order); 389template<class T> 390 void atomic_notify_one(volatile atomic<T>*); 391template<class T> 392 void atomic_notify_one(atomic<T>*); 393template<class T> 394 void atomic_notify_all(volatile atomic<T>*); 395template<class T> 396 void atomic_notify_all(atomic<T>*); 397 398// Atomics for standard typedef types 399 400typedef atomic<bool> atomic_bool; 401typedef atomic<char> atomic_char; 402typedef atomic<signed char> atomic_schar; 403typedef atomic<unsigned char> atomic_uchar; 404typedef atomic<short> atomic_short; 405typedef atomic<unsigned short> atomic_ushort; 406typedef atomic<int> atomic_int; 407typedef atomic<unsigned int> atomic_uint; 408typedef atomic<long> atomic_long; 409typedef atomic<unsigned long> atomic_ulong; 410typedef atomic<long long> atomic_llong; 411typedef atomic<unsigned long long> atomic_ullong; 412typedef atomic<char8_t> atomic_char8_t; // C++20 413typedef atomic<char16_t> atomic_char16_t; 414typedef atomic<char32_t> atomic_char32_t; 415typedef atomic<wchar_t> atomic_wchar_t; 416 417typedef atomic<int_least8_t> atomic_int_least8_t; 418typedef atomic<uint_least8_t> atomic_uint_least8_t; 419typedef atomic<int_least16_t> atomic_int_least16_t; 420typedef atomic<uint_least16_t> atomic_uint_least16_t; 421typedef atomic<int_least32_t> atomic_int_least32_t; 422typedef atomic<uint_least32_t> atomic_uint_least32_t; 423typedef atomic<int_least64_t> atomic_int_least64_t; 424typedef atomic<uint_least64_t> atomic_uint_least64_t; 425 426typedef atomic<int_fast8_t> atomic_int_fast8_t; 427typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 428typedef atomic<int_fast16_t> atomic_int_fast16_t; 429typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 430typedef atomic<int_fast32_t> atomic_int_fast32_t; 431typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 432typedef atomic<int_fast64_t> atomic_int_fast64_t; 433typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 434 435typedef atomic<int8_t> atomic_int8_t; 436typedef atomic<uint8_t> atomic_uint8_t; 437typedef atomic<int16_t> atomic_int16_t; 438typedef atomic<uint16_t> atomic_uint16_t; 439typedef atomic<int32_t> atomic_int32_t; 440typedef atomic<uint32_t> atomic_uint32_t; 441typedef atomic<int64_t> atomic_int64_t; 442typedef atomic<uint64_t> atomic_uint64_t; 443 444typedef atomic<intptr_t> atomic_intptr_t; 445typedef atomic<uintptr_t> atomic_uintptr_t; 446typedef atomic<size_t> atomic_size_t; 447typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 448typedef atomic<intmax_t> atomic_intmax_t; 449typedef atomic<uintmax_t> atomic_uintmax_t; 450 451// flag type and operations 452 453typedef struct atomic_flag 454{ 455 atomic_flag() noexcept = default; // until C++20 456 constexpr atomic_flag() noexcept; // since C++20 457 atomic_flag(const atomic_flag&) = delete; 458 atomic_flag& operator=(const atomic_flag&) = delete; 459 atomic_flag& operator=(const atomic_flag&) volatile = delete; 460 461 bool test(memory_order m = memory_order_seq_cst) volatile noexcept; 462 bool test(memory_order m = memory_order_seq_cst) noexcept; 463 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept; 464 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept; 465 void clear(memory_order m = memory_order_seq_cst) volatile noexcept; 466 void clear(memory_order m = memory_order_seq_cst) noexcept; 467 468 void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept; 469 void wait(bool, memory_order = memory_order::seq_cst) const noexcept; 470 void notify_one() volatile noexcept; 471 void notify_one() noexcept; 472 void notify_all() volatile noexcept; 473 void notify_all() noexcept; 474} atomic_flag; 475 476bool atomic_flag_test(volatile atomic_flag* obj) noexcept; 477bool atomic_flag_test(atomic_flag* obj) noexcept; 478bool atomic_flag_test_explicit(volatile atomic_flag* obj, 479 memory_order m) noexcept; 480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept; 481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept; 482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept; 483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj, 484 memory_order m) noexcept; 485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept; 486void atomic_flag_clear(volatile atomic_flag* obj) noexcept; 487void atomic_flag_clear(atomic_flag* obj) noexcept; 488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept; 489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept; 490 491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept; 492void atomic_wait(const atomic_flag* obj, T old) noexcept; 493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept; 494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept; 495void atomic_one(volatile atomic_flag* obj) noexcept; 496void atomic_one(atomic_flag* obj) noexcept; 497void atomic_all(volatile atomic_flag* obj) noexcept; 498void atomic_all(atomic_flag* obj) noexcept; 499 500// fences 501 502void atomic_thread_fence(memory_order m) noexcept; 503void atomic_signal_fence(memory_order m) noexcept; 504 505// deprecated 506 507template <class T> 508 void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept; 509 510template <class T> 511 void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept; 512 513#define ATOMIC_VAR_INIT(value) see below 514 515#define ATOMIC_FLAG_INIT see below 516 517} // std 518 519*/ 520 521#include <__availability> 522#include <__config> 523#include <__thread/poll_with_backoff.h> 524#include <__thread/timed_backoff_policy.h> 525#include <cstddef> 526#include <cstdint> 527#include <cstring> 528#include <type_traits> 529#include <version> 530 531#ifndef _LIBCPP_HAS_NO_THREADS 532# include <__threading_support> 533#endif 534 535#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 536#pragma GCC system_header 537#endif 538 539#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER 540# error <atomic> is not implemented 541#endif 542#ifdef kill_dependency 543# error C++ standard library is incompatible with <stdatomic.h> 544#endif 545 546#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \ 547 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \ 548 __m == memory_order_acquire || \ 549 __m == memory_order_acq_rel, \ 550 "memory order argument to atomic operation is invalid") 551 552#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \ 553 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \ 554 __m == memory_order_acq_rel, \ 555 "memory order argument to atomic operation is invalid") 556 557#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \ 558 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \ 559 __f == memory_order_acq_rel, \ 560 "memory order argument to atomic operation is invalid") 561 562_LIBCPP_BEGIN_NAMESPACE_STD 563 564// Figure out what the underlying type for `memory_order` would be if it were 565// declared as an unscoped enum (accounting for -fshort-enums). Use this result 566// to pin the underlying type in C++20. 567enum __legacy_memory_order { 568 __mo_relaxed, 569 __mo_consume, 570 __mo_acquire, 571 __mo_release, 572 __mo_acq_rel, 573 __mo_seq_cst 574}; 575 576typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t; 577 578#if _LIBCPP_STD_VER > 17 579 580enum class memory_order : __memory_order_underlying_t { 581 relaxed = __mo_relaxed, 582 consume = __mo_consume, 583 acquire = __mo_acquire, 584 release = __mo_release, 585 acq_rel = __mo_acq_rel, 586 seq_cst = __mo_seq_cst 587}; 588 589inline constexpr auto memory_order_relaxed = memory_order::relaxed; 590inline constexpr auto memory_order_consume = memory_order::consume; 591inline constexpr auto memory_order_acquire = memory_order::acquire; 592inline constexpr auto memory_order_release = memory_order::release; 593inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 594inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 595 596#else 597 598typedef enum memory_order { 599 memory_order_relaxed = __mo_relaxed, 600 memory_order_consume = __mo_consume, 601 memory_order_acquire = __mo_acquire, 602 memory_order_release = __mo_release, 603 memory_order_acq_rel = __mo_acq_rel, 604 memory_order_seq_cst = __mo_seq_cst, 605} memory_order; 606 607#endif // _LIBCPP_STD_VER > 17 608 609template <typename _Tp> _LIBCPP_INLINE_VISIBILITY 610bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) { 611 return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0; 612} 613 614static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value), 615 "unexpected underlying type for std::memory_order"); 616 617#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \ 618 defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS) 619 620// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because 621// the default operator= in an object is not volatile, a byte-by-byte copy 622// is required. 623template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY 624typename enable_if<is_assignable<_Tp&, _Tv>::value>::type 625__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) { 626 __a_value = __val; 627} 628template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY 629typename enable_if<is_assignable<_Tp&, _Tv>::value>::type 630__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) { 631 volatile char* __to = reinterpret_cast<volatile char*>(&__a_value); 632 volatile char* __end = __to + sizeof(_Tp); 633 volatile const char* __from = reinterpret_cast<volatile const char*>(&__val); 634 while (__to != __end) 635 *__to++ = *__from++; 636} 637 638#endif 639 640#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) 641 642template <typename _Tp> 643struct __cxx_atomic_base_impl { 644 645 _LIBCPP_INLINE_VISIBILITY 646#ifndef _LIBCPP_CXX03_LANG 647 __cxx_atomic_base_impl() _NOEXCEPT = default; 648#else 649 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} 650#endif // _LIBCPP_CXX03_LANG 651 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT 652 : __a_value(value) {} 653 _Tp __a_value; 654}; 655 656_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) { 657 // Avoid switch statement to make this a constexpr. 658 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 659 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 660 (__order == memory_order_release ? __ATOMIC_RELEASE: 661 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 662 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL: 663 __ATOMIC_CONSUME)))); 664} 665 666_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) { 667 // Avoid switch statement to make this a constexpr. 668 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 669 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 670 (__order == memory_order_release ? __ATOMIC_RELAXED: 671 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 672 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE: 673 __ATOMIC_CONSUME)))); 674} 675 676template <typename _Tp> 677_LIBCPP_INLINE_VISIBILITY 678void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { 679 __cxx_atomic_assign_volatile(__a->__a_value, __val); 680} 681 682template <typename _Tp> 683_LIBCPP_INLINE_VISIBILITY 684void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { 685 __a->__a_value = __val; 686} 687 688_LIBCPP_INLINE_VISIBILITY inline 689void __cxx_atomic_thread_fence(memory_order __order) { 690 __atomic_thread_fence(__to_gcc_order(__order)); 691} 692 693_LIBCPP_INLINE_VISIBILITY inline 694void __cxx_atomic_signal_fence(memory_order __order) { 695 __atomic_signal_fence(__to_gcc_order(__order)); 696} 697 698template <typename _Tp> 699_LIBCPP_INLINE_VISIBILITY 700void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, 701 memory_order __order) { 702 __atomic_store(&__a->__a_value, &__val, 703 __to_gcc_order(__order)); 704} 705 706template <typename _Tp> 707_LIBCPP_INLINE_VISIBILITY 708void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, 709 memory_order __order) { 710 __atomic_store(&__a->__a_value, &__val, 711 __to_gcc_order(__order)); 712} 713 714template <typename _Tp> 715_LIBCPP_INLINE_VISIBILITY 716_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a, 717 memory_order __order) { 718 _Tp __ret; 719 __atomic_load(&__a->__a_value, &__ret, 720 __to_gcc_order(__order)); 721 return __ret; 722} 723 724template <typename _Tp> 725_LIBCPP_INLINE_VISIBILITY 726_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) { 727 _Tp __ret; 728 __atomic_load(&__a->__a_value, &__ret, 729 __to_gcc_order(__order)); 730 return __ret; 731} 732 733template <typename _Tp> 734_LIBCPP_INLINE_VISIBILITY 735_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a, 736 _Tp __value, memory_order __order) { 737 _Tp __ret; 738 __atomic_exchange(&__a->__a_value, &__value, &__ret, 739 __to_gcc_order(__order)); 740 return __ret; 741} 742 743template <typename _Tp> 744_LIBCPP_INLINE_VISIBILITY 745_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, 746 memory_order __order) { 747 _Tp __ret; 748 __atomic_exchange(&__a->__a_value, &__value, &__ret, 749 __to_gcc_order(__order)); 750 return __ret; 751} 752 753template <typename _Tp> 754_LIBCPP_INLINE_VISIBILITY 755bool __cxx_atomic_compare_exchange_strong( 756 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, 757 memory_order __success, memory_order __failure) { 758 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 759 false, 760 __to_gcc_order(__success), 761 __to_gcc_failure_order(__failure)); 762} 763 764template <typename _Tp> 765_LIBCPP_INLINE_VISIBILITY 766bool __cxx_atomic_compare_exchange_strong( 767 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, 768 memory_order __failure) { 769 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 770 false, 771 __to_gcc_order(__success), 772 __to_gcc_failure_order(__failure)); 773} 774 775template <typename _Tp> 776_LIBCPP_INLINE_VISIBILITY 777bool __cxx_atomic_compare_exchange_weak( 778 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, 779 memory_order __success, memory_order __failure) { 780 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 781 true, 782 __to_gcc_order(__success), 783 __to_gcc_failure_order(__failure)); 784} 785 786template <typename _Tp> 787_LIBCPP_INLINE_VISIBILITY 788bool __cxx_atomic_compare_exchange_weak( 789 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, 790 memory_order __failure) { 791 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 792 true, 793 __to_gcc_order(__success), 794 __to_gcc_failure_order(__failure)); 795} 796 797template <typename _Tp> 798struct __skip_amt { enum {value = 1}; }; 799 800template <typename _Tp> 801struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; }; 802 803// FIXME: Haven't figured out what the spec says about using arrays with 804// atomic_fetch_add. Force a failure rather than creating bad behavior. 805template <typename _Tp> 806struct __skip_amt<_Tp[]> { }; 807template <typename _Tp, int n> 808struct __skip_amt<_Tp[n]> { }; 809 810template <typename _Tp, typename _Td> 811_LIBCPP_INLINE_VISIBILITY 812_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a, 813 _Td __delta, memory_order __order) { 814 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 815 __to_gcc_order(__order)); 816} 817 818template <typename _Tp, typename _Td> 819_LIBCPP_INLINE_VISIBILITY 820_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, 821 memory_order __order) { 822 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 823 __to_gcc_order(__order)); 824} 825 826template <typename _Tp, typename _Td> 827_LIBCPP_INLINE_VISIBILITY 828_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a, 829 _Td __delta, memory_order __order) { 830 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 831 __to_gcc_order(__order)); 832} 833 834template <typename _Tp, typename _Td> 835_LIBCPP_INLINE_VISIBILITY 836_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, 837 memory_order __order) { 838 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 839 __to_gcc_order(__order)); 840} 841 842template <typename _Tp> 843_LIBCPP_INLINE_VISIBILITY 844_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a, 845 _Tp __pattern, memory_order __order) { 846 return __atomic_fetch_and(&__a->__a_value, __pattern, 847 __to_gcc_order(__order)); 848} 849 850template <typename _Tp> 851_LIBCPP_INLINE_VISIBILITY 852_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, 853 _Tp __pattern, memory_order __order) { 854 return __atomic_fetch_and(&__a->__a_value, __pattern, 855 __to_gcc_order(__order)); 856} 857 858template <typename _Tp> 859_LIBCPP_INLINE_VISIBILITY 860_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a, 861 _Tp __pattern, memory_order __order) { 862 return __atomic_fetch_or(&__a->__a_value, __pattern, 863 __to_gcc_order(__order)); 864} 865 866template <typename _Tp> 867_LIBCPP_INLINE_VISIBILITY 868_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, 869 memory_order __order) { 870 return __atomic_fetch_or(&__a->__a_value, __pattern, 871 __to_gcc_order(__order)); 872} 873 874template <typename _Tp> 875_LIBCPP_INLINE_VISIBILITY 876_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a, 877 _Tp __pattern, memory_order __order) { 878 return __atomic_fetch_xor(&__a->__a_value, __pattern, 879 __to_gcc_order(__order)); 880} 881 882template <typename _Tp> 883_LIBCPP_INLINE_VISIBILITY 884_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, 885 memory_order __order) { 886 return __atomic_fetch_xor(&__a->__a_value, __pattern, 887 __to_gcc_order(__order)); 888} 889 890#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0) 891 892#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP) 893 894template <typename _Tp> 895struct __cxx_atomic_base_impl { 896 897 _LIBCPP_INLINE_VISIBILITY 898#ifndef _LIBCPP_CXX03_LANG 899 __cxx_atomic_base_impl() _NOEXCEPT = default; 900#else 901 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} 902#endif // _LIBCPP_CXX03_LANG 903 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT 904 : __a_value(value) {} 905 _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value; 906}; 907 908#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s) 909 910_LIBCPP_INLINE_VISIBILITY inline 911void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT { 912 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order)); 913} 914 915_LIBCPP_INLINE_VISIBILITY inline 916void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT { 917 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order)); 918} 919 920template<class _Tp> 921_LIBCPP_INLINE_VISIBILITY 922void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT { 923 __c11_atomic_init(&__a->__a_value, __val); 924} 925template<class _Tp> 926_LIBCPP_INLINE_VISIBILITY 927void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT { 928 __c11_atomic_init(&__a->__a_value, __val); 929} 930 931template<class _Tp> 932_LIBCPP_INLINE_VISIBILITY 933void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT { 934 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); 935} 936template<class _Tp> 937_LIBCPP_INLINE_VISIBILITY 938void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT { 939 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); 940} 941 942template<class _Tp> 943_LIBCPP_INLINE_VISIBILITY 944_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT { 945 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; 946 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); 947} 948template<class _Tp> 949_LIBCPP_INLINE_VISIBILITY 950_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT { 951 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; 952 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); 953} 954 955template<class _Tp> 956_LIBCPP_INLINE_VISIBILITY 957_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT { 958 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); 959} 960template<class _Tp> 961_LIBCPP_INLINE_VISIBILITY 962_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT { 963 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); 964} 965 966_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) { 967 // Avoid switch statement to make this a constexpr. 968 return __order == memory_order_release ? memory_order_relaxed: 969 (__order == memory_order_acq_rel ? memory_order_acquire: 970 __order); 971} 972 973template<class _Tp> 974_LIBCPP_INLINE_VISIBILITY 975bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 976 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 977} 978template<class _Tp> 979_LIBCPP_INLINE_VISIBILITY 980bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 981 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 982} 983 984template<class _Tp> 985_LIBCPP_INLINE_VISIBILITY 986bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 987 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 988} 989template<class _Tp> 990_LIBCPP_INLINE_VISIBILITY 991bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 992 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 993} 994 995template<class _Tp> 996_LIBCPP_INLINE_VISIBILITY 997_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { 998 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 999} 1000template<class _Tp> 1001_LIBCPP_INLINE_VISIBILITY 1002_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1003 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1004} 1005 1006template<class _Tp> 1007_LIBCPP_INLINE_VISIBILITY 1008_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1009 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1010} 1011template<class _Tp> 1012_LIBCPP_INLINE_VISIBILITY 1013_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1014 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1015} 1016 1017template<class _Tp> 1018_LIBCPP_INLINE_VISIBILITY 1019_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1020 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1021} 1022template<class _Tp> 1023_LIBCPP_INLINE_VISIBILITY 1024_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1025 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1026} 1027template<class _Tp> 1028_LIBCPP_INLINE_VISIBILITY 1029_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1030 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1031} 1032template<class _Tp> 1033_LIBCPP_INLINE_VISIBILITY 1034_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1035 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1036} 1037 1038template<class _Tp> 1039_LIBCPP_INLINE_VISIBILITY 1040_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1041 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1042} 1043template<class _Tp> 1044_LIBCPP_INLINE_VISIBILITY 1045_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1046 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1047} 1048 1049template<class _Tp> 1050_LIBCPP_INLINE_VISIBILITY 1051_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1052 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1053} 1054template<class _Tp> 1055_LIBCPP_INLINE_VISIBILITY 1056_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1057 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1058} 1059 1060template<class _Tp> 1061_LIBCPP_INLINE_VISIBILITY 1062_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1063 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1064} 1065template<class _Tp> 1066_LIBCPP_INLINE_VISIBILITY 1067_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1068 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1069} 1070 1071#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP 1072 1073template <class _Tp> 1074_LIBCPP_INLINE_VISIBILITY 1075_Tp kill_dependency(_Tp __y) _NOEXCEPT 1076{ 1077 return __y; 1078} 1079 1080#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE) 1081# define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE 1082# define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE 1083#ifndef _LIBCPP_HAS_NO_CHAR8_T 1084# define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE 1085#endif 1086# define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE 1087# define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE 1088# define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE 1089# define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE 1090# define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE 1091# define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE 1092# define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE 1093# define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE 1094#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE) 1095# define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE 1096# define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE 1097#ifndef _LIBCPP_HAS_NO_CHAR8_T 1098# define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE 1099#endif 1100# define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE 1101# define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE 1102# define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE 1103# define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE 1104# define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE 1105# define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE 1106# define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE 1107# define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE 1108#endif 1109 1110#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS 1111 1112template<typename _Tp> 1113struct __cxx_atomic_lock_impl { 1114 1115 _LIBCPP_INLINE_VISIBILITY 1116 __cxx_atomic_lock_impl() _NOEXCEPT 1117 : __a_value(), __a_lock(0) {} 1118 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit 1119 __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT 1120 : __a_value(value), __a_lock(0) {} 1121 1122 _Tp __a_value; 1123 mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock; 1124 1125 _LIBCPP_INLINE_VISIBILITY void __lock() const volatile { 1126 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) 1127 /*spin*/; 1128 } 1129 _LIBCPP_INLINE_VISIBILITY void __lock() const { 1130 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) 1131 /*spin*/; 1132 } 1133 _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile { 1134 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); 1135 } 1136 _LIBCPP_INLINE_VISIBILITY void __unlock() const { 1137 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); 1138 } 1139 _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile { 1140 __lock(); 1141 _Tp __old; 1142 __cxx_atomic_assign_volatile(__old, __a_value); 1143 __unlock(); 1144 return __old; 1145 } 1146 _LIBCPP_INLINE_VISIBILITY _Tp __read() const { 1147 __lock(); 1148 _Tp __old = __a_value; 1149 __unlock(); 1150 return __old; 1151 } 1152}; 1153 1154template <typename _Tp> 1155_LIBCPP_INLINE_VISIBILITY 1156void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { 1157 __cxx_atomic_assign_volatile(__a->__a_value, __val); 1158} 1159template <typename _Tp> 1160_LIBCPP_INLINE_VISIBILITY 1161void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { 1162 __a->__a_value = __val; 1163} 1164 1165template <typename _Tp> 1166_LIBCPP_INLINE_VISIBILITY 1167void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { 1168 __a->__lock(); 1169 __cxx_atomic_assign_volatile(__a->__a_value, __val); 1170 __a->__unlock(); 1171} 1172template <typename _Tp> 1173_LIBCPP_INLINE_VISIBILITY 1174void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { 1175 __a->__lock(); 1176 __a->__a_value = __val; 1177 __a->__unlock(); 1178} 1179 1180template <typename _Tp> 1181_LIBCPP_INLINE_VISIBILITY 1182_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { 1183 return __a->__read(); 1184} 1185template <typename _Tp> 1186_LIBCPP_INLINE_VISIBILITY 1187_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { 1188 return __a->__read(); 1189} 1190 1191template <typename _Tp> 1192_LIBCPP_INLINE_VISIBILITY 1193_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { 1194 __a->__lock(); 1195 _Tp __old; 1196 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1197 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1198 __a->__unlock(); 1199 return __old; 1200} 1201template <typename _Tp> 1202_LIBCPP_INLINE_VISIBILITY 1203_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { 1204 __a->__lock(); 1205 _Tp __old = __a->__a_value; 1206 __a->__a_value = __value; 1207 __a->__unlock(); 1208 return __old; 1209} 1210 1211template <typename _Tp> 1212_LIBCPP_INLINE_VISIBILITY 1213bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1214 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1215 _Tp __temp; 1216 __a->__lock(); 1217 __cxx_atomic_assign_volatile(__temp, __a->__a_value); 1218 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); 1219 if(__ret) 1220 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1221 else 1222 __cxx_atomic_assign_volatile(*__expected, __a->__a_value); 1223 __a->__unlock(); 1224 return __ret; 1225} 1226template <typename _Tp> 1227_LIBCPP_INLINE_VISIBILITY 1228bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a, 1229 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1230 __a->__lock(); 1231 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); 1232 if(__ret) 1233 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); 1234 else 1235 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); 1236 __a->__unlock(); 1237 return __ret; 1238} 1239 1240template <typename _Tp> 1241_LIBCPP_INLINE_VISIBILITY 1242bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1243 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1244 _Tp __temp; 1245 __a->__lock(); 1246 __cxx_atomic_assign_volatile(__temp, __a->__a_value); 1247 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); 1248 if(__ret) 1249 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1250 else 1251 __cxx_atomic_assign_volatile(*__expected, __a->__a_value); 1252 __a->__unlock(); 1253 return __ret; 1254} 1255template <typename _Tp> 1256_LIBCPP_INLINE_VISIBILITY 1257bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a, 1258 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1259 __a->__lock(); 1260 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); 1261 if(__ret) 1262 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); 1263 else 1264 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); 1265 __a->__unlock(); 1266 return __ret; 1267} 1268 1269template <typename _Tp, typename _Td> 1270_LIBCPP_INLINE_VISIBILITY 1271_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1272 _Td __delta, memory_order) { 1273 __a->__lock(); 1274 _Tp __old; 1275 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1276 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta)); 1277 __a->__unlock(); 1278 return __old; 1279} 1280template <typename _Tp, typename _Td> 1281_LIBCPP_INLINE_VISIBILITY 1282_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a, 1283 _Td __delta, memory_order) { 1284 __a->__lock(); 1285 _Tp __old = __a->__a_value; 1286 __a->__a_value += __delta; 1287 __a->__unlock(); 1288 return __old; 1289} 1290 1291template <typename _Tp, typename _Td> 1292_LIBCPP_INLINE_VISIBILITY 1293_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a, 1294 ptrdiff_t __delta, memory_order) { 1295 __a->__lock(); 1296 _Tp* __old; 1297 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1298 __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta); 1299 __a->__unlock(); 1300 return __old; 1301} 1302template <typename _Tp, typename _Td> 1303_LIBCPP_INLINE_VISIBILITY 1304_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a, 1305 ptrdiff_t __delta, memory_order) { 1306 __a->__lock(); 1307 _Tp* __old = __a->__a_value; 1308 __a->__a_value += __delta; 1309 __a->__unlock(); 1310 return __old; 1311} 1312 1313template <typename _Tp, typename _Td> 1314_LIBCPP_INLINE_VISIBILITY 1315_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1316 _Td __delta, memory_order) { 1317 __a->__lock(); 1318 _Tp __old; 1319 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1320 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta)); 1321 __a->__unlock(); 1322 return __old; 1323} 1324template <typename _Tp, typename _Td> 1325_LIBCPP_INLINE_VISIBILITY 1326_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a, 1327 _Td __delta, memory_order) { 1328 __a->__lock(); 1329 _Tp __old = __a->__a_value; 1330 __a->__a_value -= __delta; 1331 __a->__unlock(); 1332 return __old; 1333} 1334 1335template <typename _Tp> 1336_LIBCPP_INLINE_VISIBILITY 1337_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1338 _Tp __pattern, memory_order) { 1339 __a->__lock(); 1340 _Tp __old; 1341 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1342 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern)); 1343 __a->__unlock(); 1344 return __old; 1345} 1346template <typename _Tp> 1347_LIBCPP_INLINE_VISIBILITY 1348_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a, 1349 _Tp __pattern, memory_order) { 1350 __a->__lock(); 1351 _Tp __old = __a->__a_value; 1352 __a->__a_value &= __pattern; 1353 __a->__unlock(); 1354 return __old; 1355} 1356 1357template <typename _Tp> 1358_LIBCPP_INLINE_VISIBILITY 1359_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1360 _Tp __pattern, memory_order) { 1361 __a->__lock(); 1362 _Tp __old; 1363 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1364 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern)); 1365 __a->__unlock(); 1366 return __old; 1367} 1368template <typename _Tp> 1369_LIBCPP_INLINE_VISIBILITY 1370_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a, 1371 _Tp __pattern, memory_order) { 1372 __a->__lock(); 1373 _Tp __old = __a->__a_value; 1374 __a->__a_value |= __pattern; 1375 __a->__unlock(); 1376 return __old; 1377} 1378 1379template <typename _Tp> 1380_LIBCPP_INLINE_VISIBILITY 1381_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1382 _Tp __pattern, memory_order) { 1383 __a->__lock(); 1384 _Tp __old; 1385 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1386 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern)); 1387 __a->__unlock(); 1388 return __old; 1389} 1390template <typename _Tp> 1391_LIBCPP_INLINE_VISIBILITY 1392_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a, 1393 _Tp __pattern, memory_order) { 1394 __a->__lock(); 1395 _Tp __old = __a->__a_value; 1396 __a->__a_value ^= __pattern; 1397 __a->__unlock(); 1398 return __old; 1399} 1400 1401#ifdef __cpp_lib_atomic_is_always_lock_free 1402 1403template<typename _Tp> struct __cxx_is_always_lock_free { 1404 enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; }; 1405 1406#else 1407 1408template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; }; 1409// Implementations must match the C ATOMIC_*_LOCK_FREE macro values. 1410template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; }; 1411template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; }; 1412template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; }; 1413template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; }; 1414#ifndef _LIBCPP_HAS_NO_CHAR8_T 1415template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; }; 1416#endif 1417template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; }; 1418template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; }; 1419#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS 1420template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; }; 1421#endif 1422template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; }; 1423template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; }; 1424template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; }; 1425template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; }; 1426template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; }; 1427template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; }; 1428template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; }; 1429template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; }; 1430template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; }; 1431template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; }; 1432 1433#endif //__cpp_lib_atomic_is_always_lock_free 1434 1435template <typename _Tp, 1436 typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value, 1437 __cxx_atomic_base_impl<_Tp>, 1438 __cxx_atomic_lock_impl<_Tp> >::type> 1439#else 1440template <typename _Tp, 1441 typename _Base = __cxx_atomic_base_impl<_Tp> > 1442#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS 1443struct __cxx_atomic_impl : public _Base { 1444 static_assert(is_trivially_copyable<_Tp>::value, 1445 "std::atomic<T> requires that 'T' be a trivially copyable type"); 1446 1447 _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default; 1448 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT 1449 : _Base(value) {} 1450}; 1451 1452#if defined(__linux__) || (defined(__FreeBSD__) && defined(__mips__)) 1453 using __cxx_contention_t = int32_t; 1454#else 1455 using __cxx_contention_t = int64_t; 1456#endif 1457 1458using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>; 1459 1460#if defined(_LIBCPP_HAS_NO_THREADS) 1461# define _LIBCPP_HAS_NO_PLATFORM_WAIT 1462#endif 1463 1464// TODO: 1465// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that 1466// it is not tied anywhere into the build system or even documented. We should 1467// clean it up because it is technically never defined except when threads are 1468// disabled. We should clean it up in its own changeset in case we break "bad" 1469// users. 1470#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT 1471 1472_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*); 1473_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*); 1474_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*); 1475_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t); 1476 1477_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*); 1478_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*); 1479_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*); 1480_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t); 1481 1482template <class _Atp, class _Fn> 1483struct __libcpp_atomic_wait_backoff_impl { 1484 _Atp* __a; 1485 _Fn __test_fn; 1486 _LIBCPP_AVAILABILITY_SYNC 1487 _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const 1488 { 1489 if(__elapsed > chrono::microseconds(64)) 1490 { 1491 auto const __monitor = __libcpp_atomic_monitor(__a); 1492 if(__test_fn()) 1493 return true; 1494 __libcpp_atomic_wait(__a, __monitor); 1495 } 1496 else if(__elapsed > chrono::microseconds(4)) 1497 __libcpp_thread_yield(); 1498 else 1499 {} // poll 1500 return false; 1501 } 1502}; 1503 1504template <class _Atp, class _Fn> 1505_LIBCPP_AVAILABILITY_SYNC 1506_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn) 1507{ 1508 __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn}; 1509 return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn); 1510} 1511 1512#else // _LIBCPP_HAS_NO_PLATFORM_WAIT 1513 1514template <class _Tp> 1515_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { } 1516template <class _Tp> 1517_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { } 1518template <class _Atp, class _Fn> 1519_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn) 1520{ 1521#if defined(_LIBCPP_HAS_NO_THREADS) 1522 using _Policy = __spinning_backoff_policy; 1523#else 1524 using _Policy = __libcpp_timed_backoff_policy; 1525#endif 1526 return __libcpp_thread_poll_with_backoff(__test_fn, _Policy()); 1527} 1528 1529#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT 1530 1531template <class _Atp, class _Tp> 1532struct __cxx_atomic_wait_test_fn_impl { 1533 _Atp* __a; 1534 _Tp __val; 1535 memory_order __order; 1536 _LIBCPP_INLINE_VISIBILITY bool operator()() const 1537 { 1538 return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val); 1539 } 1540}; 1541 1542template <class _Atp, class _Tp> 1543_LIBCPP_AVAILABILITY_SYNC 1544_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order) 1545{ 1546 __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order}; 1547 return __cxx_atomic_wait(__a, __test_fn); 1548} 1549 1550// general atomic<T> 1551 1552template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> 1553struct __atomic_base // false 1554{ 1555 mutable __cxx_atomic_impl<_Tp> __a_; 1556 1557#if defined(__cpp_lib_atomic_is_always_lock_free) 1558 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0); 1559#endif 1560 1561 _LIBCPP_INLINE_VISIBILITY 1562 bool is_lock_free() const volatile _NOEXCEPT 1563 {return __cxx_atomic_is_lock_free(sizeof(_Tp));} 1564 _LIBCPP_INLINE_VISIBILITY 1565 bool is_lock_free() const _NOEXCEPT 1566 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();} 1567 _LIBCPP_INLINE_VISIBILITY 1568 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1569 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1570 {__cxx_atomic_store(&__a_, __d, __m);} 1571 _LIBCPP_INLINE_VISIBILITY 1572 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1573 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1574 {__cxx_atomic_store(&__a_, __d, __m);} 1575 _LIBCPP_INLINE_VISIBILITY 1576 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 1577 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1578 {return __cxx_atomic_load(&__a_, __m);} 1579 _LIBCPP_INLINE_VISIBILITY 1580 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 1581 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1582 {return __cxx_atomic_load(&__a_, __m);} 1583 _LIBCPP_INLINE_VISIBILITY 1584 operator _Tp() const volatile _NOEXCEPT {return load();} 1585 _LIBCPP_INLINE_VISIBILITY 1586 operator _Tp() const _NOEXCEPT {return load();} 1587 _LIBCPP_INLINE_VISIBILITY 1588 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1589 {return __cxx_atomic_exchange(&__a_, __d, __m);} 1590 _LIBCPP_INLINE_VISIBILITY 1591 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1592 {return __cxx_atomic_exchange(&__a_, __d, __m);} 1593 _LIBCPP_INLINE_VISIBILITY 1594 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1595 memory_order __s, memory_order __f) volatile _NOEXCEPT 1596 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1597 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 1598 _LIBCPP_INLINE_VISIBILITY 1599 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1600 memory_order __s, memory_order __f) _NOEXCEPT 1601 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1602 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 1603 _LIBCPP_INLINE_VISIBILITY 1604 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1605 memory_order __s, memory_order __f) volatile _NOEXCEPT 1606 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1607 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 1608 _LIBCPP_INLINE_VISIBILITY 1609 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1610 memory_order __s, memory_order __f) _NOEXCEPT 1611 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1612 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 1613 _LIBCPP_INLINE_VISIBILITY 1614 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1615 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1616 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 1617 _LIBCPP_INLINE_VISIBILITY 1618 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1619 memory_order __m = memory_order_seq_cst) _NOEXCEPT 1620 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 1621 _LIBCPP_INLINE_VISIBILITY 1622 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1623 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1624 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 1625 _LIBCPP_INLINE_VISIBILITY 1626 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1627 memory_order __m = memory_order_seq_cst) _NOEXCEPT 1628 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 1629 1630 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 1631 {__cxx_atomic_wait(&__a_, __v, __m);} 1632 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT 1633 {__cxx_atomic_wait(&__a_, __v, __m);} 1634 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT 1635 {__cxx_atomic_notify_one(&__a_);} 1636 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT 1637 {__cxx_atomic_notify_one(&__a_);} 1638 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT 1639 {__cxx_atomic_notify_all(&__a_);} 1640 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT 1641 {__cxx_atomic_notify_all(&__a_);} 1642 1643#if _LIBCPP_STD_VER > 17 1644 _LIBCPP_INLINE_VISIBILITY constexpr 1645 __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {} 1646#else 1647 _LIBCPP_INLINE_VISIBILITY 1648 __atomic_base() _NOEXCEPT = default; 1649#endif 1650 1651 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR 1652 __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} 1653 1654#ifndef _LIBCPP_CXX03_LANG 1655 __atomic_base(const __atomic_base&) = delete; 1656#else 1657private: 1658 _LIBCPP_INLINE_VISIBILITY 1659 __atomic_base(const __atomic_base&); 1660#endif 1661}; 1662 1663#if defined(__cpp_lib_atomic_is_always_lock_free) 1664template <class _Tp, bool __b> 1665_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free; 1666#endif 1667 1668// atomic<Integral> 1669 1670template <class _Tp> 1671struct __atomic_base<_Tp, true> 1672 : public __atomic_base<_Tp, false> 1673{ 1674 typedef __atomic_base<_Tp, false> __base; 1675 1676 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17 1677 __atomic_base() _NOEXCEPT = default; 1678 1679 _LIBCPP_INLINE_VISIBILITY 1680 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} 1681 1682 _LIBCPP_INLINE_VISIBILITY 1683 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1684 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} 1685 _LIBCPP_INLINE_VISIBILITY 1686 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1687 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} 1688 _LIBCPP_INLINE_VISIBILITY 1689 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1690 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} 1691 _LIBCPP_INLINE_VISIBILITY 1692 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1693 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} 1694 _LIBCPP_INLINE_VISIBILITY 1695 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1696 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} 1697 _LIBCPP_INLINE_VISIBILITY 1698 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1699 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} 1700 _LIBCPP_INLINE_VISIBILITY 1701 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1702 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} 1703 _LIBCPP_INLINE_VISIBILITY 1704 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1705 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} 1706 _LIBCPP_INLINE_VISIBILITY 1707 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1708 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} 1709 _LIBCPP_INLINE_VISIBILITY 1710 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1711 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} 1712 1713 _LIBCPP_INLINE_VISIBILITY 1714 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} 1715 _LIBCPP_INLINE_VISIBILITY 1716 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));} 1717 _LIBCPP_INLINE_VISIBILITY 1718 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));} 1719 _LIBCPP_INLINE_VISIBILITY 1720 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));} 1721 _LIBCPP_INLINE_VISIBILITY 1722 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1723 _LIBCPP_INLINE_VISIBILITY 1724 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1725 _LIBCPP_INLINE_VISIBILITY 1726 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1727 _LIBCPP_INLINE_VISIBILITY 1728 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1729 _LIBCPP_INLINE_VISIBILITY 1730 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1731 _LIBCPP_INLINE_VISIBILITY 1732 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1733 _LIBCPP_INLINE_VISIBILITY 1734 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1735 _LIBCPP_INLINE_VISIBILITY 1736 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1737 _LIBCPP_INLINE_VISIBILITY 1738 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;} 1739 _LIBCPP_INLINE_VISIBILITY 1740 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;} 1741 _LIBCPP_INLINE_VISIBILITY 1742 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;} 1743 _LIBCPP_INLINE_VISIBILITY 1744 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;} 1745 _LIBCPP_INLINE_VISIBILITY 1746 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1747 _LIBCPP_INLINE_VISIBILITY 1748 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1749}; 1750 1751// atomic<T> 1752 1753template <class _Tp> 1754struct atomic 1755 : public __atomic_base<_Tp> 1756{ 1757 typedef __atomic_base<_Tp> __base; 1758 typedef _Tp value_type; 1759 typedef value_type difference_type; 1760 1761#if _LIBCPP_STD_VER > 17 1762 _LIBCPP_INLINE_VISIBILITY 1763 atomic() = default; 1764#else 1765 _LIBCPP_INLINE_VISIBILITY 1766 atomic() _NOEXCEPT = default; 1767#endif 1768 1769 _LIBCPP_INLINE_VISIBILITY 1770 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} 1771 1772 _LIBCPP_INLINE_VISIBILITY 1773 _Tp operator=(_Tp __d) volatile _NOEXCEPT 1774 {__base::store(__d); return __d;} 1775 _LIBCPP_INLINE_VISIBILITY 1776 _Tp operator=(_Tp __d) _NOEXCEPT 1777 {__base::store(__d); return __d;} 1778 1779 atomic& operator=(const atomic&) = delete; 1780 atomic& operator=(const atomic&) volatile = delete; 1781}; 1782 1783// atomic<T*> 1784 1785template <class _Tp> 1786struct atomic<_Tp*> 1787 : public __atomic_base<_Tp*> 1788{ 1789 typedef __atomic_base<_Tp*> __base; 1790 typedef _Tp* value_type; 1791 typedef ptrdiff_t difference_type; 1792 1793 _LIBCPP_INLINE_VISIBILITY 1794 atomic() _NOEXCEPT = default; 1795 1796 _LIBCPP_INLINE_VISIBILITY 1797 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} 1798 1799 _LIBCPP_INLINE_VISIBILITY 1800 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT 1801 {__base::store(__d); return __d;} 1802 _LIBCPP_INLINE_VISIBILITY 1803 _Tp* operator=(_Tp* __d) _NOEXCEPT 1804 {__base::store(__d); return __d;} 1805 1806 _LIBCPP_INLINE_VISIBILITY 1807 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 1808 // __atomic_fetch_add accepts function pointers, guard against them. 1809 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1810 return __cxx_atomic_fetch_add(&this->__a_, __op, __m); 1811 } 1812 1813 _LIBCPP_INLINE_VISIBILITY 1814 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 1815 // __atomic_fetch_add accepts function pointers, guard against them. 1816 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1817 return __cxx_atomic_fetch_add(&this->__a_, __op, __m); 1818 } 1819 1820 _LIBCPP_INLINE_VISIBILITY 1821 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 1822 // __atomic_fetch_add accepts function pointers, guard against them. 1823 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1824 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); 1825 } 1826 1827 _LIBCPP_INLINE_VISIBILITY 1828 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 1829 // __atomic_fetch_add accepts function pointers, guard against them. 1830 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1831 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); 1832 } 1833 1834 _LIBCPP_INLINE_VISIBILITY 1835 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} 1836 _LIBCPP_INLINE_VISIBILITY 1837 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);} 1838 _LIBCPP_INLINE_VISIBILITY 1839 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);} 1840 _LIBCPP_INLINE_VISIBILITY 1841 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);} 1842 _LIBCPP_INLINE_VISIBILITY 1843 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;} 1844 _LIBCPP_INLINE_VISIBILITY 1845 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;} 1846 _LIBCPP_INLINE_VISIBILITY 1847 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;} 1848 _LIBCPP_INLINE_VISIBILITY 1849 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;} 1850 _LIBCPP_INLINE_VISIBILITY 1851 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1852 _LIBCPP_INLINE_VISIBILITY 1853 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1854 _LIBCPP_INLINE_VISIBILITY 1855 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1856 _LIBCPP_INLINE_VISIBILITY 1857 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1858 1859 atomic& operator=(const atomic&) = delete; 1860 atomic& operator=(const atomic&) volatile = delete; 1861}; 1862 1863// atomic_is_lock_free 1864 1865template <class _Tp> 1866_LIBCPP_INLINE_VISIBILITY 1867bool 1868atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT 1869{ 1870 return __o->is_lock_free(); 1871} 1872 1873template <class _Tp> 1874_LIBCPP_INLINE_VISIBILITY 1875bool 1876atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT 1877{ 1878 return __o->is_lock_free(); 1879} 1880 1881// atomic_init 1882 1883template <class _Tp> 1884_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY 1885void 1886atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1887{ 1888 __cxx_atomic_init(&__o->__a_, __d); 1889} 1890 1891template <class _Tp> 1892_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY 1893void 1894atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1895{ 1896 __cxx_atomic_init(&__o->__a_, __d); 1897} 1898 1899// atomic_store 1900 1901template <class _Tp> 1902_LIBCPP_INLINE_VISIBILITY 1903void 1904atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1905{ 1906 __o->store(__d); 1907} 1908 1909template <class _Tp> 1910_LIBCPP_INLINE_VISIBILITY 1911void 1912atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1913{ 1914 __o->store(__d); 1915} 1916 1917// atomic_store_explicit 1918 1919template <class _Tp> 1920_LIBCPP_INLINE_VISIBILITY 1921void 1922atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1923 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1924{ 1925 __o->store(__d, __m); 1926} 1927 1928template <class _Tp> 1929_LIBCPP_INLINE_VISIBILITY 1930void 1931atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1932 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1933{ 1934 __o->store(__d, __m); 1935} 1936 1937// atomic_load 1938 1939template <class _Tp> 1940_LIBCPP_INLINE_VISIBILITY 1941_Tp 1942atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT 1943{ 1944 return __o->load(); 1945} 1946 1947template <class _Tp> 1948_LIBCPP_INLINE_VISIBILITY 1949_Tp 1950atomic_load(const atomic<_Tp>* __o) _NOEXCEPT 1951{ 1952 return __o->load(); 1953} 1954 1955// atomic_load_explicit 1956 1957template <class _Tp> 1958_LIBCPP_INLINE_VISIBILITY 1959_Tp 1960atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1961 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1962{ 1963 return __o->load(__m); 1964} 1965 1966template <class _Tp> 1967_LIBCPP_INLINE_VISIBILITY 1968_Tp 1969atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1970 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1971{ 1972 return __o->load(__m); 1973} 1974 1975// atomic_exchange 1976 1977template <class _Tp> 1978_LIBCPP_INLINE_VISIBILITY 1979_Tp 1980atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1981{ 1982 return __o->exchange(__d); 1983} 1984 1985template <class _Tp> 1986_LIBCPP_INLINE_VISIBILITY 1987_Tp 1988atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1989{ 1990 return __o->exchange(__d); 1991} 1992 1993// atomic_exchange_explicit 1994 1995template <class _Tp> 1996_LIBCPP_INLINE_VISIBILITY 1997_Tp 1998atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1999{ 2000 return __o->exchange(__d, __m); 2001} 2002 2003template <class _Tp> 2004_LIBCPP_INLINE_VISIBILITY 2005_Tp 2006atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 2007{ 2008 return __o->exchange(__d, __m); 2009} 2010 2011// atomic_compare_exchange_weak 2012 2013template <class _Tp> 2014_LIBCPP_INLINE_VISIBILITY 2015bool 2016atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2017{ 2018 return __o->compare_exchange_weak(*__e, __d); 2019} 2020 2021template <class _Tp> 2022_LIBCPP_INLINE_VISIBILITY 2023bool 2024atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2025{ 2026 return __o->compare_exchange_weak(*__e, __d); 2027} 2028 2029// atomic_compare_exchange_strong 2030 2031template <class _Tp> 2032_LIBCPP_INLINE_VISIBILITY 2033bool 2034atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2035{ 2036 return __o->compare_exchange_strong(*__e, __d); 2037} 2038 2039template <class _Tp> 2040_LIBCPP_INLINE_VISIBILITY 2041bool 2042atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2043{ 2044 return __o->compare_exchange_strong(*__e, __d); 2045} 2046 2047// atomic_compare_exchange_weak_explicit 2048 2049template <class _Tp> 2050_LIBCPP_INLINE_VISIBILITY 2051bool 2052atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, 2053 typename atomic<_Tp>::value_type __d, 2054 memory_order __s, memory_order __f) _NOEXCEPT 2055 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2056{ 2057 return __o->compare_exchange_weak(*__e, __d, __s, __f); 2058} 2059 2060template <class _Tp> 2061_LIBCPP_INLINE_VISIBILITY 2062bool 2063atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, 2064 memory_order __s, memory_order __f) _NOEXCEPT 2065 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2066{ 2067 return __o->compare_exchange_weak(*__e, __d, __s, __f); 2068} 2069 2070// atomic_compare_exchange_strong_explicit 2071 2072template <class _Tp> 2073_LIBCPP_INLINE_VISIBILITY 2074bool 2075atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o, 2076 typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, 2077 memory_order __s, memory_order __f) _NOEXCEPT 2078 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2079{ 2080 return __o->compare_exchange_strong(*__e, __d, __s, __f); 2081} 2082 2083template <class _Tp> 2084_LIBCPP_INLINE_VISIBILITY 2085bool 2086atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, 2087 typename atomic<_Tp>::value_type __d, 2088 memory_order __s, memory_order __f) _NOEXCEPT 2089 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2090{ 2091 return __o->compare_exchange_strong(*__e, __d, __s, __f); 2092} 2093 2094// atomic_wait 2095 2096template <class _Tp> 2097_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2098void atomic_wait(const volatile atomic<_Tp>* __o, 2099 typename atomic<_Tp>::value_type __v) _NOEXCEPT 2100{ 2101 return __o->wait(__v); 2102} 2103 2104template <class _Tp> 2105_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2106void atomic_wait(const atomic<_Tp>* __o, 2107 typename atomic<_Tp>::value_type __v) _NOEXCEPT 2108{ 2109 return __o->wait(__v); 2110} 2111 2112// atomic_wait_explicit 2113 2114template <class _Tp> 2115_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2116void atomic_wait_explicit(const volatile atomic<_Tp>* __o, 2117 typename atomic<_Tp>::value_type __v, 2118 memory_order __m) _NOEXCEPT 2119 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 2120{ 2121 return __o->wait(__v, __m); 2122} 2123 2124template <class _Tp> 2125_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2126void atomic_wait_explicit(const atomic<_Tp>* __o, 2127 typename atomic<_Tp>::value_type __v, 2128 memory_order __m) _NOEXCEPT 2129 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 2130{ 2131 return __o->wait(__v, __m); 2132} 2133 2134// atomic_notify_one 2135 2136template <class _Tp> 2137_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2138void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT 2139{ 2140 __o->notify_one(); 2141} 2142template <class _Tp> 2143_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2144void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT 2145{ 2146 __o->notify_one(); 2147} 2148 2149// atomic_notify_one 2150 2151template <class _Tp> 2152_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2153void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT 2154{ 2155 __o->notify_all(); 2156} 2157template <class _Tp> 2158_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2159void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT 2160{ 2161 __o->notify_all(); 2162} 2163 2164// atomic_fetch_add 2165 2166template <class _Tp> 2167_LIBCPP_INLINE_VISIBILITY 2168_Tp 2169atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2170{ 2171 return __o->fetch_add(__op); 2172} 2173 2174template <class _Tp> 2175_LIBCPP_INLINE_VISIBILITY 2176_Tp 2177atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2178{ 2179 return __o->fetch_add(__op); 2180} 2181 2182// atomic_fetch_add_explicit 2183 2184template <class _Tp> 2185_LIBCPP_INLINE_VISIBILITY 2186_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2187{ 2188 return __o->fetch_add(__op, __m); 2189} 2190 2191template <class _Tp> 2192_LIBCPP_INLINE_VISIBILITY 2193_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2194{ 2195 return __o->fetch_add(__op, __m); 2196} 2197 2198// atomic_fetch_sub 2199 2200template <class _Tp> 2201_LIBCPP_INLINE_VISIBILITY 2202_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2203{ 2204 return __o->fetch_sub(__op); 2205} 2206 2207template <class _Tp> 2208_LIBCPP_INLINE_VISIBILITY 2209_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2210{ 2211 return __o->fetch_sub(__op); 2212} 2213 2214// atomic_fetch_sub_explicit 2215 2216template <class _Tp> 2217_LIBCPP_INLINE_VISIBILITY 2218_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2219{ 2220 return __o->fetch_sub(__op, __m); 2221} 2222 2223template <class _Tp> 2224_LIBCPP_INLINE_VISIBILITY 2225_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2226{ 2227 return __o->fetch_sub(__op, __m); 2228} 2229 2230// atomic_fetch_and 2231 2232template <class _Tp> 2233_LIBCPP_INLINE_VISIBILITY 2234typename enable_if 2235< 2236 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2237 _Tp 2238>::type 2239atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2240{ 2241 return __o->fetch_and(__op); 2242} 2243 2244template <class _Tp> 2245_LIBCPP_INLINE_VISIBILITY 2246typename enable_if 2247< 2248 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2249 _Tp 2250>::type 2251atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2252{ 2253 return __o->fetch_and(__op); 2254} 2255 2256// atomic_fetch_and_explicit 2257 2258template <class _Tp> 2259_LIBCPP_INLINE_VISIBILITY 2260typename enable_if 2261< 2262 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2263 _Tp 2264>::type 2265atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2266{ 2267 return __o->fetch_and(__op, __m); 2268} 2269 2270template <class _Tp> 2271_LIBCPP_INLINE_VISIBILITY 2272typename enable_if 2273< 2274 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2275 _Tp 2276>::type 2277atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2278{ 2279 return __o->fetch_and(__op, __m); 2280} 2281 2282// atomic_fetch_or 2283 2284template <class _Tp> 2285_LIBCPP_INLINE_VISIBILITY 2286typename enable_if 2287< 2288 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2289 _Tp 2290>::type 2291atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2292{ 2293 return __o->fetch_or(__op); 2294} 2295 2296template <class _Tp> 2297_LIBCPP_INLINE_VISIBILITY 2298typename enable_if 2299< 2300 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2301 _Tp 2302>::type 2303atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2304{ 2305 return __o->fetch_or(__op); 2306} 2307 2308// atomic_fetch_or_explicit 2309 2310template <class _Tp> 2311_LIBCPP_INLINE_VISIBILITY 2312typename enable_if 2313< 2314 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2315 _Tp 2316>::type 2317atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2318{ 2319 return __o->fetch_or(__op, __m); 2320} 2321 2322template <class _Tp> 2323_LIBCPP_INLINE_VISIBILITY 2324typename enable_if 2325< 2326 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2327 _Tp 2328>::type 2329atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2330{ 2331 return __o->fetch_or(__op, __m); 2332} 2333 2334// atomic_fetch_xor 2335 2336template <class _Tp> 2337_LIBCPP_INLINE_VISIBILITY 2338typename enable_if 2339< 2340 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2341 _Tp 2342>::type 2343atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2344{ 2345 return __o->fetch_xor(__op); 2346} 2347 2348template <class _Tp> 2349_LIBCPP_INLINE_VISIBILITY 2350typename enable_if 2351< 2352 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2353 _Tp 2354>::type 2355atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2356{ 2357 return __o->fetch_xor(__op); 2358} 2359 2360// atomic_fetch_xor_explicit 2361 2362template <class _Tp> 2363_LIBCPP_INLINE_VISIBILITY 2364typename enable_if 2365< 2366 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2367 _Tp 2368>::type 2369atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2370{ 2371 return __o->fetch_xor(__op, __m); 2372} 2373 2374template <class _Tp> 2375_LIBCPP_INLINE_VISIBILITY 2376typename enable_if 2377< 2378 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2379 _Tp 2380>::type 2381atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2382{ 2383 return __o->fetch_xor(__op, __m); 2384} 2385 2386// flag type and operations 2387 2388typedef struct atomic_flag 2389{ 2390 __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_; 2391 2392 _LIBCPP_INLINE_VISIBILITY 2393 bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 2394 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);} 2395 _LIBCPP_INLINE_VISIBILITY 2396 bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 2397 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);} 2398 2399 _LIBCPP_INLINE_VISIBILITY 2400 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 2401 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);} 2402 _LIBCPP_INLINE_VISIBILITY 2403 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT 2404 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);} 2405 _LIBCPP_INLINE_VISIBILITY 2406 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 2407 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);} 2408 _LIBCPP_INLINE_VISIBILITY 2409 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT 2410 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);} 2411 2412 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2413 void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 2414 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);} 2415 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2416 void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT 2417 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);} 2418 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2419 void notify_one() volatile _NOEXCEPT 2420 {__cxx_atomic_notify_one(&__a_);} 2421 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2422 void notify_one() _NOEXCEPT 2423 {__cxx_atomic_notify_one(&__a_);} 2424 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2425 void notify_all() volatile _NOEXCEPT 2426 {__cxx_atomic_notify_all(&__a_);} 2427 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2428 void notify_all() _NOEXCEPT 2429 {__cxx_atomic_notify_all(&__a_);} 2430 2431#if _LIBCPP_STD_VER > 17 2432 _LIBCPP_INLINE_VISIBILITY constexpr 2433 atomic_flag() _NOEXCEPT : __a_(false) {} 2434#else 2435 _LIBCPP_INLINE_VISIBILITY 2436 atomic_flag() _NOEXCEPT = default; 2437#endif 2438 2439 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR 2440 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION 2441 2442#ifndef _LIBCPP_CXX03_LANG 2443 atomic_flag(const atomic_flag&) = delete; 2444 atomic_flag& operator=(const atomic_flag&) = delete; 2445 atomic_flag& operator=(const atomic_flag&) volatile = delete; 2446#else 2447private: 2448 _LIBCPP_INLINE_VISIBILITY 2449 atomic_flag(const atomic_flag&); 2450 _LIBCPP_INLINE_VISIBILITY 2451 atomic_flag& operator=(const atomic_flag&); 2452 _LIBCPP_INLINE_VISIBILITY 2453 atomic_flag& operator=(const atomic_flag&) volatile; 2454#endif 2455} atomic_flag; 2456 2457 2458inline _LIBCPP_INLINE_VISIBILITY 2459bool 2460atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT 2461{ 2462 return __o->test(); 2463} 2464 2465inline _LIBCPP_INLINE_VISIBILITY 2466bool 2467atomic_flag_test(const atomic_flag* __o) _NOEXCEPT 2468{ 2469 return __o->test(); 2470} 2471 2472inline _LIBCPP_INLINE_VISIBILITY 2473bool 2474atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2475{ 2476 return __o->test(__m); 2477} 2478 2479inline _LIBCPP_INLINE_VISIBILITY 2480bool 2481atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT 2482{ 2483 return __o->test(__m); 2484} 2485 2486inline _LIBCPP_INLINE_VISIBILITY 2487bool 2488atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT 2489{ 2490 return __o->test_and_set(); 2491} 2492 2493inline _LIBCPP_INLINE_VISIBILITY 2494bool 2495atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT 2496{ 2497 return __o->test_and_set(); 2498} 2499 2500inline _LIBCPP_INLINE_VISIBILITY 2501bool 2502atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2503{ 2504 return __o->test_and_set(__m); 2505} 2506 2507inline _LIBCPP_INLINE_VISIBILITY 2508bool 2509atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 2510{ 2511 return __o->test_and_set(__m); 2512} 2513 2514inline _LIBCPP_INLINE_VISIBILITY 2515void 2516atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT 2517{ 2518 __o->clear(); 2519} 2520 2521inline _LIBCPP_INLINE_VISIBILITY 2522void 2523atomic_flag_clear(atomic_flag* __o) _NOEXCEPT 2524{ 2525 __o->clear(); 2526} 2527 2528inline _LIBCPP_INLINE_VISIBILITY 2529void 2530atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2531{ 2532 __o->clear(__m); 2533} 2534 2535inline _LIBCPP_INLINE_VISIBILITY 2536void 2537atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 2538{ 2539 __o->clear(__m); 2540} 2541 2542inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2543void 2544atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT 2545{ 2546 __o->wait(__v); 2547} 2548 2549inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2550void 2551atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT 2552{ 2553 __o->wait(__v); 2554} 2555 2556inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2557void 2558atomic_flag_wait_explicit(const volatile atomic_flag* __o, 2559 bool __v, memory_order __m) _NOEXCEPT 2560{ 2561 __o->wait(__v, __m); 2562} 2563 2564inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2565void 2566atomic_flag_wait_explicit(const atomic_flag* __o, 2567 bool __v, memory_order __m) _NOEXCEPT 2568{ 2569 __o->wait(__v, __m); 2570} 2571 2572inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2573void 2574atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT 2575{ 2576 __o->notify_one(); 2577} 2578 2579inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2580void 2581atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT 2582{ 2583 __o->notify_one(); 2584} 2585 2586inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2587void 2588atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT 2589{ 2590 __o->notify_all(); 2591} 2592 2593inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2594void 2595atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT 2596{ 2597 __o->notify_all(); 2598} 2599 2600// fences 2601 2602inline _LIBCPP_INLINE_VISIBILITY 2603void 2604atomic_thread_fence(memory_order __m) _NOEXCEPT 2605{ 2606 __cxx_atomic_thread_fence(__m); 2607} 2608 2609inline _LIBCPP_INLINE_VISIBILITY 2610void 2611atomic_signal_fence(memory_order __m) _NOEXCEPT 2612{ 2613 __cxx_atomic_signal_fence(__m); 2614} 2615 2616// Atomics for standard typedef types 2617 2618typedef atomic<bool> atomic_bool; 2619typedef atomic<char> atomic_char; 2620typedef atomic<signed char> atomic_schar; 2621typedef atomic<unsigned char> atomic_uchar; 2622typedef atomic<short> atomic_short; 2623typedef atomic<unsigned short> atomic_ushort; 2624typedef atomic<int> atomic_int; 2625typedef atomic<unsigned int> atomic_uint; 2626typedef atomic<long> atomic_long; 2627typedef atomic<unsigned long> atomic_ulong; 2628typedef atomic<long long> atomic_llong; 2629typedef atomic<unsigned long long> atomic_ullong; 2630#ifndef _LIBCPP_HAS_NO_CHAR8_T 2631typedef atomic<char8_t> atomic_char8_t; 2632#endif 2633typedef atomic<char16_t> atomic_char16_t; 2634typedef atomic<char32_t> atomic_char32_t; 2635#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS 2636typedef atomic<wchar_t> atomic_wchar_t; 2637#endif 2638 2639typedef atomic<int_least8_t> atomic_int_least8_t; 2640typedef atomic<uint_least8_t> atomic_uint_least8_t; 2641typedef atomic<int_least16_t> atomic_int_least16_t; 2642typedef atomic<uint_least16_t> atomic_uint_least16_t; 2643typedef atomic<int_least32_t> atomic_int_least32_t; 2644typedef atomic<uint_least32_t> atomic_uint_least32_t; 2645typedef atomic<int_least64_t> atomic_int_least64_t; 2646typedef atomic<uint_least64_t> atomic_uint_least64_t; 2647 2648typedef atomic<int_fast8_t> atomic_int_fast8_t; 2649typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 2650typedef atomic<int_fast16_t> atomic_int_fast16_t; 2651typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 2652typedef atomic<int_fast32_t> atomic_int_fast32_t; 2653typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 2654typedef atomic<int_fast64_t> atomic_int_fast64_t; 2655typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 2656 2657typedef atomic< int8_t> atomic_int8_t; 2658typedef atomic<uint8_t> atomic_uint8_t; 2659typedef atomic< int16_t> atomic_int16_t; 2660typedef atomic<uint16_t> atomic_uint16_t; 2661typedef atomic< int32_t> atomic_int32_t; 2662typedef atomic<uint32_t> atomic_uint32_t; 2663typedef atomic< int64_t> atomic_int64_t; 2664typedef atomic<uint64_t> atomic_uint64_t; 2665 2666typedef atomic<intptr_t> atomic_intptr_t; 2667typedef atomic<uintptr_t> atomic_uintptr_t; 2668typedef atomic<size_t> atomic_size_t; 2669typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 2670typedef atomic<intmax_t> atomic_intmax_t; 2671typedef atomic<uintmax_t> atomic_uintmax_t; 2672 2673// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type 2674 2675#ifdef __cpp_lib_atomic_is_always_lock_free 2676# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0) 2677#else 2678# define _LIBCPP_CONTENTION_LOCK_FREE false 2679#endif 2680 2681#if ATOMIC_LLONG_LOCK_FREE == 2 2682typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free; 2683typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free; 2684#elif ATOMIC_INT_LOCK_FREE == 2 2685typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free; 2686typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free; 2687#elif ATOMIC_SHORT_LOCK_FREE == 2 2688typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free; 2689typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free; 2690#elif ATOMIC_CHAR_LOCK_FREE == 2 2691typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free; 2692typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free; 2693#else 2694 // No signed/unsigned lock-free types 2695#define _LIBCPP_NO_LOCK_FREE_TYPES 2696#endif 2697 2698#if !defined(_LIBCPP_NO_LOCK_FREE_TYPES) 2699typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free; 2700typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free; 2701#endif 2702 2703#define ATOMIC_FLAG_INIT {false} 2704#define ATOMIC_VAR_INIT(__v) {__v} 2705 2706#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) 2707# if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400 2708# pragma clang deprecated(ATOMIC_FLAG_INIT) 2709# pragma clang deprecated(ATOMIC_VAR_INIT) 2710# endif 2711#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) 2712 2713_LIBCPP_END_NAMESPACE_STD 2714 2715#endif // _LIBCPP_ATOMIC 2716