Home
last modified time | relevance | path

Searched refs:memory_order_acquire (Results 1 – 25 of 82) sorted by relevance

1234

/freebsd/contrib/llvm-project/libcxx/include/__atomic/
H A Dto_gcc_order.h28 : (__order == memory_order_acquire in __to_gcc_order()
41 : (__order == memory_order_acquire in __to_gcc_failure_order()
H A Dmemory_order.h45 inline constexpr auto memory_order_acquire = memory_order::acquire; variable
55 memory_order_acquire = __mo_acquire, enumerator
H A Dcheck_memory_order.h19 …_LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || __m == memory_order_acquire || __m == memo…
/freebsd/contrib/llvm-project/compiler-rt/lib/xray/
H A Dxray_interface.cpp211 memory_order_acquire)) in patchFunction()
266 memory_order_acquire)) in controlPatching()
416 memory_order_acquire)) { in __xray_set_handler()
429 memory_order_acquire)) { in __xray_set_customevent_handler()
441 memory_order_acquire)) { in __xray_set_typedevent_handler()
492 memory_order_acquire)) in __xray_set_handler_arg1()
H A Dxray_init.cpp66 if (atomic_load(&XRayInitialized, memory_order_acquire)) in __xray_init()
72 if (!atomic_load(&XRayFlagsInitialized, memory_order_acquire)) { in __xray_init()
H A Dxray_basic_logging.cpp94 if (atomic_load(&UseRealTSC, memory_order_acquire)) in getLog()
106 Header.CycleFrequency = atomic_load(&CycleFrequency, memory_order_acquire); in getLog()
424 atomic_load(&TicksPerSec, memory_order_acquire) * in basicLoggingInit()
427 __xray_set_handler_arg1(atomic_load(&UseRealTSC, memory_order_acquire) in basicLoggingInit()
430 __xray_set_handler(atomic_load(&UseRealTSC, memory_order_acquire) in basicLoggingInit()
H A Dxray_fdr_logging.cpp249 atomic_thread_fence(memory_order_acquire); in fdrIterator()
250 auto BufferSize = atomic_load(It->Extents, memory_order_acquire); in fdrIterator()
277 if (atomic_load(&LoggingStatus, memory_order_acquire) != in fdrLoggingFlush()
363 auto BufferExtents = atomic_load(B.Extents, memory_order_acquire); in fdrLoggingFlush()
445 auto Status = atomic_load(&LoggingStatus, memory_order_acquire); in setupTLD()
H A Dxray_buffer_queue.h233 return atomic_load(&Finalizing, memory_order_acquire); in finalizing()
237 return atomic_load(&Generation, memory_order_acquire); in generation()
/freebsd/contrib/llvm-project/compiler-rt/lib/asan/
H A Dasan_allocator.cpp175 return atomic_load(&magic, memory_order_acquire) == kAllocBegMagic in Get()
238 memory_order_acquire)) { in Recycle()
405 if (ac && atomic_load(&ac->chunk_state, memory_order_acquire) == in RePoisonChunk()
447 options->min_redzone = atomic_load(&min_redzone, memory_order_acquire); in GetOptions()
448 options->max_redzone = atomic_load(&max_redzone, memory_order_acquire); in GetOptions()
451 atomic_load(&alloc_dealloc_mismatch, memory_order_acquire); in GetOptions()
466 u32 min_log = RZSize2Log(atomic_load(&min_redzone, memory_order_acquire)); in ComputeRZLog()
467 u32 max_log = RZSize2Log(atomic_load(&max_redzone, memory_order_acquire)); in ComputeRZLog()
519 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED) in UpdateAllocationStack()
670 memory_order_acquire)) { in AtomicallySetQuarantineFlagIfAllocated()
[all …]
/freebsd/contrib/llvm-project/compiler-rt/lib/sanitizer_common/
H A Dsanitizer_tls_get_addr.h67 (DTLS::DTVBlock *)atomic_load(&dtls->dtv_block, memory_order_acquire); in ForEachDVT()
71 block = (DTLS::DTVBlock *)atomic_load(&block->next, memory_order_acquire); in ForEachDVT()
H A Dsanitizer_atomic.h27 memory_order_acquire = __ATOMIC_ACQUIRE, enumerator
34 memory_order_acquire = 1 << 2,
H A Dsanitizer_lfstack.h48 u64 cmp = atomic_load(&head_, memory_order_acquire); in Pop()
57 memory_order_acquire)) in Pop()
H A Dsanitizer_libignore.h91 const uptr n = atomic_load(&ignored_ranges_count_, memory_order_acquire); in IsIgnored()
105 const uptr n = atomic_load(&instrumented_ranges_count_, memory_order_acquire); in IsPcInstrumented()
H A Dsanitizer_mutex.h36 return atomic_exchange(&state_, 1, memory_order_acquire) == 0; in TryLock()
190 memory_order_acquire))) in Lock()
218 &state_, &state, state | kWriterLock, memory_order_acquire))) { in TryLock()
274 memory_order_acquire))) in ReadLock()
H A Dsanitizer_addrhashmap.h121 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in ForEach()
128 (AddBucket *)atomic_load(&bucket->add, memory_order_acquire)) { in ForEach()
131 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in ForEach()
226 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in acquire()
H A Dsanitizer_tls_get_addr.cpp47 uptr v = atomic_load(cur, memory_order_acquire); in DTLS_NextBlock()
85 (DTLS::DTVBlock *)atomic_load(&block->next, memory_order_acquire); in DTLS_Destroy()
/freebsd/contrib/llvm-project/libcxx/include/__stop_token/
H A Dstop_state.h92 return (__state_.load(std::memory_order_acquire) & __stop_requested_bit) != 0; in __stop_requested()
99 __state_t __curent_state = __state_.load(std::memory_order_acquire); in __stop_possible_for_stop_token()
180 __cb->__completed_.wait(false, std::memory_order_acquire); in __remove_callback()
H A Datomic_unique_lock.h46 __is_locked_ = __lock_impl(__give_up_locking, __set_locked_bit, std::memory_order_acquire); in __atomic_unique_lock()
74 // std::memory_order_acquire because we'd like to make sure that all the read operations after the lock can read the in __lock()
76 __lock_impl(__never_give_up_locking, __set_locked_bit, std::memory_order_acquire); in __lock()
125 __locked_ordering, // sucessful exchange order. Usually it should be std::memory_order_acquire. in __lock_impl()
/freebsd/contrib/llvm-project/lldb/source/Utility/
H A DTimer.cpp141 uint64_t nanos = i->m_nanos.load(std::memory_order_acquire); in DumpCategoryTimes()
143 uint64_t nanos_total = i->m_nanos_total.load(std::memory_order_acquire); in DumpCategoryTimes()
144 uint64_t count = i->m_count.load(std::memory_order_acquire); in DumpCategoryTimes()
/freebsd/contrib/llvm-project/compiler-rt/lib/scudo/standalone/
H A Datomic_helpers.h19 memory_order_acquire = 2, enumerator
26 static_assert(memory_order_acquire == __ATOMIC_ACQUIRE, "");
H A Dlinux.cpp113 memory_order_acquire) == Unlocked; in lockSlow()
119 memory_order_acquire); in lockSlow()
123 V = atomic_exchange(&M, Sleeping, memory_order_acquire);
127 V = atomic_exchange(&M, Sleeping, memory_order_acquire); in unlock()
140 CHECK(atomic_load(&M, memory_order_acquire) != Unlocked); in getMonotonicTime()
/freebsd/tools/test/stress2/misc/
H A Dvm_reserv_populate.sh99 memory_order_acquire = 1 << 2,
116 | memory_order_acquire | memory_order_seq_cst));
/freebsd/contrib/llvm-project/llvm/include/llvm/Support/
H A DManagedStatic.h87 void *Tmp = Ptr.load(std::memory_order_acquire);
97 void *Tmp = Ptr.load(std::memory_order_acquire);
/freebsd/sys/dev/qcom_rnd/
H A Dqcom_rnd.c124 sc, memory_order_release, memory_order_acquire)) { in qcom_rnd_attach()
173 atomic_load_explicit(&g_qcom_rnd_softc, memory_order_acquire) == sc, in qcom_rnd_detach()
/freebsd/contrib/llvm-project/libcxx/include/
H A Dlatch98 auto __value = __a_.load(memory_order_acquire);
103 …__a_, [this](ptrdiff_t& __value) -> bool { return try_wait_impl(__value); }, memory_order_acquire);

1234