/freebsd/contrib/llvm-project/compiler-rt/lib/sanitizer_common/ |
H A D | sanitizer_addrhashmap.h | 121 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in ForEach() 128 (AddBucket *)atomic_load(&bucket->add, memory_order_acquire)) { in ForEach() 131 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in ForEach() 226 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in acquire() 234 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 236 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 239 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 255 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 267 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 271 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() [all …]
|
H A D | sanitizer_mutex.h | 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked() 186 state = atomic_load(&state_, memory_order_relaxed); in Lock() 206 state = atomic_load(&state_, memory_order_relaxed); in Lock() 270 state = atomic_load(&state_, memory_order_relaxed); in ReadLock() 286 state = atomic_load(&state_, memory_order_relaxed); in ReadLock() 317 CHECK(atomic_load(&state_, memory_order_relaxed) & kWriterLock); in CheckWriteLocked() 323 CHECK(atomic_load(&state_, memory_order_relaxed) & kReaderLockMask); in CheckReadLocked()
|
H A D | sanitizer_lfstack.h | 32 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; in Empty() 36 u64 cmp = atomic_load(&head_, memory_order_relaxed); in Push() 48 u64 cmp = atomic_load(&head_, memory_order_acquire); in Pop()
|
H A D | sanitizer_tls_get_addr.cpp | 47 uptr v = atomic_load(cur, memory_order_acquire); in DTLS_NextBlock() 85 (DTLS::DTVBlock *)atomic_load(&block->next, memory_order_acquire); in DTLS_Destroy() 128 atomic_load(&number_of_live_dtls, memory_order_relaxed)); in DTLS_on_tls_get_addr() 164 return atomic_load(&dtls->dtv_block, memory_order_relaxed) == in DTLSInDestruction()
|
H A D | sanitizer_allocator.cpp | 38 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) { in internal_allocator() 40 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) == in internal_allocator() 190 return atomic_load(&allocator_may_return_null, memory_order_relaxed); in AllocatorMayReturnNull() 206 return atomic_load(&rss_limit_exceeded, memory_order_relaxed); in IsRssLimitExceeded()
|
H A D | sanitizer_stackdepotbase.h | 95 u32 cmp = atomic_load(p, memory_order_relaxed); in lock() 123 u32 v = atomic_load(p, memory_order_consume); in Put() 187 uptr s = atomic_load(p, memory_order_relaxed); in PrintAll() 197 u32 s = atomic_load(p, memory_order_consume) & kUnlockMask;
|
H A D | sanitizer_tls_get_addr.h | 67 (DTLS::DTVBlock *)atomic_load(&dtls->dtv_block, memory_order_acquire); in ForEachDVT() 71 block = (DTLS::DTVBlock *)atomic_load(&block->next, memory_order_acquire); in ForEachDVT()
|
H A D | sanitizer_libignore.h | 91 const uptr n = atomic_load(&ignored_ranges_count_, memory_order_acquire); in IsIgnored() 105 const uptr n = atomic_load(&instrumented_ranges_count_, memory_order_acquire); in IsPcInstrumented()
|
H A D | sanitizer_mutex.cpp | 25 if (atomic_load(&state_, memory_order_relaxed) == 0 && in LockSlow() 32 u32 count = atomic_load(&state_, memory_order_relaxed); in Wait() 36 count = atomic_load(&state_, memory_order_relaxed); in Wait()
|
H A D | sanitizer_libignore.cpp | 82 atomic_load(&ignored_ranges_count_, memory_order_relaxed); in OnLibraryLoaded() 111 atomic_load(&instrumented_ranges_count_, memory_order_relaxed); in OnLibraryLoaded()
|
/freebsd/tools/regression/include/stdatomic/ |
H A D | logic.c | 57 T v1 = atomic_load(a); \ 60 assert(atomic_load(a) == (T)(result)); \ 64 T v1 = atomic_load(a); \ 71 assert(atomic_load(a) == v3); \ 78 assert(atomic_load(a) == v1); \ 83 assert(atomic_load(a) == v1); \
|
/freebsd/contrib/llvm-project/compiler-rt/lib/asan/ |
H A D | asan_allocator.cpp | 66 u64 context = atomic_load(atomic_context, memory_order_relaxed); in AtomicContextLoad() 175 return atomic_load(&magic, memory_order_acquire) == kAllocBegMagic in Get() 405 if (ac && atomic_load(&ac->chunk_state, memory_order_acquire) == in RePoisonChunk() 447 options->min_redzone = atomic_load(&min_redzone, memory_order_acquire); in GetOptions() 448 options->max_redzone = atomic_load(&max_redzone, memory_order_acquire); in GetOptions() 451 atomic_load(&alloc_dealloc_mismatch, memory_order_acquire); in GetOptions() 466 u32 min_log = RZSize2Log(atomic_load(&min_redzone, memory_order_acquire)); in ComputeRZLog() 467 u32 max_log = RZSize2Log(atomic_load(&max_redzone, memory_order_acquire)); in ComputeRZLog() 494 u8 left_state = atomic_load(&left_chunk->chunk_state, memory_order_relaxed); in ChooseChunk() 496 atomic_load(&right_chunk->chunk_state, memory_order_relaxed); in ChooseChunk() [all …]
|
/freebsd/contrib/llvm-project/compiler-rt/lib/tsan/rtl/ |
H A D | tsan_fd.cpp | 64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref() 70 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref() 84 uptr l1 = atomic_load(pl1, memory_order_consume); in fddesc() 112 atomic_load(&d->aux_sync, memory_order_relaxed))); in init() 150 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork() 162 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdLocation() 198 if (uptr aux_sync = atomic_load(&d->aux_sync, memory_order_acquire)) in FdRelease() 246 atomic_load(&d->aux_sync, memory_order_relaxed))); in FdClose() 319 if (atomic_load(&d->aux_sync, memory_order_relaxed)) in FdPollAdd()
|
H A D | tsan_external.cpp | 35 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; in GetTagData() 50 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); in TagFromShadowStackFrame() 69 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in ExternalAccess() 108 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in __tsan_external_assign_tag()
|
H A D | tsan_dense_alloc.h | 134 u64 cmp = atomic_load(&freelist_, memory_order_acquire); in Refill() 162 u64 cmp = atomic_load(&freelist_, memory_order_acquire); in Drain()
|
/freebsd/contrib/llvm-project/compiler-rt/lib/xray/ |
H A D | xray_basic_logging.cpp | 94 if (atomic_load(&UseRealTSC, memory_order_acquire)) in getLog() 106 Header.CycleFrequency = atomic_load(&CycleFrequency, memory_order_acquire); in getLog() 219 if (Delta < atomic_load(&ThresholdTicks, memory_order_relaxed)) { in InMemoryRawLog() 392 if (!atomic_load(&UseRealTSC, memory_order_relaxed) && Verbosity()) in basicLoggingInit() 424 atomic_load(&TicksPerSec, memory_order_acquire) * in basicLoggingInit() 427 __xray_set_handler_arg1(atomic_load(&UseRealTSC, memory_order_acquire) in basicLoggingInit() 430 __xray_set_handler(atomic_load(&UseRealTSC, memory_order_acquire) in basicLoggingInit()
|
H A D | xray_interface.cpp | 210 if (!atomic_load(&XRayInitialized, in patchFunction() 265 if (!atomic_load(&XRayInitialized, in controlPatching() 415 if (atomic_load(&XRayInitialized, in __xray_set_handler() 428 if (atomic_load(&XRayInitialized, in __xray_set_customevent_handler() 440 if (atomic_load(&XRayInitialized, in __xray_set_typedevent_handler() 491 if (!atomic_load(&XRayInitialized, in __xray_set_handler_arg1()
|
H A D | xray_init.cpp | 66 if (atomic_load(&XRayInitialized, memory_order_acquire)) in __xray_init() 72 if (!atomic_load(&XRayFlagsInitialized, memory_order_acquire)) { in __xray_init()
|
H A D | xray_buffer_queue.h | 233 return atomic_load(&Finalizing, memory_order_acquire); in finalizing() 237 return atomic_load(&Generation, memory_order_acquire); in generation()
|
H A D | xray_fdr_logging.cpp | 250 auto BufferSize = atomic_load(It->Extents, memory_order_acquire); in fdrIterator() 277 if (atomic_load(&LoggingStatus, memory_order_acquire) != in fdrLoggingFlush() 363 auto BufferExtents = atomic_load(B.Extents, memory_order_acquire); in fdrLoggingFlush() 445 auto Status = atomic_load(&LoggingStatus, memory_order_acquire); in setupTLD()
|
H A D | xray_buffer_queue.cpp | 162 if (atomic_load(&Finalizing, memory_order_acquire)) in getBuffer() 212 atomic_store(B->Buff.Extents, atomic_load(Buf.Extents, memory_order_acquire), in releaseBuffer()
|
/freebsd/tools/test/stress2/misc/ |
H A D | fstat.sh | 106 while (atomic_load(&share[SYNC]) != PARALLEL) 127 while (atomic_load(&share[DONE]) == 0) 131 while (atomic_load(&share[DONE]) == 1)
|
/freebsd/contrib/llvm-project/compiler-rt/lib/scudo/standalone/ |
H A D | atomic_helpers.h | 63 inline typename T::Type atomic_load(const volatile T *A, memory_order MO) { in atomic_load() function 127 return atomic_load(A, memory_order_relaxed); in atomic_load_relaxed()
|
H A D | mem_map_fuchsia.cpp | 43 uptr Result = atomic_load(&CachedResult, memory_order_acquire); in getRootVmarBase() 63 zx_handle_t Vmo = atomic_load(&StoredVmo, memory_order_acquire); in getPlaceholderVmo()
|
/freebsd/contrib/llvm-project/compiler-rt/lib/memprof/ |
H A D | memprof_stack.cpp | 26 return atomic_load(&malloc_context_size, memory_order_acquire); in GetMallocContextSize()
|