Home
last modified time | relevance | path

Searched refs:atomic_load_int (Results 1 – 25 of 87) sorted by relevance

1234

/freebsd/sys/sys/
H A Drefcount.h69 return (atomic_load_int(count)); in refcount_load()
103 old = atomic_load_int(count); in refcount_acquire_checked()
122 old = atomic_load_int(count); in refcount_acquire_if_gt()
181 old = atomic_load_int(count); \
H A D_blockcount.h47 return (_BLOCKCOUNT_COUNT(atomic_load_int(&count->__count))); in blockcount_read()
H A Dsmr.h92 return (atomic_load_int(&s->s_wr.seq)); in smr_shared_current()
H A Datomic_common.h100 #define atomic_load_int(p) __atomic_load_generic(p, int, u_int, int) macro
/freebsd/sys/dev/vmware/vmci/
H A Dvmci_driver.c57 "event (type=%d).\n", atomic_load_int(&vm_context_id), in vmci_util_cid_update()
389 if (atomic_load_int(&vm_context_id) == VMCI_INVALID_ID) { in vmci_get_context_id()
399 return (atomic_load_int(&vm_context_id)); in vmci_get_context_id()
/freebsd/tools/test/stress2/misc/
H A Dfork2.sh116 while ((atomic_load_int(&share[SYNC])) > MX)
124 while (atomic_load_int(&share[SYNC]) <= MX)
/freebsd/sys/riscv/riscv/
H A Dmp_machdep.c172 while (!atomic_load_int(&aps_ready)) in init_secondary()
399 naps = atomic_load_int(&aps_started); in cpu_init_fdt()
407 while (atomic_load_int(&aps_started) < naps + 1) in cpu_init_fdt()
/freebsd/sys/netinet/
H A Dtcp_hostcache.c355 if (atomic_load_int(&hc_entry->hc_expire) != in tcp_hc_lookup()
472 if (atomic_load_int(&hc_entry->hc_expire) != in tcp_hc_update()
492 atomic_load_int(&V_tcp_hostcache.cache_count) >= in tcp_hc_update()
644 len = (atomic_load_int(&V_tcp_hostcache.cache_count) + 1) * in sysctl_tcp_hc_list()
772 atomic_load_int(&hc_entry->hc_expire) <= 0) { in tcp_hc_purge_internal()
/freebsd/lib/libc/amd64/string/
H A Damd64_archlevel.c204 islevel = atomic_load_int(&amd64_archlevel); in archlevel()
222 return (atomic_load_int(&amd64_archlevel)); in archlevel()
/freebsd/sys/vm/
H A Dvm_object.c198 KASSERT(atomic_load_int(&object->shadow_count) == 0, in vm_object_zdtor()
200 object, atomic_load_int(&object->shadow_count))); in vm_object_zdtor()
578 atomic_load_int(&backing_object->shadow_count) == 1, in vm_object_deallocate_anon()
581 atomic_load_int(&backing_object->shadow_count))); in vm_object_deallocate_anon()
665 atomic_load_int(&object->shadow_count) == 0) { in vm_object_deallocate()
1845 object->ref_count > atomic_load_int(&object->shadow_count), in vm_object_collapse()
1847 object->ref_count, atomic_load_int(&object->shadow_count))); in vm_object_collapse()
1861 KASSERT(atomic_load_int(&backing_object->shadow_count) in vm_object_collapse()
1864 atomic_load_int(&backing_object->shadow_count))); in vm_object_collapse()
2413 return (obj->ref_count > atomic_load_int(&obj->shadow_count)); in vm_object_is_active()
[all …]
/freebsd/sys/x86/x86/
H A Dcpu_machdep.c275 KASSERT(atomic_load_int(state) == STATE_SLEEPING, in acpi_cpu_idle_mwait()
286 if (atomic_load_int(state) == STATE_MWAIT) in acpi_cpu_idle_mwait()
533 KASSERT(atomic_load_int(statep) == STATE_RUNNING, in cpu_idle_enter()
534 ("%s: state %d", __func__, atomic_load_int(statep))); in cpu_idle_enter()
617 if (atomic_load_int(state) == STATE_MWAIT) in cpu_idle_mwait()
703 switch (atomic_load_int(state)) { in cpu_idle_wakeup()
942 while (atomic_load_int(&hp->running) != 0) in nmi_remove_handler()
H A Dstack_machdep.c133 cpuid = atomic_load_int(&td->td_oncpu); in stack_save_td()
/freebsd/sys/i386/include/
H A Dpmap_nopae.h86 #define pte_load(ptep) atomic_load_int(ptep)
/freebsd/sys/arm64/arm64/
H A Dmp_machdep.c233 while (!atomic_load_int(&aps_ready)) in init_secondary()
488 naps = atomic_load_int(&aps_started); in start_cpu()
518 while (atomic_load_int(&aps_started) < naps + 1) in start_cpu()
/freebsd/sys/kern/
H A Dsubr_smr.c373 c_seq = atomic_load_int(&c->c_seq); in smr_poll_cpu()
453 s_rd_seq = atomic_load_int(&s->s_rd_seq); in smr_poll_scan()
H A Dvfs_default.c1161 return ((int)atomic_load_int(&ap->a_vp->v_writecount) < 0); in vop_stdis_text()
1173 n = atomic_load_int(&vp->v_writecount); in vop_stdset_text()
1216 n = atomic_load_int(&vp->v_writecount); in vop_stdunset_text()
1263 n = atomic_load_int(&vp->v_writecount); in vop_stdadd_writecount_impl()
H A Dsubr_syscall.c66 if (__predict_false(td->td_cowgen != atomic_load_int(&p->p_cowgen))) in syscallenter()
H A Dtty_info.c369 kstacks_val = atomic_load_int(&tty_info_kstacks); in tty_info()
H A Dsubr_trap.c201 if (td->td_cowgen != atomic_load_int(&td->td_proc->p_cowgen)) in ast_prep()
/freebsd/sys/netinet6/
H A Dfrag6.c539 else if (atomic_load_int(&frag6_nfrags) >= (u_int)ip6_maxfrags) in frag6_input()
594 atomic_load_int(&V_frag6_nfragpackets) >= in frag6_input()
940 if (atomic_load_int(&frag6_nfrags) == 0) in frag6_slowtimo()
983 atomic_load_int(&V_frag6_nfragpackets) > in frag6_slowtimo()
/freebsd/sys/compat/linuxkpi/common/include/linux/
H A Dseqlock.h102 return (atomic_load_int(seqcp)); in lkpi_seqprop_sequence()
/freebsd/sys/dev/cxgbe/
H A Dt4_netmap.c527 nm_state = atomic_load_int(&nm_rxq->nm_state); in cxgbe_netmap_simple_rss()
546 nm_state = atomic_load_int(&nm_rxq->nm_state); in cxgbe_netmap_simple_rss()
596 nm_state = atomic_load_int(&nm_rxq->nm_state); in cxgbe_netmap_split_rss()
628 nm_state = atomic_load_int(&nm_rxq[j].nm_state); in cxgbe_netmap_split_rss()
646 nm_state = atomic_load_int(&nm_rxq[j].nm_state); in cxgbe_netmap_split_rss()
819 nm_state = atomic_load_int(&nm_rxq->nm_state); in cxgbe_netmap_off()
/freebsd/lib/libc/gen/
H A Ddlfcn.c330 r = atomic_load_int(&ret); in _rtld_get_stack_prot()
/freebsd/sys/contrib/openzfs/lib/libspl/include/
H A Datomic.h253 #define atomic_load_int(p) (*(volatile uint_t *)(p)) macro
/freebsd/sys/geom/eli/
H A Dg_eli_privacy.c312 batch = atomic_load_int(&g_eli_batch) != 0; in g_eli_crypto_run()

1234