| /linux/include/asm-generic/ |
| H A D | barrier.h | 102 #ifndef smp_rmb 103 #define smp_rmb() do { kcsan_rmb(); __smp_rmb(); } while (0) macro 116 #ifndef smp_rmb 117 #define smp_rmb() barrier() macro 232 #define smp_acquire__after_ctrl_dep() smp_rmb()
|
| /linux/arch/x86/kvm/ |
| H A D | irq.h | 81 smp_rmb(); in irqchip_full() 102 smp_rmb(); in irqchip_split() 111 smp_rmb(); in irqchip_in_kernel()
|
| /linux/rust/kernel/sync/ |
| H A D | barrier.rs | 54 pub fn smp_rmb() { in smp_rmb() function 57 unsafe { bindings::smp_rmb() }; in smp_rmb()
|
| /linux/tools/include/asm/ |
| H A D | barrier.h | 38 #ifndef smp_rmb 39 # define smp_rmb() rmb() macro
|
| /linux/tools/memory-model/litmus-tests/ |
| H A D | MP+fencewmbonceonce+fencermbonceonce.litmus | 6 * This litmus test demonstrates that smp_wmb() and smp_rmb() provide 26 smp_rmb();
|
| /linux/arch/sparc/include/asm/ |
| H A D | vvar.h | 51 smp_rmb(); /* Finish all reads before we return seq */ in vvar_read_begin() 58 smp_rmb(); /* Finish all reads before checking the value of seq */ in vvar_read_retry()
|
| /linux/include/clocksource/ |
| H A D | hyperv_timer.h | 74 smp_rmb(); in hv_read_tsc_page_tsc() 84 smp_rmb(); in hv_read_tsc_page_tsc()
|
| /linux/include/vdso/ |
| H A D | helpers.h | 17 smp_rmb(); in vdso_read_begin() 26 smp_rmb(); in vdso_read_retry()
|
| /linux/include/drm/ |
| H A D | spsc_queue.h | 96 smp_rmb(); in spsc_queue_pop() 113 smp_rmb(); in spsc_queue_pop()
|
| /linux/arch/xtensa/kernel/ |
| H A D | process.c | 126 smp_rmb(); in coprocessor_release_all() 154 smp_rmb(); in coprocessor_flush_all() 171 smp_rmb(); in coprocessor_flush_release_all()
|
| /linux/arch/mips/kernel/ |
| H A D | rtlx.c | 125 smp_rmb(); in rtlx_open() 134 smp_rmb(); in rtlx_open() 254 smp_rmb(); in rtlx_read() 297 smp_rmb(); in rtlx_write()
|
| /linux/Documentation/RCU/ |
| H A D | rculist_nulls.rst | 59 but a version with an additional memory barrier (smp_rmb()) 67 pos && ({ next = pos->next; smp_rmb(); prefetch(next); 1; }) && 75 And note the traditional hlist_for_each_entry_rcu() misses this smp_rmb():: 140 Avoiding extra smp_rmb() 143 With hlist_nulls we can avoid extra smp_rmb() in lockless_lookup().
|
| /linux/tools/testing/selftests/bpf/ |
| H A D | bpf_atomic.h | 68 #define smp_rmb() \ macro 85 #define smp_acquire__after_ctrl_dep() ({ smp_rmb(); })
|
| /linux/tools/testing/selftests/kvm/ |
| H A D | rseq_test.c | 286 smp_rmb(); in main() 291 smp_rmb(); in main()
|
| /linux/rust/helpers/ |
| H A D | barrier.c | 17 smp_rmb(); in rust_helper_smp_rmb()
|
| /linux/Documentation/translations/zh_CN/core-api/ |
| H A D | local_ops.rst | 126 的CPU上分别使用显式的 ``smp_wmb()`` 和 ``smp_rmb()`` 内存屏障。如果你使 129 也应有一个 ``smp_rmb()`` 。
|
| /linux/arch/powerpc/include/asm/book3s/64/ |
| H A D | hash-64k.h | 104 smp_rmb(); in __real_pte() 210 smp_rmb(); in get_hpte_slot_array()
|
| /linux/drivers/cpuidle/ |
| H A D | coupled.c | 304 smp_rmb(); in cpuidle_coupled_get_state() 487 smp_rmb(); in cpuidle_enter_state_coupled() 568 smp_rmb(); in cpuidle_enter_state_coupled()
|
| /linux/tools/include/linux/ |
| H A D | ring_buffer.h | 63 smp_rmb(); in ring_buffer_read_head()
|
| /linux/tools/arch/x86/include/asm/ |
| H A D | barrier.h | 27 #define smp_rmb() barrier() macro
|
| /linux/arch/arm/mach-versatile/ |
| H A D | platsmp.c | 93 smp_rmb(); in versatile_boot_secondary()
|
| /linux/include/linux/ |
| H A D | cnt32_to_63.h | 97 smp_rmb(); \
|
| /linux/kernel/livepatch/ |
| H A D | patch.c | 84 smp_rmb(); in klp_ftrace_handler() 94 smp_rmb(); in klp_ftrace_handler()
|
| /linux/lib/ |
| H A D | is_single_threaded.c | 46 smp_rmb(); in current_is_single_threaded()
|
| /linux/tools/arch/riscv/include/asm/ |
| H A D | barrier.h | 23 #define smp_rmb() RISCV_FENCE(r, r) macro
|