Home
last modified time | relevance | path

Searched refs:smp_rmb (Results 1 – 25 of 286) sorted by relevance

12345678910>>...12

/linux-5.19.10/include/asm-generic/
Dbarrier.h94 #ifndef smp_rmb
95 #define smp_rmb() do { kcsan_rmb(); __smp_rmb(); } while (0) macro
108 #ifndef smp_rmb
109 #define smp_rmb() barrier() macro
226 #define smp_acquire__after_ctrl_dep() smp_rmb()
/linux-5.19.10/arch/x86/kvm/
Dirq.h72 smp_rmb(); in irqchip_split()
81 smp_rmb(); in irqchip_kernel()
95 smp_rmb(); in irqchip_in_kernel()
/linux-5.19.10/arch/arm64/include/asm/vdso/
Dcompat_barrier.h28 #undef smp_rmb
32 #define smp_rmb() aarch32_smp_rmb() macro
/linux-5.19.10/tools/include/asm/
Dbarrier.h36 #ifndef smp_rmb
37 # define smp_rmb() rmb() macro
/linux-5.19.10/tools/memory-model/litmus-tests/
DMP+fencewmbonceonce+fencermbonceonce.litmus6 * This litmus test demonstrates that smp_wmb() and smp_rmb() provide
26 smp_rmb();
/linux-5.19.10/arch/sparc/include/asm/
Dvvar.h51 smp_rmb(); /* Finish all reads before we return seq */ in vvar_read_begin()
58 smp_rmb(); /* Finish all reads before checking the value of seq */ in vvar_read_retry()
/linux-5.19.10/include/vdso/
Dhelpers.h16 smp_rmb(); in vdso_read_begin()
25 smp_rmb(); in vdso_read_retry()
/linux-5.19.10/include/clocksource/
Dhyperv_timer.h67 smp_rmb(); in hv_read_tsc_page_tsc()
77 smp_rmb(); in hv_read_tsc_page_tsc()
/linux-5.19.10/include/drm/
Dspsc_queue.h94 smp_rmb(); in spsc_queue_pop()
111 smp_rmb(); in spsc_queue_pop()
/linux-5.19.10/arch/xtensa/kernel/
Dprocess.c125 smp_rmb(); in coprocessor_release_all()
153 smp_rmb(); in coprocessor_flush_all()
170 smp_rmb(); in coprocessor_flush_release_all()
/linux-5.19.10/arch/mips/kernel/
Drtlx.c125 smp_rmb(); in rtlx_open()
134 smp_rmb(); in rtlx_open()
254 smp_rmb(); in rtlx_read()
297 smp_rmb(); in rtlx_write()
/linux-5.19.10/Documentation/RCU/
Drculist_nulls.rst47 but a version with an additional memory barrier (smp_rmb())
55 pos && ({ next = pos->next; smp_rmb(); prefetch(next); 1; }) &&
63 And note the traditional hlist_for_each_entry_rcu() misses this smp_rmb()::
132 Avoiding extra smp_rmb()
135 With hlist_nulls we can avoid extra smp_rmb() in lockless_lookup()
/linux-5.19.10/tools/testing/selftests/kvm/
Drseq_test.c261 smp_rmb(); in main()
264 smp_rmb(); in main()
/linux-5.19.10/arch/powerpc/kvm/
Dirq.h18 smp_rmb(); in irqchip_in_kernel()
/linux-5.19.10/Documentation/translations/zh_CN/core-api/
Dlocal_ops.rst126 的CPU上分别使用显式的 ``smp_wmb()`` 和 ``smp_rmb()`` 内存屏障。如果你使
129 也应有一个 ``smp_rmb()`` 。
/linux-5.19.10/tools/include/linux/
Dring_buffer.h63 smp_rmb(); in ring_buffer_read_head()
/linux-5.19.10/drivers/cpuidle/
Dcoupled.c304 smp_rmb(); in cpuidle_coupled_get_state()
492 smp_rmb(); in cpuidle_enter_state_coupled()
573 smp_rmb(); in cpuidle_enter_state_coupled()
/linux-5.19.10/tools/arch/x86/include/asm/
Dbarrier.h27 #define smp_rmb() barrier() macro
/linux-5.19.10/arch/arm/mach-versatile/
Dplatsmp.c93 smp_rmb(); in versatile_boot_secondary()
/linux-5.19.10/include/linux/
Dcnt32_to_63.h97 smp_rmb(); \
/linux-5.19.10/kernel/livepatch/
Dpatch.c84 smp_rmb(); in klp_ftrace_handler()
94 smp_rmb(); in klp_ftrace_handler()
/linux-5.19.10/arch/powerpc/include/asm/book3s/64/
Dhash-64k.h104 smp_rmb(); in __real_pte()
210 smp_rmb(); in get_hpte_slot_array()
/linux-5.19.10/lib/
Dis_single_threaded.c46 smp_rmb(); in current_is_single_threaded()
/linux-5.19.10/drivers/firmware/tegra/
Divc.c282 smp_rmb(); in tegra_ivc_read_get_next_frame()
432 smp_rmb(); in tegra_ivc_notified()
471 smp_rmb(); in tegra_ivc_notified()
511 smp_rmb(); in tegra_ivc_notified()
/linux-5.19.10/tools/memory-model/Documentation/
Drecipes.txt291 and to use smp_load_acquire() instead of smp_rmb(). However, the older
292 smp_wmb() and smp_rmb() APIs are still heavily used, so it is important
306 smp_rmb();
311 smp_rmb() macro orders prior loads against later loads. Therefore, if
326 smp_rmb();
335 * (A) smp_rmb() (C)
342 write side and smp_rmb() on the read side.
345 or smp_rmb(), any code fragment that would work with smp_rmb() and
390 * (A) smp_rmb() (C)

12345678910>>...12