/arch/arc/include/asm/ |
D | barrier.h | 27 #define rmb() asm volatile("dmb 1\n" : : : "memory") macro 45 #define rmb() asm volatile (".word %0" : : "i"(CTOP_INST_SCHD_RD) : "memory") macro
|
/arch/x86/um/asm/ |
D | barrier.h | 15 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro 21 #define rmb() asm volatile("lfence" : : : "memory") macro
|
/arch/csky/include/asm/ |
D | io.h | 22 #define readb(c) ({ u8 __v = readb_relaxed(c); rmb(); __v; }) 23 #define readw(c) ({ u16 __v = readw_relaxed(c); rmb(); __v; }) 24 #define readl(c) ({ u32 __v = readl_relaxed(c); rmb(); __v; })
|
/arch/s390/include/asm/ |
D | barrier.h | 26 #define rmb() barrier() macro 31 #define __smp_rmb() rmb()
|
/arch/powerpc/lib/ |
D | locks.c | 33 rmb(); in splpar_spin_yield() 59 rmb(); in splpar_rw_yield()
|
/arch/x86/include/asm/ |
D | barrier.h | 17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \ macro 23 #define rmb() asm volatile("lfence":::"memory") macro
|
/arch/parisc/include/asm/ |
D | barrier.h | 13 #define rmb() mb() macro 19 #define rmb() barrier() macro
|
/arch/nds32/include/asm/ |
D | barrier.h | 9 #define rmb() asm volatile("msync all":::"memory") macro
|
D | io.h | 55 #define __iormb() rmb()
|
/arch/arm/include/asm/ |
D | barrier.h | 65 #define rmb() dsb() macro 71 #define rmb() barrier() macro
|
/arch/xtensa/include/asm/ |
D | barrier.h | 15 #define rmb() barrier() macro
|
/arch/alpha/include/asm/ |
D | barrier.h | 8 #define rmb() __asm__ __volatile__("mb": : :"memory") macro
|
/arch/x86/kernel/ |
D | pvclock.c | 124 rmb(); /* fetch version before time */ in pvclock_read_wallclock() 134 rmb(); /* fetch time before checking version */ in pvclock_read_wallclock()
|
/arch/sh/include/asm/ |
D | barrier.h | 29 #define rmb() mb() macro
|
D | io.h | 50 #define readb(a) ({ u8 r_ = readb_relaxed(a); rmb(); r_; }) 51 #define readw(a) ({ u16 r_ = readw_relaxed(a); rmb(); r_; }) 52 #define readl(a) ({ u32 r_ = readl_relaxed(a); rmb(); r_; }) 53 #define readq(a) ({ u64 r_ = readq_relaxed(a); rmb(); r_; })
|
/arch/ia64/include/asm/ |
D | barrier.h | 40 #define rmb() mb() macro
|
/arch/sparc/include/asm/ |
D | barrier_64.h | 38 #define rmb() __asm__ __volatile__("":::"memory") macro
|
/arch/arm/vfp/ |
D | vfp.h | 73 u64 rh, rma, rmb, rl; in mul64to128() local 83 rmb = (u64)nl * mh; in mul64to128() 84 rma += rmb; in mul64to128() 87 rh += ((u64)(rma < rmb) << 32) + (rma >> 32); in mul64to128()
|
/arch/powerpc/kernel/ |
D | smp-tbsync.c | 54 rmb(); in smp_generic_take_timebase() 60 rmb(); in smp_generic_take_timebase()
|
/arch/riscv/include/asm/ |
D | barrier.h | 22 #define rmb() RISCV_FENCE(ir,ir) macro
|
/arch/powerpc/include/asm/ |
D | barrier.h | 37 #define rmb() __asm__ __volatile__ ("sync" : : : "memory") macro
|
/arch/x86/kernel/cpu/resctrl/ |
D | pseudo_lock.c | 491 rmb(); in pseudo_lock_fn() 502 rmb(); in pseudo_lock_fn() 1004 rmb(); in measure_residency_fn() 1011 rmb(); in measure_residency_fn() 1017 rmb(); in measure_residency_fn() 1027 rmb(); in measure_residency_fn() 1034 rmb(); in measure_residency_fn()
|
/arch/mips/include/asm/ |
D | barrier.h | 196 #define rmb() fast_rmb() macro
|
D | io.h | 91 #define iobarrier_r() rmb() 392 rmb(); \ 434 rmb(); \
|
/arch/sparc/kernel/ |
D | smp_64.c | 137 rmb(); in smp_callin() 184 rmb(); in get_delta() 219 rmb(); in smp_synchronize_tick_client() 270 rmb(); in smp_synchronize_one_tick() 280 rmb(); in smp_synchronize_one_tick() 1148 rmb(); in smp_capture() 1185 rmb(); in smp_penguin_jailcell()
|