Home
last modified time | relevance | path

Searched refs:rmb (Results 1 – 25 of 38) sorted by relevance

12

/arch/x86/um/asm/
Dbarrier.h22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
28 #define rmb() asm volatile("lfence" : : : "memory") macro
34 #define dma_rmb() rmb()
/arch/arc/include/asm/
Dbarrier.h30 #define rmb() asm volatile("dmb 1\n" : : : "memory") macro
48 #define rmb() asm volatile (".word %0" : : "i"(CTOP_INST_SCHD_RD) : "memory") macro
/arch/x86/include/asm/
Dbarrier.h17 #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "lfence", \ macro
23 #define rmb() asm volatile("lfence":::"memory") macro
56 #define dma_rmb() rmb()
/arch/parisc/include/asm/
Dbarrier.h13 #define rmb() mb() macro
19 #define rmb() barrier() macro
/arch/powerpc/lib/
Dlocks.c38 rmb(); in __spin_yield()
64 rmb(); in __rw_yield()
/arch/s390/include/asm/
Dbarrier.h26 #define rmb() barrier() macro
31 #define __smp_rmb() rmb()
/arch/frv/include/asm/
Dbarrier.h18 #define rmb() asm volatile ("membar" : : :"memory") macro
/arch/xtensa/include/asm/
Dbarrier.h13 #define rmb() barrier() macro
/arch/arm/include/asm/
Dbarrier.h65 #define rmb() dsb() macro
71 #define rmb() barrier() macro
/arch/alpha/include/asm/
Dbarrier.h8 #define rmb() __asm__ __volatile__("mb": : :"memory") macro
/arch/blackfin/include/asm/
Dbarrier.h23 # define rmb() do { barrier(); smp_check_barrier(); } while (0) macro
/arch/x86/kernel/
Dpvclock.c135 rmb(); /* fetch version before time */ in pvclock_read_wallclock()
138 rmb(); /* fetch time before checking version */ in pvclock_read_wallclock()
/arch/sh/include/asm/
Dbarrier.h29 #define rmb() mb() macro
Dio.h49 #define readb(a) ({ u8 r_ = readb_relaxed(a); rmb(); r_; })
50 #define readw(a) ({ u16 r_ = readw_relaxed(a); rmb(); r_; })
51 #define readl(a) ({ u32 r_ = readl_relaxed(a); rmb(); r_; })
52 #define readq(a) ({ u64 r_ = readq_relaxed(a); rmb(); r_; })
/arch/ia64/include/asm/
Dbarrier.h40 #define rmb() mb() macro
/arch/sparc/include/asm/
Dbarrier_64.h38 #define rmb() __asm__ __volatile__("":::"memory") macro
/arch/arm/vfp/
Dvfp.h76 u64 rh, rma, rmb, rl; in mul64to128() local
86 rmb = (u64)nl * mh; in mul64to128()
87 rma += rmb; in mul64to128()
90 rh += ((u64)(rma < rmb) << 32) + (rma >> 32); in mul64to128()
/arch/tile/include/asm/
Dbarrier.h71 #define rmb() fast_rmb() macro
/arch/powerpc/kernel/
Dsmp-tbsync.c54 rmb(); in smp_generic_take_timebase()
60 rmb(); in smp_generic_take_timebase()
/arch/metag/include/asm/
Dbarrier.h45 #define rmb() barrier() macro
/arch/powerpc/include/asm/
Dbarrier.h35 #define rmb() __asm__ __volatile__ ("sync" : : : "memory") macro
/arch/mips/include/asm/
Dbarrier.h182 #define rmb() fast_rmb() macro
/arch/arm64/include/asm/
Dbarrier.h37 #define rmb() dsb(ld) macro
/arch/sparc/kernel/
Dsmp_64.c137 rmb(); in smp_callin()
184 rmb(); in get_delta()
219 rmb(); in smp_synchronize_tick_client()
270 rmb(); in smp_synchronize_one_tick()
280 rmb(); in smp_synchronize_one_tick()
1185 rmb(); in smp_capture()
1222 rmb(); in smp_penguin_jailcell()
/arch/mips/kernel/
Dsyscall.c170 rmb(); in mips_atomic_set()

12