Home
last modified time | relevance | path

Searched defs:mb (Results 1 – 25 of 30) sorted by relevance

12

/arch/arc/include/asm/
Dbarrier.h29 #define mb() asm volatile("dmb 3\n" : : : "memory") macro
43 #define mb() asm volatile("sync\n" : : : "memory") macro
/arch/arm64/include/asm/
Datomic_lse.h78 #define ATOMIC_OP_ADD_RETURN(name, mb, cl...) \ argument
139 #define ATOMIC_OP_SUB_RETURN(name, mb, cl...) \ argument
219 #define ATOMIC64_OP_ADD_RETURN(name, mb, cl...) \ argument
280 #define ATOMIC64_OP_SUB_RETURN(name, mb, cl...) \ argument
342 #define __CMPXCHG_CASE(w, sz, name, mb, cl...) \ argument
389 #define __CMPXCHG_DBL(name, mb, cl...) \ argument
Datomic_ll_sc.h58 #define ATOMIC_OP_RETURN(name, mb, acq, rel, cl, op, asm_op) \ argument
121 #define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op) \ argument
189 #define __CMPXCHG_CASE(w, sz, name, mb, acq, rel, cl) \ argument
235 #define __CMPXCHG_DBL(name, mb, rel, cl) \ argument
Dbarrier.h34 #define mb() dsb(sy) macro
/arch/parisc/include/asm/
Dbarrier.h12 #define mb() do { synchronize_caches(); } while (0) macro
18 #define mb() barrier() macro
/arch/x86/um/asm/
Dbarrier.h20 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2) macro
26 #define mb() asm volatile("mfence" : : : "memory") macro
/arch/openrisc/include/asm/
Dbarrier.h5 #define mb() asm volatile ("l.msync" ::: "memory") macro
/arch/arm/mach-realview/include/mach/
Dbarriers.h6 #define mb() dsb() macro
/arch/frv/include/asm/
Dbarrier.h17 #define mb() asm volatile ("membar" : : :"memory") macro
/arch/xtensa/include/asm/
Dbarrier.h12 #define mb() ({ __asm__ __volatile__("memw" : : : "memory"); }) macro
/arch/alpha/include/asm/
Dbarrier.h6 #define mb() __asm__ __volatile__("mb": : :"memory") macro
/arch/blackfin/include/asm/
Dbarrier.h22 # define mb() do { barrier(); smp_check_barrier(); smp_mark_barrier(); } while (0) macro
/arch/sh/include/asm/
Dbarrier.h27 #define mb() __asm__ __volatile__ ("synco": : :"memory") macro
/arch/x86/include/asm/
Dbarrier.h18 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2) macro
22 #define mb() asm volatile("mfence":::"memory") macro
/arch/mips/include/asm/
Dbarrier.h78 #define mb() wbflush() macro
83 #define mb() fast_mb() macro
/arch/tile/include/asm/
Dbarrier.h72 #define mb() fast_mb() macro
/arch/ia64/include/asm/
Dbarrier.h38 #define mb() ia64_mf() macro
/arch/sparc/include/asm/
Dbarrier_64.h36 #define mb() membar_safe("#StoreLoad") macro
/arch/arm/include/asm/
Dbarrier.h65 #define mb() __arm_heavy_mb() macro
71 #define mb() barrier() macro
/arch/s390/include/asm/
Dbarrier.h23 #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0) macro
/arch/powerpc/include/asm/
Dbarrier.h33 #define mb() __asm__ __volatile__ ("sync" : : : "memory") macro
/arch/metag/include/asm/
Dbarrier.h43 #define mb() wr_fence() macro
/arch/powerpc/lib/
Dsstep.c623 #define MASK32(mb, me) ((0xffffffffUL >> (mb)) + \ argument
626 #define MASK64_L(mb) (~0UL >> (mb)) argument
628 #define MASK64(mb, me) (MASK64_L(mb) + MASK64_R(me) + ((me) >= (mb))) argument
648 unsigned int mb, me, sh; in analyse_instr() local
/arch/powerpc/mm/
Dppc_mmu_32.c182 unsigned int hmask, mb, mb2; in MMU_init_hw() local
/arch/x86/mm/
Dnuma.c481 struct numa_memblk *mb = &numa_meminfo.blk[i]; in numa_clear_kernel_node_hotplug() local
523 struct numa_memblk *mb = &mi->blk[i]; in numa_register_memblks() local

12