/arch/arc/include/asm/ |
D | barrier.h | 26 #define mb() asm volatile("dmb 3\n" : : : "memory") macro 38 #define mb() asm volatile("sync\n" : : : "memory") macro
|
/arch/x86/um/asm/ |
D | barrier.h | 15 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2) macro 21 #define mb() asm volatile("mfence" : : : "memory") macro
|
/arch/arm64/include/asm/ |
D | atomic_lse.h | 36 #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \ argument 106 #define ATOMIC_FETCH_OP_AND(name, mb, cl...) \ argument 143 #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \ argument 213 #define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \ argument 248 #define __CMPXCHG_CASE(w, sfx, name, sz, mb, cl...) \ argument 284 #define __CMPXCHG128(name, mb, cl...) \ argument
|
D | atomic_ll_sc.h | 42 #define ATOMIC_OP_RETURN(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument 63 #define ATOMIC_FETCH_OP(name, mb, acq, rel, cl, op, asm_op, constraint) \ argument 138 #define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument 159 #define ATOMIC64_FETCH_OP(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument 239 #define __CMPXCHG_CASE(w, sfx, name, sz, mb, acq, rel, cl, constraint) \ argument 304 #define __CMPXCHG128(name, mb, rel, cl...) \ argument
|
D | cmpxchg.h | 21 #define __XCHG_CASE(w, sfx, name, sz, mb, nop_lse, acq, acq_lse, rel, cl) \ argument
|
/arch/parisc/include/asm/ |
D | barrier.h | 16 #define mb() do { synchronize_caches(); } while (0) macro 22 #define mb() barrier() macro
|
/arch/openrisc/include/asm/ |
D | barrier.h | 5 #define mb() asm volatile ("l.msync" ::: "memory") macro
|
/arch/microblaze/include/asm/ |
D | barrier.h | 9 #define mb() __asm__ __volatile__ ("mbar 1" : : : "memory") macro
|
/arch/alpha/include/asm/ |
D | barrier.h | 5 #define mb() __asm__ __volatile__("mb": : :"memory") macro
|
/arch/mips/include/asm/ |
D | barrier.h | 71 #define mb() wbflush() macro 76 #define mb() fast_mb() macro
|
/arch/sh/include/asm/ |
D | barrier.h | 28 #define mb() __asm__ __volatile__ ("synco": : :"memory") macro
|
/arch/sparc/include/asm/ |
D | barrier_64.h | 37 #define mb() membar_safe("#StoreLoad") macro
|
/arch/ia64/include/asm/ |
D | barrier.h | 39 #define mb() ia64_mf() macro
|
/arch/arm/include/asm/ |
D | barrier.h | 64 #define mb() __arm_heavy_mb() macro 70 #define mb() barrier() macro
|
/arch/riscv/include/asm/ |
D | barrier.h | 23 #define mb() RISCV_FENCE(iorw,iorw) macro
|
/arch/csky/include/asm/ |
D | barrier.h | 76 #define mb() asm volatile ("sync\n":::"memory") macro
|
/arch/x86/include/asm/ |
D | barrier.h | 15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ macro
|
/arch/x86/mm/ |
D | numa.c | 507 struct numa_memblk *mb = numa_meminfo.blk + i; in numa_clear_kernel_node_hotplug() local 539 struct numa_memblk *mb = numa_meminfo.blk + i; in numa_clear_kernel_node_hotplug() local 559 struct numa_memblk *mb = &mi->blk[i]; in numa_register_memblks() local 964 const struct numa_memblk *mb = *(const struct numa_memblk **)b; in cmp_memblk() local
|
/arch/loongarch/include/asm/ |
D | barrier.h | 44 #define mb() c_sync() macro
|
/arch/arc/plat-axs10x/ |
D | axs10x.c | 94 char mb[32]; in axs10x_early_init() local
|
/arch/powerpc/lib/ |
D | sstep.c | 1332 #define MASK32(mb, me) ((0xffffffffUL >> (mb)) + \ argument 1335 #define MASK64_L(mb) (~0UL >> (mb)) argument 1337 #define MASK64(mb, me) (MASK64_L(mb) + MASK64_R(me) + ((me) >= (mb))) argument 1364 unsigned int mb, me, sh; in analyse_instr() local
|
/arch/loongarch/kernel/ |
D | numa.c | 287 struct numa_memblk *mb = &numa_meminfo.blk[i]; in add_numamem_region() local
|
/arch/x86/kernel/fpu/ |
D | regset.c | 418 struct membuf mb = { .p = &fxsave, .left = sizeof(fxsave) }; in fpregs_get() local
|
D | core.c | 376 struct membuf mb = { .p = buf, .left = size }; in fpu_copy_guest_fpstate_to_uabi() local
|
/arch/x86/kernel/ |
D | e820.c | 1208 unsigned long mb = pos >> 20; in ram_alignment() local
|