/arch/arc/include/asm/ |
D | barrier.h | 29 #define mb() asm volatile("dmb 3\n" : : : "memory") macro 41 #define mb() asm volatile("sync\n" : : : "memory") macro 47 #define mb() asm volatile (".word %0" : : "i"(CTOP_INST_SCHD_RW) : "memory") macro
|
/arch/arm64/include/asm/ |
D | atomic_lse.h | 49 #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \ in ATOMIC_OP() argument 81 #define ATOMIC_OP_ADD_RETURN(name, mb, cl...) \ argument 125 #define ATOMIC_FETCH_OP_AND(name, mb, cl...) \ argument 169 #define ATOMIC_OP_SUB_RETURN(name, mb, cl...) \ argument 197 #define ATOMIC_FETCH_OP_SUB(name, mb, cl...) \ argument 246 #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \ in ATOMIC64_OP() argument 278 #define ATOMIC64_OP_ADD_RETURN(name, mb, cl...) \ argument 322 #define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \ argument 366 #define ATOMIC64_OP_SUB_RETURN(name, mb, cl...) \ argument 394 #define ATOMIC64_FETCH_OP_SUB(name, mb, cl...) \ argument [all …]
|
D | atomic_ll_sc.h | 58 #define ATOMIC_OP_RETURN(name, mb, acq, rel, cl, op, asm_op) \ argument 80 #define ATOMIC_FETCH_OP(name, mb, acq, rel, cl, op, asm_op) \ argument 152 #define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op) \ argument 174 #define ATOMIC64_FETCH_OP(name, mb, acq, rel, cl, op, asm_op) \ argument 251 #define __CMPXCHG_CASE(w, sz, name, mb, acq, rel, cl) \ argument 296 #define __CMPXCHG_DBL(name, mb, rel, cl) \ argument
|
D | barrier.h | 36 #define mb() dsb(sy) macro
|
/arch/x86/um/asm/ |
D | barrier.h | 21 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2) macro 27 #define mb() asm volatile("mfence" : : : "memory") macro
|
/arch/parisc/include/asm/ |
D | barrier.h | 12 #define mb() do { synchronize_caches(); } while (0) macro 18 #define mb() barrier() macro
|
/arch/frv/include/asm/ |
D | barrier.h | 17 #define mb() asm volatile ("membar" : : :"memory") macro
|
/arch/xtensa/include/asm/ |
D | barrier.h | 12 #define mb() ({ __asm__ __volatile__("memw" : : : "memory"); }) macro
|
/arch/alpha/include/asm/ |
D | barrier.h | 7 #define mb() __asm__ __volatile__("mb": : :"memory") macro
|
/arch/x86/include/asm/ |
D | barrier.h | 15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "mfence", \ macro 22 #define mb() asm volatile("mfence":::"memory") macro
|
/arch/blackfin/include/asm/ |
D | barrier.h | 22 # define mb() do { barrier(); smp_check_barrier(); smp_mark_barrier(); } while (0) macro
|
/arch/mips/include/asm/ |
D | barrier.h | 171 #define mb() wbflush() macro 176 #define mb() fast_mb() macro
|
/arch/sh/include/asm/ |
D | barrier.h | 28 #define mb() __asm__ __volatile__ ("synco": : :"memory") macro
|
/arch/sparc/include/asm/ |
D | barrier_64.h | 37 #define mb() membar_safe("#StoreLoad") macro
|
/arch/ia64/include/asm/ |
D | barrier.h | 39 #define mb() ia64_mf() macro
|
/arch/arm/include/asm/ |
D | barrier.h | 64 #define mb() __arm_heavy_mb() macro 70 #define mb() barrier() macro
|
/arch/s390/include/asm/ |
D | barrier.h | 24 #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0) macro
|
/arch/tile/include/asm/ |
D | barrier.h | 72 #define mb() fast_mb() macro
|
/arch/metag/include/asm/ |
D | barrier.h | 44 #define mb() wr_fence() macro
|
/arch/powerpc/include/asm/ |
D | barrier.h | 34 #define mb() __asm__ __volatile__ ("sync" : : : "memory") macro
|
/arch/x86/mm/ |
D | numa.c | 492 struct numa_memblk *mb = numa_meminfo.blk + i; in numa_clear_kernel_node_hotplug() local 522 struct numa_memblk *mb = numa_meminfo.blk + i; in numa_clear_kernel_node_hotplug() local 543 struct numa_memblk *mb = &mi->blk[i]; in numa_register_memblks() local
|
/arch/powerpc/mm/ |
D | ppc_mmu_32.c | 182 unsigned int hmask, mb, mb2; in MMU_init_hw() local
|
/arch/powerpc/lib/ |
D | sstep.c | 1130 #define MASK32(mb, me) ((0xffffffffUL >> (mb)) + \ argument 1133 #define MASK64_L(mb) (~0UL >> (mb)) argument 1135 #define MASK64(mb, me) (MASK64_L(mb) + MASK64_R(me) + ((me) >= (mb))) argument 1159 unsigned int mb, me, sh; in analyse_instr() local
|
/arch/powerpc/net/ |
D | bpf_jit.h | 154 #define PPC_RLWINM(d, a, i, mb, me) EMIT(PPC_INST_RLWINM | ___PPC_RA(d) | \ argument 157 #define PPC_RLWIMI(d, a, i, mb, me) EMIT(PPC_INST_RLWIMI | ___PPC_RA(d) | \ argument 160 #define PPC_RLDICL(d, a, i, mb) EMIT(PPC_INST_RLDICL | ___PPC_RA(d) | \ argument
|
/arch/arc/plat-axs10x/ |
D | axs10x.c | 104 char mb[32]; in axs10x_early_init() local
|