Home
last modified time | relevance | path

Searched defs:mb (Results 1 – 25 of 26) sorted by relevance

12

/arch/arc/include/asm/
Dbarrier.h26 #define mb() asm volatile("dmb 3\n" : : : "memory") macro
38 #define mb() asm volatile("sync\n" : : : "memory") macro
44 #define mb() asm volatile (".word %0" : : "i"(CTOP_INST_SCHD_RW) : "memory") macro
/arch/arm64/include/asm/
Datomic_lse.h30 #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \ in ATOMIC_OP() argument
57 #define ATOMIC_OP_ADD_RETURN(name, mb, cl...) \ argument
90 #define ATOMIC_FETCH_OP_AND(name, mb, cl...) \ argument
121 #define ATOMIC_OP_SUB_RETURN(name, mb, cl...) \ argument
145 #define ATOMIC_FETCH_OP_SUB(name, mb, cl...) \ argument
183 #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \ in ATOMIC64_OP() argument
210 #define ATOMIC64_OP_ADD_RETURN(name, mb, cl...) \ argument
243 #define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \ argument
274 #define ATOMIC64_OP_SUB_RETURN(name, mb, cl...) \ argument
298 #define ATOMIC64_FETCH_OP_SUB(name, mb, cl...) \ argument
[all …]
Datomic_ll_sc.h56 #define ATOMIC_OP_RETURN(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument
78 #define ATOMIC_FETCH_OP(name, mb, acq, rel, cl, op, asm_op, constraint) \ argument
155 #define ATOMIC64_OP_RETURN(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument
177 #define ATOMIC64_FETCH_OP(name, mb, acq, rel, cl, op, asm_op, constraint)\ argument
259 #define __CMPXCHG_CASE(w, sfx, name, sz, mb, acq, rel, cl, constraint) \ argument
318 #define __CMPXCHG_DBL(name, mb, rel, cl) \ argument
Dbarrier.h32 #define mb() dsb(sy) macro
Dcmpxchg.h21 #define __XCHG_CASE(w, sfx, name, sz, mb, nop_lse, acq, acq_lse, rel, cl) \ argument
/arch/x86/um/asm/
Dbarrier.h14 #define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2) macro
20 #define mb() asm volatile("mfence" : : : "memory") macro
/arch/csky/include/asm/
Dbarrier.h31 #define mb() asm volatile ("sync.s\n":::"memory") macro
42 #define mb() asm volatile ("sync\n":::"memory") macro
/arch/parisc/include/asm/
Dbarrier.h12 #define mb() do { synchronize_caches(); } while (0) macro
18 #define mb() barrier() macro
/arch/nds32/include/asm/
Dbarrier.h8 #define mb() asm volatile("msync all":::"memory") macro
/arch/x86/include/asm/
Dbarrier.h15 #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ macro
22 #define mb() asm volatile("mfence":::"memory") macro
/arch/xtensa/include/asm/
Dbarrier.h14 #define mb() ({ __asm__ __volatile__("memw" : : : "memory"); }) macro
/arch/alpha/include/asm/
Dbarrier.h7 #define mb() __asm__ __volatile__("mb": : :"memory") macro
/arch/sh/include/asm/
Dbarrier.h28 #define mb() __asm__ __volatile__ ("synco": : :"memory") macro
/arch/sparc/include/asm/
Dbarrier_64.h37 #define mb() membar_safe("#StoreLoad") macro
/arch/ia64/include/asm/
Dbarrier.h39 #define mb() ia64_mf() macro
/arch/mips/include/asm/
Dbarrier.h185 #define mb() wbflush() macro
190 #define mb() fast_mb() macro
/arch/riscv/include/asm/
Dbarrier.h21 #define mb() RISCV_FENCE(iorw,iorw) macro
/arch/arm/include/asm/
Dbarrier.h64 #define mb() __arm_heavy_mb() macro
70 #define mb() barrier() macro
/arch/s390/include/asm/
Dbarrier.h24 #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0) macro
/arch/powerpc/include/asm/
Dbarrier.h36 #define mb() __asm__ __volatile__ ("sync" : : : "memory") macro
/arch/x86/mm/
Dnuma.c488 struct numa_memblk *mb = numa_meminfo.blk + i; in numa_clear_kernel_node_hotplug() local
518 struct numa_memblk *mb = numa_meminfo.blk + i; in numa_clear_kernel_node_hotplug() local
539 struct numa_memblk *mb = &mi->blk[i]; in numa_register_memblks() local
/arch/powerpc/net/
Dbpf_jit.h154 #define PPC_RLWINM(d, a, i, mb, me) EMIT(PPC_INST_RLWINM | ___PPC_RA(d) | \ argument
157 #define PPC_RLWINM_DOT(d, a, i, mb, me) EMIT(PPC_INST_RLWINM_DOT | \ argument
161 #define PPC_RLWIMI(d, a, i, mb, me) EMIT(PPC_INST_RLWIMI | ___PPC_RA(d) | \ argument
164 #define PPC_RLDICL(d, a, i, mb) EMIT(PPC_INST_RLDICL | ___PPC_RA(d) | \ argument
/arch/powerpc/lib/
Dsstep.c1142 #define MASK32(mb, me) ((0xffffffffUL >> (mb)) + \ argument
1145 #define MASK64_L(mb) (~0UL >> (mb)) argument
1147 #define MASK64(mb, me) (MASK64_L(mb) + MASK64_R(me) + ((me) >= (mb))) argument
1171 unsigned int mb, me, sh; in analyse_instr() local
/arch/arc/plat-axs10x/
Daxs10x.c95 char mb[32]; in axs10x_early_init() local
/arch/x86/kernel/
De820.c1157 unsigned long mb = pos >> 20; in ram_alignment() local

12