• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef __ASM_BARRIER_H
2 #define __ASM_BARRIER_H
3 
4 #ifndef __ASSEMBLY__
5 
6 #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
7 
8 #if __LINUX_ARM_ARCH__ >= 7 ||		\
9 	(__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
10 #define sev()	__asm__ __volatile__ ("sev" : : : "memory")
11 #define wfe()	__asm__ __volatile__ ("wfe" : : : "memory")
12 #define wfi()	__asm__ __volatile__ ("wfi" : : : "memory")
13 #else
14 #define wfe()	do { } while (0)
15 #endif
16 
17 #if __LINUX_ARM_ARCH__ >= 7
18 #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
19 #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
20 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
21 #ifdef CONFIG_THUMB2_KERNEL
22 #define CSDB	".inst.w 0xf3af8014"
23 #else
24 #define CSDB	".inst	0xe320f014"
25 #endif
26 #define csdb() __asm__ __volatile__(CSDB : : : "memory")
27 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
28 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
29 				    : : "r" (0) : "memory")
30 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
31 				    : : "r" (0) : "memory")
32 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
33 				    : : "r" (0) : "memory")
34 #elif defined(CONFIG_CPU_FA526)
35 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
36 				    : : "r" (0) : "memory")
37 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
38 				    : : "r" (0) : "memory")
39 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
40 #else
41 #define isb(x) __asm__ __volatile__ ("" : : : "memory")
42 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
43 				    : : "r" (0) : "memory")
44 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
45 #endif
46 
47 #ifndef CSDB
48 #define CSDB
49 #endif
50 #ifndef csdb
51 #define csdb()
52 #endif
53 
54 #ifdef CONFIG_ARM_HEAVY_MB
55 extern void (*soc_mb)(void);
56 extern void arm_heavy_mb(void);
57 #define __arm_heavy_mb(x...) do { dsb(x); arm_heavy_mb(); } while (0)
58 #else
59 #define __arm_heavy_mb(x...) dsb(x)
60 #endif
61 
62 #ifdef CONFIG_ARCH_HAS_BARRIERS
63 #include <mach/barriers.h>
64 #elif defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
65 #define mb()		__arm_heavy_mb()
66 #define rmb()		dsb()
67 #define wmb()		__arm_heavy_mb(st)
68 #define dma_rmb()	dmb(osh)
69 #define dma_wmb()	dmb(oshst)
70 #else
71 #define mb()		barrier()
72 #define rmb()		barrier()
73 #define wmb()		barrier()
74 #define dma_rmb()	barrier()
75 #define dma_wmb()	barrier()
76 #endif
77 
78 #ifndef CONFIG_SMP
79 #define smp_mb()	barrier()
80 #define smp_rmb()	barrier()
81 #define smp_wmb()	barrier()
82 #else
83 #define smp_mb()	dmb(ish)
84 #define smp_rmb()	smp_mb()
85 #define smp_wmb()	dmb(ishst)
86 #endif
87 
88 #define smp_store_release(p, v)						\
89 do {									\
90 	compiletime_assert_atomic_type(*p);				\
91 	smp_mb();							\
92 	WRITE_ONCE(*p, v);						\
93 } while (0)
94 
95 #define smp_load_acquire(p)						\
96 ({									\
97 	typeof(*p) ___p1 = READ_ONCE(*p);				\
98 	compiletime_assert_atomic_type(*p);				\
99 	smp_mb();							\
100 	___p1;								\
101 })
102 
103 #define read_barrier_depends()		do { } while(0)
104 #define smp_read_barrier_depends()	do { } while(0)
105 
106 #define smp_store_mb(var, value)	do { WRITE_ONCE(var, value); smp_mb(); } while (0)
107 
108 #define smp_mb__before_atomic()	smp_mb()
109 #define smp_mb__after_atomic()	smp_mb()
110 
111 #ifdef CONFIG_CPU_SPECTRE
array_index_mask_nospec(unsigned long idx,unsigned long sz)112 static inline unsigned long array_index_mask_nospec(unsigned long idx,
113 						    unsigned long sz)
114 {
115 	unsigned long mask;
116 
117 	asm volatile(
118 		"cmp	%1, %2\n"
119 	"	sbc	%0, %1, %1\n"
120 	CSDB
121 	: "=r" (mask)
122 	: "r" (idx), "Ir" (sz)
123 	: "cc");
124 
125 	return mask;
126 }
127 #define array_index_mask_nospec array_index_mask_nospec
128 #endif
129 
130 #include <asm-generic/barrier.h>
131 
132 #endif /* !__ASSEMBLY__ */
133 #endif /* __ASM_BARRIER_H */
134