1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * Based on arch/arm/include/asm/barrier.h
4 *
5 * Copyright (C) 2012 ARM Ltd.
6 */
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 #ifndef __ASSEMBLY__
11
12 #include <linux/kasan-checks.h>
13
14 #define __nops(n) ".rept " #n "\nnop\n.endr\n"
15 #define nops(n) asm volatile(__nops(n))
16
17 #define sev() asm volatile("sev" : : : "memory")
18 #define wfe() asm volatile("wfe" : : : "memory")
19 #define wfi() asm volatile("wfi" : : : "memory")
20
21 #define isb() asm volatile("isb" : : : "memory")
22 #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
23 #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
24
25 #define psb_csync() asm volatile("hint #17" : : : "memory")
26 #define csdb() asm volatile("hint #20" : : : "memory")
27
28 #define spec_bar() asm volatile(ALTERNATIVE("dsb nsh\nisb\n", \
29 SB_BARRIER_INSN"nop\n", \
30 ARM64_HAS_SB))
31
32 #ifdef CONFIG_ARM64_PSEUDO_NMI
33 #define pmr_sync() \
34 do { \
35 extern struct static_key_false gic_pmr_sync; \
36 \
37 if (static_branch_unlikely(&gic_pmr_sync)) \
38 dsb(sy); \
39 } while(0)
40 #else
41 #define pmr_sync() do {} while (0)
42 #endif
43
44 #define mb() dsb(sy)
45 #define rmb() dsb(ld)
46 #define wmb() dsb(st)
47
48 #define dma_mb() dmb(osh)
49 #define dma_rmb() dmb(oshld)
50 #define dma_wmb() dmb(oshst)
51
52 /*
53 * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
54 * and 0 otherwise.
55 */
56 #define array_index_mask_nospec array_index_mask_nospec
array_index_mask_nospec(unsigned long idx,unsigned long sz)57 static inline unsigned long array_index_mask_nospec(unsigned long idx,
58 unsigned long sz)
59 {
60 unsigned long mask;
61
62 asm volatile(
63 " cmp %1, %2\n"
64 " sbc %0, xzr, xzr\n"
65 : "=r" (mask)
66 : "r" (idx), "Ir" (sz)
67 : "cc");
68
69 csdb();
70 return mask;
71 }
72
73 /*
74 * Ensure that reads of the counter are treated the same as memory reads
75 * for the purposes of ordering by subsequent memory barriers.
76 *
77 * This insanity brought to you by speculative system register reads,
78 * out-of-order memory accesses, sequence locks and Thomas Gleixner.
79 *
80 * http://lists.infradead.org/pipermail/linux-arm-kernel/2019-February/631195.html
81 */
82 #define arch_counter_enforce_ordering(val) do { \
83 u64 tmp, _val = (val); \
84 \
85 asm volatile( \
86 " eor %0, %1, %1\n" \
87 " add %0, sp, %0\n" \
88 " ldr xzr, [%0]" \
89 : "=r" (tmp) : "r" (_val)); \
90 } while (0)
91
92 #define __smp_mb() dmb(ish)
93 #define __smp_rmb() dmb(ishld)
94 #define __smp_wmb() dmb(ishst)
95
96 #define __smp_store_release(p, v) \
97 do { \
98 typeof(p) __p = (p); \
99 union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u = \
100 { .__val = (__force __unqual_scalar_typeof(*p)) (v) }; \
101 compiletime_assert_atomic_type(*p); \
102 kasan_check_write(__p, sizeof(*p)); \
103 switch (sizeof(*p)) { \
104 case 1: \
105 asm volatile ("stlrb %w1, %0" \
106 : "=Q" (*__p) \
107 : "r" (*(__u8 *)__u.__c) \
108 : "memory"); \
109 break; \
110 case 2: \
111 asm volatile ("stlrh %w1, %0" \
112 : "=Q" (*__p) \
113 : "r" (*(__u16 *)__u.__c) \
114 : "memory"); \
115 break; \
116 case 4: \
117 asm volatile ("stlr %w1, %0" \
118 : "=Q" (*__p) \
119 : "r" (*(__u32 *)__u.__c) \
120 : "memory"); \
121 break; \
122 case 8: \
123 asm volatile ("stlr %1, %0" \
124 : "=Q" (*__p) \
125 : "r" (*(__u64 *)__u.__c) \
126 : "memory"); \
127 break; \
128 } \
129 } while (0)
130
131 #define __smp_load_acquire(p) \
132 ({ \
133 union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u; \
134 typeof(p) __p = (p); \
135 compiletime_assert_atomic_type(*p); \
136 kasan_check_read(__p, sizeof(*p)); \
137 switch (sizeof(*p)) { \
138 case 1: \
139 asm volatile ("ldarb %w0, %1" \
140 : "=r" (*(__u8 *)__u.__c) \
141 : "Q" (*__p) : "memory"); \
142 break; \
143 case 2: \
144 asm volatile ("ldarh %w0, %1" \
145 : "=r" (*(__u16 *)__u.__c) \
146 : "Q" (*__p) : "memory"); \
147 break; \
148 case 4: \
149 asm volatile ("ldar %w0, %1" \
150 : "=r" (*(__u32 *)__u.__c) \
151 : "Q" (*__p) : "memory"); \
152 break; \
153 case 8: \
154 asm volatile ("ldar %0, %1" \
155 : "=r" (*(__u64 *)__u.__c) \
156 : "Q" (*__p) : "memory"); \
157 break; \
158 } \
159 (typeof(*p))__u.__val; \
160 })
161
162 #define smp_cond_load_relaxed(ptr, cond_expr) \
163 ({ \
164 typeof(ptr) __PTR = (ptr); \
165 __unqual_scalar_typeof(*ptr) VAL; \
166 for (;;) { \
167 VAL = READ_ONCE(*__PTR); \
168 if (cond_expr) \
169 break; \
170 __cmpwait_relaxed(__PTR, VAL); \
171 } \
172 (typeof(*ptr))VAL; \
173 })
174
175 #define smp_cond_load_acquire(ptr, cond_expr) \
176 ({ \
177 typeof(ptr) __PTR = (ptr); \
178 __unqual_scalar_typeof(*ptr) VAL; \
179 for (;;) { \
180 VAL = smp_load_acquire(__PTR); \
181 if (cond_expr) \
182 break; \
183 __cmpwait_relaxed(__PTR, VAL); \
184 } \
185 (typeof(*ptr))VAL; \
186 })
187
188 #include <asm-generic/barrier.h>
189
190 #endif /* __ASSEMBLY__ */
191
192 #endif /* __ASM_BARRIER_H */
193