1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (C) 2020 Loongson Technology Corporation Limited
4 */
5 #ifndef __ASM_BARRIER_H
6 #define __ASM_BARRIER_H
7
8 #include <asm/addrspace.h>
9
10 /*
11 * Hint encoding:
12 *
13 * Bit4: ordering or completion (0: completion, 1: ordering)
14 * Bit3: barrier for previous read (0: true, 1: false)
15 * Bit2: barrier for previous write (0: true, 1: false)
16 * Bit1: barrier for succeeding read (0: true, 1: false)
17 * Bit0: barrier for succeeding write (0: true, 1: false)
18 *
19 * Hint 0x700: barrier for "read after read" from the same address
20 */
21
22 #define DBAR(hint) __asm__ __volatile__("dbar %0 " : : "I"(hint) : "memory")
23
24 #define crwrw 0b00000
25 #define cr_r_ 0b00101
26 #define c_w_w 0b01010
27
28 #define orwrw 0b10000
29 #define or_r_ 0b10101
30 #define o_w_w 0b11010
31
32 #define orw_w 0b10010
33 #define or_rw 0b10100
34
35 #define c_sync() DBAR(crwrw)
36 #define c_rsync() DBAR(cr_r_)
37 #define c_wsync() DBAR(c_w_w)
38
39 #define o_sync() DBAR(orwrw)
40 #define o_rsync() DBAR(or_r_)
41 #define o_wsync() DBAR(o_w_w)
42
43 #define ldacq_mb() DBAR(or_rw)
44 #define strel_mb() DBAR(orw_w)
45
46 #define mb() c_sync()
47 #define rmb() c_rsync()
48 #define wmb() c_wsync()
49 #define iob() c_sync()
50 #define wbflush() c_sync()
51
52 #define __smp_mb() o_sync()
53 #define __smp_rmb() o_rsync()
54 #define __smp_wmb() o_wsync()
55
56 #ifdef CONFIG_SMP
57 #define __WEAK_LLSC_MB " dbar 0x700 \n"
58 #else
59 #define __WEAK_LLSC_MB " \n"
60 #endif
61
62 #define __smp_mb__before_atomic() barrier()
63 #define __smp_mb__after_atomic() barrier()
64
65 /**
66 * array_index_mask_nospec() - generate a ~0 mask when index < size, 0 otherwise
67 * @index: array element index
68 * @size: number of elements in array
69 *
70 * Returns:
71 * 0 - (@index < @size)
72 */
73 #define array_index_mask_nospec array_index_mask_nospec
array_index_mask_nospec(unsigned long index,unsigned long size)74 static inline unsigned long array_index_mask_nospec(unsigned long index,
75 unsigned long size)
76 {
77 unsigned long mask;
78
79 __asm__ __volatile__(
80 "sltu %0, %1, %2\n\t"
81 #if (__SIZEOF_LONG__ == 4)
82 "sub.w %0, $r0, %0\n\t"
83 #elif (__SIZEOF_LONG__ == 8)
84 "sub.d %0, $r0, %0\n\t"
85 #endif
86 : "=r" (mask)
87 : "r" (index), "r" (size)
88 :);
89
90 return mask;
91 }
92
93 #define __smp_load_acquire(p) \
94 ({ \
95 typeof(*p) ___p1 = READ_ONCE(*p); \
96 compiletime_assert_atomic_type(*p); \
97 ldacq_mb(); \
98 ___p1; \
99 })
100
101 #define __smp_store_release(p, v) \
102 do { \
103 compiletime_assert_atomic_type(*p); \
104 strel_mb(); \
105 WRITE_ONCE(*p, v); \
106 } while (0)
107
108 #define __smp_store_mb(p, v) \
109 do { \
110 union { typeof(p) __val; char __c[1]; } __u = \
111 { .__val = (__force typeof(p)) (v) }; \
112 unsigned long __tmp; \
113 switch (sizeof(p)) { \
114 case 1: \
115 *(volatile __u8 *)&p = *(__u8 *)__u.__c; \
116 __smp_mb(); \
117 break; \
118 case 2: \
119 *(volatile __u16 *)&p = *(__u16 *)__u.__c; \
120 __smp_mb(); \
121 break; \
122 case 4: \
123 __asm__ __volatile__( \
124 "amswap_db.w %[tmp], %[val], %[mem] \n" \
125 : [mem] "+ZB" (*(u32 *)&p), [tmp] "=&r" (__tmp) \
126 : [val] "r" (*(__u32 *)__u.__c) \
127 : ); \
128 break; \
129 case 8: \
130 __asm__ __volatile__( \
131 "amswap_db.d %[tmp], %[val], %[mem] \n" \
132 : [mem] "+ZB" (*(u64 *)&p), [tmp] "=&r" (__tmp) \
133 : [val] "r" (*(__u64 *)__u.__c) \
134 : ); \
135 break; \
136 } \
137 } while (0)
138
139 #include <asm-generic/barrier.h>
140
141 #endif /* __ASM_BARRIER_H */
142