• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
3  * Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without modification,
6  * are permitted provided that the following conditions are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright notice, this list of
9  *    conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright notice, this list
12  *    of conditions and the following disclaimer in the documentation and/or other materials
13  *    provided with the distribution.
14  *
15  * 3. Neither the name of the copyright holder nor the names of its contributors may be used
16  *    to endorse or promote products derived from this software without specific prior written
17  *    permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
23  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
26  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
27  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
28  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
29  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30  */
31 
32 #ifndef _LOS_ARCH_ATOMIC_H
33 #define _LOS_ARCH_ATOMIC_H
34 
35 #include "los_compiler.h"
36 
37 #ifdef __cplusplus
38 #if __cplusplus
39 extern "C" {
40 #endif /* __cplusplus */
41 #endif /* __cplusplus */
42 
ArchAtomicRead(const Atomic * v)43 STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
44 {
45     INT32 val;
46     UINT32 intSave;
47 
48     intSave = LOS_IntLock();
49     __asm__ __volatile__("ldw %0, (%1)\n"
50                          : "=&r"(val)
51                          : "r"(v)
52                          : "cc");
53     LOS_IntRestore(intSave);
54 
55     return val;
56 }
57 
ArchAtomicSet(Atomic * v,INT32 setVal)58 STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
59 {
60     UINT32 intSave;
61 
62     intSave = LOS_IntLock();
63     __asm__ __volatile__("stw %1, (%0, 0)"
64                          :
65                          : "r"(v), "r"(setVal)
66                          : "cc");
67     LOS_IntRestore(intSave);
68 }
69 
ArchAtomicAdd(Atomic * v,INT32 addVal)70 STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
71 {
72     INT32 val;
73     UINT32 intSave;
74 
75     intSave = LOS_IntLock();
76 
77     __asm__ __volatile__("ldw %0, (%1)\n"
78                          "add %0, %0, %2\n"
79                          "stw %0, (%1, 0)"
80                          : "=&r"(val)
81                          : "r"(v), "r"(addVal)
82                          : "cc");
83     LOS_IntRestore(intSave);
84 
85     return val;
86 }
87 
ArchAtomicSub(Atomic * v,INT32 subVal)88 STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
89 {
90     INT32 val;
91     UINT32 intSave;
92 
93     intSave = LOS_IntLock();
94 
95     __asm__ __volatile__("ldw %0, (%1)\n"
96                          "sub %0, %2\n"
97                          "stw %0, (%1, 0)"
98                          : "=&r"(val)
99                          : "r"(v), "r"(subVal)
100                          : "cc");
101     LOS_IntRestore(intSave);
102 
103     return val;
104 }
105 
ArchAtomicInc(Atomic * v)106 STATIC INLINE VOID ArchAtomicInc(Atomic *v)
107 {
108     (VOID)ArchAtomicAdd(v, 1);
109 }
110 
ArchAtomicDec(Atomic * v)111 STATIC INLINE VOID ArchAtomicDec(Atomic *v)
112 {
113     (VOID)ArchAtomicSub(v, 1);
114 }
115 
ArchAtomicIncRet(Atomic * v)116 STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
117 {
118     return ArchAtomicAdd(v, 1);
119 }
120 
ArchAtomicDecRet(Atomic * v)121 STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
122 {
123     return ArchAtomicSub(v, 1);
124 }
125 
126 /**
127  * @ingroup  los_arch_atomic
128  * @brief Atomic exchange for 32-bit variable.
129  *
130  * @par Description:
131  * This API is used to implement the atomic exchange for 32-bit variable
132  * and return the previous value of the atomic variable.
133  * @attention
134  * <ul>The pointer v must not be NULL.</ul>
135  *
136  * @param  v       [IN] The variable pointer.
137  * @param  val     [IN] The exchange value.
138  *
139  * @retval #INT32       The previous value of the atomic variable
140  * @par Dependency:
141  * <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
142  * @see
143  */
ArchAtomicXchg32bits(volatile INT32 * v,INT32 val)144 STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
145 {
146     INT32 prevVal = 0;
147     UINT32 intSave;
148 
149     intSave = LOS_IntLock();
150     __asm__ __volatile__("ldw %0, (%1)\n"
151                          "stw %2, (%1)"
152                          : "=&r"(prevVal)
153                          : "r"(v), "r"(val)
154                          : "cc");
155     LOS_IntRestore(intSave);
156 
157     return prevVal;
158 }
159 
160 /**
161  * @ingroup  los_arch_atomic
162  * @brief Atomic exchange for 32-bit variable with compare.
163  *
164  * @par Description:
165  * This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
166  * @attention
167  * <ul>The pointer v must not be NULL.</ul>
168  *
169  * @param  v       [IN] The variable pointer.
170  * @param  val     [IN] The new value.
171  * @param  oldVal  [IN] The old value.
172  *
173  * @retval TRUE  The previous value of the atomic variable is not equal to oldVal.
174  * @retval FALSE The previous value of the atomic variable is equal to oldVal.
175  * @par Dependency:
176  * <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
177  * @see
178  */
ArchAtomicCmpXchg32bits(volatile INT32 * v,INT32 val,INT32 oldVal)179 STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
180 {
181     INT32 prevVal = 0;
182     UINT32 intSave;
183 
184     intSave = LOS_IntLock();
185 
186     __asm__ __volatile__("ldw %0, (%1)\n"
187                          "cmpne %0, %2\n"
188                          "bt 1f\n"
189                          "stw %3, (%1)\n"
190                          "1:"
191                          : "=&r"(prevVal)
192                          : "r"(v), "r"(oldVal), "r"(val)
193                          : "cc");
194     LOS_IntRestore(intSave);
195 
196     return prevVal != oldVal;
197 }
198 
ArchAtomic64Read(const Atomic64 * v)199 STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
200 {
201     INT64 val;
202     UINT32 intSave;
203 
204     intSave = LOS_IntLock();
205     val = *v;
206     LOS_IntRestore(intSave);
207 
208     return val;
209 }
210 
ArchAtomic64Set(Atomic64 * v,INT64 setVal)211 STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
212 {
213     UINT32 intSave;
214 
215     intSave = LOS_IntLock();
216     *v = setVal;
217     LOS_IntRestore(intSave);
218 }
219 
ArchAtomic64Add(Atomic64 * v,INT64 addVal)220 STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
221 {
222     INT64 val;
223     UINT32 intSave;
224 
225     intSave = LOS_IntLock();
226     *v += addVal;
227     val = *v;
228     LOS_IntRestore(intSave);
229 
230     return val;
231 }
232 
ArchAtomic64Sub(Atomic64 * v,INT64 subVal)233 STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
234 {
235     INT64 val;
236     UINT32 intSave;
237 
238     intSave = LOS_IntLock();
239     *v -= subVal;
240     val = *v;
241     LOS_IntRestore(intSave);
242 
243     return val;
244 }
245 
ArchAtomic64Inc(Atomic64 * v)246 STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
247 {
248     (VOID)ArchAtomic64Add(v, 1);
249 }
250 
ArchAtomic64IncRet(Atomic64 * v)251 STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
252 {
253     return ArchAtomic64Add(v, 1);
254 }
255 
ArchAtomic64Dec(Atomic64 * v)256 STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
257 {
258     (VOID)ArchAtomic64Sub(v, 1);
259 }
260 
ArchAtomic64DecRet(Atomic64 * v)261 STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
262 {
263     return ArchAtomic64Sub(v, 1);
264 }
265 
ArchAtomicXchg64bits(Atomic64 * v,INT64 val)266 STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
267 {
268     INT64 prevVal;
269     UINT32 intSave;
270 
271     intSave = LOS_IntLock();
272     prevVal = *v;
273     *v = val;
274     LOS_IntRestore(intSave);
275 
276     return prevVal;
277 }
278 
ArchAtomicCmpXchg64bits(Atomic64 * v,INT64 val,INT64 oldVal)279 STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
280 {
281     INT64 prevVal;
282     UINT32 intSave;
283 
284     intSave = LOS_IntLock();
285     prevVal = *v;
286     if (prevVal == oldVal) {
287         *v = val;
288     }
289     LOS_IntRestore(intSave);
290 
291     return prevVal != oldVal;
292 }
293 
294 #ifdef __cplusplus
295 #if __cplusplus
296 }
297 #endif /* __cplusplus */
298 #endif /* __cplusplus */
299 
300 #endif /* _LOS_ARCH_ATOMIC_H */
301