• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
3  * Copyright (c) 2020-2022 Huawei Device Co., Ltd. All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without modification,
6  * are permitted provided that the following conditions are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright notice, this list of
9  *    conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright notice, this list
12  *    of conditions and the following disclaimer in the documentation and/or other materials
13  *    provided with the distribution.
14  *
15  * 3. Neither the name of the copyright holder nor the names of its contributors may be used
16  *    to endorse or promote products derived from this software without specific prior written
17  *    permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
23  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
26  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
27  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
28  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
29  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30  */
31 
32 #ifndef _LOS_ARCH_ATOMIC_H
33 #define _LOS_ARCH_ATOMIC_H
34 
35 #include "los_compiler.h"
36 #include "los_interrupt.h"
37 
38 #ifdef __cplusplus
39 #if __cplusplus
40 extern "C" {
41 #endif /* __cplusplus */
42 #endif /* __cplusplus */
43 
ArchAtomicRead(const Atomic * v)44 STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
45 {
46     INT32 val;
47     UINT32 intSave;
48 
49     intSave = LOS_IntLock();
50     __asm__ __volatile__("ldw %0, (%1)\n"
51                          : "=&r"(val)
52                          : "r"(v)
53                          : "cc");
54     LOS_IntRestore(intSave);
55 
56     return val;
57 }
58 
ArchAtomicSet(Atomic * v,INT32 setVal)59 STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
60 {
61     UINT32 intSave;
62 
63     intSave = LOS_IntLock();
64     __asm__ __volatile__("stw %1, (%0, 0)"
65                          :
66                          : "r"(v), "r"(setVal)
67                          : "cc");
68     LOS_IntRestore(intSave);
69 }
70 
ArchAtomicAdd(Atomic * v,INT32 addVal)71 STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
72 {
73     INT32 val;
74     UINT32 intSave;
75 
76     intSave = LOS_IntLock();
77 
78     __asm__ __volatile__("ldw %0, (%1)\n"
79                          "add %0, %0, %2\n"
80                          "stw %0, (%1, 0)"
81                          : "=&r"(val)
82                          : "r"(v), "r"(addVal)
83                          : "cc");
84     LOS_IntRestore(intSave);
85 
86     return val;
87 }
88 
ArchAtomicSub(Atomic * v,INT32 subVal)89 STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
90 {
91     INT32 val;
92     UINT32 intSave;
93 
94     intSave = LOS_IntLock();
95 
96     __asm__ __volatile__("ldw %0, (%1)\n"
97                          "sub %0, %2\n"
98                          "stw %0, (%1, 0)"
99                          : "=&r"(val)
100                          : "r"(v), "r"(subVal)
101                          : "cc");
102     LOS_IntRestore(intSave);
103 
104     return val;
105 }
106 
ArchAtomicInc(Atomic * v)107 STATIC INLINE VOID ArchAtomicInc(Atomic *v)
108 {
109     (VOID)ArchAtomicAdd(v, 1);
110 }
111 
ArchAtomicDec(Atomic * v)112 STATIC INLINE VOID ArchAtomicDec(Atomic *v)
113 {
114     (VOID)ArchAtomicSub(v, 1);
115 }
116 
ArchAtomicIncRet(Atomic * v)117 STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
118 {
119     return ArchAtomicAdd(v, 1);
120 }
121 
ArchAtomicDecRet(Atomic * v)122 STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
123 {
124     return ArchAtomicSub(v, 1);
125 }
126 
127 /**
128  * @ingroup  los_arch_atomic
129  * @brief Atomic exchange for 32-bit variable.
130  *
131  * @par Description:
132  * This API is used to implement the atomic exchange for 32-bit variable
133  * and return the previous value of the atomic variable.
134  * @attention
135  * <ul>The pointer v must not be NULL.</ul>
136  *
137  * @param  v       [IN] The variable pointer.
138  * @param  val     [IN] The exchange value.
139  *
140  * @retval #INT32       The previous value of the atomic variable
141  * @par Dependency:
142  * <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
143  * @see
144  */
ArchAtomicXchg32bits(volatile INT32 * v,INT32 val)145 STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
146 {
147     INT32 prevVal = 0;
148     UINT32 intSave;
149 
150     intSave = LOS_IntLock();
151     __asm__ __volatile__("ldw %0, (%1)\n"
152                          "stw %2, (%1)"
153                          : "=&r"(prevVal)
154                          : "r"(v), "r"(val)
155                          : "cc");
156     LOS_IntRestore(intSave);
157 
158     return prevVal;
159 }
160 
161 /**
162  * @ingroup  los_arch_atomic
163  * @brief Atomic exchange for 32-bit variable with compare.
164  *
165  * @par Description:
166  * This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
167  * @attention
168  * <ul>The pointer v must not be NULL.</ul>
169  *
170  * @param  v       [IN] The variable pointer.
171  * @param  val     [IN] The new value.
172  * @param  oldVal  [IN] The old value.
173  *
174  * @retval TRUE  The previous value of the atomic variable is not equal to oldVal.
175  * @retval FALSE The previous value of the atomic variable is equal to oldVal.
176  * @par Dependency:
177  * <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
178  * @see
179  */
ArchAtomicCmpXchg32bits(volatile INT32 * v,INT32 val,INT32 oldVal)180 STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
181 {
182     INT32 prevVal = 0;
183     UINT32 intSave;
184 
185     intSave = LOS_IntLock();
186 
187     __asm__ __volatile__("ldw %0, (%1)\n"
188                          "cmpne %0, %2\n"
189                          "bt 1f\n"
190                          "stw %3, (%1)\n"
191                          "1:"
192                          : "=&r"(prevVal)
193                          : "r"(v), "r"(oldVal), "r"(val)
194                          : "cc");
195     LOS_IntRestore(intSave);
196 
197     return prevVal != oldVal;
198 }
199 
ArchAtomic64Read(const Atomic64 * v)200 STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
201 {
202     INT64 val;
203     UINT32 intSave;
204 
205     intSave = LOS_IntLock();
206     val = *v;
207     LOS_IntRestore(intSave);
208 
209     return val;
210 }
211 
ArchAtomic64Set(Atomic64 * v,INT64 setVal)212 STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
213 {
214     UINT32 intSave;
215 
216     intSave = LOS_IntLock();
217     *v = setVal;
218     LOS_IntRestore(intSave);
219 }
220 
ArchAtomic64Add(Atomic64 * v,INT64 addVal)221 STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
222 {
223     INT64 val;
224     UINT32 intSave;
225 
226     intSave = LOS_IntLock();
227     *v += addVal;
228     val = *v;
229     LOS_IntRestore(intSave);
230 
231     return val;
232 }
233 
ArchAtomic64Sub(Atomic64 * v,INT64 subVal)234 STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
235 {
236     INT64 val;
237     UINT32 intSave;
238 
239     intSave = LOS_IntLock();
240     *v -= subVal;
241     val = *v;
242     LOS_IntRestore(intSave);
243 
244     return val;
245 }
246 
ArchAtomic64Inc(Atomic64 * v)247 STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
248 {
249     (VOID)ArchAtomic64Add(v, 1);
250 }
251 
ArchAtomic64IncRet(Atomic64 * v)252 STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
253 {
254     return ArchAtomic64Add(v, 1);
255 }
256 
ArchAtomic64Dec(Atomic64 * v)257 STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
258 {
259     (VOID)ArchAtomic64Sub(v, 1);
260 }
261 
ArchAtomic64DecRet(Atomic64 * v)262 STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
263 {
264     return ArchAtomic64Sub(v, 1);
265 }
266 
ArchAtomicXchg64bits(Atomic64 * v,INT64 val)267 STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
268 {
269     INT64 prevVal;
270     UINT32 intSave;
271 
272     intSave = LOS_IntLock();
273     prevVal = *v;
274     *v = val;
275     LOS_IntRestore(intSave);
276 
277     return prevVal;
278 }
279 
ArchAtomicCmpXchg64bits(Atomic64 * v,INT64 val,INT64 oldVal)280 STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
281 {
282     INT64 prevVal;
283     UINT32 intSave;
284 
285     intSave = LOS_IntLock();
286     prevVal = *v;
287     if (prevVal == oldVal) {
288         *v = val;
289     }
290     LOS_IntRestore(intSave);
291 
292     return prevVal != oldVal;
293 }
294 
295 #ifdef __cplusplus
296 #if __cplusplus
297 }
298 #endif /* __cplusplus */
299 #endif /* __cplusplus */
300 
301 #endif /* _LOS_ARCH_ATOMIC_H */
302