1 /*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ANDROID_CUTILS_ATOMIC_ARM_H
18 #define ANDROID_CUTILS_ATOMIC_ARM_H
19
20 #include <stdint.h>
21
22 #ifndef ANDROID_ATOMIC_INLINE
23 #define ANDROID_ATOMIC_INLINE inline __attribute__((always_inline))
24 #endif
25
android_compiler_barrier()26 extern ANDROID_ATOMIC_INLINE void android_compiler_barrier()
27 {
28 __asm__ __volatile__ ("" : : : "memory");
29 }
30
android_memory_barrier()31 extern ANDROID_ATOMIC_INLINE void android_memory_barrier()
32 {
33 #if ANDROID_SMP == 0
34 android_compiler_barrier();
35 #else
36 __asm__ __volatile__ ("dmb" : : : "memory");
37 #endif
38 }
39
40 extern ANDROID_ATOMIC_INLINE
android_atomic_acquire_load(volatile const int32_t * ptr)41 int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
42 {
43 int32_t value = *ptr;
44 android_memory_barrier();
45 return value;
46 }
47
48 extern ANDROID_ATOMIC_INLINE
android_atomic_release_load(volatile const int32_t * ptr)49 int32_t android_atomic_release_load(volatile const int32_t *ptr)
50 {
51 android_memory_barrier();
52 return *ptr;
53 }
54
55 extern ANDROID_ATOMIC_INLINE
android_atomic_acquire_store(int32_t value,volatile int32_t * ptr)56 void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
57 {
58 *ptr = value;
59 android_memory_barrier();
60 }
61
62 extern ANDROID_ATOMIC_INLINE
android_atomic_release_store(int32_t value,volatile int32_t * ptr)63 void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
64 {
65 android_memory_barrier();
66 *ptr = value;
67 }
68
69 extern ANDROID_ATOMIC_INLINE
android_atomic_cas(int32_t old_value,int32_t new_value,volatile int32_t * ptr)70 int android_atomic_cas(int32_t old_value, int32_t new_value,
71 volatile int32_t *ptr)
72 {
73 int32_t prev, status;
74 do {
75 __asm__ __volatile__ ("ldrex %0, [%3]\n"
76 "mov %1, #0\n"
77 "teq %0, %4\n"
78 #ifdef __thumb2__
79 "it eq\n"
80 #endif
81 "strexeq %1, %5, [%3]"
82 : "=&r" (prev), "=&r" (status), "+m"(*ptr)
83 : "r" (ptr), "Ir" (old_value), "r" (new_value)
84 : "cc");
85 } while (__builtin_expect(status != 0, 0));
86 return prev != old_value;
87 }
88
89 extern ANDROID_ATOMIC_INLINE
android_atomic_acquire_cas(int32_t old_value,int32_t new_value,volatile int32_t * ptr)90 int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
91 volatile int32_t *ptr)
92 {
93 int status = android_atomic_cas(old_value, new_value, ptr);
94 android_memory_barrier();
95 return status;
96 }
97
98 extern ANDROID_ATOMIC_INLINE
android_atomic_release_cas(int32_t old_value,int32_t new_value,volatile int32_t * ptr)99 int android_atomic_release_cas(int32_t old_value, int32_t new_value,
100 volatile int32_t *ptr)
101 {
102 android_memory_barrier();
103 return android_atomic_cas(old_value, new_value, ptr);
104 }
105
106 extern ANDROID_ATOMIC_INLINE
android_atomic_add(int32_t increment,volatile int32_t * ptr)107 int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
108 {
109 int32_t prev, tmp, status;
110 android_memory_barrier();
111 do {
112 __asm__ __volatile__ ("ldrex %0, [%4]\n"
113 "add %1, %0, %5\n"
114 "strex %2, %1, [%4]"
115 : "=&r" (prev), "=&r" (tmp),
116 "=&r" (status), "+m" (*ptr)
117 : "r" (ptr), "Ir" (increment)
118 : "cc");
119 } while (__builtin_expect(status != 0, 0));
120 return prev;
121 }
122
android_atomic_inc(volatile int32_t * addr)123 extern ANDROID_ATOMIC_INLINE int32_t android_atomic_inc(volatile int32_t *addr)
124 {
125 return android_atomic_add(1, addr);
126 }
127
android_atomic_dec(volatile int32_t * addr)128 extern ANDROID_ATOMIC_INLINE int32_t android_atomic_dec(volatile int32_t *addr)
129 {
130 return android_atomic_add(-1, addr);
131 }
132
133 extern ANDROID_ATOMIC_INLINE
android_atomic_and(int32_t value,volatile int32_t * ptr)134 int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
135 {
136 int32_t prev, tmp, status;
137 android_memory_barrier();
138 do {
139 __asm__ __volatile__ ("ldrex %0, [%4]\n"
140 "and %1, %0, %5\n"
141 "strex %2, %1, [%4]"
142 : "=&r" (prev), "=&r" (tmp),
143 "=&r" (status), "+m" (*ptr)
144 : "r" (ptr), "Ir" (value)
145 : "cc");
146 } while (__builtin_expect(status != 0, 0));
147 return prev;
148 }
149
150 extern ANDROID_ATOMIC_INLINE
android_atomic_or(int32_t value,volatile int32_t * ptr)151 int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
152 {
153 int32_t prev, tmp, status;
154 android_memory_barrier();
155 do {
156 __asm__ __volatile__ ("ldrex %0, [%4]\n"
157 "orr %1, %0, %5\n"
158 "strex %2, %1, [%4]"
159 : "=&r" (prev), "=&r" (tmp),
160 "=&r" (status), "+m" (*ptr)
161 : "r" (ptr), "Ir" (value)
162 : "cc");
163 } while (__builtin_expect(status != 0, 0));
164 return prev;
165 }
166
167 #endif /* ANDROID_CUTILS_ATOMIC_ARM_H */
168