• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2012 Google Inc. All rights reserved.
2 //
3 // Redistribution and use in source and binary forms, with or without
4 // modification, are permitted provided that the following conditions are
5 // met:
6 //
7 //    * Redistributions of source code must retain the above copyright
8 // notice, this list of conditions and the following disclaimer.
9 //    * Redistributions in binary form must reproduce the above
10 // copyright notice, this list of conditions and the following disclaimer
11 // in the documentation and/or other materials provided with the
12 // distribution.
13 //    * Neither the name of Google Inc. nor the name Chromium Embedded
14 // Framework nor the names of its contributors may be used to endorse
15 // or promote products derived from this software without specific prior
16 // written permission.
17 //
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 
30 // Do not include this header file directly. Use base/cef_atomicops.h
31 // instead.
32 
33 #ifndef CEF_INCLUDE_BASE_INTERNAL_CEF_ATOMICOPS_MAC_H_
34 #define CEF_INCLUDE_BASE_INTERNAL_CEF_ATOMICOPS_MAC_H_
35 
36 #include <libkern/OSAtomic.h>
37 
38 namespace base {
39 namespace subtle {
40 
NoBarrier_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)41 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
42                                          Atomic32 old_value,
43                                          Atomic32 new_value) {
44   Atomic32 prev_value;
45   do {
46     if (OSAtomicCompareAndSwap32(old_value, new_value,
47                                  const_cast<Atomic32*>(ptr))) {
48       return old_value;
49     }
50     prev_value = *ptr;
51   } while (prev_value == old_value);
52   return prev_value;
53 }
54 
NoBarrier_AtomicExchange(volatile Atomic32 * ptr,Atomic32 new_value)55 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
56                                          Atomic32 new_value) {
57   Atomic32 old_value;
58   do {
59     old_value = *ptr;
60   } while (!OSAtomicCompareAndSwap32(old_value, new_value,
61                                      const_cast<Atomic32*>(ptr)));
62   return old_value;
63 }
64 
NoBarrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)65 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
66                                           Atomic32 increment) {
67   return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
68 }
69 
Barrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)70 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
71                                         Atomic32 increment) {
72   return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
73 }
74 
MemoryBarrier()75 inline void MemoryBarrier() {
76   OSMemoryBarrier();
77 }
78 
Acquire_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)79 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
80                                        Atomic32 old_value,
81                                        Atomic32 new_value) {
82   Atomic32 prev_value;
83   do {
84     if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
85                                         const_cast<Atomic32*>(ptr))) {
86       return old_value;
87     }
88     prev_value = *ptr;
89   } while (prev_value == old_value);
90   return prev_value;
91 }
92 
Release_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)93 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
94                                        Atomic32 old_value,
95                                        Atomic32 new_value) {
96   return Acquire_CompareAndSwap(ptr, old_value, new_value);
97 }
98 
NoBarrier_Store(volatile Atomic32 * ptr,Atomic32 value)99 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
100   *ptr = value;
101 }
102 
Acquire_Store(volatile Atomic32 * ptr,Atomic32 value)103 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
104   *ptr = value;
105   MemoryBarrier();
106 }
107 
Release_Store(volatile Atomic32 * ptr,Atomic32 value)108 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
109   MemoryBarrier();
110   *ptr = value;
111 }
112 
NoBarrier_Load(volatile const Atomic32 * ptr)113 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
114   return *ptr;
115 }
116 
Acquire_Load(volatile const Atomic32 * ptr)117 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
118   Atomic32 value = *ptr;
119   MemoryBarrier();
120   return value;
121 }
122 
Release_Load(volatile const Atomic32 * ptr)123 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
124   MemoryBarrier();
125   return *ptr;
126 }
127 
128 #ifdef __LP64__
129 
130 // 64-bit implementation on 64-bit platform
131 
NoBarrier_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)132 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
133                                          Atomic64 old_value,
134                                          Atomic64 new_value) {
135   Atomic64 prev_value;
136   do {
137     if (OSAtomicCompareAndSwap64(old_value, new_value,
138                                  reinterpret_cast<volatile int64_t*>(ptr))) {
139       return old_value;
140     }
141     prev_value = *ptr;
142   } while (prev_value == old_value);
143   return prev_value;
144 }
145 
NoBarrier_AtomicExchange(volatile Atomic64 * ptr,Atomic64 new_value)146 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
147                                          Atomic64 new_value) {
148   Atomic64 old_value;
149   do {
150     old_value = *ptr;
151   } while (!OSAtomicCompareAndSwap64(old_value, new_value,
152                                      reinterpret_cast<volatile int64_t*>(ptr)));
153   return old_value;
154 }
155 
NoBarrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)156 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
157                                           Atomic64 increment) {
158   return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
159 }
160 
Barrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)161 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
162                                         Atomic64 increment) {
163   return OSAtomicAdd64Barrier(increment,
164                               reinterpret_cast<volatile int64_t*>(ptr));
165 }
166 
Acquire_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)167 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
168                                        Atomic64 old_value,
169                                        Atomic64 new_value) {
170   Atomic64 prev_value;
171   do {
172     if (OSAtomicCompareAndSwap64Barrier(
173             old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
174       return old_value;
175     }
176     prev_value = *ptr;
177   } while (prev_value == old_value);
178   return prev_value;
179 }
180 
Release_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)181 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
182                                        Atomic64 old_value,
183                                        Atomic64 new_value) {
184   // The lib kern interface does not distinguish between
185   // Acquire and Release memory barriers; they are equivalent.
186   return Acquire_CompareAndSwap(ptr, old_value, new_value);
187 }
188 
NoBarrier_Store(volatile Atomic64 * ptr,Atomic64 value)189 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
190   *ptr = value;
191 }
192 
Acquire_Store(volatile Atomic64 * ptr,Atomic64 value)193 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
194   *ptr = value;
195   MemoryBarrier();
196 }
197 
Release_Store(volatile Atomic64 * ptr,Atomic64 value)198 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
199   MemoryBarrier();
200   *ptr = value;
201 }
202 
NoBarrier_Load(volatile const Atomic64 * ptr)203 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
204   return *ptr;
205 }
206 
Acquire_Load(volatile const Atomic64 * ptr)207 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
208   Atomic64 value = *ptr;
209   MemoryBarrier();
210   return value;
211 }
212 
Release_Load(volatile const Atomic64 * ptr)213 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
214   MemoryBarrier();
215   return *ptr;
216 }
217 
218 #endif  // defined(__LP64__)
219 
220 }  // namespace base::subtle
221 }  // namespace base
222 
223 #endif  // CEF_INCLUDE_BASE_INTERNAL_CEF_ATOMICOPS_MAC_H_
224