• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2012 Google Inc.  All rights reserved.
3 // https://developers.google.com/protocol-buffers/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 //     * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 //     * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
14 // distribution.
15 //     * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // This file is an internal atomic implementation, use atomicops.h instead.
32 
33 #ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MACOSX_H_
34 #define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MACOSX_H_
35 
36 #include <libkern/OSAtomic.h>
37 
38 namespace google {
39 namespace protobuf {
40 namespace internal {
41 
NoBarrier_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
43                                          Atomic32 old_value,
44                                          Atomic32 new_value) {
45   Atomic32 prev_value;
46   do {
47     if (OSAtomicCompareAndSwap32(old_value, new_value,
48                                  const_cast<Atomic32*>(ptr))) {
49       return old_value;
50     }
51     prev_value = *ptr;
52   } while (prev_value == old_value);
53   return prev_value;
54 }
55 
NoBarrier_AtomicExchange(volatile Atomic32 * ptr,Atomic32 new_value)56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
57                                          Atomic32 new_value) {
58   Atomic32 old_value;
59   do {
60     old_value = *ptr;
61   } while (!OSAtomicCompareAndSwap32(old_value, new_value,
62                                      const_cast<Atomic32*>(ptr)));
63   return old_value;
64 }
65 
NoBarrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
67                                           Atomic32 increment) {
68   return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
69 }
70 
Barrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
72                                           Atomic32 increment) {
73   return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
74 }
75 
MemoryBarrier()76 inline void MemoryBarrier() {
77   OSMemoryBarrier();
78 }
79 
Acquire_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
81                                        Atomic32 old_value,
82                                        Atomic32 new_value) {
83   Atomic32 prev_value;
84   do {
85     if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
86                                         const_cast<Atomic32*>(ptr))) {
87       return old_value;
88     }
89     prev_value = *ptr;
90   } while (prev_value == old_value);
91   return prev_value;
92 }
93 
Release_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
95                                        Atomic32 old_value,
96                                        Atomic32 new_value) {
97   return Acquire_CompareAndSwap(ptr, old_value, new_value);
98 }
99 
NoBarrier_Store(volatile Atomic32 * ptr,Atomic32 value)100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
101   *ptr = value;
102 }
103 
Acquire_Store(volatile Atomic32 * ptr,Atomic32 value)104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
105   *ptr = value;
106   MemoryBarrier();
107 }
108 
Release_Store(volatile Atomic32 * ptr,Atomic32 value)109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
110   MemoryBarrier();
111   *ptr = value;
112 }
113 
NoBarrier_Load(volatile const Atomic32 * ptr)114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
115   return *ptr;
116 }
117 
Acquire_Load(volatile const Atomic32 * ptr)118 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
119   Atomic32 value = *ptr;
120   MemoryBarrier();
121   return value;
122 }
123 
Release_Load(volatile const Atomic32 * ptr)124 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
125   MemoryBarrier();
126   return *ptr;
127 }
128 
129 #ifdef __LP64__
130 
131 // 64-bit implementation on 64-bit platform
132 
NoBarrier_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)133 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
134                                          Atomic64 old_value,
135                                          Atomic64 new_value) {
136   Atomic64 prev_value;
137   do {
138     if (OSAtomicCompareAndSwap64(old_value, new_value,
139                                  reinterpret_cast<volatile int64_t*>(ptr))) {
140       return old_value;
141     }
142     prev_value = *ptr;
143   } while (prev_value == old_value);
144   return prev_value;
145 }
146 
NoBarrier_AtomicExchange(volatile Atomic64 * ptr,Atomic64 new_value)147 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
148                                          Atomic64 new_value) {
149   Atomic64 old_value;
150   do {
151     old_value = *ptr;
152   } while (!OSAtomicCompareAndSwap64(old_value, new_value,
153                                      reinterpret_cast<volatile int64_t*>(ptr)));
154   return old_value;
155 }
156 
NoBarrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)157 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
158                                           Atomic64 increment) {
159   return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
160 }
161 
Barrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)162 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
163                                         Atomic64 increment) {
164   return OSAtomicAdd64Barrier(increment,
165                               reinterpret_cast<volatile int64_t*>(ptr));
166 }
167 
Acquire_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)168 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
169                                        Atomic64 old_value,
170                                        Atomic64 new_value) {
171   Atomic64 prev_value;
172   do {
173     if (OSAtomicCompareAndSwap64Barrier(
174         old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
175       return old_value;
176     }
177     prev_value = *ptr;
178   } while (prev_value == old_value);
179   return prev_value;
180 }
181 
Release_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)182 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
183                                        Atomic64 old_value,
184                                        Atomic64 new_value) {
185   // The lib kern interface does not distinguish between
186   // Acquire and Release memory barriers; they are equivalent.
187   return Acquire_CompareAndSwap(ptr, old_value, new_value);
188 }
189 
NoBarrier_Store(volatile Atomic64 * ptr,Atomic64 value)190 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
191   *ptr = value;
192 }
193 
Acquire_Store(volatile Atomic64 * ptr,Atomic64 value)194 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
195   *ptr = value;
196   MemoryBarrier();
197 }
198 
Release_Store(volatile Atomic64 * ptr,Atomic64 value)199 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
200   MemoryBarrier();
201   *ptr = value;
202 }
203 
NoBarrier_Load(volatile const Atomic64 * ptr)204 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
205   return *ptr;
206 }
207 
Acquire_Load(volatile const Atomic64 * ptr)208 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
209   Atomic64 value = *ptr;
210   MemoryBarrier();
211   return value;
212 }
213 
Release_Load(volatile const Atomic64 * ptr)214 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
215   MemoryBarrier();
216   return *ptr;
217 }
218 
219 #endif  // defined(__LP64__)
220 
221 }  // namespace internal
222 }  // namespace protobuf
223 }  // namespace google
224 
225 #endif  // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MACOSX_H_
226