1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 // This file is an internal atomic implementation, use base/atomicops.h instead.
6
7 #ifndef BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
8 #define BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
9 #pragma once
10
11 #include <libkern/OSAtomic.h>
12
13 namespace base {
14 namespace subtle {
15
NoBarrier_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)16 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
17 Atomic32 old_value,
18 Atomic32 new_value) {
19 Atomic32 prev_value;
20 do {
21 if (OSAtomicCompareAndSwap32(old_value, new_value,
22 const_cast<Atomic32*>(ptr))) {
23 return old_value;
24 }
25 prev_value = *ptr;
26 } while (prev_value == old_value);
27 return prev_value;
28 }
29
NoBarrier_AtomicExchange(volatile Atomic32 * ptr,Atomic32 new_value)30 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
31 Atomic32 new_value) {
32 Atomic32 old_value;
33 do {
34 old_value = *ptr;
35 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
36 const_cast<Atomic32*>(ptr)));
37 return old_value;
38 }
39
NoBarrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
41 Atomic32 increment) {
42 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
43 }
44
Barrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)45 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
46 Atomic32 increment) {
47 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
48 }
49
MemoryBarrier()50 inline void MemoryBarrier() {
51 OSMemoryBarrier();
52 }
53
Acquire_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)54 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
55 Atomic32 old_value,
56 Atomic32 new_value) {
57 Atomic32 prev_value;
58 do {
59 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
60 const_cast<Atomic32*>(ptr))) {
61 return old_value;
62 }
63 prev_value = *ptr;
64 } while (prev_value == old_value);
65 return prev_value;
66 }
67
Release_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)68 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
69 Atomic32 old_value,
70 Atomic32 new_value) {
71 return Acquire_CompareAndSwap(ptr, old_value, new_value);
72 }
73
NoBarrier_Store(volatile Atomic32 * ptr,Atomic32 value)74 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
75 *ptr = value;
76 }
77
Acquire_Store(volatile Atomic32 * ptr,Atomic32 value)78 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
79 *ptr = value;
80 MemoryBarrier();
81 }
82
Release_Store(volatile Atomic32 * ptr,Atomic32 value)83 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
84 MemoryBarrier();
85 *ptr = value;
86 }
87
NoBarrier_Load(volatile const Atomic32 * ptr)88 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
89 return *ptr;
90 }
91
Acquire_Load(volatile const Atomic32 * ptr)92 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
93 Atomic32 value = *ptr;
94 MemoryBarrier();
95 return value;
96 }
97
Release_Load(volatile const Atomic32 * ptr)98 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
99 MemoryBarrier();
100 return *ptr;
101 }
102
103 #ifdef __LP64__
104
105 // 64-bit implementation on 64-bit platform
106
NoBarrier_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)107 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
108 Atomic64 old_value,
109 Atomic64 new_value) {
110 Atomic64 prev_value;
111 do {
112 if (OSAtomicCompareAndSwap64(old_value, new_value,
113 reinterpret_cast<volatile int64_t*>(ptr))) {
114 return old_value;
115 }
116 prev_value = *ptr;
117 } while (prev_value == old_value);
118 return prev_value;
119 }
120
NoBarrier_AtomicExchange(volatile Atomic64 * ptr,Atomic64 new_value)121 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
122 Atomic64 new_value) {
123 Atomic64 old_value;
124 do {
125 old_value = *ptr;
126 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
127 reinterpret_cast<volatile int64_t*>(ptr)));
128 return old_value;
129 }
130
NoBarrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)131 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
132 Atomic64 increment) {
133 return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
134 }
135
Barrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)136 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
137 Atomic64 increment) {
138 return OSAtomicAdd64Barrier(increment,
139 reinterpret_cast<volatile int64_t*>(ptr));
140 }
141
Acquire_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)142 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
143 Atomic64 old_value,
144 Atomic64 new_value) {
145 Atomic64 prev_value;
146 do {
147 if (OSAtomicCompareAndSwap64Barrier(
148 old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
149 return old_value;
150 }
151 prev_value = *ptr;
152 } while (prev_value == old_value);
153 return prev_value;
154 }
155
Release_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)156 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
157 Atomic64 old_value,
158 Atomic64 new_value) {
159 // The lib kern interface does not distinguish between
160 // Acquire and Release memory barriers; they are equivalent.
161 return Acquire_CompareAndSwap(ptr, old_value, new_value);
162 }
163
NoBarrier_Store(volatile Atomic64 * ptr,Atomic64 value)164 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
165 *ptr = value;
166 }
167
Acquire_Store(volatile Atomic64 * ptr,Atomic64 value)168 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
169 *ptr = value;
170 MemoryBarrier();
171 }
172
Release_Store(volatile Atomic64 * ptr,Atomic64 value)173 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
174 MemoryBarrier();
175 *ptr = value;
176 }
177
NoBarrier_Load(volatile const Atomic64 * ptr)178 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
179 return *ptr;
180 }
181
Acquire_Load(volatile const Atomic64 * ptr)182 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
183 Atomic64 value = *ptr;
184 MemoryBarrier();
185 return value;
186 }
187
Release_Load(volatile const Atomic64 * ptr)188 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
189 MemoryBarrier();
190 return *ptr;
191 }
192
193 #endif // defined(__LP64__)
194
195 } // namespace base::subtle
196 } // namespace base
197
198 #endif // BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
199