1 /*
2 * Copyright © 2007 Chris Wilson
3 * Copyright © 2009,2010 Red Hat, Inc.
4 * Copyright © 2011,2012 Google, Inc.
5 *
6 * This is part of HarfBuzz, a text shaping library.
7 *
8 * Permission is hereby granted, without written agreement and without
9 * license or royalty fees, to use, copy, modify, and distribute this
10 * software and its documentation for any purpose, provided that the
11 * above copyright notice and the following two paragraphs appear in
12 * all copies of this software.
13 *
14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * DAMAGE.
19 *
20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 *
26 * Contributor(s):
27 * Chris Wilson <chris@chris-wilson.co.uk>
28 * Red Hat Author(s): Behdad Esfahbod
29 * Google Author(s): Behdad Esfahbod
30 */
31
32 #ifndef HB_ATOMIC_HH
33 #define HB_ATOMIC_HH
34
35 #include "hb.hh"
36
37
38 /*
39 * Atomic integers and pointers.
40 */
41
42
43 /* We need external help for these */
44
45 #if defined(hb_atomic_int_impl_add) \
46 && defined(hb_atomic_ptr_impl_get) \
47 && defined(hb_atomic_ptr_impl_cmpexch)
48
49 /* Defined externally, i.e. in config.h. */
50
51
52 #elif !defined(HB_NO_MT) && defined(__ATOMIC_ACQUIRE)
53
54 /* C++11-style GCC primitives. */
55
56 #define _hb_memory_barrier() __sync_synchronize ()
57
58 #define hb_atomic_int_impl_add(AI, V) __atomic_fetch_add ((AI), (V), __ATOMIC_ACQ_REL)
59 #define hb_atomic_int_impl_set_relaxed(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELAXED)
60 #define hb_atomic_int_impl_set(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELEASE)
61 #define hb_atomic_int_impl_get_relaxed(AI) __atomic_load_n ((AI), __ATOMIC_RELAXED)
62 #define hb_atomic_int_impl_get(AI) __atomic_load_n ((AI), __ATOMIC_ACQUIRE)
63
64 #define hb_atomic_ptr_impl_set_relaxed(P, V) __atomic_store_n ((P), (V), __ATOMIC_RELAXED)
65 #define hb_atomic_ptr_impl_get_relaxed(P) __atomic_load_n ((P), __ATOMIC_RELAXED)
66 #define hb_atomic_ptr_impl_get(P) __atomic_load_n ((P), __ATOMIC_ACQUIRE)
67 static inline bool
_hb_atomic_ptr_impl_cmplexch(const void ** P,const void * O_,const void * N)68 _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
69 {
70 const void *O = O_; // Need lvalue
71 return __atomic_compare_exchange_n ((void **) P, (void **) &O, (void *) N, true, __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
72 }
73 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
74
75 #elif !defined(HB_NO_MT) && __cplusplus >= 201103L
76
77 /* C++11 atomics. */
78
79 #include <atomic>
80
81 #define _hb_memory_barrier() std::atomic_thread_fence(std::memory_order_ack_rel)
82 #define _hb_memory_r_barrier() std::atomic_thread_fence(std::memory_order_acquire)
83 #define _hb_memory_w_barrier() std::atomic_thread_fence(std::memory_order_release)
84
85 #define hb_atomic_int_impl_add(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->fetch_add ((V), std::memory_order_acq_rel))
86 #define hb_atomic_int_impl_set_relaxed(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_relaxed))
87 #define hb_atomic_int_impl_set(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_release))
88 #define hb_atomic_int_impl_get_relaxed(AI) (reinterpret_cast<std::atomic<int> *> (AI)->load (std::memory_order_relaxed))
89 #define hb_atomic_int_impl_get(AI) (reinterpret_cast<std::atomic<int> *> (AI)->load (std::memory_order_acquire))
90
91 #define hb_atomic_ptr_impl_set_relaxed(P, V) (reinterpret_cast<std::atomic<void*> *> (P)->store ((V), std::memory_order_relaxed))
92 #define hb_atomic_ptr_impl_get_relaxed(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_relaxed))
93 #define hb_atomic_ptr_impl_get(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_acquire))
94 static inline bool
_hb_atomic_ptr_impl_cmplexch(const void ** P,const void * O_,const void * N)95 _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
96 {
97 const void *O = O_; // Need lvalue
98 return reinterpret_cast<std::atomic<const void*> *> (P)->compare_exchange_weak (O, N, std::memory_order_acq_rel, std::memory_order_relaxed);
99 }
100 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
101
102
103 #elif !defined(HB_NO_MT) && defined(_WIN32)
104
105 #include <windows.h>
106
_hb_memory_barrier()107 static inline void _hb_memory_barrier ()
108 {
109 #if !defined(MemoryBarrier)
110 /* MinGW has a convoluted history of supporting MemoryBarrier. */
111 LONG dummy = 0;
112 InterlockedExchange (&dummy, 1);
113 #else
114 MemoryBarrier ();
115 #endif
116 }
117 #define _hb_memory_barrier() _hb_memory_barrier ()
118
119 #define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd ((LONG *) (AI), (V))
120 static_assert ((sizeof (LONG) == sizeof (int)), "");
121
122 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((P), (N), (O)) == (O))
123
124
125 #elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
126
127 #define _hb_memory_barrier() __sync_synchronize ()
128
129 #define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add ((AI), (V))
130
131 #define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
132
133
134 #elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
135
136 #include <atomic.h>
137 #include <mbarrier.h>
138
139 #define _hb_memory_r_barrier() __machine_r_barrier ()
140 #define _hb_memory_w_barrier() __machine_w_barrier ()
141 #define _hb_memory_barrier() __machine_rw_barrier ()
142
_hb_fetch_and_add(int * AI,int V)143 static inline int _hb_fetch_and_add (int *AI, int V)
144 {
145 _hb_memory_w_barrier ();
146 int result = atomic_add_int_nv ((uint_t *) AI, V) - V;
147 _hb_memory_r_barrier ();
148 return result;
149 }
_hb_compare_and_swap_ptr(void ** P,void * O,void * N)150 static inline bool _hb_compare_and_swap_ptr (void **P, void *O, void *N)
151 {
152 _hb_memory_w_barrier ();
153 bool result = atomic_cas_ptr (P, O, N) == O;
154 _hb_memory_r_barrier ();
155 return result;
156 }
157
158 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
159
160 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swap_ptr ((P), (O), (N))
161
162
163 #elif !defined(HB_NO_MT) && defined(__APPLE__)
164
165 #include <libkern/OSAtomic.h>
166 #ifdef __MAC_OS_X_MIN_REQUIRED
167 #include <AvailabilityMacros.h>
168 #elif defined(__IPHONE_OS_MIN_REQUIRED)
169 #include <Availability.h>
170 #endif
171
172 #define _hb_memory_barrier() OSMemoryBarrier ()
173
174 #define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), (AI)) - (V))
175
176 #if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
177 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((O), (N), (P))
178 #else
179 #if __ppc64__ || __x86_64__ || __aarch64__
180 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P))
181 #else
182 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P))
183 #endif
184 #endif
185
186
187 #elif !defined(HB_NO_MT) && defined(_AIX) && defined(__IBMCPP__)
188
189 #include <builtins.h>
190
191 #define _hb_memory_barrier() __lwsync ()
192
_hb_fetch_and_add(int * AI,int V)193 static inline int _hb_fetch_and_add (int *AI, int V)
194 {
195 _hb_memory_barrier ();
196 int result = __fetch_and_add (AI, V);
197 _hb_memory_barrier ();
198 return result;
199 }
_hb_compare_and_swaplp(long * P,long O,long N)200 static inline bool _hb_compare_and_swaplp (long *P, long O, long N)
201 {
202 _hb_memory_barrier ();
203 bool result = __compare_and_swaplp (P, &O, N);
204 _hb_memory_barrier ();
205 return result;
206 }
207
208 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
209
210 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swaplp ((long *) (P), (long) (O), (long) (N))
211 static_assert ((sizeof (long) == sizeof (void *)), "");
212
213
214 #elif !defined(HB_NO_MT)
215
216 #define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
217
218 #define _hb_memory_barrier()
219
220 #define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
221
222 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
223
224
225 #else /* HB_NO_MT */
226
227 #define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
228
229 #define _hb_memory_barrier()
230
231 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
232
233
234 #endif
235
236
237 #ifndef _hb_memory_r_barrier
238 #define _hb_memory_r_barrier() _hb_memory_barrier ()
239 #endif
240 #ifndef _hb_memory_w_barrier
241 #define _hb_memory_w_barrier() _hb_memory_barrier ()
242 #endif
243 #ifndef hb_atomic_int_impl_set_relaxed
244 #define hb_atomic_int_impl_set_relaxed(AI, V) (*(AI) = (V))
245 #endif
246 #ifndef hb_atomic_int_impl_get_relaxed
247 #define hb_atomic_int_impl_get_relaxed(AI) (*(AI))
248 #endif
249
250 #ifndef hb_atomic_ptr_impl_set_relaxed
251 #define hb_atomic_ptr_impl_set_relaxed(P, V) (*(P) = (V))
252 #endif
253 #ifndef hb_atomic_ptr_impl_get_relaxed
254 #define hb_atomic_ptr_impl_get_relaxed(P) (*(P))
255 #endif
256 #ifndef hb_atomic_int_impl_set
hb_atomic_int_impl_set(int * AI,int v)257 inline void hb_atomic_int_impl_set (int *AI, int v) { _hb_memory_w_barrier (); *AI = v; }
258 #endif
259 #ifndef hb_atomic_int_impl_get
hb_atomic_int_impl_get(const int * AI)260 inline int hb_atomic_int_impl_get (const int *AI) { int v = *AI; _hb_memory_r_barrier (); return v; }
261 #endif
262 #ifndef hb_atomic_ptr_impl_get
hb_atomic_ptr_impl_get(void ** const P)263 inline void *hb_atomic_ptr_impl_get (void ** const P) { void *v = *P; _hb_memory_r_barrier (); return v; }
264 #endif
265
266
267 #define HB_ATOMIC_INT_INIT(V) {V}
268 struct hb_atomic_int_t
269 {
set_relaxedhb_atomic_int_t270 void set_relaxed (int v_) { hb_atomic_int_impl_set_relaxed (&v, v_); }
sethb_atomic_int_t271 void set (int v_) { hb_atomic_int_impl_set (&v, v_); }
get_relaxedhb_atomic_int_t272 int get_relaxed () const { return hb_atomic_int_impl_get_relaxed (&v); }
gethb_atomic_int_t273 int get () const { return hb_atomic_int_impl_get (&v); }
inchb_atomic_int_t274 int inc () { return hb_atomic_int_impl_add (&v, 1); }
dechb_atomic_int_t275 int dec () { return hb_atomic_int_impl_add (&v, -1); }
276
277 int v;
278 };
279
280
281 #define HB_ATOMIC_PTR_INIT(V) {V}
282 template <typename P>
283 struct hb_atomic_ptr_t
284 {
285 typedef typename hb_remove_pointer (P) T;
286
inithb_atomic_ptr_t287 void init (T* v_ = nullptr) { set_relaxed (v_); }
set_relaxedhb_atomic_ptr_t288 void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
get_relaxedhb_atomic_ptr_t289 T *get_relaxed () const { return (T *) hb_atomic_ptr_impl_get_relaxed (&v); }
gethb_atomic_ptr_t290 T *get () const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
cmpexchhb_atomic_ptr_t291 bool cmpexch (const T *old, T *new_) const { return hb_atomic_ptr_impl_cmpexch ((void **) &v, (void *) old, (void *) new_); }
292
operator ->hb_atomic_ptr_t293 T * operator -> () const { return get (); }
operator C*hb_atomic_ptr_t294 template <typename C> operator C * () const { return get (); }
295
296 T *v;
297 };
298
299
300 #endif /* HB_ATOMIC_HH */
301