1 /* 2 * Copyright © 2011 Ryan Lortie 3 * 4 * This library is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU Lesser General Public 6 * License as published by the Free Software Foundation; either 7 * version 2.1 of the License, or (at your option) any later version. 8 * 9 * This library is distributed in the hope that it will be useful, but 10 * WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 12 * Lesser General Public License for more details. 13 * 14 * You should have received a copy of the GNU Lesser General Public 15 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 16 * 17 * Author: Ryan Lortie <desrt@desrt.ca> 18 */ 19 20 #ifndef __G_ATOMIC_H__ 21 #define __G_ATOMIC_H__ 22 23 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION) 24 #error "Only <glib.h> can be included directly." 25 #endif 26 27 #include <glib/gtypes.h> 28 29 G_BEGIN_DECLS 30 31 GLIB_AVAILABLE_IN_ALL 32 gint g_atomic_int_get (const volatile gint *atomic); 33 GLIB_AVAILABLE_IN_ALL 34 void g_atomic_int_set (volatile gint *atomic, 35 gint newval); 36 GLIB_AVAILABLE_IN_ALL 37 void g_atomic_int_inc (volatile gint *atomic); 38 GLIB_AVAILABLE_IN_ALL 39 gboolean g_atomic_int_dec_and_test (volatile gint *atomic); 40 GLIB_AVAILABLE_IN_ALL 41 gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic, 42 gint oldval, 43 gint newval); 44 GLIB_AVAILABLE_IN_ALL 45 gint g_atomic_int_add (volatile gint *atomic, 46 gint val); 47 GLIB_AVAILABLE_IN_2_30 48 guint g_atomic_int_and (volatile guint *atomic, 49 guint val); 50 GLIB_AVAILABLE_IN_2_30 51 guint g_atomic_int_or (volatile guint *atomic, 52 guint val); 53 GLIB_AVAILABLE_IN_ALL 54 guint g_atomic_int_xor (volatile guint *atomic, 55 guint val); 56 57 GLIB_AVAILABLE_IN_ALL 58 gpointer g_atomic_pointer_get (const volatile void *atomic); 59 GLIB_AVAILABLE_IN_ALL 60 void g_atomic_pointer_set (volatile void *atomic, 61 gpointer newval); 62 GLIB_AVAILABLE_IN_ALL 63 gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic, 64 gpointer oldval, 65 gpointer newval); 66 GLIB_AVAILABLE_IN_ALL 67 gssize g_atomic_pointer_add (volatile void *atomic, 68 gssize val); 69 GLIB_AVAILABLE_IN_2_30 70 gsize g_atomic_pointer_and (volatile void *atomic, 71 gsize val); 72 GLIB_AVAILABLE_IN_2_30 73 gsize g_atomic_pointer_or (volatile void *atomic, 74 gsize val); 75 GLIB_AVAILABLE_IN_ALL 76 gsize g_atomic_pointer_xor (volatile void *atomic, 77 gsize val); 78 79 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add) 80 gint g_atomic_int_exchange_and_add (volatile gint *atomic, 81 gint val); 82 83 G_END_DECLS 84 85 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) 86 87 /* We prefer the new C11-style atomic extension of GCC if available */ 88 #if defined(__ATOMIC_SEQ_CST) && !defined(__clang__) 89 90 /* This assumes sizeof(int) is 4: gatomic.c statically 91 * asserts that (using G_STATIC_ASSERT at top-level in a header was 92 * problematic, see #730932) */ 93 94 #define g_atomic_int_get(atomic) \ 95 (G_GNUC_EXTENSION ({ \ 96 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 97 (void) (0 ? *(atomic) ^ *(atomic) : 1); \ 98 (gint) __atomic_load_4 ((atomic), __ATOMIC_SEQ_CST); \ 99 })) 100 #define g_atomic_int_set(atomic, newval) \ 101 (G_GNUC_EXTENSION ({ \ 102 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 103 (void) (0 ? *(atomic) ^ (newval) : 1); \ 104 __atomic_store_4 ((atomic), (newval), __ATOMIC_SEQ_CST); \ 105 })) 106 107 #if GLIB_SIZEOF_VOID_P == 8 108 109 #define g_atomic_pointer_get(atomic) \ 110 (G_GNUC_EXTENSION ({ \ 111 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 112 guint64 gapg_temp = __atomic_load_8 ((atomic), __ATOMIC_SEQ_CST); \ 113 (gpointer) gapg_temp; \ 114 })) 115 #define g_atomic_pointer_set(atomic, newval) \ 116 (G_GNUC_EXTENSION ({ \ 117 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 118 (void) (0 ? (gpointer) *(atomic) : NULL); \ 119 __atomic_store_8 ((atomic), (gsize) (newval), __ATOMIC_SEQ_CST); \ 120 })) 121 122 #else /* GLIB_SIZEOF_VOID_P == 8 */ 123 124 /* This assumes that if sizeof(void *) is not 8, then it is 4: 125 * gatomic.c statically asserts that (using G_STATIC_ASSERT 126 * at top-level in a header was problematic, see #730932) */ 127 128 #define g_atomic_pointer_get(atomic) \ 129 (G_GNUC_EXTENSION ({ \ 130 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 131 guint32 gapg_temp = __atomic_load_4 ((atomic), __ATOMIC_SEQ_CST); \ 132 (gpointer) gapg_temp; \ 133 })) 134 #define g_atomic_pointer_set(atomic, newval) \ 135 (G_GNUC_EXTENSION ({ \ 136 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 137 (void) (0 ? (gpointer) *(atomic) : NULL); \ 138 __atomic_store_4 ((atomic), (gsize) (newval), __ATOMIC_SEQ_CST); \ 139 })) 140 141 #endif /* GLIB_SIZEOF_VOID_P == 8 */ 142 143 #else /* defined(__ATOMIC_SEQ_CST) */ 144 145 #define g_atomic_int_get(atomic) \ 146 (G_GNUC_EXTENSION ({ \ 147 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 148 (void) (0 ? *(atomic) ^ *(atomic) : 1); \ 149 __sync_synchronize (); \ 150 (gint) *(atomic); \ 151 })) 152 #define g_atomic_int_set(atomic, newval) \ 153 (G_GNUC_EXTENSION ({ \ 154 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 155 (void) (0 ? *(atomic) ^ (newval) : 1); \ 156 *(atomic) = (newval); \ 157 __sync_synchronize (); \ 158 })) 159 #define g_atomic_pointer_get(atomic) \ 160 (G_GNUC_EXTENSION ({ \ 161 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 162 __sync_synchronize (); \ 163 (gpointer) *(atomic); \ 164 })) 165 #define g_atomic_pointer_set(atomic, newval) \ 166 (G_GNUC_EXTENSION ({ \ 167 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 168 (void) (0 ? (gpointer) *(atomic) : NULL); \ 169 *(atomic) = (__typeof__ (*(atomic))) (gsize) (newval); \ 170 __sync_synchronize (); \ 171 })) 172 173 #endif /* !defined(__ATOMIC_SEQ_CST) */ 174 175 #define g_atomic_int_inc(atomic) \ 176 (G_GNUC_EXTENSION ({ \ 177 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 178 (void) (0 ? *(atomic) ^ *(atomic) : 1); \ 179 (void) __sync_fetch_and_add ((atomic), 1); \ 180 })) 181 #define g_atomic_int_dec_and_test(atomic) \ 182 (G_GNUC_EXTENSION ({ \ 183 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 184 (void) (0 ? *(atomic) ^ *(atomic) : 1); \ 185 __sync_fetch_and_sub ((atomic), 1) == 1; \ 186 })) 187 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \ 188 (G_GNUC_EXTENSION ({ \ 189 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 190 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \ 191 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \ 192 })) 193 #define g_atomic_int_add(atomic, val) \ 194 (G_GNUC_EXTENSION ({ \ 195 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 196 (void) (0 ? *(atomic) ^ (val) : 1); \ 197 (gint) __sync_fetch_and_add ((atomic), (val)); \ 198 })) 199 #define g_atomic_int_and(atomic, val) \ 200 (G_GNUC_EXTENSION ({ \ 201 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 202 (void) (0 ? *(atomic) ^ (val) : 1); \ 203 (guint) __sync_fetch_and_and ((atomic), (val)); \ 204 })) 205 #define g_atomic_int_or(atomic, val) \ 206 (G_GNUC_EXTENSION ({ \ 207 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 208 (void) (0 ? *(atomic) ^ (val) : 1); \ 209 (guint) __sync_fetch_and_or ((atomic), (val)); \ 210 })) 211 #define g_atomic_int_xor(atomic, val) \ 212 (G_GNUC_EXTENSION ({ \ 213 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ 214 (void) (0 ? *(atomic) ^ (val) : 1); \ 215 (guint) __sync_fetch_and_xor ((atomic), (val)); \ 216 })) 217 218 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \ 219 (G_GNUC_EXTENSION ({ \ 220 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 221 (void) (0 ? (gpointer) *(atomic) : NULL); \ 222 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \ 223 })) 224 #define g_atomic_pointer_add(atomic, val) \ 225 (G_GNUC_EXTENSION ({ \ 226 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 227 (void) (0 ? (gpointer) *(atomic) : NULL); \ 228 (void) (0 ? (val) ^ (val) : 1); \ 229 (gssize) __sync_fetch_and_add ((atomic), (val)); \ 230 })) 231 #define g_atomic_pointer_and(atomic, val) \ 232 (G_GNUC_EXTENSION ({ \ 233 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 234 (void) (0 ? (gpointer) *(atomic) : NULL); \ 235 (void) (0 ? (val) ^ (val) : 1); \ 236 (gsize) __sync_fetch_and_and ((atomic), (val)); \ 237 })) 238 #define g_atomic_pointer_or(atomic, val) \ 239 (G_GNUC_EXTENSION ({ \ 240 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 241 (void) (0 ? (gpointer) *(atomic) : NULL); \ 242 (void) (0 ? (val) ^ (val) : 1); \ 243 (gsize) __sync_fetch_and_or ((atomic), (val)); \ 244 })) 245 #define g_atomic_pointer_xor(atomic, val) \ 246 (G_GNUC_EXTENSION ({ \ 247 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ 248 (void) (0 ? (gpointer) *(atomic) : NULL); \ 249 (void) (0 ? (val) ^ (val) : 1); \ 250 (gsize) __sync_fetch_and_xor ((atomic), (val)); \ 251 })) 252 253 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */ 254 255 #define g_atomic_int_get(atomic) \ 256 (g_atomic_int_get ((gint *) (atomic))) 257 #define g_atomic_int_set(atomic, newval) \ 258 (g_atomic_int_set ((gint *) (atomic), (gint) (newval))) 259 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \ 260 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval))) 261 #define g_atomic_int_add(atomic, val) \ 262 (g_atomic_int_add ((gint *) (atomic), (val))) 263 #define g_atomic_int_and(atomic, val) \ 264 (g_atomic_int_and ((guint *) (atomic), (val))) 265 #define g_atomic_int_or(atomic, val) \ 266 (g_atomic_int_or ((guint *) (atomic), (val))) 267 #define g_atomic_int_xor(atomic, val) \ 268 (g_atomic_int_xor ((guint *) (atomic), (val))) 269 #define g_atomic_int_inc(atomic) \ 270 (g_atomic_int_inc ((gint *) (atomic))) 271 #define g_atomic_int_dec_and_test(atomic) \ 272 (g_atomic_int_dec_and_test ((gint *) (atomic))) 273 274 #define g_atomic_pointer_get(atomic) \ 275 (g_atomic_pointer_get (atomic)) 276 #define g_atomic_pointer_set(atomic, newval) \ 277 (g_atomic_pointer_set ((atomic), (gpointer) (newval))) 278 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \ 279 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval))) 280 #define g_atomic_pointer_add(atomic, val) \ 281 (g_atomic_pointer_add ((atomic), (gssize) (val))) 282 #define g_atomic_pointer_and(atomic, val) \ 283 (g_atomic_pointer_and ((atomic), (gsize) (val))) 284 #define g_atomic_pointer_or(atomic, val) \ 285 (g_atomic_pointer_or ((atomic), (gsize) (val))) 286 #define g_atomic_pointer_xor(atomic, val) \ 287 (g_atomic_pointer_xor ((atomic), (gsize) (val))) 288 289 #endif /* defined(__GNUC__) && defined(G_ATOMIC_OP_USE_GCC_BUILTINS) */ 290 291 #endif /* __G_ATOMIC_H__ */ 292