1/*
2 * Copyright © 2011 Ryan Lortie
3 *
4 * SPDX-License-Identifier: LGPL-2.1-or-later
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful, but
12 * WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18 *
19 * Author: Ryan Lortie <desrt@desrt.ca>
20 */
21
22#ifndef __G_ATOMIC_H__
23#define __G_ATOMIC_H__
24
25#if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
26#error "Only <glib.h> can be included directly."
27#endif
28
29#include <glib/gtypes.h>
30#include <glib/glib-typeof.h>
31
32G_BEGIN_DECLS
33
34GLIB_AVAILABLE_IN_ALL
35gint g_atomic_int_get (const volatile gint *atomic);
36GLIB_AVAILABLE_IN_ALL
37void g_atomic_int_set (volatile gint *atomic,
38 gint newval);
39GLIB_AVAILABLE_IN_ALL
40void g_atomic_int_inc (volatile gint *atomic);
41GLIB_AVAILABLE_IN_ALL
42gboolean g_atomic_int_dec_and_test (volatile gint *atomic);
43GLIB_AVAILABLE_IN_ALL
44gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic,
45 gint oldval,
46 gint newval);
47GLIB_AVAILABLE_IN_2_74
48gboolean g_atomic_int_compare_and_exchange_full (gint *atomic,
49 gint oldval,
50 gint newval,
51 gint *preval);
52GLIB_AVAILABLE_IN_2_74
53gint g_atomic_int_exchange (gint *atomic,
54 gint newval);
55GLIB_AVAILABLE_IN_ALL
56gint g_atomic_int_add (volatile gint *atomic,
57 gint val);
58GLIB_AVAILABLE_IN_2_30
59guint g_atomic_int_and (volatile guint *atomic,
60 guint val);
61GLIB_AVAILABLE_IN_2_30
62guint g_atomic_int_or (volatile guint *atomic,
63 guint val);
64GLIB_AVAILABLE_IN_ALL
65guint g_atomic_int_xor (volatile guint *atomic,
66 guint val);
67
68GLIB_AVAILABLE_IN_ALL
69gpointer g_atomic_pointer_get (const volatile void *atomic);
70GLIB_AVAILABLE_IN_ALL
71void g_atomic_pointer_set (volatile void *atomic,
72 gpointer newval);
73GLIB_AVAILABLE_IN_ALL
74gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic,
75 gpointer oldval,
76 gpointer newval);
77GLIB_AVAILABLE_IN_2_74
78gboolean g_atomic_pointer_compare_and_exchange_full (void *atomic,
79 gpointer oldval,
80 gpointer newval,
81 void *preval);
82GLIB_AVAILABLE_IN_2_74
83gpointer g_atomic_pointer_exchange (void *atomic,
84 gpointer newval);
85GLIB_AVAILABLE_IN_ALL
86gintptr g_atomic_pointer_add (volatile void *atomic,
87 gssize val);
88GLIB_AVAILABLE_IN_2_30
89guintptr g_atomic_pointer_and (volatile void *atomic,
90 gsize val);
91GLIB_AVAILABLE_IN_2_30
92guintptr g_atomic_pointer_or (volatile void *atomic,
93 gsize val);
94GLIB_AVAILABLE_IN_ALL
95guintptr g_atomic_pointer_xor (volatile void *atomic,
96 gsize val);
97
98GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
99gint g_atomic_int_exchange_and_add (volatile gint *atomic,
100 gint val);
101
102G_END_DECLS
103
104#if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
105
106/* We prefer the new C11-style atomic extension of GCC if available */
107#if defined(__ATOMIC_SEQ_CST)
108
109#define g_atomic_int_get(atomic) \
110 (G_GNUC_EXTENSION ({ \
111 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
112 gint gaig_temp; \
113 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
114 __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \
115 (gint) gaig_temp; \
116 }))
117#define g_atomic_int_set(atomic, newval) \
118 (G_GNUC_EXTENSION ({ \
119 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
120 gint gais_temp = (gint) (newval); \
121 (void) (0 ? *(atomic) ^ (newval) : 1); \
122 __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \
123 }))
124
125#if defined(glib_typeof)
126#define g_atomic_pointer_get(atomic) \
127 (G_GNUC_EXTENSION ({ \
128 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
129 glib_typeof (*(atomic)) gapg_temp_newval; \
130 glib_typeof ((atomic)) gapg_temp_atomic = (atomic); \
131 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
132 gapg_temp_newval; \
133 }))
134#define g_atomic_pointer_set(atomic, newval) \
135 (G_GNUC_EXTENSION ({ \
136 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
137 glib_typeof ((atomic)) gaps_temp_atomic = (atomic); \
138 glib_typeof (*(atomic)) gaps_temp_newval = (newval); \
139 (void) (0 ? (gpointer) * (atomic) : NULL); \
140 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
141 }))
142#else /* if !(defined(glib_typeof) */
143#define g_atomic_pointer_get(atomic) \
144 (G_GNUC_EXTENSION ({ \
145 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
146 gpointer gapg_temp_newval; \
147 gpointer *gapg_temp_atomic = (gpointer *)(atomic); \
148 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
149 gapg_temp_newval; \
150 }))
151#define g_atomic_pointer_set(atomic, newval) \
152 (G_GNUC_EXTENSION ({ \
153 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
154 gpointer *gaps_temp_atomic = (gpointer *)(atomic); \
155 gpointer gaps_temp_newval = (gpointer)(newval); \
156 (void) (0 ? (gpointer) *(atomic) : NULL); \
157 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
158 }))
159#endif /* if defined(glib_typeof) */
160
161#define g_atomic_int_inc(atomic) \
162 (G_GNUC_EXTENSION ({ \
163 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
164 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
165 (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \
166 }))
167#define g_atomic_int_dec_and_test(atomic) \
168 (G_GNUC_EXTENSION ({ \
169 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
170 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
171 __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \
172 }))
173#if defined(glib_typeof) && defined(G_CXX_STD_VERSION)
174/* See comments below about equivalent g_atomic_pointer_compare_and_exchange()
175 * shenanigans for type-safety when compiling in C++ mode. */
176#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
177 (G_GNUC_EXTENSION ({ \
178 glib_typeof (*(atomic)) gaicae_oldval = (oldval); \
179 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
180 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
181 __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
182 }))
183#else /* if !(defined(glib_typeof) && defined(G_CXX_STD_VERSION)) */
184#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
185 (G_GNUC_EXTENSION ({ \
186 gint gaicae_oldval = (oldval); \
187 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
188 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
189 __atomic_compare_exchange_n ((atomic), (void *) (&(gaicae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
190 }))
191#endif /* defined(glib_typeof) */
192#define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
193 (G_GNUC_EXTENSION ({ \
194 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
195 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gint)); \
196 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) ^ *(preval) : 1); \
197 *(preval) = (oldval); \
198 __atomic_compare_exchange_n ((atomic), (preval), (newval), FALSE, \
199 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) \
200 ? TRUE : FALSE; \
201 }))
202#define g_atomic_int_exchange(atomic, newval) \
203 (G_GNUC_EXTENSION ({ \
204 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
205 (void) (0 ? *(atomic) ^ (newval) : 1); \
206 (gint) __atomic_exchange_n ((atomic), (newval), __ATOMIC_SEQ_CST); \
207 }))
208#define g_atomic_int_add(atomic, val) \
209 (G_GNUC_EXTENSION ({ \
210 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
211 (void) (0 ? *(atomic) ^ (val) : 1); \
212 (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
213 }))
214#define g_atomic_int_and(atomic, val) \
215 (G_GNUC_EXTENSION ({ \
216 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
217 (void) (0 ? *(atomic) ^ (val) : 1); \
218 (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \
219 }))
220#define g_atomic_int_or(atomic, val) \
221 (G_GNUC_EXTENSION ({ \
222 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
223 (void) (0 ? *(atomic) ^ (val) : 1); \
224 (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \
225 }))
226#define g_atomic_int_xor(atomic, val) \
227 (G_GNUC_EXTENSION ({ \
228 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
229 (void) (0 ? *(atomic) ^ (val) : 1); \
230 (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \
231 }))
232
233#if defined(glib_typeof) && defined(G_CXX_STD_VERSION)
234/* This is typesafe because we check we can assign oldval to the type of
235 * (*atomic). Unfortunately it can only be done in C++ because gcc/clang warn
236 * when atomic is volatile and not oldval, or when atomic is gsize* and oldval
237 * is NULL. Note that clang++ force us to be typesafe because it is an error if the 2nd
238 * argument of __atomic_compare_exchange_n() has a different type than the
239 * first.
240 * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1919
241 * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1715#note_1024120. */
242#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
243 (G_GNUC_EXTENSION ({ \
244 G_STATIC_ASSERT (sizeof (static_cast<glib_typeof (*(atomic))>((oldval))) \
245 == sizeof (gpointer)); \
246 glib_typeof (*(atomic)) gapcae_oldval = (oldval); \
247 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
248 (void) (0 ? (gpointer) *(atomic) : NULL); \
249 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
250 }))
251#else /* if !(defined(glib_typeof) && defined(G_CXX_STD_VERSION) */
252#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
253 (G_GNUC_EXTENSION ({ \
254 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
255 gpointer gapcae_oldval = (gpointer)(oldval); \
256 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
257 (void) (0 ? (gpointer) *(atomic) : NULL); \
258 __atomic_compare_exchange_n ((atomic), (void *) (&(gapcae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
259 }))
260#endif /* defined(glib_typeof) */
261#define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, preval) \
262 (G_GNUC_EXTENSION ({ \
263 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
264 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gpointer)); \
265 (void) (0 ? (gpointer) *(atomic) : NULL); \
266 (void) (0 ? (gpointer) *(preval) : NULL); \
267 *(preval) = (oldval); \
268 __atomic_compare_exchange_n ((atomic), (preval), (newval), FALSE, \
269 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? \
270 TRUE : FALSE; \
271 }))
272#define g_atomic_pointer_exchange(atomic, newval) \
273 (G_GNUC_EXTENSION ({ \
274 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
275 (void) (0 ? (gpointer) *(atomic) : NULL); \
276 (gpointer) __atomic_exchange_n ((atomic), (newval), __ATOMIC_SEQ_CST); \
277 }))
278#define g_atomic_pointer_add(atomic, val) \
279 (G_GNUC_EXTENSION ({ \
280 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
281 (void) (0 ? (gpointer) *(atomic) : NULL); \
282 (void) (0 ? (val) ^ (val) : 1); \
283 (gintptr) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
284 }))
285#define g_atomic_pointer_and(atomic, val) \
286 (G_GNUC_EXTENSION ({ \
287 guintptr *gapa_atomic = (guintptr *) (atomic); \
288 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
289 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
290 (void) (0 ? (gpointer) *(atomic) : NULL); \
291 (void) (0 ? (val) ^ (val) : 1); \
292 (guintptr) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \
293 }))
294#define g_atomic_pointer_or(atomic, val) \
295 (G_GNUC_EXTENSION ({ \
296 guintptr *gapo_atomic = (guintptr *) (atomic); \
297 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
298 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
299 (void) (0 ? (gpointer) *(atomic) : NULL); \
300 (void) (0 ? (val) ^ (val) : 1); \
301 (guintptr) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \
302 }))
303#define g_atomic_pointer_xor(atomic, val) \
304 (G_GNUC_EXTENSION ({ \
305 guintptr *gapx_atomic = (guintptr *) (atomic); \
306 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
307 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
308 (void) (0 ? (gpointer) *(atomic) : NULL); \
309 (void) (0 ? (val) ^ (val) : 1); \
310 (guintptr) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \
311 }))
312
313#else /* defined(__ATOMIC_SEQ_CST) */
314
315/* We want to achieve __ATOMIC_SEQ_CST semantics here. See
316 * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
317 * operations, that means performing an *acquire*:
318 * > A load operation with this memory order performs the acquire operation on
319 * > the affected memory location: no reads or writes in the current thread can
320 * > be reordered before this load. All writes in other threads that release
321 * > the same atomic variable are visible in the current thread.
322 *
323 * “no reads or writes in the current thread can be reordered before this load”
324 * is implemented using a compiler barrier (a no-op `__asm__` section) to
325 * prevent instruction reordering. Writes in other threads are synchronised
326 * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
327 * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
328 * one.
329 *
330 * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
331 * > A store operation with this memory order performs the release operation:
332 * > no reads or writes in the current thread can be reordered after this store.
333 * > All writes in the current thread are visible in other threads that acquire
334 * > the same atomic variable (see Release-Acquire ordering below) and writes
335 * > that carry a dependency into the atomic variable become visible in other
336 * > threads that consume the same atomic (see Release-Consume ordering below).
337 *
338 * “no reads or writes in the current thread can be reordered after this store”
339 * is implemented using a compiler barrier to prevent instruction reordering.
340 * “All writes in the current thread are visible in other threads” is implemented
341 * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
342 */
343#define g_atomic_int_get(atomic) \
344 (G_GNUC_EXTENSION ({ \
345 gint gaig_result; \
346 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
347 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
348 gaig_result = (gint) *(atomic); \
349 __sync_synchronize (); \
350 __asm__ __volatile__ ("" : : : "memory"); \
351 gaig_result; \
352 }))
353#define g_atomic_int_set(atomic, newval) \
354 (G_GNUC_EXTENSION ({ \
355 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
356 (void) (0 ? *(atomic) ^ (newval) : 1); \
357 __sync_synchronize (); \
358 __asm__ __volatile__ ("" : : : "memory"); \
359 *(atomic) = (newval); \
360 }))
361#define g_atomic_pointer_get(atomic) \
362 (G_GNUC_EXTENSION ({ \
363 gpointer gapg_result; \
364 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
365 gapg_result = (gpointer) *(atomic); \
366 __sync_synchronize (); \
367 __asm__ __volatile__ ("" : : : "memory"); \
368 gapg_result; \
369 }))
370#if defined(glib_typeof)
371#define g_atomic_pointer_set(atomic, newval) \
372 (G_GNUC_EXTENSION ({ \
373 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
374 (void) (0 ? (gpointer) *(atomic) : NULL); \
375 __sync_synchronize (); \
376 __asm__ __volatile__ ("" : : : "memory"); \
377 *(atomic) = (glib_typeof (*(atomic))) (guintptr) (newval); \
378 }))
379#else /* if !(defined(glib_typeof) */
380#define g_atomic_pointer_set(atomic, newval) \
381 (G_GNUC_EXTENSION ({ \
382 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
383 (void) (0 ? (gpointer) *(atomic) : NULL); \
384 __sync_synchronize (); \
385 __asm__ __volatile__ ("" : : : "memory"); \
386 *(atomic) = (gpointer) (guintptr) (newval); \
387 }))
388#endif /* if defined(glib_typeof) */
389
390#define g_atomic_int_inc(atomic) \
391 (G_GNUC_EXTENSION ({ \
392 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
393 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
394 (void) __sync_fetch_and_add ((atomic), 1); \
395 }))
396#define g_atomic_int_dec_and_test(atomic) \
397 (G_GNUC_EXTENSION ({ \
398 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
399 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
400 __sync_fetch_and_sub ((atomic), 1) == 1; \
401 }))
402#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
403 (G_GNUC_EXTENSION ({ \
404 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
405 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
406 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
407 }))
408#define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
409 (G_GNUC_EXTENSION ({ \
410 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
411 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gint)); \
412 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) ^ *(preval) : 1); \
413 *(preval) = __sync_val_compare_and_swap ((atomic), (oldval), (newval)); \
414 (*(preval) == (oldval)) ? TRUE : FALSE; \
415 }))
416#if defined(_GLIB_GCC_HAVE_SYNC_SWAP)
417#define g_atomic_int_exchange(atomic, newval) \
418 (G_GNUC_EXTENSION ({ \
419 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
420 (void) (0 ? *(atomic) ^ (newval) : 1); \
421 (gint) __sync_swap ((atomic), (newval)); \
422 }))
423#else /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
424 #define g_atomic_int_exchange(atomic, newval) \
425 (G_GNUC_EXTENSION ({ \
426 gint oldval; \
427 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
428 (void) (0 ? *(atomic) ^ (newval) : 1); \
429 do \
430 { \
431 oldval = *atomic; \
432 } while (!__sync_bool_compare_and_swap (atomic, oldval, newval)); \
433 oldval; \
434 }))
435#endif /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
436#define g_atomic_int_add(atomic, val) \
437 (G_GNUC_EXTENSION ({ \
438 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
439 (void) (0 ? *(atomic) ^ (val) : 1); \
440 (gint) __sync_fetch_and_add ((atomic), (val)); \
441 }))
442#define g_atomic_int_and(atomic, val) \
443 (G_GNUC_EXTENSION ({ \
444 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
445 (void) (0 ? *(atomic) ^ (val) : 1); \
446 (guint) __sync_fetch_and_and ((atomic), (val)); \
447 }))
448#define g_atomic_int_or(atomic, val) \
449 (G_GNUC_EXTENSION ({ \
450 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
451 (void) (0 ? *(atomic) ^ (val) : 1); \
452 (guint) __sync_fetch_and_or ((atomic), (val)); \
453 }))
454#define g_atomic_int_xor(atomic, val) \
455 (G_GNUC_EXTENSION ({ \
456 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
457 (void) (0 ? *(atomic) ^ (val) : 1); \
458 (guint) __sync_fetch_and_xor ((atomic), (val)); \
459 }))
460
461#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
462 (G_GNUC_EXTENSION ({ \
463 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
464 (void) (0 ? (gpointer) *(atomic) : NULL); \
465 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
466 }))
467#define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, preval) \
468 (G_GNUC_EXTENSION ({ \
469 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
470 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gpointer)); \
471 (void) (0 ? (gpointer) *(atomic) : NULL); \
472 (void) (0 ? (gpointer) *(preval) : NULL); \
473 *(preval) = __sync_val_compare_and_swap ((atomic), (oldval), (newval)); \
474 (*(preval) == (oldval)) ? TRUE : FALSE; \
475 }))
476#if defined(_GLIB_GCC_HAVE_SYNC_SWAP)
477#define g_atomic_pointer_exchange(atomic, newval) \
478 (G_GNUC_EXTENSION ({ \
479 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
480 (void) (0 ? (gpointer) *(atomic) : NULL); \
481 (gpointer) __sync_swap ((atomic), (newval)); \
482 }))
483#else
484#define g_atomic_pointer_exchange(atomic, newval) \
485 (G_GNUC_EXTENSION ({ \
486 gpointer oldval; \
487 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
488 (void) (0 ? (gpointer) *(atomic) : NULL); \
489 do \
490 { \
491 oldval = (gpointer) *atomic; \
492 } while (!__sync_bool_compare_and_swap (atomic, oldval, newval)); \
493 oldval; \
494 }))
495#endif /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
496#define g_atomic_pointer_add(atomic, val) \
497 (G_GNUC_EXTENSION ({ \
498 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
499 (void) (0 ? (gpointer) *(atomic) : NULL); \
500 (void) (0 ? (val) ^ (val) : 1); \
501 (gintptr) __sync_fetch_and_add ((atomic), (val)); \
502 }))
503#define g_atomic_pointer_and(atomic, val) \
504 (G_GNUC_EXTENSION ({ \
505 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
506 (void) (0 ? (gpointer) *(atomic) : NULL); \
507 (void) (0 ? (val) ^ (val) : 1); \
508 (guintptr) __sync_fetch_and_and ((atomic), (val)); \
509 }))
510#define g_atomic_pointer_or(atomic, val) \
511 (G_GNUC_EXTENSION ({ \
512 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
513 (void) (0 ? (gpointer) *(atomic) : NULL); \
514 (void) (0 ? (val) ^ (val) : 1); \
515 (guintptr) __sync_fetch_and_or ((atomic), (val)); \
516 }))
517#define g_atomic_pointer_xor(atomic, val) \
518 (G_GNUC_EXTENSION ({ \
519 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
520 (void) (0 ? (gpointer) *(atomic) : NULL); \
521 (void) (0 ? (val) ^ (val) : 1); \
522 (guintptr) __sync_fetch_and_xor ((atomic), (val)); \
523 }))
524
525#endif /* !defined(__ATOMIC_SEQ_CST) */
526
527#else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
528
529#define g_atomic_int_get(atomic) \
530 (g_atomic_int_get ((gint *) (atomic)))
531#define g_atomic_int_set(atomic, newval) \
532 (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
533#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
534 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
535#define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
536 (g_atomic_int_compare_and_exchange_full ((gint *) (atomic), (oldval), (newval), (gint *) (preval)))
537#define g_atomic_int_exchange(atomic, newval) \
538 (g_atomic_int_exchange ((gint *) (atomic), (newval)))
539#define g_atomic_int_add(atomic, val) \
540 (g_atomic_int_add ((gint *) (atomic), (val)))
541#define g_atomic_int_and(atomic, val) \
542 (g_atomic_int_and ((guint *) (atomic), (val)))
543#define g_atomic_int_or(atomic, val) \
544 (g_atomic_int_or ((guint *) (atomic), (val)))
545#define g_atomic_int_xor(atomic, val) \
546 (g_atomic_int_xor ((guint *) (atomic), (val)))
547#define g_atomic_int_inc(atomic) \
548 (g_atomic_int_inc ((gint *) (atomic)))
549#define g_atomic_int_dec_and_test(atomic) \
550 (g_atomic_int_dec_and_test ((gint *) (atomic)))
551
552#if defined(glib_typeof)
553 /* The (void *) cast in the middle *looks* redundant, because
554 * g_atomic_pointer_get returns void * already, but it's to silence
555 * -Werror=bad-function-cast when we're doing something like:
556 * guintptr a, b; ...; a = g_atomic_pointer_get (&b);
557 * which would otherwise be assigning the void * result of
558 * g_atomic_pointer_get directly to the pointer-sized but
559 * non-pointer-typed result. */
560#define g_atomic_pointer_get(atomic) \
561 (glib_typeof (*(atomic))) (void *) ((g_atomic_pointer_get) ((void *) atomic))
562#else /* !(defined(glib_typeof) */
563#define g_atomic_pointer_get(atomic) \
564 (g_atomic_pointer_get (atomic))
565#endif
566
567#define g_atomic_pointer_set(atomic, newval) \
568 (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
569
570#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
571 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
572#define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, prevval) \
573 (g_atomic_pointer_compare_and_exchange_full ((atomic), (gpointer) (oldval), (gpointer) (newval), (prevval)))
574#define g_atomic_pointer_exchange(atomic, newval) \
575 (g_atomic_pointer_exchange ((atomic), (gpointer) (newval)))
576#define g_atomic_pointer_add(atomic, val) \
577 (g_atomic_pointer_add ((atomic), (gssize) (val)))
578#define g_atomic_pointer_and(atomic, val) \
579 (g_atomic_pointer_and ((atomic), (gsize) (val)))
580#define g_atomic_pointer_or(atomic, val) \
581 (g_atomic_pointer_or ((atomic), (gsize) (val)))
582#define g_atomic_pointer_xor(atomic, val) \
583 (g_atomic_pointer_xor ((atomic), (gsize) (val)))
584
585#endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
586
587#endif /* __G_ATOMIC_H__ */
588