1/*
2 * Copyright © 2011 Ryan Lortie
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
16 *
17 * Author: Ryan Lortie <desrt@desrt.ca>
18 */
19
20#ifndef __G_ATOMIC_H__
21#define __G_ATOMIC_H__
22
23#if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
24#error "Only <glib.h> can be included directly."
25#endif
26
27#include <glib/gtypes.h>
28
29#if defined(glib_typeof_2_68) && GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68
30/* for glib_typeof */
31#include <type_traits>
32#endif
33
34G_BEGIN_DECLS
35
36GLIB_AVAILABLE_IN_ALL
37gint g_atomic_int_get (const volatile gint *atomic);
38GLIB_AVAILABLE_IN_ALL
39void g_atomic_int_set (volatile gint *atomic,
40 gint newval);
41GLIB_AVAILABLE_IN_ALL
42void g_atomic_int_inc (volatile gint *atomic);
43GLIB_AVAILABLE_IN_ALL
44gboolean g_atomic_int_dec_and_test (volatile gint *atomic);
45GLIB_AVAILABLE_IN_ALL
46gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic,
47 gint oldval,
48 gint newval);
49GLIB_AVAILABLE_IN_ALL
50gint g_atomic_int_add (volatile gint *atomic,
51 gint val);
52GLIB_AVAILABLE_IN_2_30
53guint g_atomic_int_and (volatile guint *atomic,
54 guint val);
55GLIB_AVAILABLE_IN_2_30
56guint g_atomic_int_or (volatile guint *atomic,
57 guint val);
58GLIB_AVAILABLE_IN_ALL
59guint g_atomic_int_xor (volatile guint *atomic,
60 guint val);
61
62GLIB_AVAILABLE_IN_ALL
63gpointer g_atomic_pointer_get (const volatile void *atomic);
64GLIB_AVAILABLE_IN_ALL
65void g_atomic_pointer_set (volatile void *atomic,
66 gpointer newval);
67GLIB_AVAILABLE_IN_ALL
68gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic,
69 gpointer oldval,
70 gpointer newval);
71GLIB_AVAILABLE_IN_ALL
72gssize g_atomic_pointer_add (volatile void *atomic,
73 gssize val);
74GLIB_AVAILABLE_IN_2_30
75gsize g_atomic_pointer_and (volatile void *atomic,
76 gsize val);
77GLIB_AVAILABLE_IN_2_30
78gsize g_atomic_pointer_or (volatile void *atomic,
79 gsize val);
80GLIB_AVAILABLE_IN_ALL
81gsize g_atomic_pointer_xor (volatile void *atomic,
82 gsize val);
83
84GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
85gint g_atomic_int_exchange_and_add (volatile gint *atomic,
86 gint val);
87
88G_END_DECLS
89
90#if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
91
92/* We prefer the new C11-style atomic extension of GCC if available */
93#if defined(__ATOMIC_SEQ_CST)
94
95#define g_atomic_int_get(atomic) \
96 (G_GNUC_EXTENSION ({ \
97 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
98 gint gaig_temp; \
99 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
100 __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \
101 (gint) gaig_temp; \
102 }))
103#define g_atomic_int_set(atomic, newval) \
104 (G_GNUC_EXTENSION ({ \
105 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
106 gint gais_temp = (gint) (newval); \
107 (void) (0 ? *(atomic) ^ (newval) : 1); \
108 __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \
109 }))
110
111#if defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68)
112#define g_atomic_pointer_get(atomic) \
113 (G_GNUC_EXTENSION ({ \
114 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
115 glib_typeof (*(atomic)) gapg_temp_newval; \
116 glib_typeof ((atomic)) gapg_temp_atomic = (atomic); \
117 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
118 gapg_temp_newval; \
119 }))
120#define g_atomic_pointer_set(atomic, newval) \
121 (G_GNUC_EXTENSION ({ \
122 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
123 glib_typeof ((atomic)) gaps_temp_atomic = (atomic); \
124 glib_typeof (*(atomic)) gaps_temp_newval = (newval); \
125 (void) (0 ? (gpointer) * (atomic) : NULL); \
126 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
127 }))
128#else /* if !(defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68)) */
129#define g_atomic_pointer_get(atomic) \
130 (G_GNUC_EXTENSION ({ \
131 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
132 gpointer gapg_temp_newval; \
133 gpointer *gapg_temp_atomic = (gpointer *)(atomic); \
134 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
135 gapg_temp_newval; \
136 }))
137#define g_atomic_pointer_set(atomic, newval) \
138 (G_GNUC_EXTENSION ({ \
139 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
140 gpointer *gaps_temp_atomic = (gpointer *)(atomic); \
141 gpointer gaps_temp_newval = (gpointer)(newval); \
142 (void) (0 ? (gpointer) *(atomic) : NULL); \
143 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
144 }))
145#endif /* if defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68) */
146
147#define g_atomic_int_inc(atomic) \
148 (G_GNUC_EXTENSION ({ \
149 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
150 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
151 (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \
152 }))
153#define g_atomic_int_dec_and_test(atomic) \
154 (G_GNUC_EXTENSION ({ \
155 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
156 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
157 __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \
158 }))
159#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
160 (G_GNUC_EXTENSION ({ \
161 gint gaicae_oldval = (oldval); \
162 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
163 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
164 __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
165 }))
166#define g_atomic_int_add(atomic, val) \
167 (G_GNUC_EXTENSION ({ \
168 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
169 (void) (0 ? *(atomic) ^ (val) : 1); \
170 (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
171 }))
172#define g_atomic_int_and(atomic, val) \
173 (G_GNUC_EXTENSION ({ \
174 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
175 (void) (0 ? *(atomic) ^ (val) : 1); \
176 (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \
177 }))
178#define g_atomic_int_or(atomic, val) \
179 (G_GNUC_EXTENSION ({ \
180 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
181 (void) (0 ? *(atomic) ^ (val) : 1); \
182 (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \
183 }))
184#define g_atomic_int_xor(atomic, val) \
185 (G_GNUC_EXTENSION ({ \
186 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
187 (void) (0 ? *(atomic) ^ (val) : 1); \
188 (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \
189 }))
190
191#if defined(glib_typeof) && defined(__cplusplus) && __cplusplus >= 201103L
192/* This is typesafe because we check we can assign oldval to the type of
193 * (*atomic). Unfortunately it can only be done in C++ because gcc/clang warn
194 * when atomic is volatile and not oldval, or when atomic is gsize* and oldval
195 * is NULL. Note that clang++ force us to be typesafe because it is an error if the 2nd
196 * argument of __atomic_compare_exchange_n() has a different type than the
197 * first.
198 * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1919
199 * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1715#note_1024120. */
200#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
201 (G_GNUC_EXTENSION ({ \
202 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
203 glib_typeof (*(atomic)) gapcae_oldval = (oldval); \
204 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
205 (void) (0 ? (gpointer) *(atomic) : NULL); \
206 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
207 }))
208#else /* if !(defined(glib_typeof) && defined(__cplusplus) && __cplusplus >= 201103L) */
209#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
210 (G_GNUC_EXTENSION ({ \
211 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
212 gpointer gapcae_oldval = (gpointer)(oldval); \
213 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
214 (void) (0 ? (gpointer) *(atomic) : NULL); \
215 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
216 }))
217#endif /* defined(glib_typeof) */
218#define g_atomic_pointer_add(atomic, val) \
219 (G_GNUC_EXTENSION ({ \
220 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
221 (void) (0 ? (gpointer) *(atomic) : NULL); \
222 (void) (0 ? (val) ^ (val) : 1); \
223 (gssize) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
224 }))
225#define g_atomic_pointer_and(atomic, val) \
226 (G_GNUC_EXTENSION ({ \
227 gsize *gapa_atomic = (gsize *) (atomic); \
228 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
229 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
230 (void) (0 ? (gpointer) *(atomic) : NULL); \
231 (void) (0 ? (val) ^ (val) : 1); \
232 (gsize) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \
233 }))
234#define g_atomic_pointer_or(atomic, val) \
235 (G_GNUC_EXTENSION ({ \
236 gsize *gapo_atomic = (gsize *) (atomic); \
237 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
238 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
239 (void) (0 ? (gpointer) *(atomic) : NULL); \
240 (void) (0 ? (val) ^ (val) : 1); \
241 (gsize) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \
242 }))
243#define g_atomic_pointer_xor(atomic, val) \
244 (G_GNUC_EXTENSION ({ \
245 gsize *gapx_atomic = (gsize *) (atomic); \
246 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
247 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
248 (void) (0 ? (gpointer) *(atomic) : NULL); \
249 (void) (0 ? (val) ^ (val) : 1); \
250 (gsize) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \
251 }))
252
253#else /* defined(__ATOMIC_SEQ_CST) */
254
255/* We want to achieve __ATOMIC_SEQ_CST semantics here. See
256 * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
257 * operations, that means performing an *acquire*:
258 * > A load operation with this memory order performs the acquire operation on
259 * > the affected memory location: no reads or writes in the current thread can
260 * > be reordered before this load. All writes in other threads that release
261 * > the same atomic variable are visible in the current thread.
262 *
263 * “no reads or writes in the current thread can be reordered before this load”
264 * is implemented using a compiler barrier (a no-op `__asm__` section) to
265 * prevent instruction reordering. Writes in other threads are synchronised
266 * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
267 * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
268 * one.
269 *
270 * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
271 * > A store operation with this memory order performs the release operation:
272 * > no reads or writes in the current thread can be reordered after this store.
273 * > All writes in the current thread are visible in other threads that acquire
274 * > the same atomic variable (see Release-Acquire ordering below) and writes
275 * > that carry a dependency into the atomic variable become visible in other
276 * > threads that consume the same atomic (see Release-Consume ordering below).
277 *
278 * “no reads or writes in the current thread can be reordered after this store”
279 * is implemented using a compiler barrier to prevent instruction reordering.
280 * “All writes in the current thread are visible in other threads” is implemented
281 * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
282 */
283#define g_atomic_int_get(atomic) \
284 (G_GNUC_EXTENSION ({ \
285 gint gaig_result; \
286 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
287 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
288 gaig_result = (gint) *(atomic); \
289 __sync_synchronize (); \
290 __asm__ __volatile__ ("" : : : "memory"); \
291 gaig_result; \
292 }))
293#define g_atomic_int_set(atomic, newval) \
294 (G_GNUC_EXTENSION ({ \
295 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
296 (void) (0 ? *(atomic) ^ (newval) : 1); \
297 __sync_synchronize (); \
298 __asm__ __volatile__ ("" : : : "memory"); \
299 *(atomic) = (newval); \
300 }))
301#define g_atomic_pointer_get(atomic) \
302 (G_GNUC_EXTENSION ({ \
303 gpointer gapg_result; \
304 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
305 gapg_result = (gpointer) *(atomic); \
306 __sync_synchronize (); \
307 __asm__ __volatile__ ("" : : : "memory"); \
308 gapg_result; \
309 }))
310#if defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68)
311#define g_atomic_pointer_set(atomic, newval) \
312 (G_GNUC_EXTENSION ({ \
313 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
314 (void) (0 ? (gpointer) *(atomic) : NULL); \
315 __sync_synchronize (); \
316 __asm__ __volatile__ ("" : : : "memory"); \
317 *(atomic) = (glib_typeof (*(atomic))) (gsize) (newval); \
318 }))
319#else /* if !(defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68)) */
320#define g_atomic_pointer_set(atomic, newval) \
321 (G_GNUC_EXTENSION ({ \
322 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
323 (void) (0 ? (gpointer) *(atomic) : NULL); \
324 __sync_synchronize (); \
325 __asm__ __volatile__ ("" : : : "memory"); \
326 *(atomic) = (gpointer) (gsize) (newval); \
327 }))
328#endif /* if defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68) */
329
330#define g_atomic_int_inc(atomic) \
331 (G_GNUC_EXTENSION ({ \
332 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
333 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
334 (void) __sync_fetch_and_add ((atomic), 1); \
335 }))
336#define g_atomic_int_dec_and_test(atomic) \
337 (G_GNUC_EXTENSION ({ \
338 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
339 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
340 __sync_fetch_and_sub ((atomic), 1) == 1; \
341 }))
342#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
343 (G_GNUC_EXTENSION ({ \
344 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
345 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
346 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
347 }))
348#define g_atomic_int_add(atomic, val) \
349 (G_GNUC_EXTENSION ({ \
350 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
351 (void) (0 ? *(atomic) ^ (val) : 1); \
352 (gint) __sync_fetch_and_add ((atomic), (val)); \
353 }))
354#define g_atomic_int_and(atomic, val) \
355 (G_GNUC_EXTENSION ({ \
356 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
357 (void) (0 ? *(atomic) ^ (val) : 1); \
358 (guint) __sync_fetch_and_and ((atomic), (val)); \
359 }))
360#define g_atomic_int_or(atomic, val) \
361 (G_GNUC_EXTENSION ({ \
362 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
363 (void) (0 ? *(atomic) ^ (val) : 1); \
364 (guint) __sync_fetch_and_or ((atomic), (val)); \
365 }))
366#define g_atomic_int_xor(atomic, val) \
367 (G_GNUC_EXTENSION ({ \
368 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
369 (void) (0 ? *(atomic) ^ (val) : 1); \
370 (guint) __sync_fetch_and_xor ((atomic), (val)); \
371 }))
372
373#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
374 (G_GNUC_EXTENSION ({ \
375 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
376 (void) (0 ? (gpointer) *(atomic) : NULL); \
377 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
378 }))
379#define g_atomic_pointer_add(atomic, val) \
380 (G_GNUC_EXTENSION ({ \
381 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
382 (void) (0 ? (gpointer) *(atomic) : NULL); \
383 (void) (0 ? (val) ^ (val) : 1); \
384 (gssize) __sync_fetch_and_add ((atomic), (val)); \
385 }))
386#define g_atomic_pointer_and(atomic, val) \
387 (G_GNUC_EXTENSION ({ \
388 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
389 (void) (0 ? (gpointer) *(atomic) : NULL); \
390 (void) (0 ? (val) ^ (val) : 1); \
391 (gsize) __sync_fetch_and_and ((atomic), (val)); \
392 }))
393#define g_atomic_pointer_or(atomic, val) \
394 (G_GNUC_EXTENSION ({ \
395 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
396 (void) (0 ? (gpointer) *(atomic) : NULL); \
397 (void) (0 ? (val) ^ (val) : 1); \
398 (gsize) __sync_fetch_and_or ((atomic), (val)); \
399 }))
400#define g_atomic_pointer_xor(atomic, val) \
401 (G_GNUC_EXTENSION ({ \
402 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
403 (void) (0 ? (gpointer) *(atomic) : NULL); \
404 (void) (0 ? (val) ^ (val) : 1); \
405 (gsize) __sync_fetch_and_xor ((atomic), (val)); \
406 }))
407
408#endif /* !defined(__ATOMIC_SEQ_CST) */
409
410#else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
411
412#define g_atomic_int_get(atomic) \
413 (g_atomic_int_get ((gint *) (atomic)))
414#define g_atomic_int_set(atomic, newval) \
415 (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
416#define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
417 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
418#define g_atomic_int_add(atomic, val) \
419 (g_atomic_int_add ((gint *) (atomic), (val)))
420#define g_atomic_int_and(atomic, val) \
421 (g_atomic_int_and ((guint *) (atomic), (val)))
422#define g_atomic_int_or(atomic, val) \
423 (g_atomic_int_or ((guint *) (atomic), (val)))
424#define g_atomic_int_xor(atomic, val) \
425 (g_atomic_int_xor ((guint *) (atomic), (val)))
426#define g_atomic_int_inc(atomic) \
427 (g_atomic_int_inc ((gint *) (atomic)))
428#define g_atomic_int_dec_and_test(atomic) \
429 (g_atomic_int_dec_and_test ((gint *) (atomic)))
430
431#if defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68)
432 /* The (void *) cast in the middle *looks* redundant, because
433 * g_atomic_pointer_get returns void * already, but it's to silence
434 * -Werror=bad-function-cast when we're doing something like:
435 * guintptr a, b; ...; a = g_atomic_pointer_get (&b);
436 * which would otherwise be assigning the void * result of
437 * g_atomic_pointer_get directly to the pointer-sized but
438 * non-pointer-typed result. */
439#define g_atomic_pointer_get(atomic) \
440 (glib_typeof (*(atomic))) (void *) ((g_atomic_pointer_get) ((void *) atomic))
441#else /* !(defined(glib_typeof) && (!defined(glib_typeof_2_68) || GLIB_VERSION_MIN_REQUIRED >= GLIB_VERSION_2_68)) */
442#define g_atomic_pointer_get(atomic) \
443 (g_atomic_pointer_get (atomic))
444#endif
445
446#define g_atomic_pointer_set(atomic, newval) \
447 (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
448
449#define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
450 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
451#define g_atomic_pointer_add(atomic, val) \
452 (g_atomic_pointer_add ((atomic), (gssize) (val)))
453#define g_atomic_pointer_and(atomic, val) \
454 (g_atomic_pointer_and ((atomic), (gsize) (val)))
455#define g_atomic_pointer_or(atomic, val) \
456 (g_atomic_pointer_or ((atomic), (gsize) (val)))
457#define g_atomic_pointer_xor(atomic, val) \
458 (g_atomic_pointer_xor ((atomic), (gsize) (val)))
459
460#endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
461
462#endif /* __G_ATOMIC_H__ */
463

source code of gtk/subprojects/glib/glib/gatomic.h