1 | /* |
2 | * Copyright © 2011 Ryan Lortie |
3 | * |
4 | * This library is free software; you can redistribute it and/or |
5 | * modify it under the terms of the GNU Lesser General Public |
6 | * License as published by the Free Software Foundation; either |
7 | * version 2.1 of the License, or (at your option) any later version. |
8 | * |
9 | * This library is distributed in the hope that it will be useful, but |
10 | * WITHOUT ANY WARRANTY; without even the implied warranty of |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 | * Lesser General Public License for more details. |
13 | * |
14 | * You should have received a copy of the GNU Lesser General Public |
15 | * License along with this library; if not, see <http://www.gnu.org/licenses/>. |
16 | * |
17 | * Author: Ryan Lortie <desrt@desrt.ca> |
18 | */ |
19 | |
20 | #ifndef __G_ATOMIC_H__ |
21 | #define __G_ATOMIC_H__ |
22 | |
23 | #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION) |
24 | #error "Only <glib.h> can be included directly." |
25 | #endif |
26 | |
27 | #include <glib/gtypes.h> |
28 | #include <glib/glib-typeof.h> |
29 | |
30 | G_BEGIN_DECLS |
31 | |
32 | GLIB_AVAILABLE_IN_ALL |
33 | gint g_atomic_int_get (const volatile gint *atomic); |
34 | GLIB_AVAILABLE_IN_ALL |
35 | void g_atomic_int_set (volatile gint *atomic, |
36 | gint newval); |
37 | GLIB_AVAILABLE_IN_ALL |
38 | void g_atomic_int_inc (volatile gint *atomic); |
39 | GLIB_AVAILABLE_IN_ALL |
40 | gboolean g_atomic_int_dec_and_test (volatile gint *atomic); |
41 | GLIB_AVAILABLE_IN_ALL |
42 | gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic, |
43 | gint oldval, |
44 | gint newval); |
45 | GLIB_AVAILABLE_IN_ALL |
46 | gint g_atomic_int_add (volatile gint *atomic, |
47 | gint val); |
48 | GLIB_AVAILABLE_IN_2_30 |
49 | guint g_atomic_int_and (volatile guint *atomic, |
50 | guint val); |
51 | GLIB_AVAILABLE_IN_2_30 |
52 | guint g_atomic_int_or (volatile guint *atomic, |
53 | guint val); |
54 | GLIB_AVAILABLE_IN_ALL |
55 | guint g_atomic_int_xor (volatile guint *atomic, |
56 | guint val); |
57 | |
58 | GLIB_AVAILABLE_IN_ALL |
59 | gpointer g_atomic_pointer_get (const volatile void *atomic); |
60 | GLIB_AVAILABLE_IN_ALL |
61 | void g_atomic_pointer_set (volatile void *atomic, |
62 | gpointer newval); |
63 | GLIB_AVAILABLE_IN_ALL |
64 | gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic, |
65 | gpointer oldval, |
66 | gpointer newval); |
67 | GLIB_AVAILABLE_IN_ALL |
68 | gssize g_atomic_pointer_add (volatile void *atomic, |
69 | gssize val); |
70 | GLIB_AVAILABLE_IN_2_30 |
71 | gsize g_atomic_pointer_and (volatile void *atomic, |
72 | gsize val); |
73 | GLIB_AVAILABLE_IN_2_30 |
74 | gsize g_atomic_pointer_or (volatile void *atomic, |
75 | gsize val); |
76 | GLIB_AVAILABLE_IN_ALL |
77 | gsize g_atomic_pointer_xor (volatile void *atomic, |
78 | gsize val); |
79 | |
80 | GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add) |
81 | gint g_atomic_int_exchange_and_add (volatile gint *atomic, |
82 | gint val); |
83 | |
84 | G_END_DECLS |
85 | |
86 | #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) |
87 | |
88 | /* We prefer the new C11-style atomic extension of GCC if available */ |
89 | #if defined(__ATOMIC_SEQ_CST) |
90 | |
91 | #define g_atomic_int_get(atomic) \ |
92 | (G_GNUC_EXTENSION ({ \ |
93 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
94 | gint gaig_temp; \ |
95 | (void) (0 ? *(atomic) ^ *(atomic) : 1); \ |
96 | __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \ |
97 | (gint) gaig_temp; \ |
98 | })) |
99 | #define g_atomic_int_set(atomic, newval) \ |
100 | (G_GNUC_EXTENSION ({ \ |
101 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
102 | gint gais_temp = (gint) (newval); \ |
103 | (void) (0 ? *(atomic) ^ (newval) : 1); \ |
104 | __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \ |
105 | })) |
106 | |
107 | #if defined(glib_typeof) |
108 | #define g_atomic_pointer_get(atomic) \ |
109 | (G_GNUC_EXTENSION ({ \ |
110 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
111 | glib_typeof (*(atomic)) gapg_temp_newval; \ |
112 | glib_typeof ((atomic)) gapg_temp_atomic = (atomic); \ |
113 | __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \ |
114 | gapg_temp_newval; \ |
115 | })) |
116 | #define g_atomic_pointer_set(atomic, newval) \ |
117 | (G_GNUC_EXTENSION ({ \ |
118 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
119 | glib_typeof ((atomic)) gaps_temp_atomic = (atomic); \ |
120 | glib_typeof (*(atomic)) gaps_temp_newval = (newval); \ |
121 | (void) (0 ? (gpointer) * (atomic) : NULL); \ |
122 | __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \ |
123 | })) |
124 | #else /* if !(defined(glib_typeof) */ |
125 | #define g_atomic_pointer_get(atomic) \ |
126 | (G_GNUC_EXTENSION ({ \ |
127 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
128 | gpointer gapg_temp_newval; \ |
129 | gpointer *gapg_temp_atomic = (gpointer *)(atomic); \ |
130 | __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \ |
131 | gapg_temp_newval; \ |
132 | })) |
133 | #define g_atomic_pointer_set(atomic, newval) \ |
134 | (G_GNUC_EXTENSION ({ \ |
135 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
136 | gpointer *gaps_temp_atomic = (gpointer *)(atomic); \ |
137 | gpointer gaps_temp_newval = (gpointer)(newval); \ |
138 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
139 | __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \ |
140 | })) |
141 | #endif /* if defined(glib_typeof) */ |
142 | |
143 | #define g_atomic_int_inc(atomic) \ |
144 | (G_GNUC_EXTENSION ({ \ |
145 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
146 | (void) (0 ? *(atomic) ^ *(atomic) : 1); \ |
147 | (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \ |
148 | })) |
149 | #define g_atomic_int_dec_and_test(atomic) \ |
150 | (G_GNUC_EXTENSION ({ \ |
151 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
152 | (void) (0 ? *(atomic) ^ *(atomic) : 1); \ |
153 | __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \ |
154 | })) |
155 | #if defined(glib_typeof) && defined(__cplusplus) && __cplusplus >= 201103L |
156 | /* See comments below about equivalent g_atomic_pointer_compare_and_exchange() |
157 | * shenanigans for type-safety when compiling in C++ mode. */ |
158 | #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \ |
159 | (G_GNUC_EXTENSION ({ \ |
160 | glib_typeof (*(atomic)) gaicae_oldval = (oldval); \ |
161 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
162 | (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \ |
163 | __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \ |
164 | })) |
165 | #else /* if !(defined(glib_typeof) && defined(__cplusplus) && __cplusplus >= 201103L) */ |
166 | #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \ |
167 | (G_GNUC_EXTENSION ({ \ |
168 | gint gaicae_oldval = (oldval); \ |
169 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
170 | (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \ |
171 | __atomic_compare_exchange_n ((atomic), (void *) (&(gaicae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \ |
172 | })) |
173 | #endif /* defined(glib_typeof) */ |
174 | #define g_atomic_int_add(atomic, val) \ |
175 | (G_GNUC_EXTENSION ({ \ |
176 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
177 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
178 | (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \ |
179 | })) |
180 | #define g_atomic_int_and(atomic, val) \ |
181 | (G_GNUC_EXTENSION ({ \ |
182 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
183 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
184 | (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \ |
185 | })) |
186 | #define g_atomic_int_or(atomic, val) \ |
187 | (G_GNUC_EXTENSION ({ \ |
188 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
189 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
190 | (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \ |
191 | })) |
192 | #define g_atomic_int_xor(atomic, val) \ |
193 | (G_GNUC_EXTENSION ({ \ |
194 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
195 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
196 | (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \ |
197 | })) |
198 | |
199 | #if defined(glib_typeof) && defined(__cplusplus) && __cplusplus >= 201103L |
200 | /* This is typesafe because we check we can assign oldval to the type of |
201 | * (*atomic). Unfortunately it can only be done in C++ because gcc/clang warn |
202 | * when atomic is volatile and not oldval, or when atomic is gsize* and oldval |
203 | * is NULL. Note that clang++ force us to be typesafe because it is an error if the 2nd |
204 | * argument of __atomic_compare_exchange_n() has a different type than the |
205 | * first. |
206 | * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1919 |
207 | * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1715#note_1024120. */ |
208 | #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \ |
209 | (G_GNUC_EXTENSION ({ \ |
210 | G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \ |
211 | glib_typeof (*(atomic)) gapcae_oldval = (oldval); \ |
212 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
213 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
214 | __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \ |
215 | })) |
216 | #else /* if !(defined(glib_typeof) && defined(__cplusplus) && __cplusplus >= 201103L) */ |
217 | #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \ |
218 | (G_GNUC_EXTENSION ({ \ |
219 | G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \ |
220 | gpointer gapcae_oldval = (gpointer)(oldval); \ |
221 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
222 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
223 | __atomic_compare_exchange_n ((atomic), (void *) (&(gapcae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \ |
224 | })) |
225 | #endif /* defined(glib_typeof) */ |
226 | #define g_atomic_pointer_add(atomic, val) \ |
227 | (G_GNUC_EXTENSION ({ \ |
228 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
229 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
230 | (void) (0 ? (val) ^ (val) : 1); \ |
231 | (gssize) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \ |
232 | })) |
233 | #define g_atomic_pointer_and(atomic, val) \ |
234 | (G_GNUC_EXTENSION ({ \ |
235 | gsize *gapa_atomic = (gsize *) (atomic); \ |
236 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
237 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \ |
238 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
239 | (void) (0 ? (val) ^ (val) : 1); \ |
240 | (gsize) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \ |
241 | })) |
242 | #define g_atomic_pointer_or(atomic, val) \ |
243 | (G_GNUC_EXTENSION ({ \ |
244 | gsize *gapo_atomic = (gsize *) (atomic); \ |
245 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
246 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \ |
247 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
248 | (void) (0 ? (val) ^ (val) : 1); \ |
249 | (gsize) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \ |
250 | })) |
251 | #define g_atomic_pointer_xor(atomic, val) \ |
252 | (G_GNUC_EXTENSION ({ \ |
253 | gsize *gapx_atomic = (gsize *) (atomic); \ |
254 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
255 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \ |
256 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
257 | (void) (0 ? (val) ^ (val) : 1); \ |
258 | (gsize) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \ |
259 | })) |
260 | |
261 | #else /* defined(__ATOMIC_SEQ_CST) */ |
262 | |
263 | /* We want to achieve __ATOMIC_SEQ_CST semantics here. See |
264 | * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load |
265 | * operations, that means performing an *acquire*: |
266 | * > A load operation with this memory order performs the acquire operation on |
267 | * > the affected memory location: no reads or writes in the current thread can |
268 | * > be reordered before this load. All writes in other threads that release |
269 | * > the same atomic variable are visible in the current thread. |
270 | * |
271 | * “no reads or writes in the current thread can be reordered before this load” |
272 | * is implemented using a compiler barrier (a no-op `__asm__` section) to |
273 | * prevent instruction reordering. Writes in other threads are synchronised |
274 | * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether |
275 | * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of |
276 | * one. |
277 | * |
278 | * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*: |
279 | * > A store operation with this memory order performs the release operation: |
280 | * > no reads or writes in the current thread can be reordered after this store. |
281 | * > All writes in the current thread are visible in other threads that acquire |
282 | * > the same atomic variable (see Release-Acquire ordering below) and writes |
283 | * > that carry a dependency into the atomic variable become visible in other |
284 | * > threads that consume the same atomic (see Release-Consume ordering below). |
285 | * |
286 | * “no reads or writes in the current thread can be reordered after this store” |
287 | * is implemented using a compiler barrier to prevent instruction reordering. |
288 | * “All writes in the current thread are visible in other threads” is implemented |
289 | * using `__sync_synchronize()`; similarly for “writes that carry a dependency”. |
290 | */ |
291 | #define g_atomic_int_get(atomic) \ |
292 | (G_GNUC_EXTENSION ({ \ |
293 | gint gaig_result; \ |
294 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
295 | (void) (0 ? *(atomic) ^ *(atomic) : 1); \ |
296 | gaig_result = (gint) *(atomic); \ |
297 | __sync_synchronize (); \ |
298 | __asm__ __volatile__ ("" : : : "memory"); \ |
299 | gaig_result; \ |
300 | })) |
301 | #define g_atomic_int_set(atomic, newval) \ |
302 | (G_GNUC_EXTENSION ({ \ |
303 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
304 | (void) (0 ? *(atomic) ^ (newval) : 1); \ |
305 | __sync_synchronize (); \ |
306 | __asm__ __volatile__ ("" : : : "memory"); \ |
307 | *(atomic) = (newval); \ |
308 | })) |
309 | #define g_atomic_pointer_get(atomic) \ |
310 | (G_GNUC_EXTENSION ({ \ |
311 | gpointer gapg_result; \ |
312 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
313 | gapg_result = (gpointer) *(atomic); \ |
314 | __sync_synchronize (); \ |
315 | __asm__ __volatile__ ("" : : : "memory"); \ |
316 | gapg_result; \ |
317 | })) |
318 | #if defined(glib_typeof) |
319 | #define g_atomic_pointer_set(atomic, newval) \ |
320 | (G_GNUC_EXTENSION ({ \ |
321 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
322 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
323 | __sync_synchronize (); \ |
324 | __asm__ __volatile__ ("" : : : "memory"); \ |
325 | *(atomic) = (glib_typeof (*(atomic))) (gsize) (newval); \ |
326 | })) |
327 | #else /* if !(defined(glib_typeof) */ |
328 | #define g_atomic_pointer_set(atomic, newval) \ |
329 | (G_GNUC_EXTENSION ({ \ |
330 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
331 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
332 | __sync_synchronize (); \ |
333 | __asm__ __volatile__ ("" : : : "memory"); \ |
334 | *(atomic) = (gpointer) (gsize) (newval); \ |
335 | })) |
336 | #endif /* if defined(glib_typeof) */ |
337 | |
338 | #define g_atomic_int_inc(atomic) \ |
339 | (G_GNUC_EXTENSION ({ \ |
340 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
341 | (void) (0 ? *(atomic) ^ *(atomic) : 1); \ |
342 | (void) __sync_fetch_and_add ((atomic), 1); \ |
343 | })) |
344 | #define g_atomic_int_dec_and_test(atomic) \ |
345 | (G_GNUC_EXTENSION ({ \ |
346 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
347 | (void) (0 ? *(atomic) ^ *(atomic) : 1); \ |
348 | __sync_fetch_and_sub ((atomic), 1) == 1; \ |
349 | })) |
350 | #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \ |
351 | (G_GNUC_EXTENSION ({ \ |
352 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
353 | (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \ |
354 | __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \ |
355 | })) |
356 | #define g_atomic_int_add(atomic, val) \ |
357 | (G_GNUC_EXTENSION ({ \ |
358 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
359 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
360 | (gint) __sync_fetch_and_add ((atomic), (val)); \ |
361 | })) |
362 | #define g_atomic_int_and(atomic, val) \ |
363 | (G_GNUC_EXTENSION ({ \ |
364 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
365 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
366 | (guint) __sync_fetch_and_and ((atomic), (val)); \ |
367 | })) |
368 | #define g_atomic_int_or(atomic, val) \ |
369 | (G_GNUC_EXTENSION ({ \ |
370 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
371 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
372 | (guint) __sync_fetch_and_or ((atomic), (val)); \ |
373 | })) |
374 | #define g_atomic_int_xor(atomic, val) \ |
375 | (G_GNUC_EXTENSION ({ \ |
376 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \ |
377 | (void) (0 ? *(atomic) ^ (val) : 1); \ |
378 | (guint) __sync_fetch_and_xor ((atomic), (val)); \ |
379 | })) |
380 | |
381 | #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \ |
382 | (G_GNUC_EXTENSION ({ \ |
383 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
384 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
385 | __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \ |
386 | })) |
387 | #define g_atomic_pointer_add(atomic, val) \ |
388 | (G_GNUC_EXTENSION ({ \ |
389 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
390 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
391 | (void) (0 ? (val) ^ (val) : 1); \ |
392 | (gssize) __sync_fetch_and_add ((atomic), (val)); \ |
393 | })) |
394 | #define g_atomic_pointer_and(atomic, val) \ |
395 | (G_GNUC_EXTENSION ({ \ |
396 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
397 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
398 | (void) (0 ? (val) ^ (val) : 1); \ |
399 | (gsize) __sync_fetch_and_and ((atomic), (val)); \ |
400 | })) |
401 | #define g_atomic_pointer_or(atomic, val) \ |
402 | (G_GNUC_EXTENSION ({ \ |
403 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
404 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
405 | (void) (0 ? (val) ^ (val) : 1); \ |
406 | (gsize) __sync_fetch_and_or ((atomic), (val)); \ |
407 | })) |
408 | #define g_atomic_pointer_xor(atomic, val) \ |
409 | (G_GNUC_EXTENSION ({ \ |
410 | G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \ |
411 | (void) (0 ? (gpointer) *(atomic) : NULL); \ |
412 | (void) (0 ? (val) ^ (val) : 1); \ |
413 | (gsize) __sync_fetch_and_xor ((atomic), (val)); \ |
414 | })) |
415 | |
416 | #endif /* !defined(__ATOMIC_SEQ_CST) */ |
417 | |
418 | #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */ |
419 | |
420 | #define g_atomic_int_get(atomic) \ |
421 | (g_atomic_int_get ((gint *) (atomic))) |
422 | #define g_atomic_int_set(atomic, newval) \ |
423 | (g_atomic_int_set ((gint *) (atomic), (gint) (newval))) |
424 | #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \ |
425 | (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval))) |
426 | #define g_atomic_int_add(atomic, val) \ |
427 | (g_atomic_int_add ((gint *) (atomic), (val))) |
428 | #define g_atomic_int_and(atomic, val) \ |
429 | (g_atomic_int_and ((guint *) (atomic), (val))) |
430 | #define g_atomic_int_or(atomic, val) \ |
431 | (g_atomic_int_or ((guint *) (atomic), (val))) |
432 | #define g_atomic_int_xor(atomic, val) \ |
433 | (g_atomic_int_xor ((guint *) (atomic), (val))) |
434 | #define g_atomic_int_inc(atomic) \ |
435 | (g_atomic_int_inc ((gint *) (atomic))) |
436 | #define g_atomic_int_dec_and_test(atomic) \ |
437 | (g_atomic_int_dec_and_test ((gint *) (atomic))) |
438 | |
439 | #if defined(glib_typeof) |
440 | /* The (void *) cast in the middle *looks* redundant, because |
441 | * g_atomic_pointer_get returns void * already, but it's to silence |
442 | * -Werror=bad-function-cast when we're doing something like: |
443 | * guintptr a, b; ...; a = g_atomic_pointer_get (&b); |
444 | * which would otherwise be assigning the void * result of |
445 | * g_atomic_pointer_get directly to the pointer-sized but |
446 | * non-pointer-typed result. */ |
447 | #define g_atomic_pointer_get(atomic) \ |
448 | (glib_typeof (*(atomic))) (void *) ((g_atomic_pointer_get) ((void *) atomic)) |
449 | #else /* !(defined(glib_typeof) */ |
450 | #define g_atomic_pointer_get(atomic) \ |
451 | (g_atomic_pointer_get (atomic)) |
452 | #endif |
453 | |
454 | #define g_atomic_pointer_set(atomic, newval) \ |
455 | (g_atomic_pointer_set ((atomic), (gpointer) (newval))) |
456 | |
457 | #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \ |
458 | (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval))) |
459 | #define g_atomic_pointer_add(atomic, val) \ |
460 | (g_atomic_pointer_add ((atomic), (gssize) (val))) |
461 | #define g_atomic_pointer_and(atomic, val) \ |
462 | (g_atomic_pointer_and ((atomic), (gsize) (val))) |
463 | #define g_atomic_pointer_or(atomic, val) \ |
464 | (g_atomic_pointer_or ((atomic), (gsize) (val))) |
465 | #define g_atomic_pointer_xor(atomic, val) \ |
466 | (g_atomic_pointer_xor ((atomic), (gsize) (val))) |
467 | |
468 | #endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */ |
469 | |
470 | #endif /* __G_ATOMIC_H__ */ |
471 | |