1 | // -*- C++ -*- |
2 | //===----------------------------------------------------------------------===// |
3 | // |
4 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
5 | // See https://llvm.org/LICENSE.txt for license information. |
6 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
7 | // |
8 | //===----------------------------------------------------------------------===// |
9 | |
10 | #ifndef _LIBCPP_ATOMIC |
11 | #define _LIBCPP_ATOMIC |
12 | |
13 | /* |
14 | atomic synopsis |
15 | |
16 | namespace std |
17 | { |
18 | |
19 | // feature test macro [version.syn] |
20 | |
21 | #define __cpp_lib_atomic_is_always_lock_free |
22 | #define __cpp_lib_atomic_flag_test |
23 | #define __cpp_lib_atomic_lock_free_type_aliases |
24 | #define __cpp_lib_atomic_wait |
25 | |
26 | // order and consistency |
27 | |
28 | enum memory_order: unspecified // enum class in C++20 |
29 | { |
30 | relaxed, |
31 | consume, // load-consume |
32 | acquire, // load-acquire |
33 | release, // store-release |
34 | acq_rel, // store-release load-acquire |
35 | seq_cst // store-release load-acquire |
36 | }; |
37 | |
38 | inline constexpr auto memory_order_relaxed = memory_order::relaxed; |
39 | inline constexpr auto memory_order_consume = memory_order::consume; |
40 | inline constexpr auto memory_order_acquire = memory_order::acquire; |
41 | inline constexpr auto memory_order_release = memory_order::release; |
42 | inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; |
43 | inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; |
44 | |
45 | template <class T> T kill_dependency(T y) noexcept; |
46 | |
47 | // lock-free property |
48 | |
49 | #define ATOMIC_BOOL_LOCK_FREE unspecified |
50 | #define ATOMIC_CHAR_LOCK_FREE unspecified |
51 | #define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20 |
52 | #define ATOMIC_CHAR16_T_LOCK_FREE unspecified |
53 | #define ATOMIC_CHAR32_T_LOCK_FREE unspecified |
54 | #define ATOMIC_WCHAR_T_LOCK_FREE unspecified |
55 | #define ATOMIC_SHORT_LOCK_FREE unspecified |
56 | #define ATOMIC_INT_LOCK_FREE unspecified |
57 | #define ATOMIC_LONG_LOCK_FREE unspecified |
58 | #define ATOMIC_LLONG_LOCK_FREE unspecified |
59 | #define ATOMIC_POINTER_LOCK_FREE unspecified |
60 | |
61 | template <class T> |
62 | struct atomic |
63 | { |
64 | using value_type = T; |
65 | |
66 | static constexpr bool is_always_lock_free; |
67 | bool is_lock_free() const volatile noexcept; |
68 | bool is_lock_free() const noexcept; |
69 | |
70 | atomic() noexcept = default; // until C++20 |
71 | constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20 |
72 | constexpr atomic(T desr) noexcept; |
73 | atomic(const atomic&) = delete; |
74 | atomic& operator=(const atomic&) = delete; |
75 | atomic& operator=(const atomic&) volatile = delete; |
76 | |
77 | T load(memory_order m = memory_order_seq_cst) const volatile noexcept; |
78 | T load(memory_order m = memory_order_seq_cst) const noexcept; |
79 | operator T() const volatile noexcept; |
80 | operator T() const noexcept; |
81 | void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
82 | void store(T desr, memory_order m = memory_order_seq_cst) noexcept; |
83 | T operator=(T) volatile noexcept; |
84 | T operator=(T) noexcept; |
85 | |
86 | T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
87 | T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept; |
88 | bool compare_exchange_weak(T& expc, T desr, |
89 | memory_order s, memory_order f) volatile noexcept; |
90 | bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept; |
91 | bool compare_exchange_strong(T& expc, T desr, |
92 | memory_order s, memory_order f) volatile noexcept; |
93 | bool compare_exchange_strong(T& expc, T desr, |
94 | memory_order s, memory_order f) noexcept; |
95 | bool compare_exchange_weak(T& expc, T desr, |
96 | memory_order m = memory_order_seq_cst) volatile noexcept; |
97 | bool compare_exchange_weak(T& expc, T desr, |
98 | memory_order m = memory_order_seq_cst) noexcept; |
99 | bool compare_exchange_strong(T& expc, T desr, |
100 | memory_order m = memory_order_seq_cst) volatile noexcept; |
101 | bool compare_exchange_strong(T& expc, T desr, |
102 | memory_order m = memory_order_seq_cst) noexcept; |
103 | |
104 | void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept; |
105 | void wait(T, memory_order = memory_order::seq_cst) const noexcept; |
106 | void notify_one() volatile noexcept; |
107 | void notify_one() noexcept; |
108 | void notify_all() volatile noexcept; |
109 | void notify_all() noexcept; |
110 | }; |
111 | |
112 | template <> |
113 | struct atomic<integral> |
114 | { |
115 | using value_type = integral; |
116 | using difference_type = value_type; |
117 | |
118 | static constexpr bool is_always_lock_free; |
119 | bool is_lock_free() const volatile noexcept; |
120 | bool is_lock_free() const noexcept; |
121 | |
122 | atomic() noexcept = default; |
123 | constexpr atomic(integral desr) noexcept; |
124 | atomic(const atomic&) = delete; |
125 | atomic& operator=(const atomic&) = delete; |
126 | atomic& operator=(const atomic&) volatile = delete; |
127 | |
128 | integral load(memory_order m = memory_order_seq_cst) const volatile noexcept; |
129 | integral load(memory_order m = memory_order_seq_cst) const noexcept; |
130 | operator integral() const volatile noexcept; |
131 | operator integral() const noexcept; |
132 | void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
133 | void store(integral desr, memory_order m = memory_order_seq_cst) noexcept; |
134 | integral operator=(integral desr) volatile noexcept; |
135 | integral operator=(integral desr) noexcept; |
136 | |
137 | integral exchange(integral desr, |
138 | memory_order m = memory_order_seq_cst) volatile noexcept; |
139 | integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept; |
140 | bool compare_exchange_weak(integral& expc, integral desr, |
141 | memory_order s, memory_order f) volatile noexcept; |
142 | bool compare_exchange_weak(integral& expc, integral desr, |
143 | memory_order s, memory_order f) noexcept; |
144 | bool compare_exchange_strong(integral& expc, integral desr, |
145 | memory_order s, memory_order f) volatile noexcept; |
146 | bool compare_exchange_strong(integral& expc, integral desr, |
147 | memory_order s, memory_order f) noexcept; |
148 | bool compare_exchange_weak(integral& expc, integral desr, |
149 | memory_order m = memory_order_seq_cst) volatile noexcept; |
150 | bool compare_exchange_weak(integral& expc, integral desr, |
151 | memory_order m = memory_order_seq_cst) noexcept; |
152 | bool compare_exchange_strong(integral& expc, integral desr, |
153 | memory_order m = memory_order_seq_cst) volatile noexcept; |
154 | bool compare_exchange_strong(integral& expc, integral desr, |
155 | memory_order m = memory_order_seq_cst) noexcept; |
156 | |
157 | integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
158 | integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept; |
159 | integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
160 | integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept; |
161 | integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
162 | integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept; |
163 | integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
164 | integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept; |
165 | integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
166 | integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept; |
167 | |
168 | integral operator++(int) volatile noexcept; |
169 | integral operator++(int) noexcept; |
170 | integral operator--(int) volatile noexcept; |
171 | integral operator--(int) noexcept; |
172 | integral operator++() volatile noexcept; |
173 | integral operator++() noexcept; |
174 | integral operator--() volatile noexcept; |
175 | integral operator--() noexcept; |
176 | integral operator+=(integral op) volatile noexcept; |
177 | integral operator+=(integral op) noexcept; |
178 | integral operator-=(integral op) volatile noexcept; |
179 | integral operator-=(integral op) noexcept; |
180 | integral operator&=(integral op) volatile noexcept; |
181 | integral operator&=(integral op) noexcept; |
182 | integral operator|=(integral op) volatile noexcept; |
183 | integral operator|=(integral op) noexcept; |
184 | integral operator^=(integral op) volatile noexcept; |
185 | integral operator^=(integral op) noexcept; |
186 | |
187 | void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept; |
188 | void wait(integral, memory_order = memory_order::seq_cst) const noexcept; |
189 | void notify_one() volatile noexcept; |
190 | void notify_one() noexcept; |
191 | void notify_all() volatile noexcept; |
192 | void notify_all() noexcept; |
193 | }; |
194 | |
195 | template <class T> |
196 | struct atomic<T*> |
197 | { |
198 | using value_type = T*; |
199 | using difference_type = ptrdiff_t; |
200 | |
201 | static constexpr bool is_always_lock_free; |
202 | bool is_lock_free() const volatile noexcept; |
203 | bool is_lock_free() const noexcept; |
204 | |
205 | atomic() noexcept = default; // until C++20 |
206 | constexpr atomic() noexcept; // since C++20 |
207 | constexpr atomic(T* desr) noexcept; |
208 | atomic(const atomic&) = delete; |
209 | atomic& operator=(const atomic&) = delete; |
210 | atomic& operator=(const atomic&) volatile = delete; |
211 | |
212 | T* load(memory_order m = memory_order_seq_cst) const volatile noexcept; |
213 | T* load(memory_order m = memory_order_seq_cst) const noexcept; |
214 | operator T*() const volatile noexcept; |
215 | operator T*() const noexcept; |
216 | void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
217 | void store(T* desr, memory_order m = memory_order_seq_cst) noexcept; |
218 | T* operator=(T*) volatile noexcept; |
219 | T* operator=(T*) noexcept; |
220 | |
221 | T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
222 | T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept; |
223 | bool compare_exchange_weak(T*& expc, T* desr, |
224 | memory_order s, memory_order f) volatile noexcept; |
225 | bool compare_exchange_weak(T*& expc, T* desr, |
226 | memory_order s, memory_order f) noexcept; |
227 | bool compare_exchange_strong(T*& expc, T* desr, |
228 | memory_order s, memory_order f) volatile noexcept; |
229 | bool compare_exchange_strong(T*& expc, T* desr, |
230 | memory_order s, memory_order f) noexcept; |
231 | bool compare_exchange_weak(T*& expc, T* desr, |
232 | memory_order m = memory_order_seq_cst) volatile noexcept; |
233 | bool compare_exchange_weak(T*& expc, T* desr, |
234 | memory_order m = memory_order_seq_cst) noexcept; |
235 | bool compare_exchange_strong(T*& expc, T* desr, |
236 | memory_order m = memory_order_seq_cst) volatile noexcept; |
237 | bool compare_exchange_strong(T*& expc, T* desr, |
238 | memory_order m = memory_order_seq_cst) noexcept; |
239 | T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; |
240 | T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; |
241 | T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; |
242 | T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; |
243 | |
244 | T* operator++(int) volatile noexcept; |
245 | T* operator++(int) noexcept; |
246 | T* operator--(int) volatile noexcept; |
247 | T* operator--(int) noexcept; |
248 | T* operator++() volatile noexcept; |
249 | T* operator++() noexcept; |
250 | T* operator--() volatile noexcept; |
251 | T* operator--() noexcept; |
252 | T* operator+=(ptrdiff_t op) volatile noexcept; |
253 | T* operator+=(ptrdiff_t op) noexcept; |
254 | T* operator-=(ptrdiff_t op) volatile noexcept; |
255 | T* operator-=(ptrdiff_t op) noexcept; |
256 | |
257 | void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept; |
258 | void wait(T*, memory_order = memory_order::seq_cst) const noexcept; |
259 | void notify_one() volatile noexcept; |
260 | void notify_one() noexcept; |
261 | void notify_all() volatile noexcept; |
262 | void notify_all() noexcept; |
263 | }; |
264 | |
265 | |
266 | // [atomics.nonmembers], non-member functions |
267 | template<class T> |
268 | bool atomic_is_lock_free(const volatile atomic<T>*) noexcept; |
269 | template<class T> |
270 | bool atomic_is_lock_free(const atomic<T>*) noexcept; |
271 | template<class T> |
272 | void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept; |
273 | template<class T> |
274 | void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept; |
275 | template<class T> |
276 | void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type, |
277 | memory_order) noexcept; |
278 | template<class T> |
279 | void atomic_store_explicit(atomic<T>*, atomic<T>::value_type, |
280 | memory_order) noexcept; |
281 | template<class T> |
282 | T atomic_load(const volatile atomic<T>*) noexcept; |
283 | template<class T> |
284 | T atomic_load(const atomic<T>*) noexcept; |
285 | template<class T> |
286 | T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept; |
287 | template<class T> |
288 | T atomic_load_explicit(const atomic<T>*, memory_order) noexcept; |
289 | template<class T> |
290 | T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept; |
291 | template<class T> |
292 | T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept; |
293 | template<class T> |
294 | T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type, |
295 | memory_order) noexcept; |
296 | template<class T> |
297 | T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type, |
298 | memory_order) noexcept; |
299 | template<class T> |
300 | bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*, |
301 | atomic<T>::value_type) noexcept; |
302 | template<class T> |
303 | bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*, |
304 | atomic<T>::value_type) noexcept; |
305 | template<class T> |
306 | bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*, |
307 | atomic<T>::value_type) noexcept; |
308 | template<class T> |
309 | bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*, |
310 | atomic<T>::value_type) noexcept; |
311 | template<class T> |
312 | bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*, |
313 | atomic<T>::value_type, |
314 | memory_order, memory_order) noexcept; |
315 | template<class T> |
316 | bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*, |
317 | atomic<T>::value_type, |
318 | memory_order, memory_order) noexcept; |
319 | template<class T> |
320 | bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*, |
321 | atomic<T>::value_type, |
322 | memory_order, memory_order) noexcept; |
323 | template<class T> |
324 | bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*, |
325 | atomic<T>::value_type, |
326 | memory_order, memory_order) noexcept; |
327 | |
328 | template<class T> |
329 | T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept; |
330 | template<class T> |
331 | T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept; |
332 | template<class T> |
333 | T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type, |
334 | memory_order) noexcept; |
335 | template<class T> |
336 | T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type, |
337 | memory_order) noexcept; |
338 | template<class T> |
339 | T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept; |
340 | template<class T> |
341 | T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept; |
342 | template<class T> |
343 | T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type, |
344 | memory_order) noexcept; |
345 | template<class T> |
346 | T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type, |
347 | memory_order) noexcept; |
348 | template<class T> |
349 | T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept; |
350 | template<class T> |
351 | T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept; |
352 | template<class T> |
353 | T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type, |
354 | memory_order) noexcept; |
355 | template<class T> |
356 | T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type, |
357 | memory_order) noexcept; |
358 | template<class T> |
359 | T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept; |
360 | template<class T> |
361 | T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept; |
362 | template<class T> |
363 | T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type, |
364 | memory_order) noexcept; |
365 | template<class T> |
366 | T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type, |
367 | memory_order) noexcept; |
368 | template<class T> |
369 | T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept; |
370 | template<class T> |
371 | T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept; |
372 | template<class T> |
373 | T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type, |
374 | memory_order) noexcept; |
375 | template<class T> |
376 | T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type, |
377 | memory_order) noexcept; |
378 | |
379 | template<class T> |
380 | void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type); |
381 | template<class T> |
382 | void atomic_wait(const atomic<T>*, atomic<T>::value_type); |
383 | template<class T> |
384 | void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type, |
385 | memory_order); |
386 | template<class T> |
387 | void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type, |
388 | memory_order); |
389 | template<class T> |
390 | void atomic_notify_one(volatile atomic<T>*); |
391 | template<class T> |
392 | void atomic_notify_one(atomic<T>*); |
393 | template<class T> |
394 | void atomic_notify_all(volatile atomic<T>*); |
395 | template<class T> |
396 | void atomic_notify_all(atomic<T>*); |
397 | |
398 | // Atomics for standard typedef types |
399 | |
400 | typedef atomic<bool> atomic_bool; |
401 | typedef atomic<char> atomic_char; |
402 | typedef atomic<signed char> atomic_schar; |
403 | typedef atomic<unsigned char> atomic_uchar; |
404 | typedef atomic<short> atomic_short; |
405 | typedef atomic<unsigned short> atomic_ushort; |
406 | typedef atomic<int> atomic_int; |
407 | typedef atomic<unsigned int> atomic_uint; |
408 | typedef atomic<long> atomic_long; |
409 | typedef atomic<unsigned long> atomic_ulong; |
410 | typedef atomic<long long> atomic_llong; |
411 | typedef atomic<unsigned long long> atomic_ullong; |
412 | typedef atomic<char8_t> atomic_char8_t; // C++20 |
413 | typedef atomic<char16_t> atomic_char16_t; |
414 | typedef atomic<char32_t> atomic_char32_t; |
415 | typedef atomic<wchar_t> atomic_wchar_t; |
416 | |
417 | typedef atomic<int_least8_t> atomic_int_least8_t; |
418 | typedef atomic<uint_least8_t> atomic_uint_least8_t; |
419 | typedef atomic<int_least16_t> atomic_int_least16_t; |
420 | typedef atomic<uint_least16_t> atomic_uint_least16_t; |
421 | typedef atomic<int_least32_t> atomic_int_least32_t; |
422 | typedef atomic<uint_least32_t> atomic_uint_least32_t; |
423 | typedef atomic<int_least64_t> atomic_int_least64_t; |
424 | typedef atomic<uint_least64_t> atomic_uint_least64_t; |
425 | |
426 | typedef atomic<int_fast8_t> atomic_int_fast8_t; |
427 | typedef atomic<uint_fast8_t> atomic_uint_fast8_t; |
428 | typedef atomic<int_fast16_t> atomic_int_fast16_t; |
429 | typedef atomic<uint_fast16_t> atomic_uint_fast16_t; |
430 | typedef atomic<int_fast32_t> atomic_int_fast32_t; |
431 | typedef atomic<uint_fast32_t> atomic_uint_fast32_t; |
432 | typedef atomic<int_fast64_t> atomic_int_fast64_t; |
433 | typedef atomic<uint_fast64_t> atomic_uint_fast64_t; |
434 | |
435 | typedef atomic<int8_t> atomic_int8_t; |
436 | typedef atomic<uint8_t> atomic_uint8_t; |
437 | typedef atomic<int16_t> atomic_int16_t; |
438 | typedef atomic<uint16_t> atomic_uint16_t; |
439 | typedef atomic<int32_t> atomic_int32_t; |
440 | typedef atomic<uint32_t> atomic_uint32_t; |
441 | typedef atomic<int64_t> atomic_int64_t; |
442 | typedef atomic<uint64_t> atomic_uint64_t; |
443 | |
444 | typedef atomic<intptr_t> atomic_intptr_t; |
445 | typedef atomic<uintptr_t> atomic_uintptr_t; |
446 | typedef atomic<size_t> atomic_size_t; |
447 | typedef atomic<ptrdiff_t> atomic_ptrdiff_t; |
448 | typedef atomic<intmax_t> atomic_intmax_t; |
449 | typedef atomic<uintmax_t> atomic_uintmax_t; |
450 | |
451 | // flag type and operations |
452 | |
453 | typedef struct atomic_flag |
454 | { |
455 | atomic_flag() noexcept = default; // until C++20 |
456 | constexpr atomic_flag() noexcept; // since C++20 |
457 | atomic_flag(const atomic_flag&) = delete; |
458 | atomic_flag& operator=(const atomic_flag&) = delete; |
459 | atomic_flag& operator=(const atomic_flag&) volatile = delete; |
460 | |
461 | bool test(memory_order m = memory_order_seq_cst) volatile noexcept; |
462 | bool test(memory_order m = memory_order_seq_cst) noexcept; |
463 | bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept; |
464 | bool test_and_set(memory_order m = memory_order_seq_cst) noexcept; |
465 | void clear(memory_order m = memory_order_seq_cst) volatile noexcept; |
466 | void clear(memory_order m = memory_order_seq_cst) noexcept; |
467 | |
468 | void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept; |
469 | void wait(bool, memory_order = memory_order::seq_cst) const noexcept; |
470 | void notify_one() volatile noexcept; |
471 | void notify_one() noexcept; |
472 | void notify_all() volatile noexcept; |
473 | void notify_all() noexcept; |
474 | } atomic_flag; |
475 | |
476 | bool atomic_flag_test(volatile atomic_flag* obj) noexcept; |
477 | bool atomic_flag_test(atomic_flag* obj) noexcept; |
478 | bool atomic_flag_test_explicit(volatile atomic_flag* obj, |
479 | memory_order m) noexcept; |
480 | bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept; |
481 | bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept; |
482 | bool atomic_flag_test_and_set(atomic_flag* obj) noexcept; |
483 | bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj, |
484 | memory_order m) noexcept; |
485 | bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept; |
486 | void atomic_flag_clear(volatile atomic_flag* obj) noexcept; |
487 | void atomic_flag_clear(atomic_flag* obj) noexcept; |
488 | void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept; |
489 | void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept; |
490 | |
491 | void atomic_wait(const volatile atomic_flag* obj, T old) noexcept; |
492 | void atomic_wait(const atomic_flag* obj, T old) noexcept; |
493 | void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept; |
494 | void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept; |
495 | void atomic_one(volatile atomic_flag* obj) noexcept; |
496 | void atomic_one(atomic_flag* obj) noexcept; |
497 | void atomic_all(volatile atomic_flag* obj) noexcept; |
498 | void atomic_all(atomic_flag* obj) noexcept; |
499 | |
500 | // fences |
501 | |
502 | void atomic_thread_fence(memory_order m) noexcept; |
503 | void atomic_signal_fence(memory_order m) noexcept; |
504 | |
505 | // deprecated |
506 | |
507 | template <class T> |
508 | void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept; |
509 | |
510 | template <class T> |
511 | void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept; |
512 | |
513 | #define ATOMIC_VAR_INIT(value) see below |
514 | |
515 | #define ATOMIC_FLAG_INIT see below |
516 | |
517 | } // std |
518 | |
519 | */ |
520 | |
521 | #include <__assert> // all public C++ headers provide the assertion handler |
522 | #include <__availability> |
523 | #include <__chrono/duration.h> |
524 | #include <__config> |
525 | #include <__thread/poll_with_backoff.h> |
526 | #include <__thread/timed_backoff_policy.h> |
527 | #include <cstddef> |
528 | #include <cstdint> |
529 | #include <cstring> |
530 | #include <type_traits> |
531 | #include <version> |
532 | |
533 | #ifndef _LIBCPP_HAS_NO_THREADS |
534 | # include <__threading_support> |
535 | #endif |
536 | |
537 | #ifndef _LIBCPP_REMOVE_TRANSITIVE_INCLUDES |
538 | # include <chrono> |
539 | #endif |
540 | |
541 | #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) |
542 | # pragma GCC system_header |
543 | #endif |
544 | |
545 | #ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER |
546 | # error <atomic> is not implemented |
547 | #endif |
548 | #ifdef kill_dependency |
549 | # error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23. |
550 | #endif |
551 | |
552 | #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \ |
553 | _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \ |
554 | __m == memory_order_acquire || \ |
555 | __m == memory_order_acq_rel, \ |
556 | "memory order argument to atomic operation is invalid") |
557 | |
558 | #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \ |
559 | _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \ |
560 | __m == memory_order_acq_rel, \ |
561 | "memory order argument to atomic operation is invalid") |
562 | |
563 | #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \ |
564 | _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \ |
565 | __f == memory_order_acq_rel, \ |
566 | "memory order argument to atomic operation is invalid") |
567 | |
568 | _LIBCPP_BEGIN_NAMESPACE_STD |
569 | |
570 | // Figure out what the underlying type for `memory_order` would be if it were |
571 | // declared as an unscoped enum (accounting for -fshort-enums). Use this result |
572 | // to pin the underlying type in C++20. |
573 | enum __legacy_memory_order { |
574 | __mo_relaxed, |
575 | __mo_consume, |
576 | __mo_acquire, |
577 | __mo_release, |
578 | __mo_acq_rel, |
579 | __mo_seq_cst |
580 | }; |
581 | |
582 | typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t; |
583 | |
584 | #if _LIBCPP_STD_VER > 17 |
585 | |
586 | enum class memory_order : __memory_order_underlying_t { |
587 | relaxed = __mo_relaxed, |
588 | consume = __mo_consume, |
589 | acquire = __mo_acquire, |
590 | release = __mo_release, |
591 | acq_rel = __mo_acq_rel, |
592 | seq_cst = __mo_seq_cst |
593 | }; |
594 | |
595 | inline constexpr auto memory_order_relaxed = memory_order::relaxed; |
596 | inline constexpr auto memory_order_consume = memory_order::consume; |
597 | inline constexpr auto memory_order_acquire = memory_order::acquire; |
598 | inline constexpr auto memory_order_release = memory_order::release; |
599 | inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; |
600 | inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; |
601 | |
602 | #else |
603 | |
604 | typedef enum memory_order { |
605 | memory_order_relaxed = __mo_relaxed, |
606 | memory_order_consume = __mo_consume, |
607 | memory_order_acquire = __mo_acquire, |
608 | memory_order_release = __mo_release, |
609 | memory_order_acq_rel = __mo_acq_rel, |
610 | memory_order_seq_cst = __mo_seq_cst, |
611 | } memory_order; |
612 | |
613 | #endif // _LIBCPP_STD_VER > 17 |
614 | |
615 | template <typename _Tp> _LIBCPP_INLINE_VISIBILITY |
616 | bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) { |
617 | return _VSTD::memcmp(s1: &__lhs, s2: &__rhs, n: sizeof(_Tp)) == 0; |
618 | } |
619 | |
620 | static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value), |
621 | "unexpected underlying type for std::memory_order" ); |
622 | |
623 | #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \ |
624 | defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS) |
625 | |
626 | // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because |
627 | // the default operator= in an object is not volatile, a byte-by-byte copy |
628 | // is required. |
629 | template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY |
630 | typename enable_if<is_assignable<_Tp&, _Tv>::value>::type |
631 | __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) { |
632 | __a_value = __val; |
633 | } |
634 | template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY |
635 | typename enable_if<is_assignable<_Tp&, _Tv>::value>::type |
636 | __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) { |
637 | volatile char* __to = reinterpret_cast<volatile char*>(&__a_value); |
638 | volatile char* __end = __to + sizeof(_Tp); |
639 | volatile const char* __from = reinterpret_cast<volatile const char*>(&__val); |
640 | while (__to != __end) |
641 | *__to++ = *__from++; |
642 | } |
643 | |
644 | #endif |
645 | |
646 | #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) |
647 | |
648 | template <typename _Tp> |
649 | struct __cxx_atomic_base_impl { |
650 | |
651 | _LIBCPP_INLINE_VISIBILITY |
652 | #ifndef _LIBCPP_CXX03_LANG |
653 | __cxx_atomic_base_impl() _NOEXCEPT = default; |
654 | #else |
655 | __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} |
656 | #endif // _LIBCPP_CXX03_LANG |
657 | _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT |
658 | : __a_value(value) {} |
659 | _Tp __a_value; |
660 | }; |
661 | |
662 | _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) { |
663 | // Avoid switch statement to make this a constexpr. |
664 | return __order == memory_order_relaxed ? __ATOMIC_RELAXED: |
665 | (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: |
666 | (__order == memory_order_release ? __ATOMIC_RELEASE: |
667 | (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: |
668 | (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL: |
669 | __ATOMIC_CONSUME)))); |
670 | } |
671 | |
672 | _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) { |
673 | // Avoid switch statement to make this a constexpr. |
674 | return __order == memory_order_relaxed ? __ATOMIC_RELAXED: |
675 | (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: |
676 | (__order == memory_order_release ? __ATOMIC_RELAXED: |
677 | (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: |
678 | (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE: |
679 | __ATOMIC_CONSUME)))); |
680 | } |
681 | |
682 | template <typename _Tp> |
683 | _LIBCPP_INLINE_VISIBILITY |
684 | void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { |
685 | __cxx_atomic_assign_volatile(__a->__a_value, __val); |
686 | } |
687 | |
688 | template <typename _Tp> |
689 | _LIBCPP_INLINE_VISIBILITY |
690 | void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { |
691 | __a->__a_value = __val; |
692 | } |
693 | |
694 | _LIBCPP_INLINE_VISIBILITY inline |
695 | void __cxx_atomic_thread_fence(memory_order __order) { |
696 | __atomic_thread_fence(__to_gcc_order(__order)); |
697 | } |
698 | |
699 | _LIBCPP_INLINE_VISIBILITY inline |
700 | void __cxx_atomic_signal_fence(memory_order __order) { |
701 | __atomic_signal_fence(__to_gcc_order(__order)); |
702 | } |
703 | |
704 | template <typename _Tp> |
705 | _LIBCPP_INLINE_VISIBILITY |
706 | void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, |
707 | memory_order __order) { |
708 | __atomic_store(&__a->__a_value, &__val, |
709 | __to_gcc_order(__order)); |
710 | } |
711 | |
712 | template <typename _Tp> |
713 | _LIBCPP_INLINE_VISIBILITY |
714 | void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, |
715 | memory_order __order) { |
716 | __atomic_store(&__a->__a_value, &__val, |
717 | __to_gcc_order(__order)); |
718 | } |
719 | |
720 | template <typename _Tp> |
721 | _LIBCPP_INLINE_VISIBILITY |
722 | _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a, |
723 | memory_order __order) { |
724 | _Tp __ret; |
725 | __atomic_load(&__a->__a_value, &__ret, |
726 | __to_gcc_order(__order)); |
727 | return __ret; |
728 | } |
729 | |
730 | template <typename _Tp> |
731 | _LIBCPP_INLINE_VISIBILITY |
732 | _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) { |
733 | _Tp __ret; |
734 | __atomic_load(&__a->__a_value, &__ret, |
735 | __to_gcc_order(__order)); |
736 | return __ret; |
737 | } |
738 | |
739 | template <typename _Tp> |
740 | _LIBCPP_INLINE_VISIBILITY |
741 | _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a, |
742 | _Tp __value, memory_order __order) { |
743 | _Tp __ret; |
744 | __atomic_exchange(&__a->__a_value, &__value, &__ret, |
745 | __to_gcc_order(__order)); |
746 | return __ret; |
747 | } |
748 | |
749 | template <typename _Tp> |
750 | _LIBCPP_INLINE_VISIBILITY |
751 | _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, |
752 | memory_order __order) { |
753 | _Tp __ret; |
754 | __atomic_exchange(&__a->__a_value, &__value, &__ret, |
755 | __to_gcc_order(__order)); |
756 | return __ret; |
757 | } |
758 | |
759 | template <typename _Tp> |
760 | _LIBCPP_INLINE_VISIBILITY |
761 | bool __cxx_atomic_compare_exchange_strong( |
762 | volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, |
763 | memory_order __success, memory_order __failure) { |
764 | return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, |
765 | false, |
766 | __to_gcc_order(__success), |
767 | __to_gcc_failure_order(__failure)); |
768 | } |
769 | |
770 | template <typename _Tp> |
771 | _LIBCPP_INLINE_VISIBILITY |
772 | bool __cxx_atomic_compare_exchange_strong( |
773 | __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, |
774 | memory_order __failure) { |
775 | return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, |
776 | false, |
777 | __to_gcc_order(__success), |
778 | __to_gcc_failure_order(__failure)); |
779 | } |
780 | |
781 | template <typename _Tp> |
782 | _LIBCPP_INLINE_VISIBILITY |
783 | bool __cxx_atomic_compare_exchange_weak( |
784 | volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, |
785 | memory_order __success, memory_order __failure) { |
786 | return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, |
787 | true, |
788 | __to_gcc_order(__success), |
789 | __to_gcc_failure_order(__failure)); |
790 | } |
791 | |
792 | template <typename _Tp> |
793 | _LIBCPP_INLINE_VISIBILITY |
794 | bool __cxx_atomic_compare_exchange_weak( |
795 | __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, |
796 | memory_order __failure) { |
797 | return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, |
798 | true, |
799 | __to_gcc_order(__success), |
800 | __to_gcc_failure_order(__failure)); |
801 | } |
802 | |
803 | template <typename _Tp> |
804 | struct __skip_amt { enum {value = 1}; }; |
805 | |
806 | template <typename _Tp> |
807 | struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; }; |
808 | |
809 | // FIXME: Haven't figured out what the spec says about using arrays with |
810 | // atomic_fetch_add. Force a failure rather than creating bad behavior. |
811 | template <typename _Tp> |
812 | struct __skip_amt<_Tp[]> { }; |
813 | template <typename _Tp, int n> |
814 | struct __skip_amt<_Tp[n]> { }; |
815 | |
816 | template <typename _Tp, typename _Td> |
817 | _LIBCPP_INLINE_VISIBILITY |
818 | _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a, |
819 | _Td __delta, memory_order __order) { |
820 | return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, |
821 | __to_gcc_order(__order)); |
822 | } |
823 | |
824 | template <typename _Tp, typename _Td> |
825 | _LIBCPP_INLINE_VISIBILITY |
826 | _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, |
827 | memory_order __order) { |
828 | return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, |
829 | __to_gcc_order(__order)); |
830 | } |
831 | |
832 | template <typename _Tp, typename _Td> |
833 | _LIBCPP_INLINE_VISIBILITY |
834 | _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a, |
835 | _Td __delta, memory_order __order) { |
836 | return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, |
837 | __to_gcc_order(__order)); |
838 | } |
839 | |
840 | template <typename _Tp, typename _Td> |
841 | _LIBCPP_INLINE_VISIBILITY |
842 | _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, |
843 | memory_order __order) { |
844 | return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, |
845 | __to_gcc_order(__order)); |
846 | } |
847 | |
848 | template <typename _Tp> |
849 | _LIBCPP_INLINE_VISIBILITY |
850 | _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a, |
851 | _Tp __pattern, memory_order __order) { |
852 | return __atomic_fetch_and(&__a->__a_value, __pattern, |
853 | __to_gcc_order(__order)); |
854 | } |
855 | |
856 | template <typename _Tp> |
857 | _LIBCPP_INLINE_VISIBILITY |
858 | _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, |
859 | _Tp __pattern, memory_order __order) { |
860 | return __atomic_fetch_and(&__a->__a_value, __pattern, |
861 | __to_gcc_order(__order)); |
862 | } |
863 | |
864 | template <typename _Tp> |
865 | _LIBCPP_INLINE_VISIBILITY |
866 | _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a, |
867 | _Tp __pattern, memory_order __order) { |
868 | return __atomic_fetch_or(&__a->__a_value, __pattern, |
869 | __to_gcc_order(__order)); |
870 | } |
871 | |
872 | template <typename _Tp> |
873 | _LIBCPP_INLINE_VISIBILITY |
874 | _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, |
875 | memory_order __order) { |
876 | return __atomic_fetch_or(&__a->__a_value, __pattern, |
877 | __to_gcc_order(__order)); |
878 | } |
879 | |
880 | template <typename _Tp> |
881 | _LIBCPP_INLINE_VISIBILITY |
882 | _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a, |
883 | _Tp __pattern, memory_order __order) { |
884 | return __atomic_fetch_xor(&__a->__a_value, __pattern, |
885 | __to_gcc_order(__order)); |
886 | } |
887 | |
888 | template <typename _Tp> |
889 | _LIBCPP_INLINE_VISIBILITY |
890 | _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, |
891 | memory_order __order) { |
892 | return __atomic_fetch_xor(&__a->__a_value, __pattern, |
893 | __to_gcc_order(__order)); |
894 | } |
895 | |
896 | #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0) |
897 | |
898 | #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP) |
899 | |
900 | template <typename _Tp> |
901 | struct __cxx_atomic_base_impl { |
902 | |
903 | _LIBCPP_INLINE_VISIBILITY |
904 | #ifndef _LIBCPP_CXX03_LANG |
905 | __cxx_atomic_base_impl() _NOEXCEPT = default; |
906 | #else |
907 | __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} |
908 | #endif // _LIBCPP_CXX03_LANG |
909 | _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT |
910 | : __a_value(__value) {} |
911 | _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value; |
912 | }; |
913 | |
914 | #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s) |
915 | |
916 | _LIBCPP_INLINE_VISIBILITY inline |
917 | void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT { |
918 | __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order)); |
919 | } |
920 | |
921 | _LIBCPP_INLINE_VISIBILITY inline |
922 | void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT { |
923 | __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order)); |
924 | } |
925 | |
926 | template<class _Tp> |
927 | _LIBCPP_INLINE_VISIBILITY |
928 | void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT { |
929 | __c11_atomic_init(&__a->__a_value, __val); |
930 | } |
931 | template<class _Tp> |
932 | _LIBCPP_INLINE_VISIBILITY |
933 | void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT { |
934 | __c11_atomic_init(&__a->__a_value, __val); |
935 | } |
936 | |
937 | template<class _Tp> |
938 | _LIBCPP_INLINE_VISIBILITY |
939 | void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT { |
940 | __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); |
941 | } |
942 | template<class _Tp> |
943 | _LIBCPP_INLINE_VISIBILITY |
944 | void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT { |
945 | __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); |
946 | } |
947 | |
948 | template<class _Tp> |
949 | _LIBCPP_INLINE_VISIBILITY |
950 | _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT { |
951 | using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; |
952 | return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); |
953 | } |
954 | template<class _Tp> |
955 | _LIBCPP_INLINE_VISIBILITY |
956 | _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT { |
957 | using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; |
958 | return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); |
959 | } |
960 | |
961 | template<class _Tp> |
962 | _LIBCPP_INLINE_VISIBILITY |
963 | _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT { |
964 | return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); |
965 | } |
966 | template<class _Tp> |
967 | _LIBCPP_INLINE_VISIBILITY |
968 | _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT { |
969 | return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); |
970 | } |
971 | |
972 | _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) { |
973 | // Avoid switch statement to make this a constexpr. |
974 | return __order == memory_order_release ? memory_order_relaxed: |
975 | (__order == memory_order_acq_rel ? memory_order_acquire: |
976 | __order); |
977 | } |
978 | |
979 | template<class _Tp> |
980 | _LIBCPP_INLINE_VISIBILITY |
981 | bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { |
982 | return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure))); |
983 | } |
984 | template<class _Tp> |
985 | _LIBCPP_INLINE_VISIBILITY |
986 | bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { |
987 | return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure))); |
988 | } |
989 | |
990 | template<class _Tp> |
991 | _LIBCPP_INLINE_VISIBILITY |
992 | bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { |
993 | return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure))); |
994 | } |
995 | template<class _Tp> |
996 | _LIBCPP_INLINE_VISIBILITY |
997 | bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { |
998 | return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure))); |
999 | } |
1000 | |
1001 | template<class _Tp> |
1002 | _LIBCPP_INLINE_VISIBILITY |
1003 | _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { |
1004 | return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1005 | } |
1006 | template<class _Tp> |
1007 | _LIBCPP_INLINE_VISIBILITY |
1008 | _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { |
1009 | return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1010 | } |
1011 | |
1012 | template<class _Tp> |
1013 | _LIBCPP_INLINE_VISIBILITY |
1014 | _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { |
1015 | return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1016 | } |
1017 | template<class _Tp> |
1018 | _LIBCPP_INLINE_VISIBILITY |
1019 | _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { |
1020 | return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1021 | } |
1022 | |
1023 | template<class _Tp> |
1024 | _LIBCPP_INLINE_VISIBILITY |
1025 | _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { |
1026 | return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1027 | } |
1028 | template<class _Tp> |
1029 | _LIBCPP_INLINE_VISIBILITY |
1030 | _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { |
1031 | return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1032 | } |
1033 | template<class _Tp> |
1034 | _LIBCPP_INLINE_VISIBILITY |
1035 | _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { |
1036 | return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1037 | } |
1038 | template<class _Tp> |
1039 | _LIBCPP_INLINE_VISIBILITY |
1040 | _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { |
1041 | return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); |
1042 | } |
1043 | |
1044 | template<class _Tp> |
1045 | _LIBCPP_INLINE_VISIBILITY |
1046 | _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { |
1047 | return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); |
1048 | } |
1049 | template<class _Tp> |
1050 | _LIBCPP_INLINE_VISIBILITY |
1051 | _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { |
1052 | return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); |
1053 | } |
1054 | |
1055 | template<class _Tp> |
1056 | _LIBCPP_INLINE_VISIBILITY |
1057 | _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { |
1058 | return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); |
1059 | } |
1060 | template<class _Tp> |
1061 | _LIBCPP_INLINE_VISIBILITY |
1062 | _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { |
1063 | return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); |
1064 | } |
1065 | |
1066 | template<class _Tp> |
1067 | _LIBCPP_INLINE_VISIBILITY |
1068 | _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { |
1069 | return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); |
1070 | } |
1071 | template<class _Tp> |
1072 | _LIBCPP_INLINE_VISIBILITY |
1073 | _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { |
1074 | return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); |
1075 | } |
1076 | |
1077 | #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP |
1078 | |
1079 | template <class _Tp> |
1080 | _LIBCPP_INLINE_VISIBILITY |
1081 | _Tp kill_dependency(_Tp __y) _NOEXCEPT |
1082 | { |
1083 | return __y; |
1084 | } |
1085 | |
1086 | #if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE) |
1087 | # define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE |
1088 | # define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE |
1089 | #ifndef _LIBCPP_HAS_NO_CHAR8_T |
1090 | # define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE |
1091 | #endif |
1092 | # define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE |
1093 | # define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE |
1094 | # define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE |
1095 | # define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE |
1096 | # define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE |
1097 | # define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE |
1098 | # define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE |
1099 | # define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE |
1100 | #elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE) |
1101 | # define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE |
1102 | # define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE |
1103 | #ifndef _LIBCPP_HAS_NO_CHAR8_T |
1104 | # define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE |
1105 | #endif |
1106 | # define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE |
1107 | # define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE |
1108 | # define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE |
1109 | # define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE |
1110 | # define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE |
1111 | # define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE |
1112 | # define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE |
1113 | # define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE |
1114 | #endif |
1115 | |
1116 | #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS |
1117 | |
1118 | template<typename _Tp> |
1119 | struct __cxx_atomic_lock_impl { |
1120 | |
1121 | _LIBCPP_INLINE_VISIBILITY |
1122 | __cxx_atomic_lock_impl() _NOEXCEPT |
1123 | : __a_value(), __a_lock(0) {} |
1124 | _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit |
1125 | __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT |
1126 | : __a_value(value), __a_lock(0) {} |
1127 | |
1128 | _Tp __a_value; |
1129 | mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock; |
1130 | |
1131 | _LIBCPP_INLINE_VISIBILITY void __lock() const volatile { |
1132 | while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) |
1133 | /*spin*/; |
1134 | } |
1135 | _LIBCPP_INLINE_VISIBILITY void __lock() const { |
1136 | while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) |
1137 | /*spin*/; |
1138 | } |
1139 | _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile { |
1140 | __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); |
1141 | } |
1142 | _LIBCPP_INLINE_VISIBILITY void __unlock() const { |
1143 | __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); |
1144 | } |
1145 | _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile { |
1146 | __lock(); |
1147 | _Tp __old; |
1148 | __cxx_atomic_assign_volatile(__old, __a_value); |
1149 | __unlock(); |
1150 | return __old; |
1151 | } |
1152 | _LIBCPP_INLINE_VISIBILITY _Tp __read() const { |
1153 | __lock(); |
1154 | _Tp __old = __a_value; |
1155 | __unlock(); |
1156 | return __old; |
1157 | } |
1158 | }; |
1159 | |
1160 | template <typename _Tp> |
1161 | _LIBCPP_INLINE_VISIBILITY |
1162 | void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { |
1163 | __cxx_atomic_assign_volatile(__a->__a_value, __val); |
1164 | } |
1165 | template <typename _Tp> |
1166 | _LIBCPP_INLINE_VISIBILITY |
1167 | void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { |
1168 | __a->__a_value = __val; |
1169 | } |
1170 | |
1171 | template <typename _Tp> |
1172 | _LIBCPP_INLINE_VISIBILITY |
1173 | void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { |
1174 | __a->__lock(); |
1175 | __cxx_atomic_assign_volatile(__a->__a_value, __val); |
1176 | __a->__unlock(); |
1177 | } |
1178 | template <typename _Tp> |
1179 | _LIBCPP_INLINE_VISIBILITY |
1180 | void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { |
1181 | __a->__lock(); |
1182 | __a->__a_value = __val; |
1183 | __a->__unlock(); |
1184 | } |
1185 | |
1186 | template <typename _Tp> |
1187 | _LIBCPP_INLINE_VISIBILITY |
1188 | _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { |
1189 | return __a->__read(); |
1190 | } |
1191 | template <typename _Tp> |
1192 | _LIBCPP_INLINE_VISIBILITY |
1193 | _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { |
1194 | return __a->__read(); |
1195 | } |
1196 | |
1197 | template <typename _Tp> |
1198 | _LIBCPP_INLINE_VISIBILITY |
1199 | _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { |
1200 | __a->__lock(); |
1201 | _Tp __old; |
1202 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1203 | __cxx_atomic_assign_volatile(__a->__a_value, __value); |
1204 | __a->__unlock(); |
1205 | return __old; |
1206 | } |
1207 | template <typename _Tp> |
1208 | _LIBCPP_INLINE_VISIBILITY |
1209 | _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { |
1210 | __a->__lock(); |
1211 | _Tp __old = __a->__a_value; |
1212 | __a->__a_value = __value; |
1213 | __a->__unlock(); |
1214 | return __old; |
1215 | } |
1216 | |
1217 | template <typename _Tp> |
1218 | _LIBCPP_INLINE_VISIBILITY |
1219 | bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1220 | _Tp* __expected, _Tp __value, memory_order, memory_order) { |
1221 | _Tp __temp; |
1222 | __a->__lock(); |
1223 | __cxx_atomic_assign_volatile(__temp, __a->__a_value); |
1224 | bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); |
1225 | if(__ret) |
1226 | __cxx_atomic_assign_volatile(__a->__a_value, __value); |
1227 | else |
1228 | __cxx_atomic_assign_volatile(*__expected, __a->__a_value); |
1229 | __a->__unlock(); |
1230 | return __ret; |
1231 | } |
1232 | template <typename _Tp> |
1233 | _LIBCPP_INLINE_VISIBILITY |
1234 | bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a, |
1235 | _Tp* __expected, _Tp __value, memory_order, memory_order) { |
1236 | __a->__lock(); |
1237 | bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); |
1238 | if(__ret) |
1239 | _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); |
1240 | else |
1241 | _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); |
1242 | __a->__unlock(); |
1243 | return __ret; |
1244 | } |
1245 | |
1246 | template <typename _Tp> |
1247 | _LIBCPP_INLINE_VISIBILITY |
1248 | bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1249 | _Tp* __expected, _Tp __value, memory_order, memory_order) { |
1250 | _Tp __temp; |
1251 | __a->__lock(); |
1252 | __cxx_atomic_assign_volatile(__temp, __a->__a_value); |
1253 | bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); |
1254 | if(__ret) |
1255 | __cxx_atomic_assign_volatile(__a->__a_value, __value); |
1256 | else |
1257 | __cxx_atomic_assign_volatile(*__expected, __a->__a_value); |
1258 | __a->__unlock(); |
1259 | return __ret; |
1260 | } |
1261 | template <typename _Tp> |
1262 | _LIBCPP_INLINE_VISIBILITY |
1263 | bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a, |
1264 | _Tp* __expected, _Tp __value, memory_order, memory_order) { |
1265 | __a->__lock(); |
1266 | bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); |
1267 | if(__ret) |
1268 | _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); |
1269 | else |
1270 | _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); |
1271 | __a->__unlock(); |
1272 | return __ret; |
1273 | } |
1274 | |
1275 | template <typename _Tp, typename _Td> |
1276 | _LIBCPP_INLINE_VISIBILITY |
1277 | _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1278 | _Td __delta, memory_order) { |
1279 | __a->__lock(); |
1280 | _Tp __old; |
1281 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1282 | __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta)); |
1283 | __a->__unlock(); |
1284 | return __old; |
1285 | } |
1286 | template <typename _Tp, typename _Td> |
1287 | _LIBCPP_INLINE_VISIBILITY |
1288 | _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a, |
1289 | _Td __delta, memory_order) { |
1290 | __a->__lock(); |
1291 | _Tp __old = __a->__a_value; |
1292 | __a->__a_value += __delta; |
1293 | __a->__unlock(); |
1294 | return __old; |
1295 | } |
1296 | |
1297 | template <typename _Tp, typename _Td> |
1298 | _LIBCPP_INLINE_VISIBILITY |
1299 | _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a, |
1300 | ptrdiff_t __delta, memory_order) { |
1301 | __a->__lock(); |
1302 | _Tp* __old; |
1303 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1304 | __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta); |
1305 | __a->__unlock(); |
1306 | return __old; |
1307 | } |
1308 | template <typename _Tp, typename _Td> |
1309 | _LIBCPP_INLINE_VISIBILITY |
1310 | _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a, |
1311 | ptrdiff_t __delta, memory_order) { |
1312 | __a->__lock(); |
1313 | _Tp* __old = __a->__a_value; |
1314 | __a->__a_value += __delta; |
1315 | __a->__unlock(); |
1316 | return __old; |
1317 | } |
1318 | |
1319 | template <typename _Tp, typename _Td> |
1320 | _LIBCPP_INLINE_VISIBILITY |
1321 | _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1322 | _Td __delta, memory_order) { |
1323 | __a->__lock(); |
1324 | _Tp __old; |
1325 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1326 | __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta)); |
1327 | __a->__unlock(); |
1328 | return __old; |
1329 | } |
1330 | template <typename _Tp, typename _Td> |
1331 | _LIBCPP_INLINE_VISIBILITY |
1332 | _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a, |
1333 | _Td __delta, memory_order) { |
1334 | __a->__lock(); |
1335 | _Tp __old = __a->__a_value; |
1336 | __a->__a_value -= __delta; |
1337 | __a->__unlock(); |
1338 | return __old; |
1339 | } |
1340 | |
1341 | template <typename _Tp> |
1342 | _LIBCPP_INLINE_VISIBILITY |
1343 | _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1344 | _Tp __pattern, memory_order) { |
1345 | __a->__lock(); |
1346 | _Tp __old; |
1347 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1348 | __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern)); |
1349 | __a->__unlock(); |
1350 | return __old; |
1351 | } |
1352 | template <typename _Tp> |
1353 | _LIBCPP_INLINE_VISIBILITY |
1354 | _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a, |
1355 | _Tp __pattern, memory_order) { |
1356 | __a->__lock(); |
1357 | _Tp __old = __a->__a_value; |
1358 | __a->__a_value &= __pattern; |
1359 | __a->__unlock(); |
1360 | return __old; |
1361 | } |
1362 | |
1363 | template <typename _Tp> |
1364 | _LIBCPP_INLINE_VISIBILITY |
1365 | _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1366 | _Tp __pattern, memory_order) { |
1367 | __a->__lock(); |
1368 | _Tp __old; |
1369 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1370 | __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern)); |
1371 | __a->__unlock(); |
1372 | return __old; |
1373 | } |
1374 | template <typename _Tp> |
1375 | _LIBCPP_INLINE_VISIBILITY |
1376 | _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a, |
1377 | _Tp __pattern, memory_order) { |
1378 | __a->__lock(); |
1379 | _Tp __old = __a->__a_value; |
1380 | __a->__a_value |= __pattern; |
1381 | __a->__unlock(); |
1382 | return __old; |
1383 | } |
1384 | |
1385 | template <typename _Tp> |
1386 | _LIBCPP_INLINE_VISIBILITY |
1387 | _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a, |
1388 | _Tp __pattern, memory_order) { |
1389 | __a->__lock(); |
1390 | _Tp __old; |
1391 | __cxx_atomic_assign_volatile(__old, __a->__a_value); |
1392 | __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern)); |
1393 | __a->__unlock(); |
1394 | return __old; |
1395 | } |
1396 | template <typename _Tp> |
1397 | _LIBCPP_INLINE_VISIBILITY |
1398 | _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a, |
1399 | _Tp __pattern, memory_order) { |
1400 | __a->__lock(); |
1401 | _Tp __old = __a->__a_value; |
1402 | __a->__a_value ^= __pattern; |
1403 | __a->__unlock(); |
1404 | return __old; |
1405 | } |
1406 | |
1407 | #ifdef __cpp_lib_atomic_is_always_lock_free |
1408 | |
1409 | template<typename _Tp> struct __cxx_is_always_lock_free { |
1410 | enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; }; |
1411 | |
1412 | #else |
1413 | |
1414 | template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; }; |
1415 | // Implementations must match the C ATOMIC_*_LOCK_FREE macro values. |
1416 | template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; }; |
1417 | template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; }; |
1418 | template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; }; |
1419 | template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; }; |
1420 | #ifndef _LIBCPP_HAS_NO_CHAR8_T |
1421 | template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; }; |
1422 | #endif |
1423 | template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; }; |
1424 | template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; }; |
1425 | #ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS |
1426 | template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; }; |
1427 | #endif |
1428 | template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; }; |
1429 | template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; }; |
1430 | template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; }; |
1431 | template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; }; |
1432 | template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; }; |
1433 | template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; }; |
1434 | template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; }; |
1435 | template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; }; |
1436 | template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; }; |
1437 | template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; }; |
1438 | |
1439 | #endif //__cpp_lib_atomic_is_always_lock_free |
1440 | |
1441 | template <typename _Tp, |
1442 | typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value, |
1443 | __cxx_atomic_base_impl<_Tp>, |
1444 | __cxx_atomic_lock_impl<_Tp> >::type> |
1445 | #else |
1446 | template <typename _Tp, |
1447 | typename _Base = __cxx_atomic_base_impl<_Tp> > |
1448 | #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS |
1449 | struct __cxx_atomic_impl : public _Base { |
1450 | static_assert(is_trivially_copyable<_Tp>::value, |
1451 | "std::atomic<T> requires that 'T' be a trivially copyable type" ); |
1452 | |
1453 | _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default; |
1454 | _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT |
1455 | : _Base(__value) {} |
1456 | }; |
1457 | |
1458 | #if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__)) |
1459 | using __cxx_contention_t = int32_t; |
1460 | #else |
1461 | using __cxx_contention_t = int64_t; |
1462 | #endif // __linux__ || (_AIX && !__64BIT__) |
1463 | |
1464 | using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>; |
1465 | |
1466 | #if defined(_LIBCPP_HAS_NO_THREADS) |
1467 | # define _LIBCPP_HAS_NO_PLATFORM_WAIT |
1468 | #endif |
1469 | |
1470 | // TODO: |
1471 | // _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that |
1472 | // it is not tied anywhere into the build system or even documented. We should |
1473 | // clean it up because it is technically never defined except when threads are |
1474 | // disabled. We should clean it up in its own changeset in case we break "bad" |
1475 | // users. |
1476 | #ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT |
1477 | |
1478 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*); |
1479 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*); |
1480 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*); |
1481 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t); |
1482 | |
1483 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*); |
1484 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*); |
1485 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*); |
1486 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t); |
1487 | |
1488 | template <class _Atp, class _Fn> |
1489 | struct __libcpp_atomic_wait_backoff_impl { |
1490 | _Atp* __a; |
1491 | _Fn __test_fn; |
1492 | _LIBCPP_AVAILABILITY_SYNC |
1493 | _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const |
1494 | { |
1495 | if(__elapsed > chrono::microseconds(64)) |
1496 | { |
1497 | auto const __monitor = __libcpp_atomic_monitor(__a); |
1498 | if(__test_fn()) |
1499 | return true; |
1500 | __libcpp_atomic_wait(__a, __monitor); |
1501 | } |
1502 | else if(__elapsed > chrono::microseconds(4)) |
1503 | __libcpp_thread_yield(); |
1504 | else |
1505 | {} // poll |
1506 | return false; |
1507 | } |
1508 | }; |
1509 | |
1510 | template <class _Atp, class _Fn> |
1511 | _LIBCPP_AVAILABILITY_SYNC |
1512 | _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn) |
1513 | { |
1514 | __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn}; |
1515 | return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn); |
1516 | } |
1517 | |
1518 | #else // _LIBCPP_HAS_NO_PLATFORM_WAIT |
1519 | |
1520 | template <class _Tp> |
1521 | _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { } |
1522 | template <class _Tp> |
1523 | _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { } |
1524 | template <class _Atp, class _Fn> |
1525 | _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn) |
1526 | { |
1527 | #if defined(_LIBCPP_HAS_NO_THREADS) |
1528 | using _Policy = __spinning_backoff_policy; |
1529 | #else |
1530 | using _Policy = __libcpp_timed_backoff_policy; |
1531 | #endif |
1532 | return __libcpp_thread_poll_with_backoff(__test_fn, _Policy()); |
1533 | } |
1534 | |
1535 | #endif // _LIBCPP_HAS_NO_PLATFORM_WAIT |
1536 | |
1537 | template <class _Atp, class _Tp> |
1538 | struct __cxx_atomic_wait_test_fn_impl { |
1539 | _Atp* __a; |
1540 | _Tp __val; |
1541 | memory_order __order; |
1542 | _LIBCPP_INLINE_VISIBILITY bool operator()() const |
1543 | { |
1544 | return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val); |
1545 | } |
1546 | }; |
1547 | |
1548 | template <class _Atp, class _Tp> |
1549 | _LIBCPP_AVAILABILITY_SYNC |
1550 | _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order) |
1551 | { |
1552 | __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order}; |
1553 | return __cxx_atomic_wait(__a, __test_fn); |
1554 | } |
1555 | |
1556 | // general atomic<T> |
1557 | |
1558 | template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> |
1559 | struct __atomic_base // false |
1560 | { |
1561 | mutable __cxx_atomic_impl<_Tp> __a_; |
1562 | |
1563 | #if defined(__cpp_lib_atomic_is_always_lock_free) |
1564 | static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0); |
1565 | #endif |
1566 | |
1567 | _LIBCPP_INLINE_VISIBILITY |
1568 | bool is_lock_free() const volatile _NOEXCEPT |
1569 | {return __cxx_atomic_is_lock_free(sizeof(_Tp));} |
1570 | _LIBCPP_INLINE_VISIBILITY |
1571 | bool is_lock_free() const _NOEXCEPT |
1572 | {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();} |
1573 | _LIBCPP_INLINE_VISIBILITY |
1574 | void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1575 | _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) |
1576 | {__cxx_atomic_store(&__a_, __d, __m);} |
1577 | _LIBCPP_INLINE_VISIBILITY |
1578 | void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1579 | _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) |
1580 | {__cxx_atomic_store(&__a_, __d, __m);} |
1581 | _LIBCPP_INLINE_VISIBILITY |
1582 | _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT |
1583 | _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) |
1584 | {return __cxx_atomic_load(&__a_, __m);} |
1585 | _LIBCPP_INLINE_VISIBILITY |
1586 | _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT |
1587 | _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) |
1588 | {return __cxx_atomic_load(&__a_, __m);} |
1589 | _LIBCPP_INLINE_VISIBILITY |
1590 | operator _Tp() const volatile _NOEXCEPT {return load();} |
1591 | _LIBCPP_INLINE_VISIBILITY |
1592 | operator _Tp() const _NOEXCEPT {return load();} |
1593 | _LIBCPP_INLINE_VISIBILITY |
1594 | _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1595 | {return __cxx_atomic_exchange(&__a_, __d, __m);} |
1596 | _LIBCPP_INLINE_VISIBILITY |
1597 | _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1598 | {return __cxx_atomic_exchange(&__a_, __d, __m);} |
1599 | _LIBCPP_INLINE_VISIBILITY |
1600 | bool compare_exchange_weak(_Tp& __e, _Tp __d, |
1601 | memory_order __s, memory_order __f) volatile _NOEXCEPT |
1602 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
1603 | {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} |
1604 | _LIBCPP_INLINE_VISIBILITY |
1605 | bool compare_exchange_weak(_Tp& __e, _Tp __d, |
1606 | memory_order __s, memory_order __f) _NOEXCEPT |
1607 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
1608 | {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} |
1609 | _LIBCPP_INLINE_VISIBILITY |
1610 | bool compare_exchange_strong(_Tp& __e, _Tp __d, |
1611 | memory_order __s, memory_order __f) volatile _NOEXCEPT |
1612 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
1613 | {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} |
1614 | _LIBCPP_INLINE_VISIBILITY |
1615 | bool compare_exchange_strong(_Tp& __e, _Tp __d, |
1616 | memory_order __s, memory_order __f) _NOEXCEPT |
1617 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
1618 | {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} |
1619 | _LIBCPP_INLINE_VISIBILITY |
1620 | bool compare_exchange_weak(_Tp& __e, _Tp __d, |
1621 | memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1622 | {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} |
1623 | _LIBCPP_INLINE_VISIBILITY |
1624 | bool compare_exchange_weak(_Tp& __e, _Tp __d, |
1625 | memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1626 | {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} |
1627 | _LIBCPP_INLINE_VISIBILITY |
1628 | bool compare_exchange_strong(_Tp& __e, _Tp __d, |
1629 | memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1630 | {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} |
1631 | _LIBCPP_INLINE_VISIBILITY |
1632 | bool compare_exchange_strong(_Tp& __e, _Tp __d, |
1633 | memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1634 | {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} |
1635 | |
1636 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT |
1637 | {__cxx_atomic_wait(&__a_, __v, __m);} |
1638 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT |
1639 | {__cxx_atomic_wait(&__a_, __v, __m);} |
1640 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT |
1641 | {__cxx_atomic_notify_one(&__a_);} |
1642 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT |
1643 | {__cxx_atomic_notify_one(&__a_);} |
1644 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT |
1645 | {__cxx_atomic_notify_all(&__a_);} |
1646 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT |
1647 | {__cxx_atomic_notify_all(&__a_);} |
1648 | |
1649 | #if _LIBCPP_STD_VER > 17 |
1650 | _LIBCPP_INLINE_VISIBILITY constexpr |
1651 | __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {} |
1652 | #else |
1653 | _LIBCPP_INLINE_VISIBILITY |
1654 | __atomic_base() _NOEXCEPT = default; |
1655 | #endif |
1656 | |
1657 | _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR |
1658 | __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} |
1659 | |
1660 | __atomic_base(const __atomic_base&) = delete; |
1661 | }; |
1662 | |
1663 | #if defined(__cpp_lib_atomic_is_always_lock_free) |
1664 | template <class _Tp, bool __b> |
1665 | _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free; |
1666 | #endif |
1667 | |
1668 | // atomic<Integral> |
1669 | |
1670 | template <class _Tp> |
1671 | struct __atomic_base<_Tp, true> |
1672 | : public __atomic_base<_Tp, false> |
1673 | { |
1674 | typedef __atomic_base<_Tp, false> __base; |
1675 | |
1676 | _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17 |
1677 | __atomic_base() _NOEXCEPT = default; |
1678 | |
1679 | _LIBCPP_INLINE_VISIBILITY |
1680 | _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} |
1681 | |
1682 | _LIBCPP_INLINE_VISIBILITY |
1683 | _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1684 | {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} |
1685 | _LIBCPP_INLINE_VISIBILITY |
1686 | _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1687 | {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} |
1688 | _LIBCPP_INLINE_VISIBILITY |
1689 | _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1690 | {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} |
1691 | _LIBCPP_INLINE_VISIBILITY |
1692 | _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1693 | {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} |
1694 | _LIBCPP_INLINE_VISIBILITY |
1695 | _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1696 | {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} |
1697 | _LIBCPP_INLINE_VISIBILITY |
1698 | _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1699 | {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} |
1700 | _LIBCPP_INLINE_VISIBILITY |
1701 | _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1702 | {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} |
1703 | _LIBCPP_INLINE_VISIBILITY |
1704 | _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1705 | {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} |
1706 | _LIBCPP_INLINE_VISIBILITY |
1707 | _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
1708 | {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} |
1709 | _LIBCPP_INLINE_VISIBILITY |
1710 | _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
1711 | {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} |
1712 | |
1713 | _LIBCPP_INLINE_VISIBILITY |
1714 | _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} |
1715 | _LIBCPP_INLINE_VISIBILITY |
1716 | _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));} |
1717 | _LIBCPP_INLINE_VISIBILITY |
1718 | _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));} |
1719 | _LIBCPP_INLINE_VISIBILITY |
1720 | _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));} |
1721 | _LIBCPP_INLINE_VISIBILITY |
1722 | _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} |
1723 | _LIBCPP_INLINE_VISIBILITY |
1724 | _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} |
1725 | _LIBCPP_INLINE_VISIBILITY |
1726 | _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} |
1727 | _LIBCPP_INLINE_VISIBILITY |
1728 | _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} |
1729 | _LIBCPP_INLINE_VISIBILITY |
1730 | _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} |
1731 | _LIBCPP_INLINE_VISIBILITY |
1732 | _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;} |
1733 | _LIBCPP_INLINE_VISIBILITY |
1734 | _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} |
1735 | _LIBCPP_INLINE_VISIBILITY |
1736 | _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;} |
1737 | _LIBCPP_INLINE_VISIBILITY |
1738 | _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;} |
1739 | _LIBCPP_INLINE_VISIBILITY |
1740 | _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;} |
1741 | _LIBCPP_INLINE_VISIBILITY |
1742 | _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;} |
1743 | _LIBCPP_INLINE_VISIBILITY |
1744 | _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;} |
1745 | _LIBCPP_INLINE_VISIBILITY |
1746 | _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;} |
1747 | _LIBCPP_INLINE_VISIBILITY |
1748 | _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;} |
1749 | }; |
1750 | |
1751 | // atomic<T> |
1752 | |
1753 | template <class _Tp> |
1754 | struct atomic |
1755 | : public __atomic_base<_Tp> |
1756 | { |
1757 | typedef __atomic_base<_Tp> __base; |
1758 | typedef _Tp value_type; |
1759 | typedef value_type difference_type; |
1760 | |
1761 | #if _LIBCPP_STD_VER > 17 |
1762 | _LIBCPP_INLINE_VISIBILITY |
1763 | atomic() = default; |
1764 | #else |
1765 | _LIBCPP_INLINE_VISIBILITY |
1766 | atomic() _NOEXCEPT = default; |
1767 | #endif |
1768 | |
1769 | _LIBCPP_INLINE_VISIBILITY |
1770 | _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} |
1771 | |
1772 | _LIBCPP_INLINE_VISIBILITY |
1773 | _Tp operator=(_Tp __d) volatile _NOEXCEPT |
1774 | {__base::store(__d); return __d;} |
1775 | _LIBCPP_INLINE_VISIBILITY |
1776 | _Tp operator=(_Tp __d) _NOEXCEPT |
1777 | {__base::store(__d); return __d;} |
1778 | |
1779 | atomic& operator=(const atomic&) = delete; |
1780 | atomic& operator=(const atomic&) volatile = delete; |
1781 | }; |
1782 | |
1783 | // atomic<T*> |
1784 | |
1785 | template <class _Tp> |
1786 | struct atomic<_Tp*> |
1787 | : public __atomic_base<_Tp*> |
1788 | { |
1789 | typedef __atomic_base<_Tp*> __base; |
1790 | typedef _Tp* value_type; |
1791 | typedef ptrdiff_t difference_type; |
1792 | |
1793 | _LIBCPP_INLINE_VISIBILITY |
1794 | atomic() _NOEXCEPT = default; |
1795 | |
1796 | _LIBCPP_INLINE_VISIBILITY |
1797 | _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} |
1798 | |
1799 | _LIBCPP_INLINE_VISIBILITY |
1800 | _Tp* operator=(_Tp* __d) volatile _NOEXCEPT |
1801 | {__base::store(__d); return __d;} |
1802 | _LIBCPP_INLINE_VISIBILITY |
1803 | _Tp* operator=(_Tp* __d) _NOEXCEPT |
1804 | {__base::store(__d); return __d;} |
1805 | |
1806 | _LIBCPP_INLINE_VISIBILITY |
1807 | _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
1808 | // __atomic_fetch_add accepts function pointers, guard against them. |
1809 | static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed" ); |
1810 | return __cxx_atomic_fetch_add(&this->__a_, __op, __m); |
1811 | } |
1812 | |
1813 | _LIBCPP_INLINE_VISIBILITY |
1814 | _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
1815 | // __atomic_fetch_add accepts function pointers, guard against them. |
1816 | static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed" ); |
1817 | return __cxx_atomic_fetch_add(&this->__a_, __op, __m); |
1818 | } |
1819 | |
1820 | _LIBCPP_INLINE_VISIBILITY |
1821 | _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
1822 | // __atomic_fetch_add accepts function pointers, guard against them. |
1823 | static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed" ); |
1824 | return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); |
1825 | } |
1826 | |
1827 | _LIBCPP_INLINE_VISIBILITY |
1828 | _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
1829 | // __atomic_fetch_add accepts function pointers, guard against them. |
1830 | static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed" ); |
1831 | return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); |
1832 | } |
1833 | |
1834 | _LIBCPP_INLINE_VISIBILITY |
1835 | _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} |
1836 | _LIBCPP_INLINE_VISIBILITY |
1837 | _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);} |
1838 | _LIBCPP_INLINE_VISIBILITY |
1839 | _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);} |
1840 | _LIBCPP_INLINE_VISIBILITY |
1841 | _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);} |
1842 | _LIBCPP_INLINE_VISIBILITY |
1843 | _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;} |
1844 | _LIBCPP_INLINE_VISIBILITY |
1845 | _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;} |
1846 | _LIBCPP_INLINE_VISIBILITY |
1847 | _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;} |
1848 | _LIBCPP_INLINE_VISIBILITY |
1849 | _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;} |
1850 | _LIBCPP_INLINE_VISIBILITY |
1851 | _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} |
1852 | _LIBCPP_INLINE_VISIBILITY |
1853 | _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;} |
1854 | _LIBCPP_INLINE_VISIBILITY |
1855 | _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} |
1856 | _LIBCPP_INLINE_VISIBILITY |
1857 | _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;} |
1858 | |
1859 | atomic& operator=(const atomic&) = delete; |
1860 | atomic& operator=(const atomic&) volatile = delete; |
1861 | }; |
1862 | |
1863 | // atomic_is_lock_free |
1864 | |
1865 | template <class _Tp> |
1866 | _LIBCPP_INLINE_VISIBILITY |
1867 | bool |
1868 | atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT |
1869 | { |
1870 | return __o->is_lock_free(); |
1871 | } |
1872 | |
1873 | template <class _Tp> |
1874 | _LIBCPP_INLINE_VISIBILITY |
1875 | bool |
1876 | atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT |
1877 | { |
1878 | return __o->is_lock_free(); |
1879 | } |
1880 | |
1881 | // atomic_init |
1882 | |
1883 | template <class _Tp> |
1884 | _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY |
1885 | void |
1886 | atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
1887 | { |
1888 | __cxx_atomic_init(&__o->__a_, __d); |
1889 | } |
1890 | |
1891 | template <class _Tp> |
1892 | _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY |
1893 | void |
1894 | atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
1895 | { |
1896 | __cxx_atomic_init(&__o->__a_, __d); |
1897 | } |
1898 | |
1899 | // atomic_store |
1900 | |
1901 | template <class _Tp> |
1902 | _LIBCPP_INLINE_VISIBILITY |
1903 | void |
1904 | atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
1905 | { |
1906 | __o->store(__d); |
1907 | } |
1908 | |
1909 | template <class _Tp> |
1910 | _LIBCPP_INLINE_VISIBILITY |
1911 | void |
1912 | atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
1913 | { |
1914 | __o->store(__d); |
1915 | } |
1916 | |
1917 | // atomic_store_explicit |
1918 | |
1919 | template <class _Tp> |
1920 | _LIBCPP_INLINE_VISIBILITY |
1921 | void |
1922 | atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT |
1923 | _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) |
1924 | { |
1925 | __o->store(__d, __m); |
1926 | } |
1927 | |
1928 | template <class _Tp> |
1929 | _LIBCPP_INLINE_VISIBILITY |
1930 | void |
1931 | atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT |
1932 | _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) |
1933 | { |
1934 | __o->store(__d, __m); |
1935 | } |
1936 | |
1937 | // atomic_load |
1938 | |
1939 | template <class _Tp> |
1940 | _LIBCPP_INLINE_VISIBILITY |
1941 | _Tp |
1942 | atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT |
1943 | { |
1944 | return __o->load(); |
1945 | } |
1946 | |
1947 | template <class _Tp> |
1948 | _LIBCPP_INLINE_VISIBILITY |
1949 | _Tp |
1950 | atomic_load(const atomic<_Tp>* __o) _NOEXCEPT |
1951 | { |
1952 | return __o->load(); |
1953 | } |
1954 | |
1955 | // atomic_load_explicit |
1956 | |
1957 | template <class _Tp> |
1958 | _LIBCPP_INLINE_VISIBILITY |
1959 | _Tp |
1960 | atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT |
1961 | _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) |
1962 | { |
1963 | return __o->load(__m); |
1964 | } |
1965 | |
1966 | template <class _Tp> |
1967 | _LIBCPP_INLINE_VISIBILITY |
1968 | _Tp |
1969 | atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT |
1970 | _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) |
1971 | { |
1972 | return __o->load(__m); |
1973 | } |
1974 | |
1975 | // atomic_exchange |
1976 | |
1977 | template <class _Tp> |
1978 | _LIBCPP_INLINE_VISIBILITY |
1979 | _Tp |
1980 | atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
1981 | { |
1982 | return __o->exchange(__d); |
1983 | } |
1984 | |
1985 | template <class _Tp> |
1986 | _LIBCPP_INLINE_VISIBILITY |
1987 | _Tp |
1988 | atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
1989 | { |
1990 | return __o->exchange(__d); |
1991 | } |
1992 | |
1993 | // atomic_exchange_explicit |
1994 | |
1995 | template <class _Tp> |
1996 | _LIBCPP_INLINE_VISIBILITY |
1997 | _Tp |
1998 | atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT |
1999 | { |
2000 | return __o->exchange(__d, __m); |
2001 | } |
2002 | |
2003 | template <class _Tp> |
2004 | _LIBCPP_INLINE_VISIBILITY |
2005 | _Tp |
2006 | atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT |
2007 | { |
2008 | return __o->exchange(__d, __m); |
2009 | } |
2010 | |
2011 | // atomic_compare_exchange_weak |
2012 | |
2013 | template <class _Tp> |
2014 | _LIBCPP_INLINE_VISIBILITY |
2015 | bool |
2016 | atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
2017 | { |
2018 | return __o->compare_exchange_weak(*__e, __d); |
2019 | } |
2020 | |
2021 | template <class _Tp> |
2022 | _LIBCPP_INLINE_VISIBILITY |
2023 | bool |
2024 | atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
2025 | { |
2026 | return __o->compare_exchange_weak(*__e, __d); |
2027 | } |
2028 | |
2029 | // atomic_compare_exchange_strong |
2030 | |
2031 | template <class _Tp> |
2032 | _LIBCPP_INLINE_VISIBILITY |
2033 | bool |
2034 | atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
2035 | { |
2036 | return __o->compare_exchange_strong(*__e, __d); |
2037 | } |
2038 | |
2039 | template <class _Tp> |
2040 | _LIBCPP_INLINE_VISIBILITY |
2041 | bool |
2042 | atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT |
2043 | { |
2044 | return __o->compare_exchange_strong(*__e, __d); |
2045 | } |
2046 | |
2047 | // atomic_compare_exchange_weak_explicit |
2048 | |
2049 | template <class _Tp> |
2050 | _LIBCPP_INLINE_VISIBILITY |
2051 | bool |
2052 | atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, |
2053 | typename atomic<_Tp>::value_type __d, |
2054 | memory_order __s, memory_order __f) _NOEXCEPT |
2055 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
2056 | { |
2057 | return __o->compare_exchange_weak(*__e, __d, __s, __f); |
2058 | } |
2059 | |
2060 | template <class _Tp> |
2061 | _LIBCPP_INLINE_VISIBILITY |
2062 | bool |
2063 | atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, |
2064 | memory_order __s, memory_order __f) _NOEXCEPT |
2065 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
2066 | { |
2067 | return __o->compare_exchange_weak(*__e, __d, __s, __f); |
2068 | } |
2069 | |
2070 | // atomic_compare_exchange_strong_explicit |
2071 | |
2072 | template <class _Tp> |
2073 | _LIBCPP_INLINE_VISIBILITY |
2074 | bool |
2075 | atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o, |
2076 | typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, |
2077 | memory_order __s, memory_order __f) _NOEXCEPT |
2078 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
2079 | { |
2080 | return __o->compare_exchange_strong(*__e, __d, __s, __f); |
2081 | } |
2082 | |
2083 | template <class _Tp> |
2084 | _LIBCPP_INLINE_VISIBILITY |
2085 | bool |
2086 | atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, |
2087 | typename atomic<_Tp>::value_type __d, |
2088 | memory_order __s, memory_order __f) _NOEXCEPT |
2089 | _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) |
2090 | { |
2091 | return __o->compare_exchange_strong(*__e, __d, __s, __f); |
2092 | } |
2093 | |
2094 | // atomic_wait |
2095 | |
2096 | template <class _Tp> |
2097 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2098 | void atomic_wait(const volatile atomic<_Tp>* __o, |
2099 | typename atomic<_Tp>::value_type __v) _NOEXCEPT |
2100 | { |
2101 | return __o->wait(__v); |
2102 | } |
2103 | |
2104 | template <class _Tp> |
2105 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2106 | void atomic_wait(const atomic<_Tp>* __o, |
2107 | typename atomic<_Tp>::value_type __v) _NOEXCEPT |
2108 | { |
2109 | return __o->wait(__v); |
2110 | } |
2111 | |
2112 | // atomic_wait_explicit |
2113 | |
2114 | template <class _Tp> |
2115 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2116 | void atomic_wait_explicit(const volatile atomic<_Tp>* __o, |
2117 | typename atomic<_Tp>::value_type __v, |
2118 | memory_order __m) _NOEXCEPT |
2119 | _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) |
2120 | { |
2121 | return __o->wait(__v, __m); |
2122 | } |
2123 | |
2124 | template <class _Tp> |
2125 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2126 | void atomic_wait_explicit(const atomic<_Tp>* __o, |
2127 | typename atomic<_Tp>::value_type __v, |
2128 | memory_order __m) _NOEXCEPT |
2129 | _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) |
2130 | { |
2131 | return __o->wait(__v, __m); |
2132 | } |
2133 | |
2134 | // atomic_notify_one |
2135 | |
2136 | template <class _Tp> |
2137 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2138 | void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT |
2139 | { |
2140 | __o->notify_one(); |
2141 | } |
2142 | template <class _Tp> |
2143 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2144 | void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT |
2145 | { |
2146 | __o->notify_one(); |
2147 | } |
2148 | |
2149 | // atomic_notify_one |
2150 | |
2151 | template <class _Tp> |
2152 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2153 | void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT |
2154 | { |
2155 | __o->notify_all(); |
2156 | } |
2157 | template <class _Tp> |
2158 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2159 | void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT |
2160 | { |
2161 | __o->notify_all(); |
2162 | } |
2163 | |
2164 | // atomic_fetch_add |
2165 | |
2166 | template <class _Tp> |
2167 | _LIBCPP_INLINE_VISIBILITY |
2168 | _Tp |
2169 | atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT |
2170 | { |
2171 | return __o->fetch_add(__op); |
2172 | } |
2173 | |
2174 | template <class _Tp> |
2175 | _LIBCPP_INLINE_VISIBILITY |
2176 | _Tp |
2177 | atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT |
2178 | { |
2179 | return __o->fetch_add(__op); |
2180 | } |
2181 | |
2182 | // atomic_fetch_add_explicit |
2183 | |
2184 | template <class _Tp> |
2185 | _LIBCPP_INLINE_VISIBILITY |
2186 | _Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT |
2187 | { |
2188 | return __o->fetch_add(__op, __m); |
2189 | } |
2190 | |
2191 | template <class _Tp> |
2192 | _LIBCPP_INLINE_VISIBILITY |
2193 | _Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT |
2194 | { |
2195 | return __o->fetch_add(__op, __m); |
2196 | } |
2197 | |
2198 | // atomic_fetch_sub |
2199 | |
2200 | template <class _Tp> |
2201 | _LIBCPP_INLINE_VISIBILITY |
2202 | _Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT |
2203 | { |
2204 | return __o->fetch_sub(__op); |
2205 | } |
2206 | |
2207 | template <class _Tp> |
2208 | _LIBCPP_INLINE_VISIBILITY |
2209 | _Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT |
2210 | { |
2211 | return __o->fetch_sub(__op); |
2212 | } |
2213 | |
2214 | // atomic_fetch_sub_explicit |
2215 | |
2216 | template <class _Tp> |
2217 | _LIBCPP_INLINE_VISIBILITY |
2218 | _Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT |
2219 | { |
2220 | return __o->fetch_sub(__op, __m); |
2221 | } |
2222 | |
2223 | template <class _Tp> |
2224 | _LIBCPP_INLINE_VISIBILITY |
2225 | _Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT |
2226 | { |
2227 | return __o->fetch_sub(__op, __m); |
2228 | } |
2229 | |
2230 | // atomic_fetch_and |
2231 | |
2232 | template <class _Tp> |
2233 | _LIBCPP_INLINE_VISIBILITY |
2234 | typename enable_if |
2235 | < |
2236 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2237 | _Tp |
2238 | >::type |
2239 | atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT |
2240 | { |
2241 | return __o->fetch_and(__op); |
2242 | } |
2243 | |
2244 | template <class _Tp> |
2245 | _LIBCPP_INLINE_VISIBILITY |
2246 | typename enable_if |
2247 | < |
2248 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2249 | _Tp |
2250 | >::type |
2251 | atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT |
2252 | { |
2253 | return __o->fetch_and(__op); |
2254 | } |
2255 | |
2256 | // atomic_fetch_and_explicit |
2257 | |
2258 | template <class _Tp> |
2259 | _LIBCPP_INLINE_VISIBILITY |
2260 | typename enable_if |
2261 | < |
2262 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2263 | _Tp |
2264 | >::type |
2265 | atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT |
2266 | { |
2267 | return __o->fetch_and(__op, __m); |
2268 | } |
2269 | |
2270 | template <class _Tp> |
2271 | _LIBCPP_INLINE_VISIBILITY |
2272 | typename enable_if |
2273 | < |
2274 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2275 | _Tp |
2276 | >::type |
2277 | atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT |
2278 | { |
2279 | return __o->fetch_and(__op, __m); |
2280 | } |
2281 | |
2282 | // atomic_fetch_or |
2283 | |
2284 | template <class _Tp> |
2285 | _LIBCPP_INLINE_VISIBILITY |
2286 | typename enable_if |
2287 | < |
2288 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2289 | _Tp |
2290 | >::type |
2291 | atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT |
2292 | { |
2293 | return __o->fetch_or(__op); |
2294 | } |
2295 | |
2296 | template <class _Tp> |
2297 | _LIBCPP_INLINE_VISIBILITY |
2298 | typename enable_if |
2299 | < |
2300 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2301 | _Tp |
2302 | >::type |
2303 | atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT |
2304 | { |
2305 | return __o->fetch_or(__op); |
2306 | } |
2307 | |
2308 | // atomic_fetch_or_explicit |
2309 | |
2310 | template <class _Tp> |
2311 | _LIBCPP_INLINE_VISIBILITY |
2312 | typename enable_if |
2313 | < |
2314 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2315 | _Tp |
2316 | >::type |
2317 | atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT |
2318 | { |
2319 | return __o->fetch_or(__op, __m); |
2320 | } |
2321 | |
2322 | template <class _Tp> |
2323 | _LIBCPP_INLINE_VISIBILITY |
2324 | typename enable_if |
2325 | < |
2326 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2327 | _Tp |
2328 | >::type |
2329 | atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT |
2330 | { |
2331 | return __o->fetch_or(__op, __m); |
2332 | } |
2333 | |
2334 | // atomic_fetch_xor |
2335 | |
2336 | template <class _Tp> |
2337 | _LIBCPP_INLINE_VISIBILITY |
2338 | typename enable_if |
2339 | < |
2340 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2341 | _Tp |
2342 | >::type |
2343 | atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT |
2344 | { |
2345 | return __o->fetch_xor(__op); |
2346 | } |
2347 | |
2348 | template <class _Tp> |
2349 | _LIBCPP_INLINE_VISIBILITY |
2350 | typename enable_if |
2351 | < |
2352 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2353 | _Tp |
2354 | >::type |
2355 | atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT |
2356 | { |
2357 | return __o->fetch_xor(__op); |
2358 | } |
2359 | |
2360 | // atomic_fetch_xor_explicit |
2361 | |
2362 | template <class _Tp> |
2363 | _LIBCPP_INLINE_VISIBILITY |
2364 | typename enable_if |
2365 | < |
2366 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2367 | _Tp |
2368 | >::type |
2369 | atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT |
2370 | { |
2371 | return __o->fetch_xor(__op, __m); |
2372 | } |
2373 | |
2374 | template <class _Tp> |
2375 | _LIBCPP_INLINE_VISIBILITY |
2376 | typename enable_if |
2377 | < |
2378 | is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
2379 | _Tp |
2380 | >::type |
2381 | atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT |
2382 | { |
2383 | return __o->fetch_xor(__op, __m); |
2384 | } |
2385 | |
2386 | // flag type and operations |
2387 | |
2388 | typedef struct atomic_flag |
2389 | { |
2390 | __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_; |
2391 | |
2392 | _LIBCPP_INLINE_VISIBILITY |
2393 | bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT |
2394 | {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(a: &__a_, order: __m);} |
2395 | _LIBCPP_INLINE_VISIBILITY |
2396 | bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT |
2397 | {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(a: &__a_, order: __m);} |
2398 | |
2399 | _LIBCPP_INLINE_VISIBILITY |
2400 | bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
2401 | {return __cxx_atomic_exchange(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), order: __m);} |
2402 | _LIBCPP_INLINE_VISIBILITY |
2403 | bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT |
2404 | {return __cxx_atomic_exchange(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), order: __m);} |
2405 | _LIBCPP_INLINE_VISIBILITY |
2406 | void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
2407 | {__cxx_atomic_store(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), order: __m);} |
2408 | _LIBCPP_INLINE_VISIBILITY |
2409 | void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT |
2410 | {__cxx_atomic_store(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), order: __m);} |
2411 | |
2412 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2413 | void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT |
2414 | {__cxx_atomic_wait(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), order: __m);} |
2415 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2416 | void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT |
2417 | {__cxx_atomic_wait(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), order: __m);} |
2418 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2419 | void notify_one() volatile _NOEXCEPT |
2420 | {__cxx_atomic_notify_one(&__a_);} |
2421 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2422 | void notify_one() _NOEXCEPT |
2423 | {__cxx_atomic_notify_one(&__a_);} |
2424 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2425 | void notify_all() volatile _NOEXCEPT |
2426 | {__cxx_atomic_notify_all(&__a_);} |
2427 | _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY |
2428 | void notify_all() _NOEXCEPT |
2429 | {__cxx_atomic_notify_all(&__a_);} |
2430 | |
2431 | #if _LIBCPP_STD_VER > 17 |
2432 | _LIBCPP_INLINE_VISIBILITY constexpr |
2433 | atomic_flag() _NOEXCEPT : __a_(false) {} |
2434 | #else |
2435 | _LIBCPP_INLINE_VISIBILITY |
2436 | atomic_flag() _NOEXCEPT = default; |
2437 | #endif |
2438 | |
2439 | _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR |
2440 | atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION |
2441 | |
2442 | atomic_flag(const atomic_flag&) = delete; |
2443 | atomic_flag& operator=(const atomic_flag&) = delete; |
2444 | atomic_flag& operator=(const atomic_flag&) volatile = delete; |
2445 | |
2446 | } atomic_flag; |
2447 | |
2448 | |
2449 | inline _LIBCPP_INLINE_VISIBILITY |
2450 | bool |
2451 | atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT |
2452 | { |
2453 | return __o->test(); |
2454 | } |
2455 | |
2456 | inline _LIBCPP_INLINE_VISIBILITY |
2457 | bool |
2458 | atomic_flag_test(const atomic_flag* __o) _NOEXCEPT |
2459 | { |
2460 | return __o->test(); |
2461 | } |
2462 | |
2463 | inline _LIBCPP_INLINE_VISIBILITY |
2464 | bool |
2465 | atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT |
2466 | { |
2467 | return __o->test(__m); |
2468 | } |
2469 | |
2470 | inline _LIBCPP_INLINE_VISIBILITY |
2471 | bool |
2472 | atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT |
2473 | { |
2474 | return __o->test(__m); |
2475 | } |
2476 | |
2477 | inline _LIBCPP_INLINE_VISIBILITY |
2478 | bool |
2479 | atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT |
2480 | { |
2481 | return __o->test_and_set(); |
2482 | } |
2483 | |
2484 | inline _LIBCPP_INLINE_VISIBILITY |
2485 | bool |
2486 | atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT |
2487 | { |
2488 | return __o->test_and_set(); |
2489 | } |
2490 | |
2491 | inline _LIBCPP_INLINE_VISIBILITY |
2492 | bool |
2493 | atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT |
2494 | { |
2495 | return __o->test_and_set(__m); |
2496 | } |
2497 | |
2498 | inline _LIBCPP_INLINE_VISIBILITY |
2499 | bool |
2500 | atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT |
2501 | { |
2502 | return __o->test_and_set(__m); |
2503 | } |
2504 | |
2505 | inline _LIBCPP_INLINE_VISIBILITY |
2506 | void |
2507 | atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT |
2508 | { |
2509 | __o->clear(); |
2510 | } |
2511 | |
2512 | inline _LIBCPP_INLINE_VISIBILITY |
2513 | void |
2514 | atomic_flag_clear(atomic_flag* __o) _NOEXCEPT |
2515 | { |
2516 | __o->clear(); |
2517 | } |
2518 | |
2519 | inline _LIBCPP_INLINE_VISIBILITY |
2520 | void |
2521 | atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT |
2522 | { |
2523 | __o->clear(__m); |
2524 | } |
2525 | |
2526 | inline _LIBCPP_INLINE_VISIBILITY |
2527 | void |
2528 | atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT |
2529 | { |
2530 | __o->clear(__m); |
2531 | } |
2532 | |
2533 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2534 | void |
2535 | atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT |
2536 | { |
2537 | __o->wait(__v); |
2538 | } |
2539 | |
2540 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2541 | void |
2542 | atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT |
2543 | { |
2544 | __o->wait(__v); |
2545 | } |
2546 | |
2547 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2548 | void |
2549 | atomic_flag_wait_explicit(const volatile atomic_flag* __o, |
2550 | bool __v, memory_order __m) _NOEXCEPT |
2551 | { |
2552 | __o->wait(__v, __m); |
2553 | } |
2554 | |
2555 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2556 | void |
2557 | atomic_flag_wait_explicit(const atomic_flag* __o, |
2558 | bool __v, memory_order __m) _NOEXCEPT |
2559 | { |
2560 | __o->wait(__v, __m); |
2561 | } |
2562 | |
2563 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2564 | void |
2565 | atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT |
2566 | { |
2567 | __o->notify_one(); |
2568 | } |
2569 | |
2570 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2571 | void |
2572 | atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT |
2573 | { |
2574 | __o->notify_one(); |
2575 | } |
2576 | |
2577 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2578 | void |
2579 | atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT |
2580 | { |
2581 | __o->notify_all(); |
2582 | } |
2583 | |
2584 | inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC |
2585 | void |
2586 | atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT |
2587 | { |
2588 | __o->notify_all(); |
2589 | } |
2590 | |
2591 | // fences |
2592 | |
2593 | inline _LIBCPP_INLINE_VISIBILITY |
2594 | void |
2595 | atomic_thread_fence(memory_order __m) _NOEXCEPT |
2596 | { |
2597 | __cxx_atomic_thread_fence(order: __m); |
2598 | } |
2599 | |
2600 | inline _LIBCPP_INLINE_VISIBILITY |
2601 | void |
2602 | atomic_signal_fence(memory_order __m) _NOEXCEPT |
2603 | { |
2604 | __cxx_atomic_signal_fence(order: __m); |
2605 | } |
2606 | |
2607 | // Atomics for standard typedef types |
2608 | |
2609 | typedef atomic<bool> atomic_bool; |
2610 | typedef atomic<char> atomic_char; |
2611 | typedef atomic<signed char> atomic_schar; |
2612 | typedef atomic<unsigned char> atomic_uchar; |
2613 | typedef atomic<short> atomic_short; |
2614 | typedef atomic<unsigned short> atomic_ushort; |
2615 | typedef atomic<int> atomic_int; |
2616 | typedef atomic<unsigned int> atomic_uint; |
2617 | typedef atomic<long> atomic_long; |
2618 | typedef atomic<unsigned long> atomic_ulong; |
2619 | typedef atomic<long long> atomic_llong; |
2620 | typedef atomic<unsigned long long> atomic_ullong; |
2621 | #ifndef _LIBCPP_HAS_NO_CHAR8_T |
2622 | typedef atomic<char8_t> atomic_char8_t; |
2623 | #endif |
2624 | typedef atomic<char16_t> atomic_char16_t; |
2625 | typedef atomic<char32_t> atomic_char32_t; |
2626 | #ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS |
2627 | typedef atomic<wchar_t> atomic_wchar_t; |
2628 | #endif |
2629 | |
2630 | typedef atomic<int_least8_t> atomic_int_least8_t; |
2631 | typedef atomic<uint_least8_t> atomic_uint_least8_t; |
2632 | typedef atomic<int_least16_t> atomic_int_least16_t; |
2633 | typedef atomic<uint_least16_t> atomic_uint_least16_t; |
2634 | typedef atomic<int_least32_t> atomic_int_least32_t; |
2635 | typedef atomic<uint_least32_t> atomic_uint_least32_t; |
2636 | typedef atomic<int_least64_t> atomic_int_least64_t; |
2637 | typedef atomic<uint_least64_t> atomic_uint_least64_t; |
2638 | |
2639 | typedef atomic<int_fast8_t> atomic_int_fast8_t; |
2640 | typedef atomic<uint_fast8_t> atomic_uint_fast8_t; |
2641 | typedef atomic<int_fast16_t> atomic_int_fast16_t; |
2642 | typedef atomic<uint_fast16_t> atomic_uint_fast16_t; |
2643 | typedef atomic<int_fast32_t> atomic_int_fast32_t; |
2644 | typedef atomic<uint_fast32_t> atomic_uint_fast32_t; |
2645 | typedef atomic<int_fast64_t> atomic_int_fast64_t; |
2646 | typedef atomic<uint_fast64_t> atomic_uint_fast64_t; |
2647 | |
2648 | typedef atomic< int8_t> atomic_int8_t; |
2649 | typedef atomic<uint8_t> atomic_uint8_t; |
2650 | typedef atomic< int16_t> atomic_int16_t; |
2651 | typedef atomic<uint16_t> atomic_uint16_t; |
2652 | typedef atomic< int32_t> atomic_int32_t; |
2653 | typedef atomic<uint32_t> atomic_uint32_t; |
2654 | typedef atomic< int64_t> atomic_int64_t; |
2655 | typedef atomic<uint64_t> atomic_uint64_t; |
2656 | |
2657 | typedef atomic<intptr_t> atomic_intptr_t; |
2658 | typedef atomic<uintptr_t> atomic_uintptr_t; |
2659 | typedef atomic<size_t> atomic_size_t; |
2660 | typedef atomic<ptrdiff_t> atomic_ptrdiff_t; |
2661 | typedef atomic<intmax_t> atomic_intmax_t; |
2662 | typedef atomic<uintmax_t> atomic_uintmax_t; |
2663 | |
2664 | // atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type |
2665 | |
2666 | #ifdef __cpp_lib_atomic_is_always_lock_free |
2667 | # define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0) |
2668 | #else |
2669 | # define _LIBCPP_CONTENTION_LOCK_FREE false |
2670 | #endif |
2671 | |
2672 | #if ATOMIC_LLONG_LOCK_FREE == 2 |
2673 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free; |
2674 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free; |
2675 | #elif ATOMIC_INT_LOCK_FREE == 2 |
2676 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free; |
2677 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free; |
2678 | #elif ATOMIC_SHORT_LOCK_FREE == 2 |
2679 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free; |
2680 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free; |
2681 | #elif ATOMIC_CHAR_LOCK_FREE == 2 |
2682 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free; |
2683 | typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free; |
2684 | #else |
2685 | // No signed/unsigned lock-free types |
2686 | #define _LIBCPP_NO_LOCK_FREE_TYPES |
2687 | #endif |
2688 | |
2689 | #if !defined(_LIBCPP_NO_LOCK_FREE_TYPES) |
2690 | typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free; |
2691 | typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free; |
2692 | #endif |
2693 | |
2694 | #define ATOMIC_FLAG_INIT {false} |
2695 | #define ATOMIC_VAR_INIT(__v) {__v} |
2696 | |
2697 | #if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) |
2698 | # if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400 |
2699 | # pragma clang deprecated(ATOMIC_FLAG_INIT) |
2700 | # pragma clang deprecated(ATOMIC_VAR_INIT) |
2701 | # endif |
2702 | #endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) |
2703 | |
2704 | _LIBCPP_END_NAMESPACE_STD |
2705 | |
2706 | #endif // _LIBCPP_ATOMIC |
2707 | |