1// -*- C++ -*-
2//===----------------------------------------------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14 atomic synopsis
15
16namespace std
17{
18
19// feature test macro [version.syn]
20
21#define __cpp_lib_atomic_is_always_lock_free
22#define __cpp_lib_atomic_flag_test
23#define __cpp_lib_atomic_lock_free_type_aliases
24#define __cpp_lib_atomic_wait
25
26 // order and consistency
27
28 enum memory_order: unspecified // enum class in C++20
29 {
30 relaxed,
31 consume, // load-consume
32 acquire, // load-acquire
33 release, // store-release
34 acq_rel, // store-release load-acquire
35 seq_cst // store-release load-acquire
36 };
37
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
44
45template <class T> T kill_dependency(T y) noexcept;
46
47// lock-free property
48
49#define ATOMIC_BOOL_LOCK_FREE unspecified
50#define ATOMIC_CHAR_LOCK_FREE unspecified
51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
55#define ATOMIC_SHORT_LOCK_FREE unspecified
56#define ATOMIC_INT_LOCK_FREE unspecified
57#define ATOMIC_LONG_LOCK_FREE unspecified
58#define ATOMIC_LLONG_LOCK_FREE unspecified
59#define ATOMIC_POINTER_LOCK_FREE unspecified
60
61template <class T>
62struct atomic
63{
64 using value_type = T;
65
66 static constexpr bool is_always_lock_free;
67 bool is_lock_free() const volatile noexcept;
68 bool is_lock_free() const noexcept;
69
70 atomic() noexcept = default; // until C++20
71 constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
72 constexpr atomic(T desr) noexcept;
73 atomic(const atomic&) = delete;
74 atomic& operator=(const atomic&) = delete;
75 atomic& operator=(const atomic&) volatile = delete;
76
77 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
78 T load(memory_order m = memory_order_seq_cst) const noexcept;
79 operator T() const volatile noexcept;
80 operator T() const noexcept;
81 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
83 T operator=(T) volatile noexcept;
84 T operator=(T) noexcept;
85
86 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
87 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
88 bool compare_exchange_weak(T& expc, T desr,
89 memory_order s, memory_order f) volatile noexcept;
90 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
91 bool compare_exchange_strong(T& expc, T desr,
92 memory_order s, memory_order f) volatile noexcept;
93 bool compare_exchange_strong(T& expc, T desr,
94 memory_order s, memory_order f) noexcept;
95 bool compare_exchange_weak(T& expc, T desr,
96 memory_order m = memory_order_seq_cst) volatile noexcept;
97 bool compare_exchange_weak(T& expc, T desr,
98 memory_order m = memory_order_seq_cst) noexcept;
99 bool compare_exchange_strong(T& expc, T desr,
100 memory_order m = memory_order_seq_cst) volatile noexcept;
101 bool compare_exchange_strong(T& expc, T desr,
102 memory_order m = memory_order_seq_cst) noexcept;
103
104 void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
105 void wait(T, memory_order = memory_order::seq_cst) const noexcept;
106 void notify_one() volatile noexcept;
107 void notify_one() noexcept;
108 void notify_all() volatile noexcept;
109 void notify_all() noexcept;
110};
111
112template <>
113struct atomic<integral>
114{
115 using value_type = integral;
116 using difference_type = value_type;
117
118 static constexpr bool is_always_lock_free;
119 bool is_lock_free() const volatile noexcept;
120 bool is_lock_free() const noexcept;
121
122 atomic() noexcept = default;
123 constexpr atomic(integral desr) noexcept;
124 atomic(const atomic&) = delete;
125 atomic& operator=(const atomic&) = delete;
126 atomic& operator=(const atomic&) volatile = delete;
127
128 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
129 integral load(memory_order m = memory_order_seq_cst) const noexcept;
130 operator integral() const volatile noexcept;
131 operator integral() const noexcept;
132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134 integral operator=(integral desr) volatile noexcept;
135 integral operator=(integral desr) noexcept;
136
137 integral exchange(integral desr,
138 memory_order m = memory_order_seq_cst) volatile noexcept;
139 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140 bool compare_exchange_weak(integral& expc, integral desr,
141 memory_order s, memory_order f) volatile noexcept;
142 bool compare_exchange_weak(integral& expc, integral desr,
143 memory_order s, memory_order f) noexcept;
144 bool compare_exchange_strong(integral& expc, integral desr,
145 memory_order s, memory_order f) volatile noexcept;
146 bool compare_exchange_strong(integral& expc, integral desr,
147 memory_order s, memory_order f) noexcept;
148 bool compare_exchange_weak(integral& expc, integral desr,
149 memory_order m = memory_order_seq_cst) volatile noexcept;
150 bool compare_exchange_weak(integral& expc, integral desr,
151 memory_order m = memory_order_seq_cst) noexcept;
152 bool compare_exchange_strong(integral& expc, integral desr,
153 memory_order m = memory_order_seq_cst) volatile noexcept;
154 bool compare_exchange_strong(integral& expc, integral desr,
155 memory_order m = memory_order_seq_cst) noexcept;
156
157 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
159 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
161 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
162 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
163 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
164 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
165 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
167
168 integral operator++(int) volatile noexcept;
169 integral operator++(int) noexcept;
170 integral operator--(int) volatile noexcept;
171 integral operator--(int) noexcept;
172 integral operator++() volatile noexcept;
173 integral operator++() noexcept;
174 integral operator--() volatile noexcept;
175 integral operator--() noexcept;
176 integral operator+=(integral op) volatile noexcept;
177 integral operator+=(integral op) noexcept;
178 integral operator-=(integral op) volatile noexcept;
179 integral operator-=(integral op) noexcept;
180 integral operator&=(integral op) volatile noexcept;
181 integral operator&=(integral op) noexcept;
182 integral operator|=(integral op) volatile noexcept;
183 integral operator|=(integral op) noexcept;
184 integral operator^=(integral op) volatile noexcept;
185 integral operator^=(integral op) noexcept;
186
187 void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
188 void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
189 void notify_one() volatile noexcept;
190 void notify_one() noexcept;
191 void notify_all() volatile noexcept;
192 void notify_all() noexcept;
193};
194
195template <class T>
196struct atomic<T*>
197{
198 using value_type = T*;
199 using difference_type = ptrdiff_t;
200
201 static constexpr bool is_always_lock_free;
202 bool is_lock_free() const volatile noexcept;
203 bool is_lock_free() const noexcept;
204
205 atomic() noexcept = default; // until C++20
206 constexpr atomic() noexcept; // since C++20
207 constexpr atomic(T* desr) noexcept;
208 atomic(const atomic&) = delete;
209 atomic& operator=(const atomic&) = delete;
210 atomic& operator=(const atomic&) volatile = delete;
211
212 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
213 T* load(memory_order m = memory_order_seq_cst) const noexcept;
214 operator T*() const volatile noexcept;
215 operator T*() const noexcept;
216 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
217 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
218 T* operator=(T*) volatile noexcept;
219 T* operator=(T*) noexcept;
220
221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
222 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
223 bool compare_exchange_weak(T*& expc, T* desr,
224 memory_order s, memory_order f) volatile noexcept;
225 bool compare_exchange_weak(T*& expc, T* desr,
226 memory_order s, memory_order f) noexcept;
227 bool compare_exchange_strong(T*& expc, T* desr,
228 memory_order s, memory_order f) volatile noexcept;
229 bool compare_exchange_strong(T*& expc, T* desr,
230 memory_order s, memory_order f) noexcept;
231 bool compare_exchange_weak(T*& expc, T* desr,
232 memory_order m = memory_order_seq_cst) volatile noexcept;
233 bool compare_exchange_weak(T*& expc, T* desr,
234 memory_order m = memory_order_seq_cst) noexcept;
235 bool compare_exchange_strong(T*& expc, T* desr,
236 memory_order m = memory_order_seq_cst) volatile noexcept;
237 bool compare_exchange_strong(T*& expc, T* desr,
238 memory_order m = memory_order_seq_cst) noexcept;
239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
242 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
243
244 T* operator++(int) volatile noexcept;
245 T* operator++(int) noexcept;
246 T* operator--(int) volatile noexcept;
247 T* operator--(int) noexcept;
248 T* operator++() volatile noexcept;
249 T* operator++() noexcept;
250 T* operator--() volatile noexcept;
251 T* operator--() noexcept;
252 T* operator+=(ptrdiff_t op) volatile noexcept;
253 T* operator+=(ptrdiff_t op) noexcept;
254 T* operator-=(ptrdiff_t op) volatile noexcept;
255 T* operator-=(ptrdiff_t op) noexcept;
256
257 void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
258 void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
259 void notify_one() volatile noexcept;
260 void notify_one() noexcept;
261 void notify_all() volatile noexcept;
262 void notify_all() noexcept;
263};
264
265
266// [atomics.nonmembers], non-member functions
267template<class T>
268 bool atomic_is_lock_free(const volatile atomic<T>*) noexcept;
269template<class T>
270 bool atomic_is_lock_free(const atomic<T>*) noexcept;
271template<class T>
272 void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept;
273template<class T>
274 void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept;
275template<class T>
276 void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type,
277 memory_order) noexcept;
278template<class T>
279 void atomic_store_explicit(atomic<T>*, atomic<T>::value_type,
280 memory_order) noexcept;
281template<class T>
282 T atomic_load(const volatile atomic<T>*) noexcept;
283template<class T>
284 T atomic_load(const atomic<T>*) noexcept;
285template<class T>
286 T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
287template<class T>
288 T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
289template<class T>
290 T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept;
291template<class T>
292 T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept;
293template<class T>
294 T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type,
295 memory_order) noexcept;
296template<class T>
297 T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type,
298 memory_order) noexcept;
299template<class T>
300 bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*,
301 atomic<T>::value_type) noexcept;
302template<class T>
303 bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*,
304 atomic<T>::value_type) noexcept;
305template<class T>
306 bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*,
307 atomic<T>::value_type) noexcept;
308template<class T>
309 bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*,
310 atomic<T>::value_type) noexcept;
311template<class T>
312 bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*,
313 atomic<T>::value_type,
314 memory_order, memory_order) noexcept;
315template<class T>
316 bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*,
317 atomic<T>::value_type,
318 memory_order, memory_order) noexcept;
319template<class T>
320 bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*,
321 atomic<T>::value_type,
322 memory_order, memory_order) noexcept;
323template<class T>
324 bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*,
325 atomic<T>::value_type,
326 memory_order, memory_order) noexcept;
327
328template<class T>
329 T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
330template<class T>
331 T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept;
332template<class T>
333 T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type,
334 memory_order) noexcept;
335template<class T>
336 T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type,
337 memory_order) noexcept;
338template<class T>
339 T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
340template<class T>
341 T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept;
342template<class T>
343 T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type,
344 memory_order) noexcept;
345template<class T>
346 T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type,
347 memory_order) noexcept;
348template<class T>
349 T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept;
350template<class T>
351 T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept;
352template<class T>
353 T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type,
354 memory_order) noexcept;
355template<class T>
356 T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type,
357 memory_order) noexcept;
358template<class T>
359 T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept;
360template<class T>
361 T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept;
362template<class T>
363 T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type,
364 memory_order) noexcept;
365template<class T>
366 T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type,
367 memory_order) noexcept;
368template<class T>
369 T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept;
370template<class T>
371 T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept;
372template<class T>
373 T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type,
374 memory_order) noexcept;
375template<class T>
376 T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type,
377 memory_order) noexcept;
378
379template<class T>
380 void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type);
381template<class T>
382 void atomic_wait(const atomic<T>*, atomic<T>::value_type);
383template<class T>
384 void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type,
385 memory_order);
386template<class T>
387 void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type,
388 memory_order);
389template<class T>
390 void atomic_notify_one(volatile atomic<T>*);
391template<class T>
392 void atomic_notify_one(atomic<T>*);
393template<class T>
394 void atomic_notify_all(volatile atomic<T>*);
395template<class T>
396 void atomic_notify_all(atomic<T>*);
397
398// Atomics for standard typedef types
399
400typedef atomic<bool> atomic_bool;
401typedef atomic<char> atomic_char;
402typedef atomic<signed char> atomic_schar;
403typedef atomic<unsigned char> atomic_uchar;
404typedef atomic<short> atomic_short;
405typedef atomic<unsigned short> atomic_ushort;
406typedef atomic<int> atomic_int;
407typedef atomic<unsigned int> atomic_uint;
408typedef atomic<long> atomic_long;
409typedef atomic<unsigned long> atomic_ulong;
410typedef atomic<long long> atomic_llong;
411typedef atomic<unsigned long long> atomic_ullong;
412typedef atomic<char8_t> atomic_char8_t; // C++20
413typedef atomic<char16_t> atomic_char16_t;
414typedef atomic<char32_t> atomic_char32_t;
415typedef atomic<wchar_t> atomic_wchar_t;
416
417typedef atomic<int_least8_t> atomic_int_least8_t;
418typedef atomic<uint_least8_t> atomic_uint_least8_t;
419typedef atomic<int_least16_t> atomic_int_least16_t;
420typedef atomic<uint_least16_t> atomic_uint_least16_t;
421typedef atomic<int_least32_t> atomic_int_least32_t;
422typedef atomic<uint_least32_t> atomic_uint_least32_t;
423typedef atomic<int_least64_t> atomic_int_least64_t;
424typedef atomic<uint_least64_t> atomic_uint_least64_t;
425
426typedef atomic<int_fast8_t> atomic_int_fast8_t;
427typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
428typedef atomic<int_fast16_t> atomic_int_fast16_t;
429typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
430typedef atomic<int_fast32_t> atomic_int_fast32_t;
431typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
432typedef atomic<int_fast64_t> atomic_int_fast64_t;
433typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
434
435typedef atomic<int8_t> atomic_int8_t;
436typedef atomic<uint8_t> atomic_uint8_t;
437typedef atomic<int16_t> atomic_int16_t;
438typedef atomic<uint16_t> atomic_uint16_t;
439typedef atomic<int32_t> atomic_int32_t;
440typedef atomic<uint32_t> atomic_uint32_t;
441typedef atomic<int64_t> atomic_int64_t;
442typedef atomic<uint64_t> atomic_uint64_t;
443
444typedef atomic<intptr_t> atomic_intptr_t;
445typedef atomic<uintptr_t> atomic_uintptr_t;
446typedef atomic<size_t> atomic_size_t;
447typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
448typedef atomic<intmax_t> atomic_intmax_t;
449typedef atomic<uintmax_t> atomic_uintmax_t;
450
451// flag type and operations
452
453typedef struct atomic_flag
454{
455 atomic_flag() noexcept = default; // until C++20
456 constexpr atomic_flag() noexcept; // since C++20
457 atomic_flag(const atomic_flag&) = delete;
458 atomic_flag& operator=(const atomic_flag&) = delete;
459 atomic_flag& operator=(const atomic_flag&) volatile = delete;
460
461 bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
462 bool test(memory_order m = memory_order_seq_cst) noexcept;
463 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
464 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
465 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
466 void clear(memory_order m = memory_order_seq_cst) noexcept;
467
468 void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
469 void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
470 void notify_one() volatile noexcept;
471 void notify_one() noexcept;
472 void notify_all() volatile noexcept;
473 void notify_all() noexcept;
474} atomic_flag;
475
476bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
477bool atomic_flag_test(atomic_flag* obj) noexcept;
478bool atomic_flag_test_explicit(volatile atomic_flag* obj,
479 memory_order m) noexcept;
480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
484 memory_order m) noexcept;
485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
486void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
487void atomic_flag_clear(atomic_flag* obj) noexcept;
488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
490
491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
492void atomic_wait(const atomic_flag* obj, T old) noexcept;
493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
495void atomic_one(volatile atomic_flag* obj) noexcept;
496void atomic_one(atomic_flag* obj) noexcept;
497void atomic_all(volatile atomic_flag* obj) noexcept;
498void atomic_all(atomic_flag* obj) noexcept;
499
500// fences
501
502void atomic_thread_fence(memory_order m) noexcept;
503void atomic_signal_fence(memory_order m) noexcept;
504
505// deprecated
506
507template <class T>
508 void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
509
510template <class T>
511 void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
512
513#define ATOMIC_VAR_INIT(value) see below
514
515#define ATOMIC_FLAG_INIT see below
516
517} // std
518
519*/
520
521#include <__assert> // all public C++ headers provide the assertion handler
522#include <__availability>
523#include <__chrono/duration.h>
524#include <__config>
525#include <__thread/poll_with_backoff.h>
526#include <__thread/timed_backoff_policy.h>
527#include <cstddef>
528#include <cstdint>
529#include <cstring>
530#include <type_traits>
531#include <version>
532
533#ifndef _LIBCPP_HAS_NO_THREADS
534# include <__threading_support>
535#endif
536
537#ifndef _LIBCPP_REMOVE_TRANSITIVE_INCLUDES
538# include <chrono>
539#endif
540
541#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
542# pragma GCC system_header
543#endif
544
545#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
546# error <atomic> is not implemented
547#endif
548#ifdef kill_dependency
549# error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23.
550#endif
551
552#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
553 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
554 __m == memory_order_acquire || \
555 __m == memory_order_acq_rel, \
556 "memory order argument to atomic operation is invalid")
557
558#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
559 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
560 __m == memory_order_acq_rel, \
561 "memory order argument to atomic operation is invalid")
562
563#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
564 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
565 __f == memory_order_acq_rel, \
566 "memory order argument to atomic operation is invalid")
567
568_LIBCPP_BEGIN_NAMESPACE_STD
569
570// Figure out what the underlying type for `memory_order` would be if it were
571// declared as an unscoped enum (accounting for -fshort-enums). Use this result
572// to pin the underlying type in C++20.
573enum __legacy_memory_order {
574 __mo_relaxed,
575 __mo_consume,
576 __mo_acquire,
577 __mo_release,
578 __mo_acq_rel,
579 __mo_seq_cst
580};
581
582typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
583
584#if _LIBCPP_STD_VER > 17
585
586enum class memory_order : __memory_order_underlying_t {
587 relaxed = __mo_relaxed,
588 consume = __mo_consume,
589 acquire = __mo_acquire,
590 release = __mo_release,
591 acq_rel = __mo_acq_rel,
592 seq_cst = __mo_seq_cst
593};
594
595inline constexpr auto memory_order_relaxed = memory_order::relaxed;
596inline constexpr auto memory_order_consume = memory_order::consume;
597inline constexpr auto memory_order_acquire = memory_order::acquire;
598inline constexpr auto memory_order_release = memory_order::release;
599inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
600inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
601
602#else
603
604typedef enum memory_order {
605 memory_order_relaxed = __mo_relaxed,
606 memory_order_consume = __mo_consume,
607 memory_order_acquire = __mo_acquire,
608 memory_order_release = __mo_release,
609 memory_order_acq_rel = __mo_acq_rel,
610 memory_order_seq_cst = __mo_seq_cst,
611} memory_order;
612
613#endif // _LIBCPP_STD_VER > 17
614
615template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
616bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
617 return _VSTD::memcmp(s1: &__lhs, s2: &__rhs, n: sizeof(_Tp)) == 0;
618}
619
620static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
621 "unexpected underlying type for std::memory_order");
622
623#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
624 defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
625
626// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
627// the default operator= in an object is not volatile, a byte-by-byte copy
628// is required.
629template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
630typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
631__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
632 __a_value = __val;
633}
634template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
635typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
636__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
637 volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
638 volatile char* __end = __to + sizeof(_Tp);
639 volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
640 while (__to != __end)
641 *__to++ = *__from++;
642}
643
644#endif
645
646#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
647
648template <typename _Tp>
649struct __cxx_atomic_base_impl {
650
651 _LIBCPP_INLINE_VISIBILITY
652#ifndef _LIBCPP_CXX03_LANG
653 __cxx_atomic_base_impl() _NOEXCEPT = default;
654#else
655 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
656#endif // _LIBCPP_CXX03_LANG
657 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
658 : __a_value(value) {}
659 _Tp __a_value;
660};
661
662_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
663 // Avoid switch statement to make this a constexpr.
664 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
665 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
666 (__order == memory_order_release ? __ATOMIC_RELEASE:
667 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
668 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
669 __ATOMIC_CONSUME))));
670}
671
672_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
673 // Avoid switch statement to make this a constexpr.
674 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
675 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
676 (__order == memory_order_release ? __ATOMIC_RELAXED:
677 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
678 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
679 __ATOMIC_CONSUME))));
680}
681
682template <typename _Tp>
683_LIBCPP_INLINE_VISIBILITY
684void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
685 __cxx_atomic_assign_volatile(__a->__a_value, __val);
686}
687
688template <typename _Tp>
689_LIBCPP_INLINE_VISIBILITY
690void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
691 __a->__a_value = __val;
692}
693
694_LIBCPP_INLINE_VISIBILITY inline
695void __cxx_atomic_thread_fence(memory_order __order) {
696 __atomic_thread_fence(__to_gcc_order(__order));
697}
698
699_LIBCPP_INLINE_VISIBILITY inline
700void __cxx_atomic_signal_fence(memory_order __order) {
701 __atomic_signal_fence(__to_gcc_order(__order));
702}
703
704template <typename _Tp>
705_LIBCPP_INLINE_VISIBILITY
706void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
707 memory_order __order) {
708 __atomic_store(&__a->__a_value, &__val,
709 __to_gcc_order(__order));
710}
711
712template <typename _Tp>
713_LIBCPP_INLINE_VISIBILITY
714void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
715 memory_order __order) {
716 __atomic_store(&__a->__a_value, &__val,
717 __to_gcc_order(__order));
718}
719
720template <typename _Tp>
721_LIBCPP_INLINE_VISIBILITY
722_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
723 memory_order __order) {
724 _Tp __ret;
725 __atomic_load(&__a->__a_value, &__ret,
726 __to_gcc_order(__order));
727 return __ret;
728}
729
730template <typename _Tp>
731_LIBCPP_INLINE_VISIBILITY
732_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
733 _Tp __ret;
734 __atomic_load(&__a->__a_value, &__ret,
735 __to_gcc_order(__order));
736 return __ret;
737}
738
739template <typename _Tp>
740_LIBCPP_INLINE_VISIBILITY
741_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
742 _Tp __value, memory_order __order) {
743 _Tp __ret;
744 __atomic_exchange(&__a->__a_value, &__value, &__ret,
745 __to_gcc_order(__order));
746 return __ret;
747}
748
749template <typename _Tp>
750_LIBCPP_INLINE_VISIBILITY
751_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
752 memory_order __order) {
753 _Tp __ret;
754 __atomic_exchange(&__a->__a_value, &__value, &__ret,
755 __to_gcc_order(__order));
756 return __ret;
757}
758
759template <typename _Tp>
760_LIBCPP_INLINE_VISIBILITY
761bool __cxx_atomic_compare_exchange_strong(
762 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
763 memory_order __success, memory_order __failure) {
764 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
765 false,
766 __to_gcc_order(__success),
767 __to_gcc_failure_order(__failure));
768}
769
770template <typename _Tp>
771_LIBCPP_INLINE_VISIBILITY
772bool __cxx_atomic_compare_exchange_strong(
773 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
774 memory_order __failure) {
775 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
776 false,
777 __to_gcc_order(__success),
778 __to_gcc_failure_order(__failure));
779}
780
781template <typename _Tp>
782_LIBCPP_INLINE_VISIBILITY
783bool __cxx_atomic_compare_exchange_weak(
784 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
785 memory_order __success, memory_order __failure) {
786 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
787 true,
788 __to_gcc_order(__success),
789 __to_gcc_failure_order(__failure));
790}
791
792template <typename _Tp>
793_LIBCPP_INLINE_VISIBILITY
794bool __cxx_atomic_compare_exchange_weak(
795 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
796 memory_order __failure) {
797 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
798 true,
799 __to_gcc_order(__success),
800 __to_gcc_failure_order(__failure));
801}
802
803template <typename _Tp>
804struct __skip_amt { enum {value = 1}; };
805
806template <typename _Tp>
807struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
808
809// FIXME: Haven't figured out what the spec says about using arrays with
810// atomic_fetch_add. Force a failure rather than creating bad behavior.
811template <typename _Tp>
812struct __skip_amt<_Tp[]> { };
813template <typename _Tp, int n>
814struct __skip_amt<_Tp[n]> { };
815
816template <typename _Tp, typename _Td>
817_LIBCPP_INLINE_VISIBILITY
818_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
819 _Td __delta, memory_order __order) {
820 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
821 __to_gcc_order(__order));
822}
823
824template <typename _Tp, typename _Td>
825_LIBCPP_INLINE_VISIBILITY
826_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
827 memory_order __order) {
828 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
829 __to_gcc_order(__order));
830}
831
832template <typename _Tp, typename _Td>
833_LIBCPP_INLINE_VISIBILITY
834_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
835 _Td __delta, memory_order __order) {
836 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
837 __to_gcc_order(__order));
838}
839
840template <typename _Tp, typename _Td>
841_LIBCPP_INLINE_VISIBILITY
842_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
843 memory_order __order) {
844 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
845 __to_gcc_order(__order));
846}
847
848template <typename _Tp>
849_LIBCPP_INLINE_VISIBILITY
850_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
851 _Tp __pattern, memory_order __order) {
852 return __atomic_fetch_and(&__a->__a_value, __pattern,
853 __to_gcc_order(__order));
854}
855
856template <typename _Tp>
857_LIBCPP_INLINE_VISIBILITY
858_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
859 _Tp __pattern, memory_order __order) {
860 return __atomic_fetch_and(&__a->__a_value, __pattern,
861 __to_gcc_order(__order));
862}
863
864template <typename _Tp>
865_LIBCPP_INLINE_VISIBILITY
866_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
867 _Tp __pattern, memory_order __order) {
868 return __atomic_fetch_or(&__a->__a_value, __pattern,
869 __to_gcc_order(__order));
870}
871
872template <typename _Tp>
873_LIBCPP_INLINE_VISIBILITY
874_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
875 memory_order __order) {
876 return __atomic_fetch_or(&__a->__a_value, __pattern,
877 __to_gcc_order(__order));
878}
879
880template <typename _Tp>
881_LIBCPP_INLINE_VISIBILITY
882_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
883 _Tp __pattern, memory_order __order) {
884 return __atomic_fetch_xor(&__a->__a_value, __pattern,
885 __to_gcc_order(__order));
886}
887
888template <typename _Tp>
889_LIBCPP_INLINE_VISIBILITY
890_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
891 memory_order __order) {
892 return __atomic_fetch_xor(&__a->__a_value, __pattern,
893 __to_gcc_order(__order));
894}
895
896#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
897
898#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
899
900template <typename _Tp>
901struct __cxx_atomic_base_impl {
902
903 _LIBCPP_INLINE_VISIBILITY
904#ifndef _LIBCPP_CXX03_LANG
905 __cxx_atomic_base_impl() _NOEXCEPT = default;
906#else
907 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
908#endif // _LIBCPP_CXX03_LANG
909 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT
910 : __a_value(__value) {}
911 _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
912};
913
914#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
915
916_LIBCPP_INLINE_VISIBILITY inline
917void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
918 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
919}
920
921_LIBCPP_INLINE_VISIBILITY inline
922void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
923 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
924}
925
926template<class _Tp>
927_LIBCPP_INLINE_VISIBILITY
928void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
929 __c11_atomic_init(&__a->__a_value, __val);
930}
931template<class _Tp>
932_LIBCPP_INLINE_VISIBILITY
933void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
934 __c11_atomic_init(&__a->__a_value, __val);
935}
936
937template<class _Tp>
938_LIBCPP_INLINE_VISIBILITY
939void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
940 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
941}
942template<class _Tp>
943_LIBCPP_INLINE_VISIBILITY
944void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
945 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
946}
947
948template<class _Tp>
949_LIBCPP_INLINE_VISIBILITY
950_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
951 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
952 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
953}
954template<class _Tp>
955_LIBCPP_INLINE_VISIBILITY
956_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
957 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
958 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
959}
960
961template<class _Tp>
962_LIBCPP_INLINE_VISIBILITY
963_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
964 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
965}
966template<class _Tp>
967_LIBCPP_INLINE_VISIBILITY
968_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
969 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
970}
971
972_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
973 // Avoid switch statement to make this a constexpr.
974 return __order == memory_order_release ? memory_order_relaxed:
975 (__order == memory_order_acq_rel ? memory_order_acquire:
976 __order);
977}
978
979template<class _Tp>
980_LIBCPP_INLINE_VISIBILITY
981bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
982 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure)));
983}
984template<class _Tp>
985_LIBCPP_INLINE_VISIBILITY
986bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
987 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure)));
988}
989
990template<class _Tp>
991_LIBCPP_INLINE_VISIBILITY
992bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
993 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure)));
994}
995template<class _Tp>
996_LIBCPP_INLINE_VISIBILITY
997bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
998 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(order: __failure)));
999}
1000
1001template<class _Tp>
1002_LIBCPP_INLINE_VISIBILITY
1003_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1004 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1005}
1006template<class _Tp>
1007_LIBCPP_INLINE_VISIBILITY
1008_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1009 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1010}
1011
1012template<class _Tp>
1013_LIBCPP_INLINE_VISIBILITY
1014_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1015 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1016}
1017template<class _Tp>
1018_LIBCPP_INLINE_VISIBILITY
1019_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1020 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1021}
1022
1023template<class _Tp>
1024_LIBCPP_INLINE_VISIBILITY
1025_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1026 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1027}
1028template<class _Tp>
1029_LIBCPP_INLINE_VISIBILITY
1030_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1031 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1032}
1033template<class _Tp>
1034_LIBCPP_INLINE_VISIBILITY
1035_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1036 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1037}
1038template<class _Tp>
1039_LIBCPP_INLINE_VISIBILITY
1040_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1041 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1042}
1043
1044template<class _Tp>
1045_LIBCPP_INLINE_VISIBILITY
1046_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1047 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1048}
1049template<class _Tp>
1050_LIBCPP_INLINE_VISIBILITY
1051_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1052 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1053}
1054
1055template<class _Tp>
1056_LIBCPP_INLINE_VISIBILITY
1057_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1058 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1059}
1060template<class _Tp>
1061_LIBCPP_INLINE_VISIBILITY
1062_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1063 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1064}
1065
1066template<class _Tp>
1067_LIBCPP_INLINE_VISIBILITY
1068_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1069 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1070}
1071template<class _Tp>
1072_LIBCPP_INLINE_VISIBILITY
1073_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1074 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1075}
1076
1077#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1078
1079template <class _Tp>
1080_LIBCPP_INLINE_VISIBILITY
1081_Tp kill_dependency(_Tp __y) _NOEXCEPT
1082{
1083 return __y;
1084}
1085
1086#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1087# define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE
1088# define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE
1089#ifndef _LIBCPP_HAS_NO_CHAR8_T
1090# define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
1091#endif
1092# define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1093# define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1094# define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1095# define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE
1096# define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE
1097# define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE
1098# define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE
1099# define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE
1100#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1101# define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1102# define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1103#ifndef _LIBCPP_HAS_NO_CHAR8_T
1104# define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE
1105#endif
1106# define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1107# define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1108# define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1109# define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1110# define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1111# define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1112# define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1113# define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1114#endif
1115
1116#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1117
1118template<typename _Tp>
1119struct __cxx_atomic_lock_impl {
1120
1121 _LIBCPP_INLINE_VISIBILITY
1122 __cxx_atomic_lock_impl() _NOEXCEPT
1123 : __a_value(), __a_lock(0) {}
1124 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1125 __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1126 : __a_value(value), __a_lock(0) {}
1127
1128 _Tp __a_value;
1129 mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1130
1131 _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1132 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1133 /*spin*/;
1134 }
1135 _LIBCPP_INLINE_VISIBILITY void __lock() const {
1136 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1137 /*spin*/;
1138 }
1139 _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1140 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1141 }
1142 _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1143 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1144 }
1145 _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1146 __lock();
1147 _Tp __old;
1148 __cxx_atomic_assign_volatile(__old, __a_value);
1149 __unlock();
1150 return __old;
1151 }
1152 _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1153 __lock();
1154 _Tp __old = __a_value;
1155 __unlock();
1156 return __old;
1157 }
1158};
1159
1160template <typename _Tp>
1161_LIBCPP_INLINE_VISIBILITY
1162void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
1163 __cxx_atomic_assign_volatile(__a->__a_value, __val);
1164}
1165template <typename _Tp>
1166_LIBCPP_INLINE_VISIBILITY
1167void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
1168 __a->__a_value = __val;
1169}
1170
1171template <typename _Tp>
1172_LIBCPP_INLINE_VISIBILITY
1173void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1174 __a->__lock();
1175 __cxx_atomic_assign_volatile(__a->__a_value, __val);
1176 __a->__unlock();
1177}
1178template <typename _Tp>
1179_LIBCPP_INLINE_VISIBILITY
1180void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1181 __a->__lock();
1182 __a->__a_value = __val;
1183 __a->__unlock();
1184}
1185
1186template <typename _Tp>
1187_LIBCPP_INLINE_VISIBILITY
1188_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1189 return __a->__read();
1190}
1191template <typename _Tp>
1192_LIBCPP_INLINE_VISIBILITY
1193_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1194 return __a->__read();
1195}
1196
1197template <typename _Tp>
1198_LIBCPP_INLINE_VISIBILITY
1199_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1200 __a->__lock();
1201 _Tp __old;
1202 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1203 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1204 __a->__unlock();
1205 return __old;
1206}
1207template <typename _Tp>
1208_LIBCPP_INLINE_VISIBILITY
1209_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1210 __a->__lock();
1211 _Tp __old = __a->__a_value;
1212 __a->__a_value = __value;
1213 __a->__unlock();
1214 return __old;
1215}
1216
1217template <typename _Tp>
1218_LIBCPP_INLINE_VISIBILITY
1219bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1220 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1221 _Tp __temp;
1222 __a->__lock();
1223 __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1224 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1225 if(__ret)
1226 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1227 else
1228 __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1229 __a->__unlock();
1230 return __ret;
1231}
1232template <typename _Tp>
1233_LIBCPP_INLINE_VISIBILITY
1234bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1235 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1236 __a->__lock();
1237 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1238 if(__ret)
1239 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1240 else
1241 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1242 __a->__unlock();
1243 return __ret;
1244}
1245
1246template <typename _Tp>
1247_LIBCPP_INLINE_VISIBILITY
1248bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1249 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1250 _Tp __temp;
1251 __a->__lock();
1252 __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1253 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1254 if(__ret)
1255 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1256 else
1257 __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1258 __a->__unlock();
1259 return __ret;
1260}
1261template <typename _Tp>
1262_LIBCPP_INLINE_VISIBILITY
1263bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1264 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1265 __a->__lock();
1266 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1267 if(__ret)
1268 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1269 else
1270 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1271 __a->__unlock();
1272 return __ret;
1273}
1274
1275template <typename _Tp, typename _Td>
1276_LIBCPP_INLINE_VISIBILITY
1277_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1278 _Td __delta, memory_order) {
1279 __a->__lock();
1280 _Tp __old;
1281 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1282 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1283 __a->__unlock();
1284 return __old;
1285}
1286template <typename _Tp, typename _Td>
1287_LIBCPP_INLINE_VISIBILITY
1288_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1289 _Td __delta, memory_order) {
1290 __a->__lock();
1291 _Tp __old = __a->__a_value;
1292 __a->__a_value += __delta;
1293 __a->__unlock();
1294 return __old;
1295}
1296
1297template <typename _Tp, typename _Td>
1298_LIBCPP_INLINE_VISIBILITY
1299_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1300 ptrdiff_t __delta, memory_order) {
1301 __a->__lock();
1302 _Tp* __old;
1303 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1304 __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1305 __a->__unlock();
1306 return __old;
1307}
1308template <typename _Tp, typename _Td>
1309_LIBCPP_INLINE_VISIBILITY
1310_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1311 ptrdiff_t __delta, memory_order) {
1312 __a->__lock();
1313 _Tp* __old = __a->__a_value;
1314 __a->__a_value += __delta;
1315 __a->__unlock();
1316 return __old;
1317}
1318
1319template <typename _Tp, typename _Td>
1320_LIBCPP_INLINE_VISIBILITY
1321_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1322 _Td __delta, memory_order) {
1323 __a->__lock();
1324 _Tp __old;
1325 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1326 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1327 __a->__unlock();
1328 return __old;
1329}
1330template <typename _Tp, typename _Td>
1331_LIBCPP_INLINE_VISIBILITY
1332_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1333 _Td __delta, memory_order) {
1334 __a->__lock();
1335 _Tp __old = __a->__a_value;
1336 __a->__a_value -= __delta;
1337 __a->__unlock();
1338 return __old;
1339}
1340
1341template <typename _Tp>
1342_LIBCPP_INLINE_VISIBILITY
1343_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1344 _Tp __pattern, memory_order) {
1345 __a->__lock();
1346 _Tp __old;
1347 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1348 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1349 __a->__unlock();
1350 return __old;
1351}
1352template <typename _Tp>
1353_LIBCPP_INLINE_VISIBILITY
1354_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1355 _Tp __pattern, memory_order) {
1356 __a->__lock();
1357 _Tp __old = __a->__a_value;
1358 __a->__a_value &= __pattern;
1359 __a->__unlock();
1360 return __old;
1361}
1362
1363template <typename _Tp>
1364_LIBCPP_INLINE_VISIBILITY
1365_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1366 _Tp __pattern, memory_order) {
1367 __a->__lock();
1368 _Tp __old;
1369 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1370 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1371 __a->__unlock();
1372 return __old;
1373}
1374template <typename _Tp>
1375_LIBCPP_INLINE_VISIBILITY
1376_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1377 _Tp __pattern, memory_order) {
1378 __a->__lock();
1379 _Tp __old = __a->__a_value;
1380 __a->__a_value |= __pattern;
1381 __a->__unlock();
1382 return __old;
1383}
1384
1385template <typename _Tp>
1386_LIBCPP_INLINE_VISIBILITY
1387_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1388 _Tp __pattern, memory_order) {
1389 __a->__lock();
1390 _Tp __old;
1391 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1392 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1393 __a->__unlock();
1394 return __old;
1395}
1396template <typename _Tp>
1397_LIBCPP_INLINE_VISIBILITY
1398_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1399 _Tp __pattern, memory_order) {
1400 __a->__lock();
1401 _Tp __old = __a->__a_value;
1402 __a->__a_value ^= __pattern;
1403 __a->__unlock();
1404 return __old;
1405}
1406
1407#ifdef __cpp_lib_atomic_is_always_lock_free
1408
1409template<typename _Tp> struct __cxx_is_always_lock_free {
1410 enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1411
1412#else
1413
1414template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1415// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1416template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1417template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1418template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1419template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1420#ifndef _LIBCPP_HAS_NO_CHAR8_T
1421template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
1422#endif
1423template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1424template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1425#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
1426template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1427#endif
1428template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1429template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1430template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1431template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1432template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1433template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1434template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1435template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1436template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1437template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1438
1439#endif //__cpp_lib_atomic_is_always_lock_free
1440
1441template <typename _Tp,
1442 typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1443 __cxx_atomic_base_impl<_Tp>,
1444 __cxx_atomic_lock_impl<_Tp> >::type>
1445#else
1446template <typename _Tp,
1447 typename _Base = __cxx_atomic_base_impl<_Tp> >
1448#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1449struct __cxx_atomic_impl : public _Base {
1450 static_assert(is_trivially_copyable<_Tp>::value,
1451 "std::atomic<T> requires that 'T' be a trivially copyable type");
1452
1453 _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default;
1454 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT
1455 : _Base(__value) {}
1456};
1457
1458#if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__))
1459 using __cxx_contention_t = int32_t;
1460#else
1461 using __cxx_contention_t = int64_t;
1462#endif // __linux__ || (_AIX && !__64BIT__)
1463
1464using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1465
1466#if defined(_LIBCPP_HAS_NO_THREADS)
1467# define _LIBCPP_HAS_NO_PLATFORM_WAIT
1468#endif
1469
1470// TODO:
1471// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that
1472// it is not tied anywhere into the build system or even documented. We should
1473// clean it up because it is technically never defined except when threads are
1474// disabled. We should clean it up in its own changeset in case we break "bad"
1475// users.
1476#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1477
1478_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1479_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1480_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1481_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1482
1483_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1484_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1485_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1486_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1487
1488template <class _Atp, class _Fn>
1489struct __libcpp_atomic_wait_backoff_impl {
1490 _Atp* __a;
1491 _Fn __test_fn;
1492 _LIBCPP_AVAILABILITY_SYNC
1493 _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1494 {
1495 if(__elapsed > chrono::microseconds(64))
1496 {
1497 auto const __monitor = __libcpp_atomic_monitor(__a);
1498 if(__test_fn())
1499 return true;
1500 __libcpp_atomic_wait(__a, __monitor);
1501 }
1502 else if(__elapsed > chrono::microseconds(4))
1503 __libcpp_thread_yield();
1504 else
1505 {} // poll
1506 return false;
1507 }
1508};
1509
1510template <class _Atp, class _Fn>
1511_LIBCPP_AVAILABILITY_SYNC
1512_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1513{
1514 __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1515 return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1516}
1517
1518#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1519
1520template <class _Tp>
1521_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1522template <class _Tp>
1523_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1524template <class _Atp, class _Fn>
1525_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1526{
1527#if defined(_LIBCPP_HAS_NO_THREADS)
1528 using _Policy = __spinning_backoff_policy;
1529#else
1530 using _Policy = __libcpp_timed_backoff_policy;
1531#endif
1532 return __libcpp_thread_poll_with_backoff(__test_fn, _Policy());
1533}
1534
1535#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1536
1537template <class _Atp, class _Tp>
1538struct __cxx_atomic_wait_test_fn_impl {
1539 _Atp* __a;
1540 _Tp __val;
1541 memory_order __order;
1542 _LIBCPP_INLINE_VISIBILITY bool operator()() const
1543 {
1544 return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1545 }
1546};
1547
1548template <class _Atp, class _Tp>
1549_LIBCPP_AVAILABILITY_SYNC
1550_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1551{
1552 __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1553 return __cxx_atomic_wait(__a, __test_fn);
1554}
1555
1556// general atomic<T>
1557
1558template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1559struct __atomic_base // false
1560{
1561 mutable __cxx_atomic_impl<_Tp> __a_;
1562
1563#if defined(__cpp_lib_atomic_is_always_lock_free)
1564 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1565#endif
1566
1567 _LIBCPP_INLINE_VISIBILITY
1568 bool is_lock_free() const volatile _NOEXCEPT
1569 {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1570 _LIBCPP_INLINE_VISIBILITY
1571 bool is_lock_free() const _NOEXCEPT
1572 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1573 _LIBCPP_INLINE_VISIBILITY
1574 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1575 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1576 {__cxx_atomic_store(&__a_, __d, __m);}
1577 _LIBCPP_INLINE_VISIBILITY
1578 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1579 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1580 {__cxx_atomic_store(&__a_, __d, __m);}
1581 _LIBCPP_INLINE_VISIBILITY
1582 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1583 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1584 {return __cxx_atomic_load(&__a_, __m);}
1585 _LIBCPP_INLINE_VISIBILITY
1586 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1587 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1588 {return __cxx_atomic_load(&__a_, __m);}
1589 _LIBCPP_INLINE_VISIBILITY
1590 operator _Tp() const volatile _NOEXCEPT {return load();}
1591 _LIBCPP_INLINE_VISIBILITY
1592 operator _Tp() const _NOEXCEPT {return load();}
1593 _LIBCPP_INLINE_VISIBILITY
1594 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1595 {return __cxx_atomic_exchange(&__a_, __d, __m);}
1596 _LIBCPP_INLINE_VISIBILITY
1597 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1598 {return __cxx_atomic_exchange(&__a_, __d, __m);}
1599 _LIBCPP_INLINE_VISIBILITY
1600 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1601 memory_order __s, memory_order __f) volatile _NOEXCEPT
1602 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1603 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1604 _LIBCPP_INLINE_VISIBILITY
1605 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1606 memory_order __s, memory_order __f) _NOEXCEPT
1607 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1608 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1609 _LIBCPP_INLINE_VISIBILITY
1610 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1611 memory_order __s, memory_order __f) volatile _NOEXCEPT
1612 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1613 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1614 _LIBCPP_INLINE_VISIBILITY
1615 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1616 memory_order __s, memory_order __f) _NOEXCEPT
1617 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1618 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1619 _LIBCPP_INLINE_VISIBILITY
1620 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1621 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1622 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1623 _LIBCPP_INLINE_VISIBILITY
1624 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1625 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1626 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1627 _LIBCPP_INLINE_VISIBILITY
1628 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1629 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1630 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1631 _LIBCPP_INLINE_VISIBILITY
1632 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1633 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1634 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1635
1636 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1637 {__cxx_atomic_wait(&__a_, __v, __m);}
1638 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1639 {__cxx_atomic_wait(&__a_, __v, __m);}
1640 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1641 {__cxx_atomic_notify_one(&__a_);}
1642 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1643 {__cxx_atomic_notify_one(&__a_);}
1644 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1645 {__cxx_atomic_notify_all(&__a_);}
1646 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1647 {__cxx_atomic_notify_all(&__a_);}
1648
1649#if _LIBCPP_STD_VER > 17
1650 _LIBCPP_INLINE_VISIBILITY constexpr
1651 __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
1652#else
1653 _LIBCPP_INLINE_VISIBILITY
1654 __atomic_base() _NOEXCEPT = default;
1655#endif
1656
1657 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1658 __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1659
1660 __atomic_base(const __atomic_base&) = delete;
1661};
1662
1663#if defined(__cpp_lib_atomic_is_always_lock_free)
1664template <class _Tp, bool __b>
1665_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1666#endif
1667
1668// atomic<Integral>
1669
1670template <class _Tp>
1671struct __atomic_base<_Tp, true>
1672 : public __atomic_base<_Tp, false>
1673{
1674 typedef __atomic_base<_Tp, false> __base;
1675
1676 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
1677 __atomic_base() _NOEXCEPT = default;
1678
1679 _LIBCPP_INLINE_VISIBILITY
1680 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1681
1682 _LIBCPP_INLINE_VISIBILITY
1683 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1684 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1685 _LIBCPP_INLINE_VISIBILITY
1686 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1687 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1688 _LIBCPP_INLINE_VISIBILITY
1689 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1690 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1691 _LIBCPP_INLINE_VISIBILITY
1692 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1693 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1694 _LIBCPP_INLINE_VISIBILITY
1695 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1696 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1697 _LIBCPP_INLINE_VISIBILITY
1698 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1699 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1700 _LIBCPP_INLINE_VISIBILITY
1701 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1702 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1703 _LIBCPP_INLINE_VISIBILITY
1704 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1705 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1706 _LIBCPP_INLINE_VISIBILITY
1707 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1708 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1709 _LIBCPP_INLINE_VISIBILITY
1710 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1711 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1712
1713 _LIBCPP_INLINE_VISIBILITY
1714 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
1715 _LIBCPP_INLINE_VISIBILITY
1716 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
1717 _LIBCPP_INLINE_VISIBILITY
1718 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
1719 _LIBCPP_INLINE_VISIBILITY
1720 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
1721 _LIBCPP_INLINE_VISIBILITY
1722 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1723 _LIBCPP_INLINE_VISIBILITY
1724 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1725 _LIBCPP_INLINE_VISIBILITY
1726 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1727 _LIBCPP_INLINE_VISIBILITY
1728 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1729 _LIBCPP_INLINE_VISIBILITY
1730 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1731 _LIBCPP_INLINE_VISIBILITY
1732 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1733 _LIBCPP_INLINE_VISIBILITY
1734 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1735 _LIBCPP_INLINE_VISIBILITY
1736 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1737 _LIBCPP_INLINE_VISIBILITY
1738 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1739 _LIBCPP_INLINE_VISIBILITY
1740 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
1741 _LIBCPP_INLINE_VISIBILITY
1742 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1743 _LIBCPP_INLINE_VISIBILITY
1744 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
1745 _LIBCPP_INLINE_VISIBILITY
1746 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1747 _LIBCPP_INLINE_VISIBILITY
1748 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1749};
1750
1751// atomic<T>
1752
1753template <class _Tp>
1754struct atomic
1755 : public __atomic_base<_Tp>
1756{
1757 typedef __atomic_base<_Tp> __base;
1758 typedef _Tp value_type;
1759 typedef value_type difference_type;
1760
1761#if _LIBCPP_STD_VER > 17
1762 _LIBCPP_INLINE_VISIBILITY
1763 atomic() = default;
1764#else
1765 _LIBCPP_INLINE_VISIBILITY
1766 atomic() _NOEXCEPT = default;
1767#endif
1768
1769 _LIBCPP_INLINE_VISIBILITY
1770 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1771
1772 _LIBCPP_INLINE_VISIBILITY
1773 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1774 {__base::store(__d); return __d;}
1775 _LIBCPP_INLINE_VISIBILITY
1776 _Tp operator=(_Tp __d) _NOEXCEPT
1777 {__base::store(__d); return __d;}
1778
1779 atomic& operator=(const atomic&) = delete;
1780 atomic& operator=(const atomic&) volatile = delete;
1781};
1782
1783// atomic<T*>
1784
1785template <class _Tp>
1786struct atomic<_Tp*>
1787 : public __atomic_base<_Tp*>
1788{
1789 typedef __atomic_base<_Tp*> __base;
1790 typedef _Tp* value_type;
1791 typedef ptrdiff_t difference_type;
1792
1793 _LIBCPP_INLINE_VISIBILITY
1794 atomic() _NOEXCEPT = default;
1795
1796 _LIBCPP_INLINE_VISIBILITY
1797 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1798
1799 _LIBCPP_INLINE_VISIBILITY
1800 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1801 {__base::store(__d); return __d;}
1802 _LIBCPP_INLINE_VISIBILITY
1803 _Tp* operator=(_Tp* __d) _NOEXCEPT
1804 {__base::store(__d); return __d;}
1805
1806 _LIBCPP_INLINE_VISIBILITY
1807 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1808 // __atomic_fetch_add accepts function pointers, guard against them.
1809 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1810 return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1811 }
1812
1813 _LIBCPP_INLINE_VISIBILITY
1814 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1815 // __atomic_fetch_add accepts function pointers, guard against them.
1816 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1817 return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1818 }
1819
1820 _LIBCPP_INLINE_VISIBILITY
1821 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1822 // __atomic_fetch_add accepts function pointers, guard against them.
1823 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1824 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1825 }
1826
1827 _LIBCPP_INLINE_VISIBILITY
1828 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1829 // __atomic_fetch_add accepts function pointers, guard against them.
1830 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1831 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1832 }
1833
1834 _LIBCPP_INLINE_VISIBILITY
1835 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1836 _LIBCPP_INLINE_VISIBILITY
1837 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1838 _LIBCPP_INLINE_VISIBILITY
1839 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1840 _LIBCPP_INLINE_VISIBILITY
1841 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1842 _LIBCPP_INLINE_VISIBILITY
1843 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1844 _LIBCPP_INLINE_VISIBILITY
1845 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1846 _LIBCPP_INLINE_VISIBILITY
1847 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1848 _LIBCPP_INLINE_VISIBILITY
1849 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1850 _LIBCPP_INLINE_VISIBILITY
1851 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1852 _LIBCPP_INLINE_VISIBILITY
1853 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1854 _LIBCPP_INLINE_VISIBILITY
1855 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1856 _LIBCPP_INLINE_VISIBILITY
1857 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1858
1859 atomic& operator=(const atomic&) = delete;
1860 atomic& operator=(const atomic&) volatile = delete;
1861};
1862
1863// atomic_is_lock_free
1864
1865template <class _Tp>
1866_LIBCPP_INLINE_VISIBILITY
1867bool
1868atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1869{
1870 return __o->is_lock_free();
1871}
1872
1873template <class _Tp>
1874_LIBCPP_INLINE_VISIBILITY
1875bool
1876atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1877{
1878 return __o->is_lock_free();
1879}
1880
1881// atomic_init
1882
1883template <class _Tp>
1884_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1885void
1886atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1887{
1888 __cxx_atomic_init(&__o->__a_, __d);
1889}
1890
1891template <class _Tp>
1892_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1893void
1894atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1895{
1896 __cxx_atomic_init(&__o->__a_, __d);
1897}
1898
1899// atomic_store
1900
1901template <class _Tp>
1902_LIBCPP_INLINE_VISIBILITY
1903void
1904atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1905{
1906 __o->store(__d);
1907}
1908
1909template <class _Tp>
1910_LIBCPP_INLINE_VISIBILITY
1911void
1912atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1913{
1914 __o->store(__d);
1915}
1916
1917// atomic_store_explicit
1918
1919template <class _Tp>
1920_LIBCPP_INLINE_VISIBILITY
1921void
1922atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1923 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1924{
1925 __o->store(__d, __m);
1926}
1927
1928template <class _Tp>
1929_LIBCPP_INLINE_VISIBILITY
1930void
1931atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1932 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1933{
1934 __o->store(__d, __m);
1935}
1936
1937// atomic_load
1938
1939template <class _Tp>
1940_LIBCPP_INLINE_VISIBILITY
1941_Tp
1942atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1943{
1944 return __o->load();
1945}
1946
1947template <class _Tp>
1948_LIBCPP_INLINE_VISIBILITY
1949_Tp
1950atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1951{
1952 return __o->load();
1953}
1954
1955// atomic_load_explicit
1956
1957template <class _Tp>
1958_LIBCPP_INLINE_VISIBILITY
1959_Tp
1960atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1961 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1962{
1963 return __o->load(__m);
1964}
1965
1966template <class _Tp>
1967_LIBCPP_INLINE_VISIBILITY
1968_Tp
1969atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1970 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1971{
1972 return __o->load(__m);
1973}
1974
1975// atomic_exchange
1976
1977template <class _Tp>
1978_LIBCPP_INLINE_VISIBILITY
1979_Tp
1980atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1981{
1982 return __o->exchange(__d);
1983}
1984
1985template <class _Tp>
1986_LIBCPP_INLINE_VISIBILITY
1987_Tp
1988atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1989{
1990 return __o->exchange(__d);
1991}
1992
1993// atomic_exchange_explicit
1994
1995template <class _Tp>
1996_LIBCPP_INLINE_VISIBILITY
1997_Tp
1998atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1999{
2000 return __o->exchange(__d, __m);
2001}
2002
2003template <class _Tp>
2004_LIBCPP_INLINE_VISIBILITY
2005_Tp
2006atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2007{
2008 return __o->exchange(__d, __m);
2009}
2010
2011// atomic_compare_exchange_weak
2012
2013template <class _Tp>
2014_LIBCPP_INLINE_VISIBILITY
2015bool
2016atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2017{
2018 return __o->compare_exchange_weak(*__e, __d);
2019}
2020
2021template <class _Tp>
2022_LIBCPP_INLINE_VISIBILITY
2023bool
2024atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2025{
2026 return __o->compare_exchange_weak(*__e, __d);
2027}
2028
2029// atomic_compare_exchange_strong
2030
2031template <class _Tp>
2032_LIBCPP_INLINE_VISIBILITY
2033bool
2034atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2035{
2036 return __o->compare_exchange_strong(*__e, __d);
2037}
2038
2039template <class _Tp>
2040_LIBCPP_INLINE_VISIBILITY
2041bool
2042atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2043{
2044 return __o->compare_exchange_strong(*__e, __d);
2045}
2046
2047// atomic_compare_exchange_weak_explicit
2048
2049template <class _Tp>
2050_LIBCPP_INLINE_VISIBILITY
2051bool
2052atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2053 typename atomic<_Tp>::value_type __d,
2054 memory_order __s, memory_order __f) _NOEXCEPT
2055 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2056{
2057 return __o->compare_exchange_weak(*__e, __d, __s, __f);
2058}
2059
2060template <class _Tp>
2061_LIBCPP_INLINE_VISIBILITY
2062bool
2063atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2064 memory_order __s, memory_order __f) _NOEXCEPT
2065 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2066{
2067 return __o->compare_exchange_weak(*__e, __d, __s, __f);
2068}
2069
2070// atomic_compare_exchange_strong_explicit
2071
2072template <class _Tp>
2073_LIBCPP_INLINE_VISIBILITY
2074bool
2075atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2076 typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2077 memory_order __s, memory_order __f) _NOEXCEPT
2078 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2079{
2080 return __o->compare_exchange_strong(*__e, __d, __s, __f);
2081}
2082
2083template <class _Tp>
2084_LIBCPP_INLINE_VISIBILITY
2085bool
2086atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2087 typename atomic<_Tp>::value_type __d,
2088 memory_order __s, memory_order __f) _NOEXCEPT
2089 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2090{
2091 return __o->compare_exchange_strong(*__e, __d, __s, __f);
2092}
2093
2094// atomic_wait
2095
2096template <class _Tp>
2097_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2098void atomic_wait(const volatile atomic<_Tp>* __o,
2099 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2100{
2101 return __o->wait(__v);
2102}
2103
2104template <class _Tp>
2105_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2106void atomic_wait(const atomic<_Tp>* __o,
2107 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2108{
2109 return __o->wait(__v);
2110}
2111
2112// atomic_wait_explicit
2113
2114template <class _Tp>
2115_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2116void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2117 typename atomic<_Tp>::value_type __v,
2118 memory_order __m) _NOEXCEPT
2119 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2120{
2121 return __o->wait(__v, __m);
2122}
2123
2124template <class _Tp>
2125_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2126void atomic_wait_explicit(const atomic<_Tp>* __o,
2127 typename atomic<_Tp>::value_type __v,
2128 memory_order __m) _NOEXCEPT
2129 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2130{
2131 return __o->wait(__v, __m);
2132}
2133
2134// atomic_notify_one
2135
2136template <class _Tp>
2137_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2138void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2139{
2140 __o->notify_one();
2141}
2142template <class _Tp>
2143_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2144void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2145{
2146 __o->notify_one();
2147}
2148
2149// atomic_notify_one
2150
2151template <class _Tp>
2152_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2153void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2154{
2155 __o->notify_all();
2156}
2157template <class _Tp>
2158_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2159void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2160{
2161 __o->notify_all();
2162}
2163
2164// atomic_fetch_add
2165
2166template <class _Tp>
2167_LIBCPP_INLINE_VISIBILITY
2168_Tp
2169atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2170{
2171 return __o->fetch_add(__op);
2172}
2173
2174template <class _Tp>
2175_LIBCPP_INLINE_VISIBILITY
2176_Tp
2177atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2178{
2179 return __o->fetch_add(__op);
2180}
2181
2182// atomic_fetch_add_explicit
2183
2184template <class _Tp>
2185_LIBCPP_INLINE_VISIBILITY
2186_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2187{
2188 return __o->fetch_add(__op, __m);
2189}
2190
2191template <class _Tp>
2192_LIBCPP_INLINE_VISIBILITY
2193_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2194{
2195 return __o->fetch_add(__op, __m);
2196}
2197
2198// atomic_fetch_sub
2199
2200template <class _Tp>
2201_LIBCPP_INLINE_VISIBILITY
2202_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2203{
2204 return __o->fetch_sub(__op);
2205}
2206
2207template <class _Tp>
2208_LIBCPP_INLINE_VISIBILITY
2209_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2210{
2211 return __o->fetch_sub(__op);
2212}
2213
2214// atomic_fetch_sub_explicit
2215
2216template <class _Tp>
2217_LIBCPP_INLINE_VISIBILITY
2218_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2219{
2220 return __o->fetch_sub(__op, __m);
2221}
2222
2223template <class _Tp>
2224_LIBCPP_INLINE_VISIBILITY
2225_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2226{
2227 return __o->fetch_sub(__op, __m);
2228}
2229
2230// atomic_fetch_and
2231
2232template <class _Tp>
2233_LIBCPP_INLINE_VISIBILITY
2234typename enable_if
2235<
2236 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2237 _Tp
2238>::type
2239atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2240{
2241 return __o->fetch_and(__op);
2242}
2243
2244template <class _Tp>
2245_LIBCPP_INLINE_VISIBILITY
2246typename enable_if
2247<
2248 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2249 _Tp
2250>::type
2251atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2252{
2253 return __o->fetch_and(__op);
2254}
2255
2256// atomic_fetch_and_explicit
2257
2258template <class _Tp>
2259_LIBCPP_INLINE_VISIBILITY
2260typename enable_if
2261<
2262 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2263 _Tp
2264>::type
2265atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2266{
2267 return __o->fetch_and(__op, __m);
2268}
2269
2270template <class _Tp>
2271_LIBCPP_INLINE_VISIBILITY
2272typename enable_if
2273<
2274 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2275 _Tp
2276>::type
2277atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2278{
2279 return __o->fetch_and(__op, __m);
2280}
2281
2282// atomic_fetch_or
2283
2284template <class _Tp>
2285_LIBCPP_INLINE_VISIBILITY
2286typename enable_if
2287<
2288 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2289 _Tp
2290>::type
2291atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2292{
2293 return __o->fetch_or(__op);
2294}
2295
2296template <class _Tp>
2297_LIBCPP_INLINE_VISIBILITY
2298typename enable_if
2299<
2300 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2301 _Tp
2302>::type
2303atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2304{
2305 return __o->fetch_or(__op);
2306}
2307
2308// atomic_fetch_or_explicit
2309
2310template <class _Tp>
2311_LIBCPP_INLINE_VISIBILITY
2312typename enable_if
2313<
2314 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2315 _Tp
2316>::type
2317atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2318{
2319 return __o->fetch_or(__op, __m);
2320}
2321
2322template <class _Tp>
2323_LIBCPP_INLINE_VISIBILITY
2324typename enable_if
2325<
2326 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2327 _Tp
2328>::type
2329atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2330{
2331 return __o->fetch_or(__op, __m);
2332}
2333
2334// atomic_fetch_xor
2335
2336template <class _Tp>
2337_LIBCPP_INLINE_VISIBILITY
2338typename enable_if
2339<
2340 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2341 _Tp
2342>::type
2343atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2344{
2345 return __o->fetch_xor(__op);
2346}
2347
2348template <class _Tp>
2349_LIBCPP_INLINE_VISIBILITY
2350typename enable_if
2351<
2352 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2353 _Tp
2354>::type
2355atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2356{
2357 return __o->fetch_xor(__op);
2358}
2359
2360// atomic_fetch_xor_explicit
2361
2362template <class _Tp>
2363_LIBCPP_INLINE_VISIBILITY
2364typename enable_if
2365<
2366 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2367 _Tp
2368>::type
2369atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2370{
2371 return __o->fetch_xor(__op, __m);
2372}
2373
2374template <class _Tp>
2375_LIBCPP_INLINE_VISIBILITY
2376typename enable_if
2377<
2378 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2379 _Tp
2380>::type
2381atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2382{
2383 return __o->fetch_xor(__op, __m);
2384}
2385
2386// flag type and operations
2387
2388typedef struct atomic_flag
2389{
2390 __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2391
2392 _LIBCPP_INLINE_VISIBILITY
2393 bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2394 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(a: &__a_, order: __m);}
2395 _LIBCPP_INLINE_VISIBILITY
2396 bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2397 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(a: &__a_, order: __m);}
2398
2399 _LIBCPP_INLINE_VISIBILITY
2400 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2401 {return __cxx_atomic_exchange(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), order: __m);}
2402 _LIBCPP_INLINE_VISIBILITY
2403 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2404 {return __cxx_atomic_exchange(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), order: __m);}
2405 _LIBCPP_INLINE_VISIBILITY
2406 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2407 {__cxx_atomic_store(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), order: __m);}
2408 _LIBCPP_INLINE_VISIBILITY
2409 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2410 {__cxx_atomic_store(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), order: __m);}
2411
2412 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2413 void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2414 {__cxx_atomic_wait(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), order: __m);}
2415 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2416 void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2417 {__cxx_atomic_wait(a: &__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), order: __m);}
2418 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2419 void notify_one() volatile _NOEXCEPT
2420 {__cxx_atomic_notify_one(&__a_);}
2421 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2422 void notify_one() _NOEXCEPT
2423 {__cxx_atomic_notify_one(&__a_);}
2424 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2425 void notify_all() volatile _NOEXCEPT
2426 {__cxx_atomic_notify_all(&__a_);}
2427 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2428 void notify_all() _NOEXCEPT
2429 {__cxx_atomic_notify_all(&__a_);}
2430
2431#if _LIBCPP_STD_VER > 17
2432 _LIBCPP_INLINE_VISIBILITY constexpr
2433 atomic_flag() _NOEXCEPT : __a_(false) {}
2434#else
2435 _LIBCPP_INLINE_VISIBILITY
2436 atomic_flag() _NOEXCEPT = default;
2437#endif
2438
2439 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2440 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2441
2442 atomic_flag(const atomic_flag&) = delete;
2443 atomic_flag& operator=(const atomic_flag&) = delete;
2444 atomic_flag& operator=(const atomic_flag&) volatile = delete;
2445
2446} atomic_flag;
2447
2448
2449inline _LIBCPP_INLINE_VISIBILITY
2450bool
2451atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2452{
2453 return __o->test();
2454}
2455
2456inline _LIBCPP_INLINE_VISIBILITY
2457bool
2458atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2459{
2460 return __o->test();
2461}
2462
2463inline _LIBCPP_INLINE_VISIBILITY
2464bool
2465atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2466{
2467 return __o->test(__m);
2468}
2469
2470inline _LIBCPP_INLINE_VISIBILITY
2471bool
2472atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2473{
2474 return __o->test(__m);
2475}
2476
2477inline _LIBCPP_INLINE_VISIBILITY
2478bool
2479atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2480{
2481 return __o->test_and_set();
2482}
2483
2484inline _LIBCPP_INLINE_VISIBILITY
2485bool
2486atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2487{
2488 return __o->test_and_set();
2489}
2490
2491inline _LIBCPP_INLINE_VISIBILITY
2492bool
2493atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2494{
2495 return __o->test_and_set(__m);
2496}
2497
2498inline _LIBCPP_INLINE_VISIBILITY
2499bool
2500atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2501{
2502 return __o->test_and_set(__m);
2503}
2504
2505inline _LIBCPP_INLINE_VISIBILITY
2506void
2507atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2508{
2509 __o->clear();
2510}
2511
2512inline _LIBCPP_INLINE_VISIBILITY
2513void
2514atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2515{
2516 __o->clear();
2517}
2518
2519inline _LIBCPP_INLINE_VISIBILITY
2520void
2521atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2522{
2523 __o->clear(__m);
2524}
2525
2526inline _LIBCPP_INLINE_VISIBILITY
2527void
2528atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2529{
2530 __o->clear(__m);
2531}
2532
2533inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2534void
2535atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2536{
2537 __o->wait(__v);
2538}
2539
2540inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2541void
2542atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2543{
2544 __o->wait(__v);
2545}
2546
2547inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2548void
2549atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2550 bool __v, memory_order __m) _NOEXCEPT
2551{
2552 __o->wait(__v, __m);
2553}
2554
2555inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2556void
2557atomic_flag_wait_explicit(const atomic_flag* __o,
2558 bool __v, memory_order __m) _NOEXCEPT
2559{
2560 __o->wait(__v, __m);
2561}
2562
2563inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2564void
2565atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2566{
2567 __o->notify_one();
2568}
2569
2570inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2571void
2572atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2573{
2574 __o->notify_one();
2575}
2576
2577inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2578void
2579atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2580{
2581 __o->notify_all();
2582}
2583
2584inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2585void
2586atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2587{
2588 __o->notify_all();
2589}
2590
2591// fences
2592
2593inline _LIBCPP_INLINE_VISIBILITY
2594void
2595atomic_thread_fence(memory_order __m) _NOEXCEPT
2596{
2597 __cxx_atomic_thread_fence(order: __m);
2598}
2599
2600inline _LIBCPP_INLINE_VISIBILITY
2601void
2602atomic_signal_fence(memory_order __m) _NOEXCEPT
2603{
2604 __cxx_atomic_signal_fence(order: __m);
2605}
2606
2607// Atomics for standard typedef types
2608
2609typedef atomic<bool> atomic_bool;
2610typedef atomic<char> atomic_char;
2611typedef atomic<signed char> atomic_schar;
2612typedef atomic<unsigned char> atomic_uchar;
2613typedef atomic<short> atomic_short;
2614typedef atomic<unsigned short> atomic_ushort;
2615typedef atomic<int> atomic_int;
2616typedef atomic<unsigned int> atomic_uint;
2617typedef atomic<long> atomic_long;
2618typedef atomic<unsigned long> atomic_ulong;
2619typedef atomic<long long> atomic_llong;
2620typedef atomic<unsigned long long> atomic_ullong;
2621#ifndef _LIBCPP_HAS_NO_CHAR8_T
2622typedef atomic<char8_t> atomic_char8_t;
2623#endif
2624typedef atomic<char16_t> atomic_char16_t;
2625typedef atomic<char32_t> atomic_char32_t;
2626#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
2627typedef atomic<wchar_t> atomic_wchar_t;
2628#endif
2629
2630typedef atomic<int_least8_t> atomic_int_least8_t;
2631typedef atomic<uint_least8_t> atomic_uint_least8_t;
2632typedef atomic<int_least16_t> atomic_int_least16_t;
2633typedef atomic<uint_least16_t> atomic_uint_least16_t;
2634typedef atomic<int_least32_t> atomic_int_least32_t;
2635typedef atomic<uint_least32_t> atomic_uint_least32_t;
2636typedef atomic<int_least64_t> atomic_int_least64_t;
2637typedef atomic<uint_least64_t> atomic_uint_least64_t;
2638
2639typedef atomic<int_fast8_t> atomic_int_fast8_t;
2640typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
2641typedef atomic<int_fast16_t> atomic_int_fast16_t;
2642typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2643typedef atomic<int_fast32_t> atomic_int_fast32_t;
2644typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2645typedef atomic<int_fast64_t> atomic_int_fast64_t;
2646typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2647
2648typedef atomic< int8_t> atomic_int8_t;
2649typedef atomic<uint8_t> atomic_uint8_t;
2650typedef atomic< int16_t> atomic_int16_t;
2651typedef atomic<uint16_t> atomic_uint16_t;
2652typedef atomic< int32_t> atomic_int32_t;
2653typedef atomic<uint32_t> atomic_uint32_t;
2654typedef atomic< int64_t> atomic_int64_t;
2655typedef atomic<uint64_t> atomic_uint64_t;
2656
2657typedef atomic<intptr_t> atomic_intptr_t;
2658typedef atomic<uintptr_t> atomic_uintptr_t;
2659typedef atomic<size_t> atomic_size_t;
2660typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2661typedef atomic<intmax_t> atomic_intmax_t;
2662typedef atomic<uintmax_t> atomic_uintmax_t;
2663
2664// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2665
2666#ifdef __cpp_lib_atomic_is_always_lock_free
2667# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2668#else
2669# define _LIBCPP_CONTENTION_LOCK_FREE false
2670#endif
2671
2672#if ATOMIC_LLONG_LOCK_FREE == 2
2673typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free;
2674typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2675#elif ATOMIC_INT_LOCK_FREE == 2
2676typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free;
2677typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free;
2678#elif ATOMIC_SHORT_LOCK_FREE == 2
2679typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free;
2680typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free;
2681#elif ATOMIC_CHAR_LOCK_FREE == 2
2682typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free;
2683typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free;
2684#else
2685 // No signed/unsigned lock-free types
2686#define _LIBCPP_NO_LOCK_FREE_TYPES
2687#endif
2688
2689#if !defined(_LIBCPP_NO_LOCK_FREE_TYPES)
2690typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2691typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2692#endif
2693
2694#define ATOMIC_FLAG_INIT {false}
2695#define ATOMIC_VAR_INIT(__v) {__v}
2696
2697#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
2698# if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400
2699# pragma clang deprecated(ATOMIC_FLAG_INIT)
2700# pragma clang deprecated(ATOMIC_VAR_INIT)
2701# endif
2702#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
2703
2704_LIBCPP_END_NAMESPACE_STD
2705
2706#endif // _LIBCPP_ATOMIC
2707

source code of flutter_engine/third_party/libcxx/include/atomic