1// shared_ptr atomic access -*- C++ -*-
2
3// Copyright (C) 2014-2021 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file bits/shared_ptr_atomic.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{memory}
28 */
29
30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
32
33#include <bits/atomic_base.h>
34
35namespace std _GLIBCXX_VISIBILITY(default)
36{
37_GLIBCXX_BEGIN_NAMESPACE_VERSION
38
39 /**
40 * @addtogroup pointer_abstractions
41 * @{
42 */
43 /// @relates shared_ptr @{
44
45 /// @cond undocumented
46
47 struct _Sp_locker
48 {
49 _Sp_locker(const _Sp_locker&) = delete;
50 _Sp_locker& operator=(const _Sp_locker&) = delete;
51
52#ifdef __GTHREADS
53 explicit
54 _Sp_locker(const void*) noexcept;
55 _Sp_locker(const void*, const void*) noexcept;
56 ~_Sp_locker();
57
58 private:
59 unsigned char _M_key1;
60 unsigned char _M_key2;
61#else
62 explicit _Sp_locker(const void*, const void* = nullptr) { }
63#endif
64 };
65
66 /// @endcond
67
68 /**
69 * @brief Report whether shared_ptr atomic operations are lock-free.
70 * @param __p A non-null pointer to a shared_ptr object.
71 * @return True if atomic access to @c *__p is lock-free, false otherwise.
72 * @{
73 */
74 template<typename _Tp, _Lock_policy _Lp>
75 inline bool
76 atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>* __p)
77 {
78#ifdef __GTHREADS
79 return __gthread_active_p() == 0;
80#else
81 return true;
82#endif
83 }
84
85 template<typename _Tp>
86 inline bool
87 atomic_is_lock_free(const shared_ptr<_Tp>* __p)
88 { return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
89
90 /// @}
91
92 /**
93 * @brief Atomic load for shared_ptr objects.
94 * @param __p A non-null pointer to a shared_ptr object.
95 * @return @c *__p
96 *
97 * The memory order shall not be @c memory_order_release or
98 * @c memory_order_acq_rel.
99 * @{
100 */
101 template<typename _Tp>
102 inline shared_ptr<_Tp>
103 atomic_load_explicit(const shared_ptr<_Tp>* __p, memory_order)
104 {
105 _Sp_locker __lock{__p};
106 return *__p;
107 }
108
109 template<typename _Tp>
110 inline shared_ptr<_Tp>
111 atomic_load(const shared_ptr<_Tp>* __p)
112 { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
113
114 template<typename _Tp, _Lock_policy _Lp>
115 inline __shared_ptr<_Tp, _Lp>
116 atomic_load_explicit(const __shared_ptr<_Tp, _Lp>* __p, memory_order)
117 {
118 _Sp_locker __lock{__p};
119 return *__p;
120 }
121
122 template<typename _Tp, _Lock_policy _Lp>
123 inline __shared_ptr<_Tp, _Lp>
124 atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
125 { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
126 /// @}
127
128 /**
129 * @brief Atomic store for shared_ptr objects.
130 * @param __p A non-null pointer to a shared_ptr object.
131 * @param __r The value to store.
132 *
133 * The memory order shall not be @c memory_order_acquire or
134 * @c memory_order_acq_rel.
135 * @{
136 */
137 template<typename _Tp>
138 inline void
139 atomic_store_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
140 memory_order)
141 {
142 _Sp_locker __lock{__p};
143 __p->swap(__r); // use swap so that **__p not destroyed while lock held
144 }
145
146 template<typename _Tp>
147 inline void
148 atomic_store(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
149 { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
150
151 template<typename _Tp, _Lock_policy _Lp>
152 inline void
153 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
154 __shared_ptr<_Tp, _Lp> __r,
155 memory_order)
156 {
157 _Sp_locker __lock{__p};
158 __p->swap(__r); // use swap so that **__p not destroyed while lock held
159 }
160
161 template<typename _Tp, _Lock_policy _Lp>
162 inline void
163 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
164 { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
165 /// @}
166
167 /**
168 * @brief Atomic exchange for shared_ptr objects.
169 * @param __p A non-null pointer to a shared_ptr object.
170 * @param __r New value to store in @c *__p.
171 * @return The original value of @c *__p
172 * @{
173 */
174 template<typename _Tp>
175 inline shared_ptr<_Tp>
176 atomic_exchange_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
177 memory_order)
178 {
179 _Sp_locker __lock{__p};
180 __p->swap(__r);
181 return __r;
182 }
183
184 template<typename _Tp>
185 inline shared_ptr<_Tp>
186 atomic_exchange(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
187 {
188 return std::atomic_exchange_explicit(__p, std::move(__r),
189 memory_order_seq_cst);
190 }
191
192 template<typename _Tp, _Lock_policy _Lp>
193 inline __shared_ptr<_Tp, _Lp>
194 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
195 __shared_ptr<_Tp, _Lp> __r,
196 memory_order)
197 {
198 _Sp_locker __lock{__p};
199 __p->swap(__r);
200 return __r;
201 }
202
203 template<typename _Tp, _Lock_policy _Lp>
204 inline __shared_ptr<_Tp, _Lp>
205 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
206 {
207 return std::atomic_exchange_explicit(__p, std::move(__r),
208 memory_order_seq_cst);
209 }
210 /// @}
211
212 /**
213 * @brief Atomic compare-and-swap for shared_ptr objects.
214 * @param __p A non-null pointer to a shared_ptr object.
215 * @param __v A non-null pointer to a shared_ptr object.
216 * @param __w A non-null pointer to a shared_ptr object.
217 * @return True if @c *__p was equivalent to @c *__v, false otherwise.
218 *
219 * The memory order for failure shall not be @c memory_order_release or
220 * @c memory_order_acq_rel, or stronger than the memory order for success.
221 * @{
222 */
223 template<typename _Tp>
224 bool
225 atomic_compare_exchange_strong_explicit(shared_ptr<_Tp>* __p,
226 shared_ptr<_Tp>* __v,
227 shared_ptr<_Tp> __w,
228 memory_order,
229 memory_order)
230 {
231 shared_ptr<_Tp> __x; // goes out of scope after __lock
232 _Sp_locker __lock{__p, __v};
233 owner_less<shared_ptr<_Tp>> __less;
234 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
235 {
236 __x = std::move(*__p);
237 *__p = std::move(__w);
238 return true;
239 }
240 __x = std::move(*__v);
241 *__v = *__p;
242 return false;
243 }
244
245 template<typename _Tp>
246 inline bool
247 atomic_compare_exchange_strong(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
248 shared_ptr<_Tp> __w)
249 {
250 return std::atomic_compare_exchange_strong_explicit(__p, __v,
251 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
252 }
253
254 template<typename _Tp>
255 inline bool
256 atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
257 shared_ptr<_Tp>* __v,
258 shared_ptr<_Tp> __w,
259 memory_order __success,
260 memory_order __failure)
261 {
262 return std::atomic_compare_exchange_strong_explicit(__p, __v,
263 std::move(__w), __success, __failure);
264 }
265
266 template<typename _Tp>
267 inline bool
268 atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
269 shared_ptr<_Tp> __w)
270 {
271 return std::atomic_compare_exchange_weak_explicit(__p, __v,
272 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
273 }
274
275 template<typename _Tp, _Lock_policy _Lp>
276 bool
277 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
278 __shared_ptr<_Tp, _Lp>* __v,
279 __shared_ptr<_Tp, _Lp> __w,
280 memory_order,
281 memory_order)
282 {
283 __shared_ptr<_Tp, _Lp> __x; // goes out of scope after __lock
284 _Sp_locker __lock{__p, __v};
285 owner_less<__shared_ptr<_Tp, _Lp>> __less;
286 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
287 {
288 __x = std::move(*__p);
289 *__p = std::move(__w);
290 return true;
291 }
292 __x = std::move(*__v);
293 *__v = *__p;
294 return false;
295 }
296
297 template<typename _Tp, _Lock_policy _Lp>
298 inline bool
299 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
300 __shared_ptr<_Tp, _Lp>* __v,
301 __shared_ptr<_Tp, _Lp> __w)
302 {
303 return std::atomic_compare_exchange_strong_explicit(__p, __v,
304 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
305 }
306
307 template<typename _Tp, _Lock_policy _Lp>
308 inline bool
309 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
310 __shared_ptr<_Tp, _Lp>* __v,
311 __shared_ptr<_Tp, _Lp> __w,
312 memory_order __success,
313 memory_order __failure)
314 {
315 return std::atomic_compare_exchange_strong_explicit(__p, __v,
316 std::move(__w), __success, __failure);
317 }
318
319 template<typename _Tp, _Lock_policy _Lp>
320 inline bool
321 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
322 __shared_ptr<_Tp, _Lp>* __v,
323 __shared_ptr<_Tp, _Lp> __w)
324 {
325 return std::atomic_compare_exchange_weak_explicit(__p, __v,
326 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
327 }
328 /// @}
329
330 /// @} relates shared_ptr
331 /// @} group pointer_abstractions
332
333_GLIBCXX_END_NAMESPACE_VERSION
334} // namespace
335
336#endif // _SHARED_PTR_ATOMIC_H
337

source code of include/c++/11/bits/shared_ptr_atomic.h