1 | // shared_ptr atomic access -*- C++ -*- |
2 | |
3 | // Copyright (C) 2014-2024 Free Software Foundation, Inc. |
4 | // |
5 | // This file is part of the GNU ISO C++ Library. This library is free |
6 | // software; you can redistribute it and/or modify it under the |
7 | // terms of the GNU General Public License as published by the |
8 | // Free Software Foundation; either version 3, or (at your option) |
9 | // any later version. |
10 | |
11 | // This library is distributed in the hope that it will be useful, |
12 | // but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | // GNU General Public License for more details. |
15 | |
16 | // Under Section 7 of GPL version 3, you are granted additional |
17 | // permissions described in the GCC Runtime Library Exception, version |
18 | // 3.1, as published by the Free Software Foundation. |
19 | |
20 | // You should have received a copy of the GNU General Public License and |
21 | // a copy of the GCC Runtime Library Exception along with this program; |
22 | // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
23 | // <http://www.gnu.org/licenses/>. |
24 | |
25 | /** @file bits/shared_ptr_atomic.h |
26 | * This is an internal header file, included by other library headers. |
27 | * Do not attempt to use it directly. @headername{memory} |
28 | */ |
29 | |
30 | #ifndef _SHARED_PTR_ATOMIC_H |
31 | #define _SHARED_PTR_ATOMIC_H 1 |
32 | |
33 | #include <bits/atomic_base.h> |
34 | #include <bits/shared_ptr.h> |
35 | |
36 | // Annotations for the custom locking in atomic<shared_ptr<T>>. |
37 | #if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>) |
38 | #include <sanitizer/tsan_interface.h> |
39 | #define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \ |
40 | __tsan_mutex_destroy(X, __tsan_mutex_not_static) |
41 | #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \ |
42 | __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock) |
43 | #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \ |
44 | __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0) |
45 | #define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \ |
46 | __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0) |
47 | #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0) |
48 | #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0) |
49 | #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0) |
50 | #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0) |
51 | #else |
52 | #define _GLIBCXX_TSAN_MUTEX_DESTROY(X) |
53 | #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) |
54 | #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) |
55 | #define _GLIBCXX_TSAN_MUTEX_LOCKED(X) |
56 | #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) |
57 | #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) |
58 | #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) |
59 | #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) |
60 | #endif |
61 | |
62 | namespace std _GLIBCXX_VISIBILITY(default) |
63 | { |
64 | _GLIBCXX_BEGIN_NAMESPACE_VERSION |
65 | |
66 | /** |
67 | * @addtogroup pointer_abstractions |
68 | * @relates shared_ptr |
69 | * @{ |
70 | */ |
71 | |
72 | /// @cond undocumented |
73 | |
74 | struct _Sp_locker |
75 | { |
76 | _Sp_locker(const _Sp_locker&) = delete; |
77 | _Sp_locker& operator=(const _Sp_locker&) = delete; |
78 | |
79 | #ifdef __GTHREADS |
80 | explicit |
81 | _Sp_locker(const void*) noexcept; |
82 | _Sp_locker(const void*, const void*) noexcept; |
83 | ~_Sp_locker(); |
84 | |
85 | private: |
86 | unsigned char _M_key1; |
87 | unsigned char _M_key2; |
88 | #else |
89 | explicit _Sp_locker(const void*, const void* = nullptr) { } |
90 | #endif |
91 | }; |
92 | |
93 | /// @endcond |
94 | |
95 | /** |
96 | * @brief Report whether shared_ptr atomic operations are lock-free. |
97 | * @param __p A non-null pointer to a shared_ptr object. |
98 | * @return True if atomic access to @c *__p is lock-free, false otherwise. |
99 | * @{ |
100 | */ |
101 | template<typename _Tp, _Lock_policy _Lp> |
102 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
103 | inline bool |
104 | atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>*) |
105 | { |
106 | #ifdef __GTHREADS |
107 | return __gthread_active_p() == 0; |
108 | #else |
109 | return true; |
110 | #endif |
111 | } |
112 | |
113 | template<typename _Tp> |
114 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
115 | inline bool |
116 | atomic_is_lock_free(const shared_ptr<_Tp>* __p) |
117 | { return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); } |
118 | |
119 | /// @} |
120 | |
121 | /** |
122 | * @brief Atomic load for shared_ptr objects. |
123 | * @param __p A non-null pointer to a shared_ptr object. |
124 | * @return @c *__p |
125 | * |
126 | * The memory order shall not be `memory_order_release` or |
127 | * `memory_order_acq_rel`. |
128 | * @{ |
129 | */ |
130 | template<typename _Tp> |
131 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
132 | inline shared_ptr<_Tp> |
133 | atomic_load_explicit(const shared_ptr<_Tp>* __p, memory_order) |
134 | { |
135 | _Sp_locker __lock{__p}; |
136 | return *__p; |
137 | } |
138 | |
139 | template<typename _Tp> |
140 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
141 | inline shared_ptr<_Tp> |
142 | atomic_load(const shared_ptr<_Tp>* __p) |
143 | { return std::atomic_load_explicit(__p, memory_order_seq_cst); } |
144 | |
145 | template<typename _Tp, _Lock_policy _Lp> |
146 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
147 | inline __shared_ptr<_Tp, _Lp> |
148 | atomic_load_explicit(const __shared_ptr<_Tp, _Lp>* __p, memory_order) |
149 | { |
150 | _Sp_locker __lock{__p}; |
151 | return *__p; |
152 | } |
153 | |
154 | template<typename _Tp, _Lock_policy _Lp> |
155 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
156 | inline __shared_ptr<_Tp, _Lp> |
157 | atomic_load(const __shared_ptr<_Tp, _Lp>* __p) |
158 | { return std::atomic_load_explicit(__p, memory_order_seq_cst); } |
159 | /// @} |
160 | |
161 | /** |
162 | * @brief Atomic store for shared_ptr objects. |
163 | * @param __p A non-null pointer to a shared_ptr object. |
164 | * @param __r The value to store. |
165 | * |
166 | * The memory order shall not be `memory_order_acquire` or |
167 | * `memory_order_acq_rel`. |
168 | * @{ |
169 | */ |
170 | template<typename _Tp> |
171 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
172 | inline void |
173 | atomic_store_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r, |
174 | memory_order) |
175 | { |
176 | _Sp_locker __lock{__p}; |
177 | __p->swap(__r); // use swap so that **__p not destroyed while lock held |
178 | } |
179 | |
180 | template<typename _Tp> |
181 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
182 | inline void |
183 | atomic_store(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r) |
184 | { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); } |
185 | |
186 | template<typename _Tp, _Lock_policy _Lp> |
187 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
188 | inline void |
189 | atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p, |
190 | __shared_ptr<_Tp, _Lp> __r, |
191 | memory_order) |
192 | { |
193 | _Sp_locker __lock{__p}; |
194 | __p->swap(__r); // use swap so that **__p not destroyed while lock held |
195 | } |
196 | |
197 | template<typename _Tp, _Lock_policy _Lp> |
198 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
199 | inline void |
200 | atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r) |
201 | { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); } |
202 | /// @} |
203 | |
204 | /** |
205 | * @brief Atomic exchange for shared_ptr objects. |
206 | * @param __p A non-null pointer to a shared_ptr object. |
207 | * @param __r New value to store in `*__p`. |
208 | * @return The original value of `*__p` |
209 | * @{ |
210 | */ |
211 | template<typename _Tp> |
212 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
213 | inline shared_ptr<_Tp> |
214 | atomic_exchange_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r, |
215 | memory_order) |
216 | { |
217 | _Sp_locker __lock{__p}; |
218 | __p->swap(__r); |
219 | return __r; |
220 | } |
221 | |
222 | template<typename _Tp> |
223 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
224 | inline shared_ptr<_Tp> |
225 | atomic_exchange(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r) |
226 | { |
227 | return std::atomic_exchange_explicit(__p, std::move(__r), |
228 | memory_order_seq_cst); |
229 | } |
230 | |
231 | template<typename _Tp, _Lock_policy _Lp> |
232 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
233 | inline __shared_ptr<_Tp, _Lp> |
234 | atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p, |
235 | __shared_ptr<_Tp, _Lp> __r, |
236 | memory_order) |
237 | { |
238 | _Sp_locker __lock{__p}; |
239 | __p->swap(__r); |
240 | return __r; |
241 | } |
242 | |
243 | template<typename _Tp, _Lock_policy _Lp> |
244 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
245 | inline __shared_ptr<_Tp, _Lp> |
246 | atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r) |
247 | { |
248 | return std::atomic_exchange_explicit(__p, std::move(__r), |
249 | memory_order_seq_cst); |
250 | } |
251 | /// @} |
252 | |
253 | /** |
254 | * @brief Atomic compare-and-swap for shared_ptr objects. |
255 | * @param __p A non-null pointer to a shared_ptr object. |
256 | * @param __v A non-null pointer to a shared_ptr object. |
257 | * @param __w A non-null pointer to a shared_ptr object. |
258 | * @return True if `*__p` was equivalent to `*__v`, false otherwise. |
259 | * |
260 | * The memory order for failure shall not be `memory_order_release` or |
261 | * `memory_order_acq_rel`. |
262 | * @{ |
263 | */ |
264 | template<typename _Tp> |
265 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
266 | bool |
267 | atomic_compare_exchange_strong_explicit(shared_ptr<_Tp>* __p, |
268 | shared_ptr<_Tp>* __v, |
269 | shared_ptr<_Tp> __w, |
270 | memory_order, |
271 | memory_order) |
272 | { |
273 | shared_ptr<_Tp> __x; // goes out of scope after __lock |
274 | _Sp_locker __lock{__p, __v}; |
275 | owner_less<shared_ptr<_Tp>> __less; |
276 | if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p)) |
277 | { |
278 | __x = std::move(*__p); |
279 | *__p = std::move(__w); |
280 | return true; |
281 | } |
282 | __x = std::move(*__v); |
283 | *__v = *__p; |
284 | return false; |
285 | } |
286 | |
287 | template<typename _Tp> |
288 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
289 | inline bool |
290 | atomic_compare_exchange_strong(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v, |
291 | shared_ptr<_Tp> __w) |
292 | { |
293 | return std::atomic_compare_exchange_strong_explicit(__p, __v, |
294 | std::move(__w), memory_order_seq_cst, memory_order_seq_cst); |
295 | } |
296 | |
297 | template<typename _Tp> |
298 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
299 | inline bool |
300 | atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p, |
301 | shared_ptr<_Tp>* __v, |
302 | shared_ptr<_Tp> __w, |
303 | memory_order __success, |
304 | memory_order __failure) |
305 | { |
306 | return std::atomic_compare_exchange_strong_explicit(__p, __v, |
307 | std::move(__w), __success, __failure); |
308 | } |
309 | |
310 | template<typename _Tp> |
311 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
312 | inline bool |
313 | atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v, |
314 | shared_ptr<_Tp> __w) |
315 | { |
316 | return std::atomic_compare_exchange_weak_explicit(__p, __v, |
317 | std::move(__w), memory_order_seq_cst, memory_order_seq_cst); |
318 | } |
319 | |
320 | template<typename _Tp, _Lock_policy _Lp> |
321 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
322 | bool |
323 | atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p, |
324 | __shared_ptr<_Tp, _Lp>* __v, |
325 | __shared_ptr<_Tp, _Lp> __w, |
326 | memory_order, |
327 | memory_order) |
328 | { |
329 | __shared_ptr<_Tp, _Lp> __x; // goes out of scope after __lock |
330 | _Sp_locker __lock{__p, __v}; |
331 | owner_less<__shared_ptr<_Tp, _Lp>> __less; |
332 | if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p)) |
333 | { |
334 | __x = std::move(*__p); |
335 | *__p = std::move(__w); |
336 | return true; |
337 | } |
338 | __x = std::move(*__v); |
339 | *__v = *__p; |
340 | return false; |
341 | } |
342 | |
343 | template<typename _Tp, _Lock_policy _Lp> |
344 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
345 | inline bool |
346 | atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p, |
347 | __shared_ptr<_Tp, _Lp>* __v, |
348 | __shared_ptr<_Tp, _Lp> __w) |
349 | { |
350 | return std::atomic_compare_exchange_strong_explicit(__p, __v, |
351 | std::move(__w), memory_order_seq_cst, memory_order_seq_cst); |
352 | } |
353 | |
354 | template<typename _Tp, _Lock_policy _Lp> |
355 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
356 | inline bool |
357 | atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p, |
358 | __shared_ptr<_Tp, _Lp>* __v, |
359 | __shared_ptr<_Tp, _Lp> __w, |
360 | memory_order __success, |
361 | memory_order __failure) |
362 | { |
363 | return std::atomic_compare_exchange_strong_explicit(__p, __v, |
364 | std::move(__w), __success, __failure); |
365 | } |
366 | |
367 | template<typename _Tp, _Lock_policy _Lp> |
368 | _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>" ) |
369 | inline bool |
370 | atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p, |
371 | __shared_ptr<_Tp, _Lp>* __v, |
372 | __shared_ptr<_Tp, _Lp> __w) |
373 | { |
374 | return std::atomic_compare_exchange_weak_explicit(__p, __v, |
375 | std::move(__w), memory_order_seq_cst, memory_order_seq_cst); |
376 | } |
377 | /// @} |
378 | |
379 | /// @} group pointer_abstractions |
380 | |
381 | #ifdef __glibcxx_atomic_shared_ptr // C++ >= 20 && HOSTED |
382 | template<typename _Tp> |
383 | struct atomic; |
384 | |
385 | /** |
386 | * @addtogroup pointer_abstractions |
387 | * @relates shared_ptr |
388 | * @{ |
389 | */ |
390 | |
391 | template<typename _Tp> |
392 | class _Sp_atomic |
393 | { |
394 | using value_type = _Tp; |
395 | |
396 | friend struct atomic<_Tp>; |
397 | |
398 | // An atomic version of __shared_count<> and __weak_count<>. |
399 | // Stores a _Sp_counted_base<>* but uses the LSB as a lock. |
400 | struct _Atomic_count |
401 | { |
402 | // Either __shared_count<> or __weak_count<> |
403 | using __count_type = decltype(_Tp::_M_refcount); |
404 | |
405 | // _Sp_counted_base<>* |
406 | using pointer = decltype(__count_type::_M_pi); |
407 | |
408 | // Ensure we can use the LSB as the lock bit. |
409 | static_assert(alignof(remove_pointer_t<pointer>) > 1); |
410 | |
411 | constexpr _Atomic_count() noexcept = default; |
412 | |
413 | explicit |
414 | _Atomic_count(__count_type&& __c) noexcept |
415 | : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi)) |
416 | { |
417 | __c._M_pi = nullptr; |
418 | } |
419 | |
420 | ~_Atomic_count() |
421 | { |
422 | auto __val = _M_val.load(memory_order_relaxed); |
423 | _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val); |
424 | __glibcxx_assert(!(__val & _S_lock_bit)); |
425 | if (auto __pi = reinterpret_cast<pointer>(__val)) |
426 | { |
427 | if constexpr (__is_shared_ptr<_Tp>) |
428 | __pi->_M_release(); |
429 | else |
430 | __pi->_M_weak_release(); |
431 | } |
432 | } |
433 | |
434 | _Atomic_count(const _Atomic_count&) = delete; |
435 | _Atomic_count& operator=(const _Atomic_count&) = delete; |
436 | |
437 | // Precondition: Caller does not hold lock! |
438 | // Returns the raw pointer value without the lock bit set. |
439 | pointer |
440 | lock(memory_order __o) const noexcept |
441 | { |
442 | // To acquire the lock we flip the LSB from 0 to 1. |
443 | |
444 | auto __current = _M_val.load(memory_order_relaxed); |
445 | while (__current & _S_lock_bit) |
446 | { |
447 | #if __glibcxx_atomic_wait |
448 | __detail::__thread_relax(); |
449 | #endif |
450 | __current = _M_val.load(memory_order_relaxed); |
451 | } |
452 | |
453 | _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val); |
454 | |
455 | while (!_M_val.compare_exchange_strong(__current, |
456 | __current | _S_lock_bit, |
457 | __o, |
458 | memory_order_relaxed)) |
459 | { |
460 | _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val); |
461 | #if __glibcxx_atomic_wait |
462 | __detail::__thread_relax(); |
463 | #endif |
464 | __current = __current & ~_S_lock_bit; |
465 | _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val); |
466 | } |
467 | _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val); |
468 | return reinterpret_cast<pointer>(__current); |
469 | } |
470 | |
471 | // Precondition: caller holds lock! |
472 | void |
473 | unlock(memory_order __o) const noexcept |
474 | { |
475 | _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val); |
476 | _M_val.fetch_sub(1, __o); |
477 | _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val); |
478 | } |
479 | |
480 | // Swaps the values of *this and __c, and unlocks *this. |
481 | // Precondition: caller holds lock! |
482 | void |
483 | _M_swap_unlock(__count_type& __c, memory_order __o) noexcept |
484 | { |
485 | if (__o != memory_order_seq_cst) |
486 | __o = memory_order_release; |
487 | auto __x = reinterpret_cast<uintptr_t>(__c._M_pi); |
488 | _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val); |
489 | __x = _M_val.exchange(__x, __o); |
490 | _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val); |
491 | __c._M_pi = reinterpret_cast<pointer>(__x & ~_S_lock_bit); |
492 | } |
493 | |
494 | #if __glibcxx_atomic_wait |
495 | // Precondition: caller holds lock! |
496 | void |
497 | _M_wait_unlock(memory_order __o) const noexcept |
498 | { |
499 | _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val); |
500 | auto __v = _M_val.fetch_sub(1, memory_order_relaxed); |
501 | _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val); |
502 | _M_val.wait(__v & ~_S_lock_bit, __o); |
503 | } |
504 | |
505 | void |
506 | notify_one() noexcept |
507 | { |
508 | _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val); |
509 | _M_val.notify_one(); |
510 | _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val); |
511 | } |
512 | |
513 | void |
514 | notify_all() noexcept |
515 | { |
516 | _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val); |
517 | _M_val.notify_all(); |
518 | _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val); |
519 | } |
520 | #endif |
521 | |
522 | private: |
523 | mutable __atomic_base<uintptr_t> _M_val{0}; |
524 | static constexpr uintptr_t _S_lock_bit{1}; |
525 | }; |
526 | |
527 | typename _Tp::element_type* _M_ptr = nullptr; |
528 | _Atomic_count _M_refcount; |
529 | |
530 | static typename _Atomic_count::pointer |
531 | _S_add_ref(typename _Atomic_count::pointer __p) |
532 | { |
533 | if (__p) |
534 | { |
535 | if constexpr (__is_shared_ptr<_Tp>) |
536 | __p->_M_add_ref_copy(); |
537 | else |
538 | __p->_M_weak_add_ref(); |
539 | } |
540 | return __p; |
541 | } |
542 | |
543 | constexpr _Sp_atomic() noexcept = default; |
544 | |
545 | explicit |
546 | _Sp_atomic(value_type __r) noexcept |
547 | : _M_ptr(__r._M_ptr), _M_refcount(std::move(__r._M_refcount)) |
548 | { } |
549 | |
550 | ~_Sp_atomic() = default; |
551 | |
552 | _Sp_atomic(const _Sp_atomic&) = delete; |
553 | void operator=(const _Sp_atomic&) = delete; |
554 | |
555 | value_type |
556 | load(memory_order __o) const noexcept |
557 | { |
558 | __glibcxx_assert(__o != memory_order_release |
559 | && __o != memory_order_acq_rel); |
560 | // Ensure that the correct value of _M_ptr is visible after locking, |
561 | // by upgrading relaxed or consume to acquire. |
562 | if (__o != memory_order_seq_cst) |
563 | __o = memory_order_acquire; |
564 | |
565 | value_type __ret; |
566 | auto __pi = _M_refcount.lock(__o); |
567 | __ret._M_ptr = _M_ptr; |
568 | __ret._M_refcount._M_pi = _S_add_ref(__pi); |
569 | _M_refcount.unlock(memory_order_relaxed); |
570 | return __ret; |
571 | } |
572 | |
573 | void |
574 | swap(value_type& __r, memory_order __o) noexcept |
575 | { |
576 | _M_refcount.lock(memory_order_acquire); |
577 | std::swap(_M_ptr, __r._M_ptr); |
578 | _M_refcount._M_swap_unlock(__r._M_refcount, __o); |
579 | } |
580 | |
581 | bool |
582 | compare_exchange_strong(value_type& __expected, value_type __desired, |
583 | memory_order __o, memory_order __o2) noexcept |
584 | { |
585 | bool __result = true; |
586 | auto __pi = _M_refcount.lock(memory_order_acquire); |
587 | if (_M_ptr == __expected._M_ptr |
588 | && __pi == __expected._M_refcount._M_pi) |
589 | { |
590 | _M_ptr = __desired._M_ptr; |
591 | _M_refcount._M_swap_unlock(__desired._M_refcount, __o); |
592 | } |
593 | else |
594 | { |
595 | _Tp __sink = std::move(__expected); |
596 | __expected._M_ptr = _M_ptr; |
597 | __expected._M_refcount._M_pi = _S_add_ref(__pi); |
598 | _M_refcount.unlock(__o2); |
599 | __result = false; |
600 | } |
601 | return __result; |
602 | } |
603 | |
604 | #if __glibcxx_atomic_wait |
605 | void |
606 | wait(value_type __old, memory_order __o) const noexcept |
607 | { |
608 | auto __pi = _M_refcount.lock(memory_order_acquire); |
609 | if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi) |
610 | _M_refcount._M_wait_unlock(__o); |
611 | else |
612 | _M_refcount.unlock(memory_order_relaxed); |
613 | } |
614 | |
615 | void |
616 | notify_one() noexcept |
617 | { |
618 | _M_refcount.notify_one(); |
619 | } |
620 | |
621 | void |
622 | notify_all() noexcept |
623 | { |
624 | _M_refcount.notify_all(); |
625 | } |
626 | #endif |
627 | }; |
628 | |
629 | template<typename _Tp> |
630 | struct atomic<shared_ptr<_Tp>> |
631 | { |
632 | public: |
633 | using value_type = shared_ptr<_Tp>; |
634 | |
635 | static constexpr bool is_always_lock_free = false; |
636 | |
637 | bool |
638 | is_lock_free() const noexcept |
639 | { return false; } |
640 | |
641 | constexpr atomic() noexcept = default; |
642 | |
643 | // _GLIBCXX_RESOLVE_LIB_DEFECTS |
644 | // 3661. constinit atomic<shared_ptr<T>> a(nullptr); should work |
645 | constexpr atomic(nullptr_t) noexcept : atomic() { } |
646 | |
647 | atomic(shared_ptr<_Tp> __r) noexcept |
648 | : _M_impl(std::move(__r)) |
649 | { } |
650 | |
651 | atomic(const atomic&) = delete; |
652 | void operator=(const atomic&) = delete; |
653 | |
654 | shared_ptr<_Tp> |
655 | load(memory_order __o = memory_order_seq_cst) const noexcept |
656 | { return _M_impl.load(__o); } |
657 | |
658 | operator shared_ptr<_Tp>() const noexcept |
659 | { return _M_impl.load(memory_order_seq_cst); } |
660 | |
661 | void |
662 | store(shared_ptr<_Tp> __desired, |
663 | memory_order __o = memory_order_seq_cst) noexcept |
664 | { _M_impl.swap(__desired, __o); } |
665 | |
666 | void |
667 | operator=(shared_ptr<_Tp> __desired) noexcept |
668 | { _M_impl.swap(__desired, memory_order_seq_cst); } |
669 | |
670 | // _GLIBCXX_RESOLVE_LIB_DEFECTS |
671 | // 3893. LWG 3661 broke atomic<shared_ptr<T>> a; a = nullptr; |
672 | void |
673 | operator=(nullptr_t) noexcept |
674 | { store(nullptr); } |
675 | |
676 | shared_ptr<_Tp> |
677 | exchange(shared_ptr<_Tp> __desired, |
678 | memory_order __o = memory_order_seq_cst) noexcept |
679 | { |
680 | _M_impl.swap(__desired, __o); |
681 | return __desired; |
682 | } |
683 | |
684 | bool |
685 | compare_exchange_strong(shared_ptr<_Tp>& __expected, |
686 | shared_ptr<_Tp> __desired, |
687 | memory_order __o, memory_order __o2) noexcept |
688 | { |
689 | return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2); |
690 | } |
691 | |
692 | bool |
693 | compare_exchange_strong(value_type& __expected, value_type __desired, |
694 | memory_order __o = memory_order_seq_cst) noexcept |
695 | { |
696 | memory_order __o2; |
697 | switch (__o) |
698 | { |
699 | case memory_order_acq_rel: |
700 | __o2 = memory_order_acquire; |
701 | break; |
702 | case memory_order_release: |
703 | __o2 = memory_order_relaxed; |
704 | break; |
705 | default: |
706 | __o2 = __o; |
707 | } |
708 | return compare_exchange_strong(__expected, std::move(__desired), |
709 | __o, __o2); |
710 | } |
711 | |
712 | bool |
713 | compare_exchange_weak(value_type& __expected, value_type __desired, |
714 | memory_order __o, memory_order __o2) noexcept |
715 | { |
716 | return compare_exchange_strong(__expected, std::move(__desired), |
717 | __o, __o2); |
718 | } |
719 | |
720 | bool |
721 | compare_exchange_weak(value_type& __expected, value_type __desired, |
722 | memory_order __o = memory_order_seq_cst) noexcept |
723 | { |
724 | return compare_exchange_strong(__expected, std::move(__desired), __o); |
725 | } |
726 | |
727 | #if __glibcxx_atomic_wait |
728 | void |
729 | wait(value_type __old, |
730 | memory_order __o = memory_order_seq_cst) const noexcept |
731 | { |
732 | _M_impl.wait(std::move(__old), __o); |
733 | } |
734 | |
735 | void |
736 | notify_one() noexcept |
737 | { |
738 | _M_impl.notify_one(); |
739 | } |
740 | |
741 | void |
742 | notify_all() noexcept |
743 | { |
744 | _M_impl.notify_all(); |
745 | } |
746 | #endif |
747 | |
748 | private: |
749 | _Sp_atomic<shared_ptr<_Tp>> _M_impl; |
750 | }; |
751 | |
752 | template<typename _Tp> |
753 | struct atomic<weak_ptr<_Tp>> |
754 | { |
755 | public: |
756 | using value_type = weak_ptr<_Tp>; |
757 | |
758 | static constexpr bool is_always_lock_free = false; |
759 | |
760 | bool |
761 | is_lock_free() const noexcept |
762 | { return false; } |
763 | |
764 | constexpr atomic() noexcept = default; |
765 | |
766 | atomic(weak_ptr<_Tp> __r) noexcept |
767 | : _M_impl(move(__r)) |
768 | { } |
769 | |
770 | atomic(const atomic&) = delete; |
771 | void operator=(const atomic&) = delete; |
772 | |
773 | weak_ptr<_Tp> |
774 | load(memory_order __o = memory_order_seq_cst) const noexcept |
775 | { return _M_impl.load(__o); } |
776 | |
777 | operator weak_ptr<_Tp>() const noexcept |
778 | { return _M_impl.load(memory_order_seq_cst); } |
779 | |
780 | void |
781 | store(weak_ptr<_Tp> __desired, |
782 | memory_order __o = memory_order_seq_cst) noexcept |
783 | { _M_impl.swap(__desired, __o); } |
784 | |
785 | void |
786 | operator=(weak_ptr<_Tp> __desired) noexcept |
787 | { _M_impl.swap(__desired, memory_order_seq_cst); } |
788 | |
789 | weak_ptr<_Tp> |
790 | exchange(weak_ptr<_Tp> __desired, |
791 | memory_order __o = memory_order_seq_cst) noexcept |
792 | { |
793 | _M_impl.swap(__desired, __o); |
794 | return __desired; |
795 | } |
796 | |
797 | bool |
798 | compare_exchange_strong(weak_ptr<_Tp>& __expected, |
799 | weak_ptr<_Tp> __desired, |
800 | memory_order __o, memory_order __o2) noexcept |
801 | { |
802 | return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2); |
803 | } |
804 | |
805 | bool |
806 | compare_exchange_strong(value_type& __expected, value_type __desired, |
807 | memory_order __o = memory_order_seq_cst) noexcept |
808 | { |
809 | memory_order __o2; |
810 | switch (__o) |
811 | { |
812 | case memory_order_acq_rel: |
813 | __o2 = memory_order_acquire; |
814 | break; |
815 | case memory_order_release: |
816 | __o2 = memory_order_relaxed; |
817 | break; |
818 | default: |
819 | __o2 = __o; |
820 | } |
821 | return compare_exchange_strong(__expected, std::move(__desired), |
822 | __o, __o2); |
823 | } |
824 | |
825 | bool |
826 | compare_exchange_weak(value_type& __expected, value_type __desired, |
827 | memory_order __o, memory_order __o2) noexcept |
828 | { |
829 | return compare_exchange_strong(__expected, std::move(__desired), |
830 | __o, __o2); |
831 | } |
832 | |
833 | bool |
834 | compare_exchange_weak(value_type& __expected, value_type __desired, |
835 | memory_order __o = memory_order_seq_cst) noexcept |
836 | { |
837 | return compare_exchange_strong(__expected, std::move(__desired), __o); |
838 | } |
839 | |
840 | #if __glibcxx_atomic_wait |
841 | void |
842 | wait(value_type __old, |
843 | memory_order __o = memory_order_seq_cst) const noexcept |
844 | { |
845 | _M_impl.wait(std::move(__old), __o); |
846 | } |
847 | |
848 | void |
849 | notify_one() noexcept |
850 | { |
851 | _M_impl.notify_one(); |
852 | } |
853 | |
854 | void |
855 | notify_all() noexcept |
856 | { |
857 | _M_impl.notify_all(); |
858 | } |
859 | #endif |
860 | |
861 | private: |
862 | _Sp_atomic<weak_ptr<_Tp>> _M_impl; |
863 | }; |
864 | /// @} group pointer_abstractions |
865 | #endif // C++20 |
866 | |
867 | _GLIBCXX_END_NAMESPACE_VERSION |
868 | } // namespace |
869 | |
870 | #endif // _SHARED_PTR_ATOMIC_H |
871 | |