30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
34#include <bits/shared_ptr.h>
37#if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
38#include <sanitizer/tsan_interface.h>
39#define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
40 __tsan_mutex_destroy(X, __tsan_mutex_not_static)
41#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
42 __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
43#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
44 __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
45#define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
46 __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
47#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
48#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
49#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
50#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
52#define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
53#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
54#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
55#define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
56#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
57#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
58#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
59#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
62namespace std _GLIBCXX_VISIBILITY(default)
64_GLIBCXX_BEGIN_NAMESPACE_VERSION
76 _Sp_locker(
const _Sp_locker&) =
delete;
77 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
81 _Sp_locker(
const void*)
noexcept;
82 _Sp_locker(
const void*,
const void*)
noexcept;
86 unsigned char _M_key1;
87 unsigned char _M_key2;
89 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
101 template<
typename _Tp, _Lock_policy _Lp>
102 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
107 return __gthread_active_p() == 0;
113 template<
typename _Tp>
114 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
130 template<
typename _Tp>
131 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
132 inline shared_ptr<_Tp>
139 template<
typename _Tp>
140 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
141 inline shared_ptr<_Tp>
143 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
145 template<
typename _Tp, _Lock_policy _Lp>
146 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
147 inline __shared_ptr<_Tp, _Lp>
154 template<
typename _Tp, _Lock_policy _Lp>
155 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
156 inline __shared_ptr<_Tp, _Lp>
158 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
170 template<
typename _Tp>
171 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
180 template<
typename _Tp>
181 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
184 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
186 template<
typename _Tp, _Lock_policy _Lp>
187 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
189 atomic_store_explicit(__shared_ptr<_Tp,
_Lp>* __p,
190 __shared_ptr<_Tp,
_Lp> __r,
197 template<
typename _Tp, _Lock_policy _Lp>
198 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
200 atomic_store(__shared_ptr<_Tp,
_Lp>* __p, __shared_ptr<_Tp,
_Lp> __r)
201 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
211 template<
typename _Tp>
212 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
213 inline shared_ptr<_Tp>
222 template<
typename _Tp>
223 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
224 inline shared_ptr<_Tp>
227 return std::atomic_exchange_explicit(__p,
std::move(__r),
228 memory_order_seq_cst);
231 template<
typename _Tp, _Lock_policy _Lp>
232 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
233 inline __shared_ptr<_Tp, _Lp>
234 atomic_exchange_explicit(__shared_ptr<_Tp,
_Lp>* __p,
235 __shared_ptr<_Tp,
_Lp> __r,
243 template<
typename _Tp, _Lock_policy _Lp>
244 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
245 inline __shared_ptr<_Tp, _Lp>
246 atomic_exchange(__shared_ptr<_Tp,
_Lp>* __p, __shared_ptr<_Tp,
_Lp> __r)
248 return std::atomic_exchange_explicit(__p,
std::move(__r),
249 memory_order_seq_cst);
264 template<
typename _Tp>
265 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
267 atomic_compare_exchange_strong_explicit(
shared_ptr<_Tp>* __p,
276 if (*__p == *
__v && !__less(*__p, *
__v) && !__less(*
__v, *__p))
287 template<
typename _Tp>
288 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
293 return std::atomic_compare_exchange_strong_explicit(__p,
__v,
294 std::move(
__w), memory_order_seq_cst, memory_order_seq_cst);
297 template<
typename _Tp>
298 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
306 return std::atomic_compare_exchange_strong_explicit(__p,
__v,
310 template<
typename _Tp>
311 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
316 return std::atomic_compare_exchange_weak_explicit(__p,
__v,
317 std::move(
__w), memory_order_seq_cst, memory_order_seq_cst);
320 template<
typename _Tp, _Lock_policy _Lp>
321 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
323 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp,
_Lp>* __p,
324 __shared_ptr<_Tp,
_Lp>*
__v,
325 __shared_ptr<_Tp,
_Lp>
__w,
332 if (*__p == *
__v && !__less(*__p, *
__v) && !__less(*
__v, *__p))
343 template<
typename _Tp, _Lock_policy _Lp>
344 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
346 atomic_compare_exchange_strong(__shared_ptr<_Tp,
_Lp>* __p,
347 __shared_ptr<_Tp,
_Lp>*
__v,
348 __shared_ptr<_Tp,
_Lp>
__w)
350 return std::atomic_compare_exchange_strong_explicit(__p,
__v,
351 std::move(
__w), memory_order_seq_cst, memory_order_seq_cst);
354 template<
typename _Tp, _Lock_policy _Lp>
355 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
357 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp,
_Lp>* __p,
358 __shared_ptr<_Tp,
_Lp>*
__v,
359 __shared_ptr<_Tp,
_Lp>
__w,
363 return std::atomic_compare_exchange_strong_explicit(__p,
__v,
367 template<
typename _Tp, _Lock_policy _Lp>
368 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
370 atomic_compare_exchange_weak(__shared_ptr<_Tp,
_Lp>* __p,
371 __shared_ptr<_Tp,
_Lp>*
__v,
372 __shared_ptr<_Tp,
_Lp>
__w)
374 return std::atomic_compare_exchange_weak_explicit(__p,
__v,
375 std::move(
__w), memory_order_seq_cst, memory_order_seq_cst);
381#ifdef __glibcxx_atomic_shared_ptr
382 template<
typename _Tp>
391 template<
typename _Tp>
394 using value_type = _Tp;
396 friend struct atomic<_Tp>;
403 using __count_type =
decltype(_Tp::_M_refcount);
406 using pointer =
decltype(__count_type::_M_pi);
409 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
411 constexpr _Atomic_count() noexcept = default;
414 _Atomic_count(__count_type&& __c) noexcept
415 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
422 auto __val = _M_val.load(memory_order_relaxed);
423 _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
424 __glibcxx_assert(!(__val & _S_lock_bit));
425 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
427 if constexpr (__is_shared_ptr<_Tp>)
430 __pi->_M_weak_release();
434 _Atomic_count(
const _Atomic_count&) =
delete;
435 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
444 auto __current = _M_val.load(memory_order_relaxed);
445 while (__current & _S_lock_bit)
447#if __glibcxx_atomic_wait
448 __detail::__thread_relax();
450 __current = _M_val.load(memory_order_relaxed);
453 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
455 while (!_M_val.compare_exchange_strong(__current,
456 __current | _S_lock_bit,
458 memory_order_relaxed))
460 _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
461#if __glibcxx_atomic_wait
462 __detail::__thread_relax();
464 __current = __current & ~_S_lock_bit;
465 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
467 _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
468 return reinterpret_cast<pointer
>(__current);
475 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
476 _M_val.fetch_sub(1, __o);
477 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
483 _M_swap_unlock(__count_type& __c,
memory_order __o)
noexcept
485 if (__o != memory_order_seq_cst)
486 __o = memory_order_release;
487 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
488 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
489 __x = _M_val.exchange(__x, __o);
490 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
491 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
494#if __glibcxx_atomic_wait
499 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
500 auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
501 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
502 _M_val.wait(__v & ~_S_lock_bit, __o);
506 notify_one() noexcept
508 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
510 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
514 notify_all() noexcept
516 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
518 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
523 mutable __atomic_base<uintptr_t> _M_val{0};
524 static constexpr uintptr_t _S_lock_bit{1};
527 typename _Tp::element_type* _M_ptr =
nullptr;
528 _Atomic_count _M_refcount;
530 static typename _Atomic_count::pointer
531 _S_add_ref(
typename _Atomic_count::pointer __p)
535 if constexpr (__is_shared_ptr<_Tp>)
536 __p->_M_add_ref_copy();
538 __p->_M_weak_add_ref();
543 constexpr _Sp_atomic() noexcept = default;
546 _Sp_atomic(value_type __r) noexcept
547 : _M_ptr(__r._M_ptr), _M_refcount(
std::
move(__r._M_refcount))
550 ~_Sp_atomic() =
default;
552 _Sp_atomic(
const _Sp_atomic&) =
delete;
553 void operator=(
const _Sp_atomic&) =
delete;
558 __glibcxx_assert(__o != memory_order_release
559 && __o != memory_order_acq_rel);
562 if (__o != memory_order_seq_cst)
563 __o = memory_order_acquire;
566 auto __pi = _M_refcount.lock(__o);
567 __ret._M_ptr = _M_ptr;
568 __ret._M_refcount._M_pi = _S_add_ref(__pi);
569 _M_refcount.unlock(memory_order_relaxed);
576 _M_refcount.lock(memory_order_acquire);
577 std::swap(_M_ptr, __r._M_ptr);
578 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
582 compare_exchange_strong(value_type& __expected, value_type __desired,
585 bool __result =
true;
586 auto __pi = _M_refcount.lock(memory_order_acquire);
587 if (_M_ptr == __expected._M_ptr
588 && __pi == __expected._M_refcount._M_pi)
590 _M_ptr = __desired._M_ptr;
591 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
596 __expected._M_ptr = _M_ptr;
597 __expected._M_refcount._M_pi = _S_add_ref(__pi);
598 _M_refcount.unlock(__o2);
604#if __glibcxx_atomic_wait
608 auto __pi = _M_refcount.lock(memory_order_acquire);
609 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
610 _M_refcount._M_wait_unlock(__o);
612 _M_refcount.unlock(memory_order_relaxed);
616 notify_one() noexcept
618 _M_refcount.notify_one();
622 notify_all() noexcept
624 _M_refcount.notify_all();
629 template<
typename _Tp>
630 struct atomic<shared_ptr<_Tp>>
633 using value_type = shared_ptr<_Tp>;
635 static constexpr bool is_always_lock_free =
false;
638 is_lock_free() const noexcept
641 constexpr atomic() noexcept = default;
645 constexpr atomic(nullptr_t) noexcept : atomic() { }
647 atomic(shared_ptr<_Tp> __r) noexcept
651 atomic(
const atomic&) =
delete;
652 void operator=(
const atomic&) =
delete;
655 load(
memory_order __o = memory_order_seq_cst)
const noexcept
656 {
return _M_impl.load(__o); }
658 operator shared_ptr<_Tp>() const noexcept
659 {
return _M_impl.load(memory_order_seq_cst); }
662 store(shared_ptr<_Tp> __desired,
664 { _M_impl.swap(__desired, __o); }
667 operator=(shared_ptr<_Tp> __desired)
noexcept
668 { _M_impl.swap(__desired, memory_order_seq_cst); }
673 operator=(nullptr_t)
noexcept
677 exchange(shared_ptr<_Tp> __desired,
680 _M_impl.swap(__desired, __o);
685 compare_exchange_strong(shared_ptr<_Tp>& __expected,
686 shared_ptr<_Tp> __desired,
689 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
693 compare_exchange_strong(value_type& __expected, value_type __desired,
699 case memory_order_acq_rel:
700 __o2 = memory_order_acquire;
702 case memory_order_release:
703 __o2 = memory_order_relaxed;
708 return compare_exchange_strong(__expected,
std::move(__desired),
713 compare_exchange_weak(value_type& __expected, value_type __desired,
716 return compare_exchange_strong(__expected,
std::move(__desired),
721 compare_exchange_weak(value_type& __expected, value_type __desired,
724 return compare_exchange_strong(__expected,
std::move(__desired), __o);
727#if __glibcxx_atomic_wait
729 wait(value_type __old,
736 notify_one() noexcept
738 _M_impl.notify_one();
742 notify_all() noexcept
744 _M_impl.notify_all();
749 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
752 template<
typename _Tp>
753 struct atomic<weak_ptr<_Tp>>
756 using value_type = weak_ptr<_Tp>;
758 static constexpr bool is_always_lock_free =
false;
761 is_lock_free() const noexcept
764 constexpr atomic() noexcept = default;
766 atomic(weak_ptr<_Tp> __r) noexcept
770 atomic(
const atomic&) =
delete;
771 void operator=(
const atomic&) =
delete;
774 load(
memory_order __o = memory_order_seq_cst)
const noexcept
775 {
return _M_impl.load(__o); }
777 operator weak_ptr<_Tp>() const noexcept
778 {
return _M_impl.load(memory_order_seq_cst); }
781 store(weak_ptr<_Tp> __desired,
783 { _M_impl.swap(__desired, __o); }
786 operator=(weak_ptr<_Tp> __desired)
noexcept
787 { _M_impl.swap(__desired, memory_order_seq_cst); }
790 exchange(weak_ptr<_Tp> __desired,
793 _M_impl.swap(__desired, __o);
798 compare_exchange_strong(weak_ptr<_Tp>& __expected,
799 weak_ptr<_Tp> __desired,
802 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
806 compare_exchange_strong(value_type& __expected, value_type __desired,
812 case memory_order_acq_rel:
813 __o2 = memory_order_acquire;
815 case memory_order_release:
816 __o2 = memory_order_relaxed;
821 return compare_exchange_strong(__expected,
std::move(__desired),
826 compare_exchange_weak(value_type& __expected, value_type __desired,
829 return compare_exchange_strong(__expected,
std::move(__desired),
834 compare_exchange_weak(value_type& __expected, value_type __desired,
837 return compare_exchange_strong(__expected,
std::move(__desired), __o);
840#if __glibcxx_atomic_wait
842 wait(value_type __old,
849 notify_one() noexcept
851 _M_impl.notify_one();
855 notify_all() noexcept
857 _M_impl.notify_all();
862 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
867_GLIBCXX_END_NAMESPACE_VERSION
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.