30 #ifndef _SHARED_PTR_ATOMIC_H
31 #define _SHARED_PTR_ATOMIC_H 1
35 namespace std _GLIBCXX_VISIBILITY(default)
37 _GLIBCXX_BEGIN_NAMESPACE_VERSION
49 _Sp_locker(
const _Sp_locker&) =
delete;
50 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
54 _Sp_locker(
const void*) noexcept;
55 _Sp_locker(
const void*,
const void*) noexcept;
59 unsigned char _M_key1;
60 unsigned char _M_key2;
62 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
74 template<
typename _Tp, _Lock_policy _Lp>
79 return __gthread_active_p() == 0;
85 template<
typename _Tp>
88 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
101 template<
typename _Tp>
102 inline shared_ptr<_Tp>
105 _Sp_locker __lock{__p};
109 template<
typename _Tp>
112 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
114 template<
typename _Tp, _Lock_policy _Lp>
115 inline __shared_ptr<_Tp, _Lp>
116 atomic_load_explicit(
const __shared_ptr<_Tp, _Lp>* __p,
memory_order)
118 _Sp_locker __lock{__p};
122 template<
typename _Tp, _Lock_policy _Lp>
123 inline __shared_ptr<_Tp, _Lp>
124 atomic_load(
const __shared_ptr<_Tp, _Lp>* __p)
125 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
137 template<
typename _Tp>
142 _Sp_locker __lock{__p};
146 template<
typename _Tp>
149 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
151 template<
typename _Tp, _Lock_policy _Lp>
153 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
154 __shared_ptr<_Tp, _Lp> __r,
157 _Sp_locker __lock{__p};
161 template<
typename _Tp, _Lock_policy _Lp>
163 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
164 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
174 template<
typename _Tp>
175 inline shared_ptr<_Tp>
179 _Sp_locker __lock{__p};
184 template<
typename _Tp>
188 return std::atomic_exchange_explicit(__p,
std::move(__r),
189 memory_order_seq_cst);
192 template<
typename _Tp, _Lock_policy _Lp>
193 inline __shared_ptr<_Tp, _Lp>
194 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
195 __shared_ptr<_Tp, _Lp> __r,
198 _Sp_locker __lock{__p};
203 template<
typename _Tp, _Lock_policy _Lp>
204 inline __shared_ptr<_Tp, _Lp>
205 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
207 return std::atomic_exchange_explicit(__p,
std::move(__r),
208 memory_order_seq_cst);
223 template<
typename _Tp>
232 _Sp_locker __lock{__p, __v};
234 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
245 template<
typename _Tp>
250 return std::atomic_compare_exchange_strong_explicit(__p, __v,
251 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
254 template<
typename _Tp>
256 atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
257 shared_ptr<_Tp>* __v,
262 return std::atomic_compare_exchange_strong_explicit(__p, __v,
266 template<
typename _Tp>
268 atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
271 return std::atomic_compare_exchange_weak_explicit(__p, __v,
272 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
275 template<
typename _Tp, _Lock_policy _Lp>
277 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
278 __shared_ptr<_Tp, _Lp>* __v,
279 __shared_ptr<_Tp, _Lp> __w,
283 __shared_ptr<_Tp, _Lp> __x;
284 _Sp_locker __lock{__p, __v};
285 owner_less<__shared_ptr<_Tp, _Lp>> __less;
286 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
297 template<
typename _Tp, _Lock_policy _Lp>
299 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
300 __shared_ptr<_Tp, _Lp>* __v,
301 __shared_ptr<_Tp, _Lp> __w)
303 return std::atomic_compare_exchange_strong_explicit(__p, __v,
304 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
307 template<
typename _Tp, _Lock_policy _Lp>
309 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
310 __shared_ptr<_Tp, _Lp>* __v,
311 __shared_ptr<_Tp, _Lp> __w,
315 return std::atomic_compare_exchange_strong_explicit(__p, __v,
319 template<
typename _Tp, _Lock_policy _Lp>
321 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
322 __shared_ptr<_Tp, _Lp>* __v,
323 __shared_ptr<_Tp, _Lp> __w)
325 return std::atomic_compare_exchange_weak_explicit(__p, __v,
326 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
330 #if __cplusplus >= 202002L
331 # define __cpp_lib_atomic_shared_ptr 201711L
332 template<
typename _Tp>
335 template<
typename _Up>
336 static constexpr
bool __is_shared_ptr =
false;
337 template<
typename _Up>
338 static constexpr
bool __is_shared_ptr<shared_ptr<_Up>> =
true;
340 template<
typename _Tp>
343 using value_type = _Tp;
345 friend class atomic<_Tp>;
352 using __count_type = decltype(_Tp::_M_refcount);
355 using pointer = decltype(__count_type::_M_pi);
358 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
360 _Atomic_count() : _M_val(0) { }
363 _Atomic_count(__count_type&& __c) noexcept
364 : _M_val(
reinterpret_cast<uintptr_t
>(__c._M_pi))
371 auto __val = _M_val.load(memory_order_relaxed);
372 __glibcxx_assert(!(__val & _S_lock_bit));
373 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
375 if constexpr (__is_shared_ptr<_Tp>)
378 __pi->_M_weak_release();
382 _Atomic_count(
const _Atomic_count&) =
delete;
383 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
392 auto __current = _M_val.load(memory_order_relaxed);
393 while (__current & _S_lock_bit)
395 __detail::__thread_relax();
396 __current = _M_val.load(memory_order_relaxed);
399 while (!_M_val.compare_exchange_strong(__current,
400 __current | _S_lock_bit,
402 memory_order_relaxed))
404 __detail::__thread_relax();
405 __current = __current & ~_S_lock_bit;
407 return reinterpret_cast<pointer
>(__current);
414 _M_val.fetch_sub(1, __o);
420 _M_swap_unlock(__count_type& __c,
memory_order __o) noexcept
422 if (__o != memory_order_seq_cst)
423 __o = memory_order_release;
424 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
425 __x = _M_val.exchange(__x, __o);
426 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
429 #if __cpp_lib_atomic_wait
434 auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
435 _M_val.wait(__v & ~_S_lock_bit, __o);
439 notify_one() noexcept
445 notify_all() noexcept
452 mutable __atomic_base<uintptr_t> _M_val{0};
453 static constexpr uintptr_t _S_lock_bit{1};
456 typename _Tp::element_type* _M_ptr;
457 _Atomic_count _M_refcount;
459 static typename _Atomic_count::pointer
460 _S_add_ref(
typename _Atomic_count::pointer __p)
464 if constexpr (__is_shared_ptr<_Tp>)
465 __p->_M_add_ref_copy();
467 __p->_M_weak_add_ref();
472 constexpr _Sp_atomic() noexcept = default;
475 _Sp_atomic(value_type __r) noexcept
476 : _M_ptr(__r._M_ptr), _M_refcount(
std::
move(__r._M_refcount))
479 ~_Sp_atomic() =
default;
481 _Sp_atomic(
const _Sp_atomic&) =
delete;
482 void operator=(
const _Sp_atomic&) =
delete;
487 __glibcxx_assert(__o != memory_order_release
488 && __o != memory_order_acq_rel);
491 if (__o != memory_order_seq_cst)
492 __o = memory_order_acquire;
495 auto __pi = _M_refcount.lock(__o);
496 __ret._M_ptr = _M_ptr;
497 __ret._M_refcount._M_pi = _S_add_ref(__pi);
498 _M_refcount.unlock(memory_order_relaxed);
505 _M_refcount.lock(memory_order_acquire);
507 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
511 compare_exchange_strong(value_type& __expected, value_type __desired,
514 bool __result =
true;
515 auto __pi = _M_refcount.lock(memory_order_acquire);
516 if (_M_ptr == __expected._M_ptr
517 && __pi == __expected._M_refcount._M_pi)
519 _M_ptr = __desired._M_ptr;
520 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
525 __expected._M_ptr = _M_ptr;
526 __expected._M_refcount._M_pi = _S_add_ref(__pi);
527 _M_refcount.unlock(__o2);
533 #if __cpp_lib_atomic_wait
537 auto __pi = _M_refcount.lock(memory_order_acquire);
538 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
539 _M_refcount._M_wait_unlock(__o);
541 _M_refcount.unlock(memory_order_relaxed);
545 notify_one() noexcept
547 _M_refcount.notify_one();
551 notify_all() noexcept
553 _M_refcount.notify_all();
558 template<
typename _Tp>
559 class atomic<shared_ptr<_Tp>>
562 using value_type = shared_ptr<_Tp>;
564 static constexpr
bool is_always_lock_free =
false;
567 is_lock_free() const noexcept
570 constexpr atomic() noexcept = default;
572 atomic(shared_ptr<_Tp> __r) noexcept
576 atomic(
const atomic&) =
delete;
577 void operator=(
const atomic&) =
delete;
580 load(
memory_order __o = memory_order_seq_cst)
const noexcept
581 {
return _M_impl.load(__o); }
583 operator shared_ptr<_Tp>() const noexcept
584 {
return _M_impl.load(memory_order_seq_cst); }
587 store(shared_ptr<_Tp> __desired,
589 { _M_impl.swap(__desired, __o); }
592 operator=(shared_ptr<_Tp> __desired) noexcept
593 { _M_impl.swap(__desired, memory_order_seq_cst); }
596 exchange(shared_ptr<_Tp> __desired,
599 _M_impl.swap(__desired, __o);
604 compare_exchange_strong(shared_ptr<_Tp>& __expected,
605 shared_ptr<_Tp> __desired,
608 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
612 compare_exchange_strong(value_type& __expected, value_type __desired,
618 case memory_order_acq_rel:
619 __o2 = memory_order_acquire;
621 case memory_order_release:
622 __o2 = memory_order_relaxed;
627 return compare_exchange_strong(__expected,
std::move(__desired),
632 compare_exchange_weak(value_type& __expected, value_type __desired,
635 return compare_exchange_strong(__expected,
std::move(__desired),
640 compare_exchange_weak(value_type& __expected, value_type __desired,
643 return compare_exchange_strong(__expected,
std::move(__desired), __o);
646 #if __cpp_lib_atomic_wait
648 wait(value_type __old,
655 notify_one() noexcept
657 _M_impl.notify_one();
661 notify_all() noexcept
663 _M_impl.notify_all();
668 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
671 template<
typename _Tp>
672 class atomic<weak_ptr<_Tp>>
675 using value_type = weak_ptr<_Tp>;
677 static constexpr
bool is_always_lock_free =
false;
680 is_lock_free() const noexcept
683 constexpr atomic() noexcept = default;
685 atomic(weak_ptr<_Tp> __r) noexcept
689 atomic(
const atomic&) =
delete;
690 void operator=(
const atomic&) =
delete;
693 load(
memory_order __o = memory_order_seq_cst)
const noexcept
694 {
return _M_impl.load(__o); }
696 operator weak_ptr<_Tp>() const noexcept
697 {
return _M_impl.load(memory_order_seq_cst); }
700 store(weak_ptr<_Tp> __desired,
702 { _M_impl.swap(__desired, __o); }
705 operator=(weak_ptr<_Tp> __desired) noexcept
706 { _M_impl.swap(__desired, memory_order_seq_cst); }
709 exchange(weak_ptr<_Tp> __desired,
712 _M_impl.swap(__desired, __o);
717 compare_exchange_strong(weak_ptr<_Tp>& __expected,
718 weak_ptr<_Tp> __desired,
721 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
725 compare_exchange_strong(value_type& __expected, value_type __desired,
731 case memory_order_acq_rel:
732 __o2 = memory_order_acquire;
734 case memory_order_release:
735 __o2 = memory_order_relaxed;
740 return compare_exchange_strong(__expected,
std::move(__desired),
745 compare_exchange_weak(value_type& __expected, value_type __desired,
748 return compare_exchange_strong(__expected,
std::move(__desired),
753 compare_exchange_weak(value_type& __expected, value_type __desired,
756 return compare_exchange_strong(__expected,
std::move(__desired), __o);
759 #if __cpp_lib_atomic_wait
761 wait(value_type __old,
768 notify_one() noexcept
770 _M_impl.notify_one();
774 notify_all() noexcept
776 _M_impl.notify_all();
781 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
788 _GLIBCXX_END_NAMESPACE_VERSION
shared_ptr< _Tp > atomic_exchange_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > __r, memory_order)
Atomic exchange for shared_ptr objects.
bool atomic_compare_exchange_strong_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > *__v, shared_ptr< _Tp > __w, memory_order, memory_order)
Atomic compare-and-swap for shared_ptr objects.
void atomic_store_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > __r, memory_order)
Atomic store for shared_ptr objects.
bool atomic_is_lock_free(const __shared_ptr< _Tp, _Lp > *__p)
Report whether shared_ptr atomic operations are lock-free.
void swap(shared_ptr< _Tp > &__a, shared_ptr< _Tp > &__b) noexcept
Swap overload for shared_ptr.
shared_ptr< _Tp > atomic_load_explicit(const shared_ptr< _Tp > *__p, memory_order)
Atomic load for shared_ptr objects.
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.
Primary template owner_less.