3 // Copyright (C) 2003-2020 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/mutex
26 * This is a Standard C++ Library header.
29 #ifndef _GLIBCXX_MUTEX
30 #define _GLIBCXX_MUTEX 1
32 #pragma GCC system_header
34 #if __cplusplus < 201103L
35 # include <bits/c++0x_warning.h>
41 #include <type_traits>
42 #include <system_error>
43 #include <bits/std_mutex.h>
44 #include <bits/unique_lock.h>
45 #if ! _GTHREAD_USE_MUTEX_TIMEDLOCK
46 # include <condition_variable>
49 #ifndef _GLIBCXX_HAVE_TLS
50 # include <bits/std_function.h>
53 namespace std _GLIBCXX_VISIBILITY(default)
55 _GLIBCXX_BEGIN_NAMESPACE_VERSION
62 #ifdef _GLIBCXX_HAS_GTHREADS
64 // Common base class for std::recursive_mutex and std::recursive_timed_mutex
65 class __recursive_mutex_base
68 typedef __gthread_recursive_mutex_t __native_type;
70 __recursive_mutex_base(const __recursive_mutex_base&) = delete;
71 __recursive_mutex_base& operator=(const __recursive_mutex_base&) = delete;
73 #ifdef __GTHREAD_RECURSIVE_MUTEX_INIT
74 __native_type _M_mutex = __GTHREAD_RECURSIVE_MUTEX_INIT;
76 __recursive_mutex_base() = default;
78 __native_type _M_mutex;
80 __recursive_mutex_base()
82 // XXX EAGAIN, ENOMEM, EPERM, EBUSY(may), EINVAL(may)
83 __GTHREAD_RECURSIVE_MUTEX_INIT_FUNCTION(&_M_mutex);
86 ~__recursive_mutex_base()
87 { __gthread_recursive_mutex_destroy(&_M_mutex); }
91 /// The standard recursive mutex type.
92 class recursive_mutex : private __recursive_mutex_base
95 typedef __native_type* native_handle_type;
97 recursive_mutex() = default;
98 ~recursive_mutex() = default;
100 recursive_mutex(const recursive_mutex&) = delete;
101 recursive_mutex& operator=(const recursive_mutex&) = delete;
106 int __e = __gthread_recursive_mutex_lock(&_M_mutex);
108 // EINVAL, EAGAIN, EBUSY, EINVAL, EDEADLK(may)
110 __throw_system_error(__e);
116 // XXX EINVAL, EAGAIN, EBUSY
117 return !__gthread_recursive_mutex_trylock(&_M_mutex);
123 // XXX EINVAL, EAGAIN, EBUSY
124 __gthread_recursive_mutex_unlock(&_M_mutex);
128 native_handle() noexcept
129 { return &_M_mutex; }
132 #if _GTHREAD_USE_MUTEX_TIMEDLOCK
133 template<typename _Derived>
134 class __timed_mutex_impl
137 template<typename _Rep, typename _Period>
139 _M_try_lock_for(const chrono::duration<_Rep, _Period>& __rtime)
141 #if _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
142 using __clock = chrono::steady_clock;
144 using __clock = chrono::system_clock;
147 auto __rt = chrono::duration_cast<__clock::duration>(__rtime);
148 if (ratio_greater<__clock::period, _Period>())
150 return _M_try_lock_until(__clock::now() + __rt);
153 template<typename _Duration>
155 _M_try_lock_until(const chrono::time_point<chrono::system_clock,
158 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
159 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
161 __gthread_time_t __ts = {
162 static_cast<std::time_t>(__s.time_since_epoch().count()),
163 static_cast<long>(__ns.count())
166 return static_cast<_Derived*>(this)->_M_timedlock(__ts);
169 #ifdef _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
170 template<typename _Duration>
172 _M_try_lock_until(const chrono::time_point<chrono::steady_clock,
175 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
176 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
178 __gthread_time_t __ts = {
179 static_cast<std::time_t>(__s.time_since_epoch().count()),
180 static_cast<long>(__ns.count())
183 return static_cast<_Derived*>(this)->_M_clocklock(CLOCK_MONOTONIC,
188 template<typename _Clock, typename _Duration>
190 _M_try_lock_until(const chrono::time_point<_Clock, _Duration>& __atime)
192 // The user-supplied clock may not tick at the same rate as
193 // steady_clock, so we must loop in order to guarantee that
194 // the timeout has expired before returning false.
195 auto __now = _Clock::now();
197 auto __rtime = __atime - __now;
198 if (_M_try_lock_for(__rtime))
200 __now = _Clock::now();
201 } while (__atime > __now);
206 /// The standard timed mutex type.
208 : private __mutex_base, public __timed_mutex_impl<timed_mutex>
211 typedef __native_type* native_handle_type;
213 timed_mutex() = default;
214 ~timed_mutex() = default;
216 timed_mutex(const timed_mutex&) = delete;
217 timed_mutex& operator=(const timed_mutex&) = delete;
222 int __e = __gthread_mutex_lock(&_M_mutex);
224 // EINVAL, EAGAIN, EBUSY, EINVAL, EDEADLK(may)
226 __throw_system_error(__e);
232 // XXX EINVAL, EAGAIN, EBUSY
233 return !__gthread_mutex_trylock(&_M_mutex);
236 template <class _Rep, class _Period>
238 try_lock_for(const chrono::duration<_Rep, _Period>& __rtime)
239 { return _M_try_lock_for(__rtime); }
241 template <class _Clock, class _Duration>
243 try_lock_until(const chrono::time_point<_Clock, _Duration>& __atime)
244 { return _M_try_lock_until(__atime); }
249 // XXX EINVAL, EAGAIN, EBUSY
250 __gthread_mutex_unlock(&_M_mutex);
254 native_handle() noexcept
255 { return &_M_mutex; }
258 friend class __timed_mutex_impl<timed_mutex>;
261 _M_timedlock(const __gthread_time_t& __ts)
262 { return !__gthread_mutex_timedlock(&_M_mutex, &__ts); }
264 #if _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
266 _M_clocklock(clockid_t clockid, const __gthread_time_t& __ts)
267 { return !pthread_mutex_clocklock(&_M_mutex, clockid, &__ts); }
271 /// recursive_timed_mutex
272 class recursive_timed_mutex
273 : private __recursive_mutex_base,
274 public __timed_mutex_impl<recursive_timed_mutex>
277 typedef __native_type* native_handle_type;
279 recursive_timed_mutex() = default;
280 ~recursive_timed_mutex() = default;
282 recursive_timed_mutex(const recursive_timed_mutex&) = delete;
283 recursive_timed_mutex& operator=(const recursive_timed_mutex&) = delete;
288 int __e = __gthread_recursive_mutex_lock(&_M_mutex);
290 // EINVAL, EAGAIN, EBUSY, EINVAL, EDEADLK(may)
292 __throw_system_error(__e);
298 // XXX EINVAL, EAGAIN, EBUSY
299 return !__gthread_recursive_mutex_trylock(&_M_mutex);
302 template <class _Rep, class _Period>
304 try_lock_for(const chrono::duration<_Rep, _Period>& __rtime)
305 { return _M_try_lock_for(__rtime); }
307 template <class _Clock, class _Duration>
309 try_lock_until(const chrono::time_point<_Clock, _Duration>& __atime)
310 { return _M_try_lock_until(__atime); }
315 // XXX EINVAL, EAGAIN, EBUSY
316 __gthread_recursive_mutex_unlock(&_M_mutex);
320 native_handle() noexcept
321 { return &_M_mutex; }
324 friend class __timed_mutex_impl<recursive_timed_mutex>;
327 _M_timedlock(const __gthread_time_t& __ts)
328 { return !__gthread_recursive_mutex_timedlock(&_M_mutex, &__ts); }
330 #ifdef _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
332 _M_clocklock(clockid_t clockid, const __gthread_time_t& __ts)
333 { return !pthread_mutex_clocklock(&_M_mutex, clockid, &__ts); }
337 #else // !_GTHREAD_USE_MUTEX_TIMEDLOCK
343 condition_variable _M_cv;
344 bool _M_locked = false;
348 timed_mutex() = default;
349 ~timed_mutex() { __glibcxx_assert( !_M_locked ); }
351 timed_mutex(const timed_mutex&) = delete;
352 timed_mutex& operator=(const timed_mutex&) = delete;
357 unique_lock<mutex> __lk(_M_mut);
358 _M_cv.wait(__lk, [&]{ return !_M_locked; });
365 lock_guard<mutex> __lk(_M_mut);
372 template<typename _Rep, typename _Period>
374 try_lock_for(const chrono::duration<_Rep, _Period>& __rtime)
376 unique_lock<mutex> __lk(_M_mut);
377 if (!_M_cv.wait_for(__lk, __rtime, [&]{ return !_M_locked; }))
383 template<typename _Clock, typename _Duration>
385 try_lock_until(const chrono::time_point<_Clock, _Duration>& __atime)
387 unique_lock<mutex> __lk(_M_mut);
388 if (!_M_cv.wait_until(__lk, __atime, [&]{ return !_M_locked; }))
397 lock_guard<mutex> __lk(_M_mut);
398 __glibcxx_assert( _M_locked );
404 /// recursive_timed_mutex
405 class recursive_timed_mutex
408 condition_variable _M_cv;
410 unsigned _M_count = 0;
412 // Predicate type that tests whether the current thread can lock a mutex.
415 // Returns true if the mutex is unlocked or is locked by _M_caller.
417 operator()() const noexcept
418 { return _M_mx->_M_count == 0 || _M_mx->_M_owner == _M_caller; }
420 const recursive_timed_mutex* _M_mx;
421 thread::id _M_caller;
426 recursive_timed_mutex() = default;
427 ~recursive_timed_mutex() { __glibcxx_assert( _M_count == 0 ); }
429 recursive_timed_mutex(const recursive_timed_mutex&) = delete;
430 recursive_timed_mutex& operator=(const recursive_timed_mutex&) = delete;
435 auto __id = this_thread::get_id();
436 _Can_lock __can_lock{this, __id};
437 unique_lock<mutex> __lk(_M_mut);
438 _M_cv.wait(__lk, __can_lock);
440 __throw_system_error(EAGAIN); // [thread.timedmutex.recursive]/3
448 auto __id = this_thread::get_id();
449 _Can_lock __can_lock{this, __id};
450 lock_guard<mutex> __lk(_M_mut);
460 template<typename _Rep, typename _Period>
462 try_lock_for(const chrono::duration<_Rep, _Period>& __rtime)
464 auto __id = this_thread::get_id();
465 _Can_lock __can_lock{this, __id};
466 unique_lock<mutex> __lk(_M_mut);
467 if (!_M_cv.wait_for(__lk, __rtime, __can_lock))
476 template<typename _Clock, typename _Duration>
478 try_lock_until(const chrono::time_point<_Clock, _Duration>& __atime)
480 auto __id = this_thread::get_id();
481 _Can_lock __can_lock{this, __id};
482 unique_lock<mutex> __lk(_M_mut);
483 if (!_M_cv.wait_until(__lk, __atime, __can_lock))
495 lock_guard<mutex> __lk(_M_mut);
496 __glibcxx_assert( _M_owner == this_thread::get_id() );
497 __glibcxx_assert( _M_count > 0 );
507 #endif // _GLIBCXX_HAS_GTHREADS
509 /// @cond undocumented
510 template<typename _Lock>
511 inline unique_lock<_Lock>
512 __try_to_lock(_Lock& __l)
513 { return unique_lock<_Lock>{__l, try_to_lock}; }
515 template<int _Idx, bool _Continue = true>
516 struct __try_lock_impl
518 template<typename... _Lock>
520 __do_try_lock(tuple<_Lock&...>& __locks, int& __idx)
523 auto __lock = std::__try_to_lock(std::get<_Idx>(__locks));
524 if (__lock.owns_lock())
526 constexpr bool __cont = _Idx + 2 < sizeof...(_Lock);
527 using __try_locker = __try_lock_impl<_Idx + 1, __cont>;
528 __try_locker::__do_try_lock(__locks, __idx);
536 struct __try_lock_impl<_Idx, false>
538 template<typename... _Lock>
540 __do_try_lock(tuple<_Lock&...>& __locks, int& __idx)
543 auto __lock = std::__try_to_lock(std::get<_Idx>(__locks));
544 if (__lock.owns_lock())
553 /** @brief Generic try_lock.
554 * @param __l1 Meets Lockable requirements (try_lock() may throw).
555 * @param __l2 Meets Lockable requirements (try_lock() may throw).
556 * @param __l3 Meets Lockable requirements (try_lock() may throw).
557 * @return Returns -1 if all try_lock() calls return true. Otherwise returns
558 * a 0-based index corresponding to the argument that returned false.
559 * @post Either all arguments are locked, or none will be.
561 * Sequentially calls try_lock() on each argument.
563 template<typename _Lock1, typename _Lock2, typename... _Lock3>
565 try_lock(_Lock1& __l1, _Lock2& __l2, _Lock3&... __l3)
568 auto __locks = std::tie(__l1, __l2, __l3...);
569 __try_lock_impl<0>::__do_try_lock(__locks, __idx);
573 /** @brief Generic lock.
574 * @param __l1 Meets Lockable requirements (try_lock() may throw).
575 * @param __l2 Meets Lockable requirements (try_lock() may throw).
576 * @param __l3 Meets Lockable requirements (try_lock() may throw).
577 * @throw An exception thrown by an argument's lock() or try_lock() member.
578 * @post All arguments are locked.
580 * All arguments are locked via a sequence of calls to lock(), try_lock()
581 * and unlock(). If the call exits via an exception any locks that were
582 * obtained will be released.
584 template<typename _L1, typename _L2, typename... _L3>
586 lock(_L1& __l1, _L2& __l2, _L3&... __l3)
590 using __try_locker = __try_lock_impl<0, sizeof...(_L3) != 0>;
591 unique_lock<_L1> __first(__l1);
593 auto __locks = std::tie(__l2, __l3...);
594 __try_locker::__do_try_lock(__locks, __idx);
603 #if __cplusplus >= 201703L
604 #define __cpp_lib_scoped_lock 201703
605 /** @brief A scoped lock type for multiple lockable objects.
607 * A scoped_lock controls mutex ownership within a scope, releasing
608 * ownership in the destructor.
610 template<typename... _MutexTypes>
614 explicit scoped_lock(_MutexTypes&... __m) : _M_devices(std::tie(__m...))
615 { std::lock(__m...); }
617 explicit scoped_lock(adopt_lock_t, _MutexTypes&... __m) noexcept
618 : _M_devices(std::tie(__m...))
619 { } // calling thread owns mutex
622 { std::apply([](auto&... __m) { (__m.unlock(), ...); }, _M_devices); }
624 scoped_lock(const scoped_lock&) = delete;
625 scoped_lock& operator=(const scoped_lock&) = delete;
628 tuple<_MutexTypes&...> _M_devices;
635 explicit scoped_lock() = default;
636 explicit scoped_lock(adopt_lock_t) noexcept { }
637 ~scoped_lock() = default;
639 scoped_lock(const scoped_lock&) = delete;
640 scoped_lock& operator=(const scoped_lock&) = delete;
643 template<typename _Mutex>
644 class scoped_lock<_Mutex>
647 using mutex_type = _Mutex;
649 explicit scoped_lock(mutex_type& __m) : _M_device(__m)
650 { _M_device.lock(); }
652 explicit scoped_lock(adopt_lock_t, mutex_type& __m) noexcept
654 { } // calling thread owns mutex
657 { _M_device.unlock(); }
659 scoped_lock(const scoped_lock&) = delete;
660 scoped_lock& operator=(const scoped_lock&) = delete;
663 mutex_type& _M_device;
667 #ifdef _GLIBCXX_HAS_GTHREADS
668 /// Flag type used by std::call_once
672 typedef __gthread_once_t __native_type;
673 __native_type _M_once = __GTHREAD_ONCE_INIT;
677 constexpr once_flag() noexcept = default;
679 /// Deleted copy constructor
680 once_flag(const once_flag&) = delete;
681 /// Deleted assignment operator
682 once_flag& operator=(const once_flag&) = delete;
684 template<typename _Callable, typename... _Args>
686 call_once(once_flag& __once, _Callable&& __f, _Args&&... __args);
689 /// @cond undocumented
690 #ifdef _GLIBCXX_HAVE_TLS
691 extern __thread void* __once_callable;
692 extern __thread void (*__once_call)();
694 extern function<void()> __once_functor;
697 __set_once_functor_lock_ptr(unique_lock<mutex>*);
703 extern "C" void __once_proxy(void);
706 /// Invoke a callable and synchronize with other calls using the same flag
707 template<typename _Callable, typename... _Args>
709 call_once(once_flag& __once, _Callable&& __f, _Args&&... __args)
711 // _GLIBCXX_RESOLVE_LIB_DEFECTS
712 // 2442. call_once() shouldn't DECAY_COPY()
713 auto __callable = [&] {
714 std::__invoke(std::forward<_Callable>(__f),
715 std::forward<_Args>(__args)...);
717 #ifdef _GLIBCXX_HAVE_TLS
718 __once_callable = std::__addressof(__callable);
719 __once_call = []{ (*(decltype(__callable)*)__once_callable)(); };
721 unique_lock<mutex> __functor_lock(__get_once_mutex());
722 __once_functor = __callable;
723 __set_once_functor_lock_ptr(&__functor_lock);
726 int __e = __gthread_once(&__once._M_once, &__once_proxy);
728 #ifndef _GLIBCXX_HAVE_TLS
730 __set_once_functor_lock_ptr(0);
733 #ifdef __clang_analyzer__
734 // PR libstdc++/82481
735 __once_callable = nullptr;
736 __once_call = nullptr;
740 __throw_system_error(__e);
742 #endif // _GLIBCXX_HAS_GTHREADS
745 _GLIBCXX_END_NAMESPACE_VERSION
750 #endif // _GLIBCXX_MUTEX