3 // Copyright (C) 2008-2020 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603
56 template<typename _Tp>
60 // NB: No operators or fetch-operations for this type.
64 using value_type = bool;
67 __atomic_base<bool> _M_base;
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
86 operator bool() const noexcept
87 { return _M_base.load(); }
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
98 #if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
168 #if __cplusplus <= 201703L
169 # define _GLIBCXX20_INIT(I)
171 # define _GLIBCXX20_INIT(I) = I
175 * @brief Generic atomic type, primary class template.
177 * @tparam _Tp Type to be made atomic, must be trivially copyable.
179 template<typename _Tp>
182 using value_type = _Tp;
185 // Align 1/2/4/8/16-byte types to at least their size.
186 static constexpr int _S_min_alignment
187 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
190 static constexpr int _S_alignment
191 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
193 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
195 static_assert(__is_trivially_copyable(_Tp),
196 "std::atomic requires a trivially copyable type");
198 static_assert(sizeof(_Tp) > 0,
199 "Incomplete or zero-sized types are not supported");
203 ~atomic() noexcept = default;
204 atomic(const atomic&) = delete;
205 atomic& operator=(const atomic&) = delete;
206 atomic& operator=(const atomic&) volatile = delete;
208 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
210 operator _Tp() const noexcept
213 operator _Tp() const volatile noexcept
217 operator=(_Tp __i) noexcept
218 { store(__i); return __i; }
221 operator=(_Tp __i) volatile noexcept
222 { store(__i); return __i; }
225 is_lock_free() const noexcept
227 // Produce a fake, minimally aligned pointer.
228 return __atomic_is_lock_free(sizeof(_M_i),
229 reinterpret_cast<void *>(-_S_alignment));
233 is_lock_free() const volatile noexcept
235 // Produce a fake, minimally aligned pointer.
236 return __atomic_is_lock_free(sizeof(_M_i),
237 reinterpret_cast<void *>(-_S_alignment));
240 #if __cplusplus >= 201703L
241 static constexpr bool is_always_lock_free
242 = __atomic_always_lock_free(sizeof(_M_i), 0);
246 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
247 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
250 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
251 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
254 load(memory_order __m = memory_order_seq_cst) const noexcept
256 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
257 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
258 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
263 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
265 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
266 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
267 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
272 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
274 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
275 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
276 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
283 memory_order __m = memory_order_seq_cst) volatile noexcept
285 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
286 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
287 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
293 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
294 memory_order __f) noexcept
296 return __atomic_compare_exchange(std::__addressof(_M_i),
297 std::__addressof(__e),
298 std::__addressof(__i),
299 true, int(__s), int(__f));
303 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
304 memory_order __f) volatile noexcept
306 return __atomic_compare_exchange(std::__addressof(_M_i),
307 std::__addressof(__e),
308 std::__addressof(__i),
309 true, int(__s), int(__f));
313 compare_exchange_weak(_Tp& __e, _Tp __i,
314 memory_order __m = memory_order_seq_cst) noexcept
315 { return compare_exchange_weak(__e, __i, __m,
316 __cmpexch_failure_order(__m)); }
319 compare_exchange_weak(_Tp& __e, _Tp __i,
320 memory_order __m = memory_order_seq_cst) volatile noexcept
321 { return compare_exchange_weak(__e, __i, __m,
322 __cmpexch_failure_order(__m)); }
325 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
326 memory_order __f) noexcept
328 return __atomic_compare_exchange(std::__addressof(_M_i),
329 std::__addressof(__e),
330 std::__addressof(__i),
331 false, int(__s), int(__f));
335 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
336 memory_order __f) volatile noexcept
338 return __atomic_compare_exchange(std::__addressof(_M_i),
339 std::__addressof(__e),
340 std::__addressof(__i),
341 false, int(__s), int(__f));
345 compare_exchange_strong(_Tp& __e, _Tp __i,
346 memory_order __m = memory_order_seq_cst) noexcept
347 { return compare_exchange_strong(__e, __i, __m,
348 __cmpexch_failure_order(__m)); }
351 compare_exchange_strong(_Tp& __e, _Tp __i,
352 memory_order __m = memory_order_seq_cst) volatile noexcept
353 { return compare_exchange_strong(__e, __i, __m,
354 __cmpexch_failure_order(__m)); }
356 #undef _GLIBCXX20_INIT
358 /// Partial specialization for pointer types.
359 template<typename _Tp>
362 using value_type = _Tp*;
363 using difference_type = ptrdiff_t;
365 typedef _Tp* __pointer_type;
366 typedef __atomic_base<_Tp*> __base_type;
369 atomic() noexcept = default;
370 ~atomic() noexcept = default;
371 atomic(const atomic&) = delete;
372 atomic& operator=(const atomic&) = delete;
373 atomic& operator=(const atomic&) volatile = delete;
375 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
377 operator __pointer_type() const noexcept
378 { return __pointer_type(_M_b); }
380 operator __pointer_type() const volatile noexcept
381 { return __pointer_type(_M_b); }
384 operator=(__pointer_type __p) noexcept
385 { return _M_b.operator=(__p); }
388 operator=(__pointer_type __p) volatile noexcept
389 { return _M_b.operator=(__p); }
392 operator++(int) noexcept
394 #if __cplusplus >= 201703L
395 static_assert( is_object<_Tp>::value, "pointer to object type" );
401 operator++(int) volatile noexcept
403 #if __cplusplus >= 201703L
404 static_assert( is_object<_Tp>::value, "pointer to object type" );
410 operator--(int) noexcept
412 #if __cplusplus >= 201703L
413 static_assert( is_object<_Tp>::value, "pointer to object type" );
419 operator--(int) volatile noexcept
421 #if __cplusplus >= 201703L
422 static_assert( is_object<_Tp>::value, "pointer to object type" );
428 operator++() noexcept
430 #if __cplusplus >= 201703L
431 static_assert( is_object<_Tp>::value, "pointer to object type" );
437 operator++() volatile noexcept
439 #if __cplusplus >= 201703L
440 static_assert( is_object<_Tp>::value, "pointer to object type" );
446 operator--() noexcept
448 #if __cplusplus >= 201703L
449 static_assert( is_object<_Tp>::value, "pointer to object type" );
455 operator--() volatile noexcept
457 #if __cplusplus >= 201703L
458 static_assert( is_object<_Tp>::value, "pointer to object type" );
464 operator+=(ptrdiff_t __d) noexcept
466 #if __cplusplus >= 201703L
467 static_assert( is_object<_Tp>::value, "pointer to object type" );
469 return _M_b.operator+=(__d);
473 operator+=(ptrdiff_t __d) volatile noexcept
475 #if __cplusplus >= 201703L
476 static_assert( is_object<_Tp>::value, "pointer to object type" );
478 return _M_b.operator+=(__d);
482 operator-=(ptrdiff_t __d) noexcept
484 #if __cplusplus >= 201703L
485 static_assert( is_object<_Tp>::value, "pointer to object type" );
487 return _M_b.operator-=(__d);
491 operator-=(ptrdiff_t __d) volatile noexcept
493 #if __cplusplus >= 201703L
494 static_assert( is_object<_Tp>::value, "pointer to object type" );
496 return _M_b.operator-=(__d);
500 is_lock_free() const noexcept
501 { return _M_b.is_lock_free(); }
504 is_lock_free() const volatile noexcept
505 { return _M_b.is_lock_free(); }
507 #if __cplusplus >= 201703L
508 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
512 store(__pointer_type __p,
513 memory_order __m = memory_order_seq_cst) noexcept
514 { return _M_b.store(__p, __m); }
517 store(__pointer_type __p,
518 memory_order __m = memory_order_seq_cst) volatile noexcept
519 { return _M_b.store(__p, __m); }
522 load(memory_order __m = memory_order_seq_cst) const noexcept
523 { return _M_b.load(__m); }
526 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
527 { return _M_b.load(__m); }
530 exchange(__pointer_type __p,
531 memory_order __m = memory_order_seq_cst) noexcept
532 { return _M_b.exchange(__p, __m); }
535 exchange(__pointer_type __p,
536 memory_order __m = memory_order_seq_cst) volatile noexcept
537 { return _M_b.exchange(__p, __m); }
540 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
541 memory_order __m1, memory_order __m2) noexcept
542 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
545 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
547 memory_order __m2) volatile noexcept
548 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
551 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
552 memory_order __m = memory_order_seq_cst) noexcept
554 return compare_exchange_weak(__p1, __p2, __m,
555 __cmpexch_failure_order(__m));
559 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
560 memory_order __m = memory_order_seq_cst) volatile noexcept
562 return compare_exchange_weak(__p1, __p2, __m,
563 __cmpexch_failure_order(__m));
567 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
568 memory_order __m1, memory_order __m2) noexcept
569 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
572 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
574 memory_order __m2) volatile noexcept
575 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
578 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
579 memory_order __m = memory_order_seq_cst) noexcept
581 return _M_b.compare_exchange_strong(__p1, __p2, __m,
582 __cmpexch_failure_order(__m));
586 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
587 memory_order __m = memory_order_seq_cst) volatile noexcept
589 return _M_b.compare_exchange_strong(__p1, __p2, __m,
590 __cmpexch_failure_order(__m));
594 fetch_add(ptrdiff_t __d,
595 memory_order __m = memory_order_seq_cst) noexcept
597 #if __cplusplus >= 201703L
598 static_assert( is_object<_Tp>::value, "pointer to object type" );
600 return _M_b.fetch_add(__d, __m);
604 fetch_add(ptrdiff_t __d,
605 memory_order __m = memory_order_seq_cst) volatile noexcept
607 #if __cplusplus >= 201703L
608 static_assert( is_object<_Tp>::value, "pointer to object type" );
610 return _M_b.fetch_add(__d, __m);
614 fetch_sub(ptrdiff_t __d,
615 memory_order __m = memory_order_seq_cst) noexcept
617 #if __cplusplus >= 201703L
618 static_assert( is_object<_Tp>::value, "pointer to object type" );
620 return _M_b.fetch_sub(__d, __m);
624 fetch_sub(ptrdiff_t __d,
625 memory_order __m = memory_order_seq_cst) volatile noexcept
627 #if __cplusplus >= 201703L
628 static_assert( is_object<_Tp>::value, "pointer to object type" );
630 return _M_b.fetch_sub(__d, __m);
635 /// Explicit specialization for char.
637 struct atomic<char> : __atomic_base<char>
639 typedef char __integral_type;
640 typedef __atomic_base<char> __base_type;
642 atomic() noexcept = default;
643 ~atomic() noexcept = default;
644 atomic(const atomic&) = delete;
645 atomic& operator=(const atomic&) = delete;
646 atomic& operator=(const atomic&) volatile = delete;
648 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
650 using __base_type::operator __integral_type;
651 using __base_type::operator=;
653 #if __cplusplus >= 201703L
654 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
658 /// Explicit specialization for signed char.
660 struct atomic<signed char> : __atomic_base<signed char>
662 typedef signed char __integral_type;
663 typedef __atomic_base<signed char> __base_type;
665 atomic() noexcept= default;
666 ~atomic() noexcept = default;
667 atomic(const atomic&) = delete;
668 atomic& operator=(const atomic&) = delete;
669 atomic& operator=(const atomic&) volatile = delete;
671 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
673 using __base_type::operator __integral_type;
674 using __base_type::operator=;
676 #if __cplusplus >= 201703L
677 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
681 /// Explicit specialization for unsigned char.
683 struct atomic<unsigned char> : __atomic_base<unsigned char>
685 typedef unsigned char __integral_type;
686 typedef __atomic_base<unsigned char> __base_type;
688 atomic() noexcept= default;
689 ~atomic() noexcept = default;
690 atomic(const atomic&) = delete;
691 atomic& operator=(const atomic&) = delete;
692 atomic& operator=(const atomic&) volatile = delete;
694 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
696 using __base_type::operator __integral_type;
697 using __base_type::operator=;
699 #if __cplusplus >= 201703L
700 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
704 /// Explicit specialization for short.
706 struct atomic<short> : __atomic_base<short>
708 typedef short __integral_type;
709 typedef __atomic_base<short> __base_type;
711 atomic() noexcept = default;
712 ~atomic() noexcept = default;
713 atomic(const atomic&) = delete;
714 atomic& operator=(const atomic&) = delete;
715 atomic& operator=(const atomic&) volatile = delete;
717 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
719 using __base_type::operator __integral_type;
720 using __base_type::operator=;
722 #if __cplusplus >= 201703L
723 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
727 /// Explicit specialization for unsigned short.
729 struct atomic<unsigned short> : __atomic_base<unsigned short>
731 typedef unsigned short __integral_type;
732 typedef __atomic_base<unsigned short> __base_type;
734 atomic() noexcept = default;
735 ~atomic() noexcept = default;
736 atomic(const atomic&) = delete;
737 atomic& operator=(const atomic&) = delete;
738 atomic& operator=(const atomic&) volatile = delete;
740 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
742 using __base_type::operator __integral_type;
743 using __base_type::operator=;
745 #if __cplusplus >= 201703L
746 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
750 /// Explicit specialization for int.
752 struct atomic<int> : __atomic_base<int>
754 typedef int __integral_type;
755 typedef __atomic_base<int> __base_type;
757 atomic() noexcept = default;
758 ~atomic() noexcept = default;
759 atomic(const atomic&) = delete;
760 atomic& operator=(const atomic&) = delete;
761 atomic& operator=(const atomic&) volatile = delete;
763 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
765 using __base_type::operator __integral_type;
766 using __base_type::operator=;
768 #if __cplusplus >= 201703L
769 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
773 /// Explicit specialization for unsigned int.
775 struct atomic<unsigned int> : __atomic_base<unsigned int>
777 typedef unsigned int __integral_type;
778 typedef __atomic_base<unsigned int> __base_type;
780 atomic() noexcept = default;
781 ~atomic() noexcept = default;
782 atomic(const atomic&) = delete;
783 atomic& operator=(const atomic&) = delete;
784 atomic& operator=(const atomic&) volatile = delete;
786 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
788 using __base_type::operator __integral_type;
789 using __base_type::operator=;
791 #if __cplusplus >= 201703L
792 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
796 /// Explicit specialization for long.
798 struct atomic<long> : __atomic_base<long>
800 typedef long __integral_type;
801 typedef __atomic_base<long> __base_type;
803 atomic() noexcept = default;
804 ~atomic() noexcept = default;
805 atomic(const atomic&) = delete;
806 atomic& operator=(const atomic&) = delete;
807 atomic& operator=(const atomic&) volatile = delete;
809 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
811 using __base_type::operator __integral_type;
812 using __base_type::operator=;
814 #if __cplusplus >= 201703L
815 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
819 /// Explicit specialization for unsigned long.
821 struct atomic<unsigned long> : __atomic_base<unsigned long>
823 typedef unsigned long __integral_type;
824 typedef __atomic_base<unsigned long> __base_type;
826 atomic() noexcept = default;
827 ~atomic() noexcept = default;
828 atomic(const atomic&) = delete;
829 atomic& operator=(const atomic&) = delete;
830 atomic& operator=(const atomic&) volatile = delete;
832 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
834 using __base_type::operator __integral_type;
835 using __base_type::operator=;
837 #if __cplusplus >= 201703L
838 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
842 /// Explicit specialization for long long.
844 struct atomic<long long> : __atomic_base<long long>
846 typedef long long __integral_type;
847 typedef __atomic_base<long long> __base_type;
849 atomic() noexcept = default;
850 ~atomic() noexcept = default;
851 atomic(const atomic&) = delete;
852 atomic& operator=(const atomic&) = delete;
853 atomic& operator=(const atomic&) volatile = delete;
855 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
857 using __base_type::operator __integral_type;
858 using __base_type::operator=;
860 #if __cplusplus >= 201703L
861 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
865 /// Explicit specialization for unsigned long long.
867 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
869 typedef unsigned long long __integral_type;
870 typedef __atomic_base<unsigned long long> __base_type;
872 atomic() noexcept = default;
873 ~atomic() noexcept = default;
874 atomic(const atomic&) = delete;
875 atomic& operator=(const atomic&) = delete;
876 atomic& operator=(const atomic&) volatile = delete;
878 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
880 using __base_type::operator __integral_type;
881 using __base_type::operator=;
883 #if __cplusplus >= 201703L
884 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
888 /// Explicit specialization for wchar_t.
890 struct atomic<wchar_t> : __atomic_base<wchar_t>
892 typedef wchar_t __integral_type;
893 typedef __atomic_base<wchar_t> __base_type;
895 atomic() noexcept = default;
896 ~atomic() noexcept = default;
897 atomic(const atomic&) = delete;
898 atomic& operator=(const atomic&) = delete;
899 atomic& operator=(const atomic&) volatile = delete;
901 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
903 using __base_type::operator __integral_type;
904 using __base_type::operator=;
906 #if __cplusplus >= 201703L
907 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
911 #ifdef _GLIBCXX_USE_CHAR8_T
912 /// Explicit specialization for char8_t.
914 struct atomic<char8_t> : __atomic_base<char8_t>
916 typedef char8_t __integral_type;
917 typedef __atomic_base<char8_t> __base_type;
919 atomic() noexcept = default;
920 ~atomic() noexcept = default;
921 atomic(const atomic&) = delete;
922 atomic& operator=(const atomic&) = delete;
923 atomic& operator=(const atomic&) volatile = delete;
925 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
927 using __base_type::operator __integral_type;
928 using __base_type::operator=;
930 #if __cplusplus > 201402L
931 static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
936 /// Explicit specialization for char16_t.
938 struct atomic<char16_t> : __atomic_base<char16_t>
940 typedef char16_t __integral_type;
941 typedef __atomic_base<char16_t> __base_type;
943 atomic() noexcept = default;
944 ~atomic() noexcept = default;
945 atomic(const atomic&) = delete;
946 atomic& operator=(const atomic&) = delete;
947 atomic& operator=(const atomic&) volatile = delete;
949 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
951 using __base_type::operator __integral_type;
952 using __base_type::operator=;
954 #if __cplusplus >= 201703L
955 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
959 /// Explicit specialization for char32_t.
961 struct atomic<char32_t> : __atomic_base<char32_t>
963 typedef char32_t __integral_type;
964 typedef __atomic_base<char32_t> __base_type;
966 atomic() noexcept = default;
967 ~atomic() noexcept = default;
968 atomic(const atomic&) = delete;
969 atomic& operator=(const atomic&) = delete;
970 atomic& operator=(const atomic&) volatile = delete;
972 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
974 using __base_type::operator __integral_type;
975 using __base_type::operator=;
977 #if __cplusplus >= 201703L
978 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
984 typedef atomic<bool> atomic_bool;
987 typedef atomic<char> atomic_char;
990 typedef atomic<signed char> atomic_schar;
993 typedef atomic<unsigned char> atomic_uchar;
996 typedef atomic<short> atomic_short;
999 typedef atomic<unsigned short> atomic_ushort;
1002 typedef atomic<int> atomic_int;
1005 typedef atomic<unsigned int> atomic_uint;
1008 typedef atomic<long> atomic_long;
1011 typedef atomic<unsigned long> atomic_ulong;
1014 typedef atomic<long long> atomic_llong;
1017 typedef atomic<unsigned long long> atomic_ullong;
1020 typedef atomic<wchar_t> atomic_wchar_t;
1022 #ifdef _GLIBCXX_USE_CHAR8_T
1024 typedef atomic<char8_t> atomic_char8_t;
1028 typedef atomic<char16_t> atomic_char16_t;
1031 typedef atomic<char32_t> atomic_char32_t;
1033 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1034 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1035 // 2441. Exact-width atomic typedefs should be provided
1038 typedef atomic<int8_t> atomic_int8_t;
1041 typedef atomic<uint8_t> atomic_uint8_t;
1044 typedef atomic<int16_t> atomic_int16_t;
1047 typedef atomic<uint16_t> atomic_uint16_t;
1050 typedef atomic<int32_t> atomic_int32_t;
1053 typedef atomic<uint32_t> atomic_uint32_t;
1056 typedef atomic<int64_t> atomic_int64_t;
1059 typedef atomic<uint64_t> atomic_uint64_t;
1062 /// atomic_int_least8_t
1063 typedef atomic<int_least8_t> atomic_int_least8_t;
1065 /// atomic_uint_least8_t
1066 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1068 /// atomic_int_least16_t
1069 typedef atomic<int_least16_t> atomic_int_least16_t;
1071 /// atomic_uint_least16_t
1072 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1074 /// atomic_int_least32_t
1075 typedef atomic<int_least32_t> atomic_int_least32_t;
1077 /// atomic_uint_least32_t
1078 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1080 /// atomic_int_least64_t
1081 typedef atomic<int_least64_t> atomic_int_least64_t;
1083 /// atomic_uint_least64_t
1084 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1087 /// atomic_int_fast8_t
1088 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1090 /// atomic_uint_fast8_t
1091 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1093 /// atomic_int_fast16_t
1094 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1096 /// atomic_uint_fast16_t
1097 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1099 /// atomic_int_fast32_t
1100 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1102 /// atomic_uint_fast32_t
1103 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1105 /// atomic_int_fast64_t
1106 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1108 /// atomic_uint_fast64_t
1109 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1114 typedef atomic<intptr_t> atomic_intptr_t;
1116 /// atomic_uintptr_t
1117 typedef atomic<uintptr_t> atomic_uintptr_t;
1120 typedef atomic<size_t> atomic_size_t;
1122 /// atomic_ptrdiff_t
1123 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1125 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1127 typedef atomic<intmax_t> atomic_intmax_t;
1129 /// atomic_uintmax_t
1130 typedef atomic<uintmax_t> atomic_uintmax_t;
1133 // Function definitions, atomic_flag operations.
1135 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1136 memory_order __m) noexcept
1137 { return __a->test_and_set(__m); }
1140 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1141 memory_order __m) noexcept
1142 { return __a->test_and_set(__m); }
1145 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1146 { __a->clear(__m); }
1149 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1150 memory_order __m) noexcept
1151 { __a->clear(__m); }
1154 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1155 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1158 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1159 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1162 atomic_flag_clear(atomic_flag* __a) noexcept
1163 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1166 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1167 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1170 template<typename _Tp>
1171 using __atomic_val_t = typename atomic<_Tp>::value_type;
1172 template<typename _Tp>
1173 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1175 // [atomics.nonmembers] Non-member functions.
1176 // Function templates generally applicable to atomic types.
1177 template<typename _ITp>
1179 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1180 { return __a->is_lock_free(); }
1182 template<typename _ITp>
1184 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1185 { return __a->is_lock_free(); }
1187 template<typename _ITp>
1189 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1190 { __a->store(__i, memory_order_relaxed); }
1192 template<typename _ITp>
1194 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1195 { __a->store(__i, memory_order_relaxed); }
1197 template<typename _ITp>
1199 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1200 memory_order __m) noexcept
1201 { __a->store(__i, __m); }
1203 template<typename _ITp>
1205 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1206 memory_order __m) noexcept
1207 { __a->store(__i, __m); }
1209 template<typename _ITp>
1211 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1212 { return __a->load(__m); }
1214 template<typename _ITp>
1216 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1217 memory_order __m) noexcept
1218 { return __a->load(__m); }
1220 template<typename _ITp>
1222 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1223 memory_order __m) noexcept
1224 { return __a->exchange(__i, __m); }
1226 template<typename _ITp>
1228 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1229 __atomic_val_t<_ITp> __i,
1230 memory_order __m) noexcept
1231 { return __a->exchange(__i, __m); }
1233 template<typename _ITp>
1235 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1236 __atomic_val_t<_ITp>* __i1,
1237 __atomic_val_t<_ITp> __i2,
1239 memory_order __m2) noexcept
1240 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1242 template<typename _ITp>
1244 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1245 __atomic_val_t<_ITp>* __i1,
1246 __atomic_val_t<_ITp> __i2,
1248 memory_order __m2) noexcept
1249 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1251 template<typename _ITp>
1253 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1254 __atomic_val_t<_ITp>* __i1,
1255 __atomic_val_t<_ITp> __i2,
1257 memory_order __m2) noexcept
1258 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1260 template<typename _ITp>
1262 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1263 __atomic_val_t<_ITp>* __i1,
1264 __atomic_val_t<_ITp> __i2,
1266 memory_order __m2) noexcept
1267 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1270 template<typename _ITp>
1272 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1273 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1275 template<typename _ITp>
1277 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1278 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1280 template<typename _ITp>
1282 atomic_load(const atomic<_ITp>* __a) noexcept
1283 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1285 template<typename _ITp>
1287 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1288 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1290 template<typename _ITp>
1292 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1293 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1295 template<typename _ITp>
1297 atomic_exchange(volatile atomic<_ITp>* __a,
1298 __atomic_val_t<_ITp> __i) noexcept
1299 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1301 template<typename _ITp>
1303 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1304 __atomic_val_t<_ITp>* __i1,
1305 __atomic_val_t<_ITp> __i2) noexcept
1307 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1308 memory_order_seq_cst,
1309 memory_order_seq_cst);
1312 template<typename _ITp>
1314 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1315 __atomic_val_t<_ITp>* __i1,
1316 __atomic_val_t<_ITp> __i2) noexcept
1318 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1319 memory_order_seq_cst,
1320 memory_order_seq_cst);
1323 template<typename _ITp>
1325 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1326 __atomic_val_t<_ITp>* __i1,
1327 __atomic_val_t<_ITp> __i2) noexcept
1329 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1330 memory_order_seq_cst,
1331 memory_order_seq_cst);
1334 template<typename _ITp>
1336 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1337 __atomic_val_t<_ITp>* __i1,
1338 __atomic_val_t<_ITp> __i2) noexcept
1340 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1341 memory_order_seq_cst,
1342 memory_order_seq_cst);
1345 // Function templates for atomic_integral and atomic_pointer operations only.
1346 // Some operations (and, or, xor) are only available for atomic integrals,
1347 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1349 template<typename _ITp>
1351 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1352 __atomic_diff_t<_ITp> __i,
1353 memory_order __m) noexcept
1354 { return __a->fetch_add(__i, __m); }
1356 template<typename _ITp>
1358 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1359 __atomic_diff_t<_ITp> __i,
1360 memory_order __m) noexcept
1361 { return __a->fetch_add(__i, __m); }
1363 template<typename _ITp>
1365 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1366 __atomic_diff_t<_ITp> __i,
1367 memory_order __m) noexcept
1368 { return __a->fetch_sub(__i, __m); }
1370 template<typename _ITp>
1372 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1373 __atomic_diff_t<_ITp> __i,
1374 memory_order __m) noexcept
1375 { return __a->fetch_sub(__i, __m); }
1377 template<typename _ITp>
1379 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1380 __atomic_val_t<_ITp> __i,
1381 memory_order __m) noexcept
1382 { return __a->fetch_and(__i, __m); }
1384 template<typename _ITp>
1386 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1387 __atomic_val_t<_ITp> __i,
1388 memory_order __m) noexcept
1389 { return __a->fetch_and(__i, __m); }
1391 template<typename _ITp>
1393 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1394 __atomic_val_t<_ITp> __i,
1395 memory_order __m) noexcept
1396 { return __a->fetch_or(__i, __m); }
1398 template<typename _ITp>
1400 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1401 __atomic_val_t<_ITp> __i,
1402 memory_order __m) noexcept
1403 { return __a->fetch_or(__i, __m); }
1405 template<typename _ITp>
1407 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1408 __atomic_val_t<_ITp> __i,
1409 memory_order __m) noexcept
1410 { return __a->fetch_xor(__i, __m); }
1412 template<typename _ITp>
1414 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1415 __atomic_val_t<_ITp> __i,
1416 memory_order __m) noexcept
1417 { return __a->fetch_xor(__i, __m); }
1419 template<typename _ITp>
1421 atomic_fetch_add(atomic<_ITp>* __a,
1422 __atomic_diff_t<_ITp> __i) noexcept
1423 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1425 template<typename _ITp>
1427 atomic_fetch_add(volatile atomic<_ITp>* __a,
1428 __atomic_diff_t<_ITp> __i) noexcept
1429 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1431 template<typename _ITp>
1433 atomic_fetch_sub(atomic<_ITp>* __a,
1434 __atomic_diff_t<_ITp> __i) noexcept
1435 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1437 template<typename _ITp>
1439 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1440 __atomic_diff_t<_ITp> __i) noexcept
1441 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1443 template<typename _ITp>
1445 atomic_fetch_and(__atomic_base<_ITp>* __a,
1446 __atomic_val_t<_ITp> __i) noexcept
1447 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1449 template<typename _ITp>
1451 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1452 __atomic_val_t<_ITp> __i) noexcept
1453 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1455 template<typename _ITp>
1457 atomic_fetch_or(__atomic_base<_ITp>* __a,
1458 __atomic_val_t<_ITp> __i) noexcept
1459 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1461 template<typename _ITp>
1463 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1464 __atomic_val_t<_ITp> __i) noexcept
1465 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1467 template<typename _ITp>
1469 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1470 __atomic_val_t<_ITp> __i) noexcept
1471 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1473 template<typename _ITp>
1475 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1476 __atomic_val_t<_ITp> __i) noexcept
1477 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1479 #if __cplusplus > 201703L
1481 struct atomic<float> : __atomic_float<float>
1483 atomic() noexcept = default;
1486 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1489 atomic& operator=(const atomic&) volatile = delete;
1490 atomic& operator=(const atomic&) = delete;
1492 using __atomic_float<float>::operator=;
1496 struct atomic<double> : __atomic_float<double>
1498 atomic() noexcept = default;
1501 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1504 atomic& operator=(const atomic&) volatile = delete;
1505 atomic& operator=(const atomic&) = delete;
1507 using __atomic_float<double>::operator=;
1511 struct atomic<long double> : __atomic_float<long double>
1513 atomic() noexcept = default;
1516 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1519 atomic& operator=(const atomic&) volatile = delete;
1520 atomic& operator=(const atomic&) = delete;
1522 using __atomic_float<long double>::operator=;
1525 #define __cpp_lib_atomic_ref 201806L
1527 /// Class template to provide atomic operations on a non-atomic variable.
1528 template<typename _Tp>
1529 struct atomic_ref : __atomic_ref<_Tp>
1532 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1535 atomic_ref& operator=(const atomic_ref&) = delete;
1537 atomic_ref(const atomic_ref&) = default;
1539 using __atomic_ref<_Tp>::operator=;
1546 _GLIBCXX_END_NAMESPACE_VERSION
1551 #endif // _GLIBCXX_ATOMIC