3 // Copyright (C) 2008-2019 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
42 #include <bits/move.h>
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
53 #if __cplusplus >= 201703L
54 # define __cpp_lib_atomic_is_always_lock_free 201603
57 template<typename _Tp>
61 // NB: No operators or fetch-operations for this type.
65 using value_type = bool;
68 __atomic_base<bool> _M_base;
71 atomic() noexcept = default;
72 ~atomic() noexcept = default;
73 atomic(const atomic&) = delete;
74 atomic& operator=(const atomic&) = delete;
75 atomic& operator=(const atomic&) volatile = delete;
77 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
80 operator=(bool __i) noexcept
81 { return _M_base.operator=(__i); }
84 operator=(bool __i) volatile noexcept
85 { return _M_base.operator=(__i); }
87 operator bool() const noexcept
88 { return _M_base.load(); }
90 operator bool() const volatile noexcept
91 { return _M_base.load(); }
94 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
97 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
99 #if __cplusplus >= 201703L
100 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
104 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
105 { _M_base.store(__i, __m); }
108 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
109 { _M_base.store(__i, __m); }
112 load(memory_order __m = memory_order_seq_cst) const noexcept
113 { return _M_base.load(__m); }
116 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
117 { return _M_base.load(__m); }
120 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
121 { return _M_base.exchange(__i, __m); }
125 memory_order __m = memory_order_seq_cst) volatile noexcept
126 { return _M_base.exchange(__i, __m); }
129 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
130 memory_order __m2) noexcept
131 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
134 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
135 memory_order __m2) volatile noexcept
136 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
139 compare_exchange_weak(bool& __i1, bool __i2,
140 memory_order __m = memory_order_seq_cst) noexcept
141 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
144 compare_exchange_weak(bool& __i1, bool __i2,
145 memory_order __m = memory_order_seq_cst) volatile noexcept
146 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
149 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
150 memory_order __m2) noexcept
151 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
154 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
155 memory_order __m2) volatile noexcept
156 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
159 compare_exchange_strong(bool& __i1, bool __i2,
160 memory_order __m = memory_order_seq_cst) noexcept
161 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
164 compare_exchange_strong(bool& __i1, bool __i2,
165 memory_order __m = memory_order_seq_cst) volatile noexcept
166 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
171 * @brief Generic atomic type, primary class template.
173 * @tparam _Tp Type to be made atomic, must be trivally copyable.
175 template<typename _Tp>
178 using value_type = _Tp;
181 // Align 1/2/4/8/16-byte types to at least their size.
182 static constexpr int _S_min_alignment
183 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
186 static constexpr int _S_alignment
187 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
189 alignas(_S_alignment) _Tp _M_i;
191 static_assert(__is_trivially_copyable(_Tp),
192 "std::atomic requires a trivially copyable type");
194 static_assert(sizeof(_Tp) > 0,
195 "Incomplete or zero-sized types are not supported");
198 atomic() noexcept = default;
199 ~atomic() noexcept = default;
200 atomic(const atomic&) = delete;
201 atomic& operator=(const atomic&) = delete;
202 atomic& operator=(const atomic&) volatile = delete;
204 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
206 operator _Tp() const noexcept
209 operator _Tp() const volatile noexcept
213 operator=(_Tp __i) noexcept
214 { store(__i); return __i; }
217 operator=(_Tp __i) volatile noexcept
218 { store(__i); return __i; }
221 is_lock_free() const noexcept
223 // Produce a fake, minimally aligned pointer.
224 return __atomic_is_lock_free(sizeof(_M_i),
225 reinterpret_cast<void *>(-_S_alignment));
229 is_lock_free() const volatile noexcept
231 // Produce a fake, minimally aligned pointer.
232 return __atomic_is_lock_free(sizeof(_M_i),
233 reinterpret_cast<void *>(-_S_alignment));
236 #if __cplusplus >= 201703L
237 static constexpr bool is_always_lock_free
238 = __atomic_always_lock_free(sizeof(_M_i), 0);
242 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
243 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
246 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
247 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
250 load(memory_order __m = memory_order_seq_cst) const noexcept
252 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
253 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
254 __atomic_load(std::__addressof(_M_i), __ptr, __m);
259 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
261 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
262 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
263 __atomic_load(std::__addressof(_M_i), __ptr, __m);
268 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
270 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
271 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
272 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
279 memory_order __m = memory_order_seq_cst) volatile noexcept
281 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
282 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
283 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
289 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
290 memory_order __f) noexcept
292 return __atomic_compare_exchange(std::__addressof(_M_i),
293 std::__addressof(__e),
294 std::__addressof(__i),
299 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
300 memory_order __f) volatile noexcept
302 return __atomic_compare_exchange(std::__addressof(_M_i),
303 std::__addressof(__e),
304 std::__addressof(__i),
309 compare_exchange_weak(_Tp& __e, _Tp __i,
310 memory_order __m = memory_order_seq_cst) noexcept
311 { return compare_exchange_weak(__e, __i, __m,
312 __cmpexch_failure_order(__m)); }
315 compare_exchange_weak(_Tp& __e, _Tp __i,
316 memory_order __m = memory_order_seq_cst) volatile noexcept
317 { return compare_exchange_weak(__e, __i, __m,
318 __cmpexch_failure_order(__m)); }
321 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
322 memory_order __f) noexcept
324 return __atomic_compare_exchange(std::__addressof(_M_i),
325 std::__addressof(__e),
326 std::__addressof(__i),
331 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
332 memory_order __f) volatile noexcept
334 return __atomic_compare_exchange(std::__addressof(_M_i),
335 std::__addressof(__e),
336 std::__addressof(__i),
341 compare_exchange_strong(_Tp& __e, _Tp __i,
342 memory_order __m = memory_order_seq_cst) noexcept
343 { return compare_exchange_strong(__e, __i, __m,
344 __cmpexch_failure_order(__m)); }
347 compare_exchange_strong(_Tp& __e, _Tp __i,
348 memory_order __m = memory_order_seq_cst) volatile noexcept
349 { return compare_exchange_strong(__e, __i, __m,
350 __cmpexch_failure_order(__m)); }
354 /// Partial specialization for pointer types.
355 template<typename _Tp>
358 using value_type = _Tp*;
359 using difference_type = ptrdiff_t;
361 typedef _Tp* __pointer_type;
362 typedef __atomic_base<_Tp*> __base_type;
365 atomic() noexcept = default;
366 ~atomic() noexcept = default;
367 atomic(const atomic&) = delete;
368 atomic& operator=(const atomic&) = delete;
369 atomic& operator=(const atomic&) volatile = delete;
371 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
373 operator __pointer_type() const noexcept
374 { return __pointer_type(_M_b); }
376 operator __pointer_type() const volatile noexcept
377 { return __pointer_type(_M_b); }
380 operator=(__pointer_type __p) noexcept
381 { return _M_b.operator=(__p); }
384 operator=(__pointer_type __p) volatile noexcept
385 { return _M_b.operator=(__p); }
388 operator++(int) noexcept
390 #if __cplusplus >= 201703L
391 static_assert( is_object<_Tp>::value, "pointer to object type" );
397 operator++(int) volatile noexcept
399 #if __cplusplus >= 201703L
400 static_assert( is_object<_Tp>::value, "pointer to object type" );
406 operator--(int) noexcept
408 #if __cplusplus >= 201703L
409 static_assert( is_object<_Tp>::value, "pointer to object type" );
415 operator--(int) volatile noexcept
417 #if __cplusplus >= 201703L
418 static_assert( is_object<_Tp>::value, "pointer to object type" );
424 operator++() noexcept
426 #if __cplusplus >= 201703L
427 static_assert( is_object<_Tp>::value, "pointer to object type" );
433 operator++() volatile noexcept
435 #if __cplusplus >= 201703L
436 static_assert( is_object<_Tp>::value, "pointer to object type" );
442 operator--() noexcept
444 #if __cplusplus >= 201703L
445 static_assert( is_object<_Tp>::value, "pointer to object type" );
451 operator--() volatile noexcept
453 #if __cplusplus >= 201703L
454 static_assert( is_object<_Tp>::value, "pointer to object type" );
460 operator+=(ptrdiff_t __d) noexcept
462 #if __cplusplus >= 201703L
463 static_assert( is_object<_Tp>::value, "pointer to object type" );
465 return _M_b.operator+=(__d);
469 operator+=(ptrdiff_t __d) volatile noexcept
471 #if __cplusplus >= 201703L
472 static_assert( is_object<_Tp>::value, "pointer to object type" );
474 return _M_b.operator+=(__d);
478 operator-=(ptrdiff_t __d) noexcept
480 #if __cplusplus >= 201703L
481 static_assert( is_object<_Tp>::value, "pointer to object type" );
483 return _M_b.operator-=(__d);
487 operator-=(ptrdiff_t __d) volatile noexcept
489 #if __cplusplus >= 201703L
490 static_assert( is_object<_Tp>::value, "pointer to object type" );
492 return _M_b.operator-=(__d);
496 is_lock_free() const noexcept
497 { return _M_b.is_lock_free(); }
500 is_lock_free() const volatile noexcept
501 { return _M_b.is_lock_free(); }
503 #if __cplusplus >= 201703L
504 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
508 store(__pointer_type __p,
509 memory_order __m = memory_order_seq_cst) noexcept
510 { return _M_b.store(__p, __m); }
513 store(__pointer_type __p,
514 memory_order __m = memory_order_seq_cst) volatile noexcept
515 { return _M_b.store(__p, __m); }
518 load(memory_order __m = memory_order_seq_cst) const noexcept
519 { return _M_b.load(__m); }
522 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
523 { return _M_b.load(__m); }
526 exchange(__pointer_type __p,
527 memory_order __m = memory_order_seq_cst) noexcept
528 { return _M_b.exchange(__p, __m); }
531 exchange(__pointer_type __p,
532 memory_order __m = memory_order_seq_cst) volatile noexcept
533 { return _M_b.exchange(__p, __m); }
536 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
537 memory_order __m1, memory_order __m2) noexcept
538 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
541 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
543 memory_order __m2) volatile noexcept
544 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
547 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
548 memory_order __m = memory_order_seq_cst) noexcept
550 return compare_exchange_weak(__p1, __p2, __m,
551 __cmpexch_failure_order(__m));
555 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
556 memory_order __m = memory_order_seq_cst) volatile noexcept
558 return compare_exchange_weak(__p1, __p2, __m,
559 __cmpexch_failure_order(__m));
563 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
564 memory_order __m1, memory_order __m2) noexcept
565 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
568 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
570 memory_order __m2) volatile noexcept
571 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
574 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
575 memory_order __m = memory_order_seq_cst) noexcept
577 return _M_b.compare_exchange_strong(__p1, __p2, __m,
578 __cmpexch_failure_order(__m));
582 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
583 memory_order __m = memory_order_seq_cst) volatile noexcept
585 return _M_b.compare_exchange_strong(__p1, __p2, __m,
586 __cmpexch_failure_order(__m));
590 fetch_add(ptrdiff_t __d,
591 memory_order __m = memory_order_seq_cst) noexcept
593 #if __cplusplus >= 201703L
594 static_assert( is_object<_Tp>::value, "pointer to object type" );
596 return _M_b.fetch_add(__d, __m);
600 fetch_add(ptrdiff_t __d,
601 memory_order __m = memory_order_seq_cst) volatile noexcept
603 #if __cplusplus >= 201703L
604 static_assert( is_object<_Tp>::value, "pointer to object type" );
606 return _M_b.fetch_add(__d, __m);
610 fetch_sub(ptrdiff_t __d,
611 memory_order __m = memory_order_seq_cst) noexcept
613 #if __cplusplus >= 201703L
614 static_assert( is_object<_Tp>::value, "pointer to object type" );
616 return _M_b.fetch_sub(__d, __m);
620 fetch_sub(ptrdiff_t __d,
621 memory_order __m = memory_order_seq_cst) volatile noexcept
623 #if __cplusplus >= 201703L
624 static_assert( is_object<_Tp>::value, "pointer to object type" );
626 return _M_b.fetch_sub(__d, __m);
631 /// Explicit specialization for char.
633 struct atomic<char> : __atomic_base<char>
635 typedef char __integral_type;
636 typedef __atomic_base<char> __base_type;
638 atomic() noexcept = default;
639 ~atomic() noexcept = default;
640 atomic(const atomic&) = delete;
641 atomic& operator=(const atomic&) = delete;
642 atomic& operator=(const atomic&) volatile = delete;
644 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
646 using __base_type::operator __integral_type;
647 using __base_type::operator=;
649 #if __cplusplus >= 201703L
650 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
654 /// Explicit specialization for signed char.
656 struct atomic<signed char> : __atomic_base<signed char>
658 typedef signed char __integral_type;
659 typedef __atomic_base<signed char> __base_type;
661 atomic() noexcept= default;
662 ~atomic() noexcept = default;
663 atomic(const atomic&) = delete;
664 atomic& operator=(const atomic&) = delete;
665 atomic& operator=(const atomic&) volatile = delete;
667 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
669 using __base_type::operator __integral_type;
670 using __base_type::operator=;
672 #if __cplusplus >= 201703L
673 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
677 /// Explicit specialization for unsigned char.
679 struct atomic<unsigned char> : __atomic_base<unsigned char>
681 typedef unsigned char __integral_type;
682 typedef __atomic_base<unsigned char> __base_type;
684 atomic() noexcept= default;
685 ~atomic() noexcept = default;
686 atomic(const atomic&) = delete;
687 atomic& operator=(const atomic&) = delete;
688 atomic& operator=(const atomic&) volatile = delete;
690 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
692 using __base_type::operator __integral_type;
693 using __base_type::operator=;
695 #if __cplusplus >= 201703L
696 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
700 /// Explicit specialization for short.
702 struct atomic<short> : __atomic_base<short>
704 typedef short __integral_type;
705 typedef __atomic_base<short> __base_type;
707 atomic() noexcept = default;
708 ~atomic() noexcept = default;
709 atomic(const atomic&) = delete;
710 atomic& operator=(const atomic&) = delete;
711 atomic& operator=(const atomic&) volatile = delete;
713 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
715 using __base_type::operator __integral_type;
716 using __base_type::operator=;
718 #if __cplusplus >= 201703L
719 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
723 /// Explicit specialization for unsigned short.
725 struct atomic<unsigned short> : __atomic_base<unsigned short>
727 typedef unsigned short __integral_type;
728 typedef __atomic_base<unsigned short> __base_type;
730 atomic() noexcept = default;
731 ~atomic() noexcept = default;
732 atomic(const atomic&) = delete;
733 atomic& operator=(const atomic&) = delete;
734 atomic& operator=(const atomic&) volatile = delete;
736 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
738 using __base_type::operator __integral_type;
739 using __base_type::operator=;
741 #if __cplusplus >= 201703L
742 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
746 /// Explicit specialization for int.
748 struct atomic<int> : __atomic_base<int>
750 typedef int __integral_type;
751 typedef __atomic_base<int> __base_type;
753 atomic() noexcept = default;
754 ~atomic() noexcept = default;
755 atomic(const atomic&) = delete;
756 atomic& operator=(const atomic&) = delete;
757 atomic& operator=(const atomic&) volatile = delete;
759 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
761 using __base_type::operator __integral_type;
762 using __base_type::operator=;
764 #if __cplusplus >= 201703L
765 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
769 /// Explicit specialization for unsigned int.
771 struct atomic<unsigned int> : __atomic_base<unsigned int>
773 typedef unsigned int __integral_type;
774 typedef __atomic_base<unsigned int> __base_type;
776 atomic() noexcept = default;
777 ~atomic() noexcept = default;
778 atomic(const atomic&) = delete;
779 atomic& operator=(const atomic&) = delete;
780 atomic& operator=(const atomic&) volatile = delete;
782 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
784 using __base_type::operator __integral_type;
785 using __base_type::operator=;
787 #if __cplusplus >= 201703L
788 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
792 /// Explicit specialization for long.
794 struct atomic<long> : __atomic_base<long>
796 typedef long __integral_type;
797 typedef __atomic_base<long> __base_type;
799 atomic() noexcept = default;
800 ~atomic() noexcept = default;
801 atomic(const atomic&) = delete;
802 atomic& operator=(const atomic&) = delete;
803 atomic& operator=(const atomic&) volatile = delete;
805 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
807 using __base_type::operator __integral_type;
808 using __base_type::operator=;
810 #if __cplusplus >= 201703L
811 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
815 /// Explicit specialization for unsigned long.
817 struct atomic<unsigned long> : __atomic_base<unsigned long>
819 typedef unsigned long __integral_type;
820 typedef __atomic_base<unsigned long> __base_type;
822 atomic() noexcept = default;
823 ~atomic() noexcept = default;
824 atomic(const atomic&) = delete;
825 atomic& operator=(const atomic&) = delete;
826 atomic& operator=(const atomic&) volatile = delete;
828 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
830 using __base_type::operator __integral_type;
831 using __base_type::operator=;
833 #if __cplusplus >= 201703L
834 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
838 /// Explicit specialization for long long.
840 struct atomic<long long> : __atomic_base<long long>
842 typedef long long __integral_type;
843 typedef __atomic_base<long long> __base_type;
845 atomic() noexcept = default;
846 ~atomic() noexcept = default;
847 atomic(const atomic&) = delete;
848 atomic& operator=(const atomic&) = delete;
849 atomic& operator=(const atomic&) volatile = delete;
851 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
853 using __base_type::operator __integral_type;
854 using __base_type::operator=;
856 #if __cplusplus >= 201703L
857 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
861 /// Explicit specialization for unsigned long long.
863 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
865 typedef unsigned long long __integral_type;
866 typedef __atomic_base<unsigned long long> __base_type;
868 atomic() noexcept = default;
869 ~atomic() noexcept = default;
870 atomic(const atomic&) = delete;
871 atomic& operator=(const atomic&) = delete;
872 atomic& operator=(const atomic&) volatile = delete;
874 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
876 using __base_type::operator __integral_type;
877 using __base_type::operator=;
879 #if __cplusplus >= 201703L
880 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
884 /// Explicit specialization for wchar_t.
886 struct atomic<wchar_t> : __atomic_base<wchar_t>
888 typedef wchar_t __integral_type;
889 typedef __atomic_base<wchar_t> __base_type;
891 atomic() noexcept = default;
892 ~atomic() noexcept = default;
893 atomic(const atomic&) = delete;
894 atomic& operator=(const atomic&) = delete;
895 atomic& operator=(const atomic&) volatile = delete;
897 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
899 using __base_type::operator __integral_type;
900 using __base_type::operator=;
902 #if __cplusplus >= 201703L
903 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
907 /// Explicit specialization for char16_t.
909 struct atomic<char16_t> : __atomic_base<char16_t>
911 typedef char16_t __integral_type;
912 typedef __atomic_base<char16_t> __base_type;
914 atomic() noexcept = default;
915 ~atomic() noexcept = default;
916 atomic(const atomic&) = delete;
917 atomic& operator=(const atomic&) = delete;
918 atomic& operator=(const atomic&) volatile = delete;
920 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
922 using __base_type::operator __integral_type;
923 using __base_type::operator=;
925 #if __cplusplus >= 201703L
926 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
930 /// Explicit specialization for char32_t.
932 struct atomic<char32_t> : __atomic_base<char32_t>
934 typedef char32_t __integral_type;
935 typedef __atomic_base<char32_t> __base_type;
937 atomic() noexcept = default;
938 ~atomic() noexcept = default;
939 atomic(const atomic&) = delete;
940 atomic& operator=(const atomic&) = delete;
941 atomic& operator=(const atomic&) volatile = delete;
943 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
945 using __base_type::operator __integral_type;
946 using __base_type::operator=;
948 #if __cplusplus >= 201703L
949 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
955 typedef atomic<bool> atomic_bool;
958 typedef atomic<char> atomic_char;
961 typedef atomic<signed char> atomic_schar;
964 typedef atomic<unsigned char> atomic_uchar;
967 typedef atomic<short> atomic_short;
970 typedef atomic<unsigned short> atomic_ushort;
973 typedef atomic<int> atomic_int;
976 typedef atomic<unsigned int> atomic_uint;
979 typedef atomic<long> atomic_long;
982 typedef atomic<unsigned long> atomic_ulong;
985 typedef atomic<long long> atomic_llong;
988 typedef atomic<unsigned long long> atomic_ullong;
991 typedef atomic<wchar_t> atomic_wchar_t;
994 typedef atomic<char16_t> atomic_char16_t;
997 typedef atomic<char32_t> atomic_char32_t;
999 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1000 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1001 // 2441. Exact-width atomic typedefs should be provided
1004 typedef atomic<int8_t> atomic_int8_t;
1007 typedef atomic<uint8_t> atomic_uint8_t;
1010 typedef atomic<int16_t> atomic_int16_t;
1013 typedef atomic<uint16_t> atomic_uint16_t;
1016 typedef atomic<int32_t> atomic_int32_t;
1019 typedef atomic<uint32_t> atomic_uint32_t;
1022 typedef atomic<int64_t> atomic_int64_t;
1025 typedef atomic<uint64_t> atomic_uint64_t;
1028 /// atomic_int_least8_t
1029 typedef atomic<int_least8_t> atomic_int_least8_t;
1031 /// atomic_uint_least8_t
1032 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1034 /// atomic_int_least16_t
1035 typedef atomic<int_least16_t> atomic_int_least16_t;
1037 /// atomic_uint_least16_t
1038 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1040 /// atomic_int_least32_t
1041 typedef atomic<int_least32_t> atomic_int_least32_t;
1043 /// atomic_uint_least32_t
1044 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1046 /// atomic_int_least64_t
1047 typedef atomic<int_least64_t> atomic_int_least64_t;
1049 /// atomic_uint_least64_t
1050 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1053 /// atomic_int_fast8_t
1054 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1056 /// atomic_uint_fast8_t
1057 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1059 /// atomic_int_fast16_t
1060 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1062 /// atomic_uint_fast16_t
1063 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1065 /// atomic_int_fast32_t
1066 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1068 /// atomic_uint_fast32_t
1069 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1071 /// atomic_int_fast64_t
1072 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1074 /// atomic_uint_fast64_t
1075 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1080 typedef atomic<intptr_t> atomic_intptr_t;
1082 /// atomic_uintptr_t
1083 typedef atomic<uintptr_t> atomic_uintptr_t;
1086 typedef atomic<size_t> atomic_size_t;
1088 /// atomic_ptrdiff_t
1089 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1091 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1093 typedef atomic<intmax_t> atomic_intmax_t;
1095 /// atomic_uintmax_t
1096 typedef atomic<uintmax_t> atomic_uintmax_t;
1099 // Function definitions, atomic_flag operations.
1101 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1102 memory_order __m) noexcept
1103 { return __a->test_and_set(__m); }
1106 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1107 memory_order __m) noexcept
1108 { return __a->test_and_set(__m); }
1111 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1112 { __a->clear(__m); }
1115 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1116 memory_order __m) noexcept
1117 { __a->clear(__m); }
1120 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1121 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1124 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1125 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1128 atomic_flag_clear(atomic_flag* __a) noexcept
1129 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1132 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1133 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1136 template<typename _Tp>
1137 using __atomic_val_t = typename atomic<_Tp>::value_type;
1138 template<typename _Tp>
1139 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1141 // [atomics.nonmembers] Non-member functions.
1142 // Function templates generally applicable to atomic types.
1143 template<typename _ITp>
1145 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1146 { return __a->is_lock_free(); }
1148 template<typename _ITp>
1150 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1151 { return __a->is_lock_free(); }
1153 template<typename _ITp>
1155 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1156 { __a->store(__i, memory_order_relaxed); }
1158 template<typename _ITp>
1160 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1161 { __a->store(__i, memory_order_relaxed); }
1163 template<typename _ITp>
1165 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1166 memory_order __m) noexcept
1167 { __a->store(__i, __m); }
1169 template<typename _ITp>
1171 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1172 memory_order __m) noexcept
1173 { __a->store(__i, __m); }
1175 template<typename _ITp>
1177 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1178 { return __a->load(__m); }
1180 template<typename _ITp>
1182 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1183 memory_order __m) noexcept
1184 { return __a->load(__m); }
1186 template<typename _ITp>
1188 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1189 memory_order __m) noexcept
1190 { return __a->exchange(__i, __m); }
1192 template<typename _ITp>
1194 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1195 __atomic_val_t<_ITp> __i,
1196 memory_order __m) noexcept
1197 { return __a->exchange(__i, __m); }
1199 template<typename _ITp>
1201 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1202 __atomic_val_t<_ITp>* __i1,
1203 __atomic_val_t<_ITp> __i2,
1205 memory_order __m2) noexcept
1206 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1208 template<typename _ITp>
1210 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1211 __atomic_val_t<_ITp>* __i1,
1212 __atomic_val_t<_ITp> __i2,
1214 memory_order __m2) noexcept
1215 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1217 template<typename _ITp>
1219 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1220 __atomic_val_t<_ITp>* __i1,
1221 __atomic_val_t<_ITp> __i2,
1223 memory_order __m2) noexcept
1224 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1226 template<typename _ITp>
1228 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1229 __atomic_val_t<_ITp>* __i1,
1230 __atomic_val_t<_ITp> __i2,
1232 memory_order __m2) noexcept
1233 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1236 template<typename _ITp>
1238 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1239 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1241 template<typename _ITp>
1243 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1244 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1246 template<typename _ITp>
1248 atomic_load(const atomic<_ITp>* __a) noexcept
1249 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1251 template<typename _ITp>
1253 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1254 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1256 template<typename _ITp>
1258 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1259 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1261 template<typename _ITp>
1263 atomic_exchange(volatile atomic<_ITp>* __a,
1264 __atomic_val_t<_ITp> __i) noexcept
1265 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1267 template<typename _ITp>
1269 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1270 __atomic_val_t<_ITp>* __i1,
1271 __atomic_val_t<_ITp> __i2) noexcept
1273 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1274 memory_order_seq_cst,
1275 memory_order_seq_cst);
1278 template<typename _ITp>
1280 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1281 __atomic_val_t<_ITp>* __i1,
1282 __atomic_val_t<_ITp> __i2) noexcept
1284 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1285 memory_order_seq_cst,
1286 memory_order_seq_cst);
1289 template<typename _ITp>
1291 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1292 __atomic_val_t<_ITp>* __i1,
1293 __atomic_val_t<_ITp> __i2) noexcept
1295 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1296 memory_order_seq_cst,
1297 memory_order_seq_cst);
1300 template<typename _ITp>
1302 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1303 __atomic_val_t<_ITp>* __i1,
1304 __atomic_val_t<_ITp> __i2) noexcept
1306 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1307 memory_order_seq_cst,
1308 memory_order_seq_cst);
1311 // Function templates for atomic_integral and atomic_pointer operations only.
1312 // Some operations (and, or, xor) are only available for atomic integrals,
1313 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1315 template<typename _ITp>
1317 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1318 __atomic_diff_t<_ITp> __i,
1319 memory_order __m) noexcept
1320 { return __a->fetch_add(__i, __m); }
1322 template<typename _ITp>
1324 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1325 __atomic_diff_t<_ITp> __i,
1326 memory_order __m) noexcept
1327 { return __a->fetch_add(__i, __m); }
1329 template<typename _ITp>
1331 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1332 __atomic_diff_t<_ITp> __i,
1333 memory_order __m) noexcept
1334 { return __a->fetch_sub(__i, __m); }
1336 template<typename _ITp>
1338 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1339 __atomic_diff_t<_ITp> __i,
1340 memory_order __m) noexcept
1341 { return __a->fetch_sub(__i, __m); }
1343 template<typename _ITp>
1345 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1346 __atomic_val_t<_ITp> __i,
1347 memory_order __m) noexcept
1348 { return __a->fetch_and(__i, __m); }
1350 template<typename _ITp>
1352 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1353 __atomic_val_t<_ITp> __i,
1354 memory_order __m) noexcept
1355 { return __a->fetch_and(__i, __m); }
1357 template<typename _ITp>
1359 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1360 __atomic_val_t<_ITp> __i,
1361 memory_order __m) noexcept
1362 { return __a->fetch_or(__i, __m); }
1364 template<typename _ITp>
1366 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1367 __atomic_val_t<_ITp> __i,
1368 memory_order __m) noexcept
1369 { return __a->fetch_or(__i, __m); }
1371 template<typename _ITp>
1373 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1374 __atomic_val_t<_ITp> __i,
1375 memory_order __m) noexcept
1376 { return __a->fetch_xor(__i, __m); }
1378 template<typename _ITp>
1380 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1381 __atomic_val_t<_ITp> __i,
1382 memory_order __m) noexcept
1383 { return __a->fetch_xor(__i, __m); }
1385 template<typename _ITp>
1387 atomic_fetch_add(atomic<_ITp>* __a,
1388 __atomic_diff_t<_ITp> __i) noexcept
1389 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1391 template<typename _ITp>
1393 atomic_fetch_add(volatile atomic<_ITp>* __a,
1394 __atomic_diff_t<_ITp> __i) noexcept
1395 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1397 template<typename _ITp>
1399 atomic_fetch_sub(atomic<_ITp>* __a,
1400 __atomic_diff_t<_ITp> __i) noexcept
1401 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1403 template<typename _ITp>
1405 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1406 __atomic_diff_t<_ITp> __i) noexcept
1407 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1409 template<typename _ITp>
1411 atomic_fetch_and(__atomic_base<_ITp>* __a,
1412 __atomic_val_t<_ITp> __i) noexcept
1413 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1415 template<typename _ITp>
1417 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1418 __atomic_val_t<_ITp> __i) noexcept
1419 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1421 template<typename _ITp>
1423 atomic_fetch_or(__atomic_base<_ITp>* __a,
1424 __atomic_val_t<_ITp> __i) noexcept
1425 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1427 template<typename _ITp>
1429 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1430 __atomic_val_t<_ITp> __i) noexcept
1431 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1433 template<typename _ITp>
1435 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1436 __atomic_val_t<_ITp> __i) noexcept
1437 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1439 template<typename _ITp>
1441 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1442 __atomic_val_t<_ITp> __i) noexcept
1443 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1447 _GLIBCXX_END_NAMESPACE_VERSION
1452 #endif // _GLIBCXX_ATOMIC