libstdc++
atomic
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2021 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/atomic
26  * This is a Standard C++ Library header.
27  */
28 
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31 
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
34 
35 #pragma GCC system_header
36 
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
39 #else
40 
41 #include <bits/atomic_base.h>
42 
43 namespace std _GLIBCXX_VISIBILITY(default)
44 {
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
46 
47  /**
48  * @addtogroup atomics
49  * @{
50  */
51 
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603
54 #endif
55 
56  template<typename _Tp>
57  struct atomic;
58 
59  /// atomic<bool>
60  // NB: No operators or fetch-operations for this type.
61  template<>
62  struct atomic<bool>
63  {
64  using value_type = bool;
65 
66  private:
67  __atomic_base<bool> _M_base;
68 
69  public:
70  atomic() noexcept = default;
71  ~atomic() noexcept = default;
72  atomic(const atomic&) = delete;
73  atomic& operator=(const atomic&) = delete;
74  atomic& operator=(const atomic&) volatile = delete;
75 
76  constexpr atomic(bool __i) noexcept : _M_base(__i) { }
77 
78  bool
79  operator=(bool __i) noexcept
80  { return _M_base.operator=(__i); }
81 
82  bool
83  operator=(bool __i) volatile noexcept
84  { return _M_base.operator=(__i); }
85 
86  operator bool() const noexcept
87  { return _M_base.load(); }
88 
89  operator bool() const volatile noexcept
90  { return _M_base.load(); }
91 
92  bool
93  is_lock_free() const noexcept { return _M_base.is_lock_free(); }
94 
95  bool
96  is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97 
98 #if __cplusplus >= 201703L
99  static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
100 #endif
101 
102  void
103  store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104  { _M_base.store(__i, __m); }
105 
106  void
107  store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108  { _M_base.store(__i, __m); }
109 
110  bool
111  load(memory_order __m = memory_order_seq_cst) const noexcept
112  { return _M_base.load(__m); }
113 
114  bool
115  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116  { return _M_base.load(__m); }
117 
118  bool
119  exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120  { return _M_base.exchange(__i, __m); }
121 
122  bool
123  exchange(bool __i,
124  memory_order __m = memory_order_seq_cst) volatile noexcept
125  { return _M_base.exchange(__i, __m); }
126 
127  bool
128  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129  memory_order __m2) noexcept
130  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
131 
132  bool
133  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134  memory_order __m2) volatile noexcept
135  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136 
137  bool
138  compare_exchange_weak(bool& __i1, bool __i2,
139  memory_order __m = memory_order_seq_cst) noexcept
140  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
141 
142  bool
143  compare_exchange_weak(bool& __i1, bool __i2,
144  memory_order __m = memory_order_seq_cst) volatile noexcept
145  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146 
147  bool
148  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149  memory_order __m2) noexcept
150  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
151 
152  bool
153  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154  memory_order __m2) volatile noexcept
155  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156 
157  bool
158  compare_exchange_strong(bool& __i1, bool __i2,
159  memory_order __m = memory_order_seq_cst) noexcept
160  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
161 
162  bool
163  compare_exchange_strong(bool& __i1, bool __i2,
164  memory_order __m = memory_order_seq_cst) volatile noexcept
165  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166 
167 #if __cpp_lib_atomic_wait
168  void
169  wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170  { _M_base.wait(__old, __m); }
171 
172  // TODO add const volatile overload
173 
174  void
175  notify_one() const noexcept
176  { _M_base.notify_one(); }
177 
178  void
179  notify_all() const noexcept
180  { _M_base.notify_all(); }
181 #endif // __cpp_lib_atomic_wait
182  };
183 
184 #if __cplusplus <= 201703L
185 # define _GLIBCXX20_INIT(I)
186 #else
187 # define _GLIBCXX20_INIT(I) = I
188 #endif
189 
190  /**
191  * @brief Generic atomic type, primary class template.
192  *
193  * @tparam _Tp Type to be made atomic, must be trivially copyable.
194  */
195  template<typename _Tp>
196  struct atomic
197  {
198  using value_type = _Tp;
199 
200  private:
201  // Align 1/2/4/8/16-byte types to at least their size.
202  static constexpr int _S_min_alignment
203  = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
204  ? 0 : sizeof(_Tp);
205 
206  static constexpr int _S_alignment
207  = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
208 
209  alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
210 
211  static_assert(__is_trivially_copyable(_Tp),
212  "std::atomic requires a trivially copyable type");
213 
214  static_assert(sizeof(_Tp) > 0,
215  "Incomplete or zero-sized types are not supported");
216 
217 #if __cplusplus > 201703L
218  static_assert(is_copy_constructible_v<_Tp>);
219  static_assert(is_move_constructible_v<_Tp>);
220  static_assert(is_copy_assignable_v<_Tp>);
221  static_assert(is_move_assignable_v<_Tp>);
222 #endif
223 
224  public:
225  atomic() = default;
226  ~atomic() noexcept = default;
227  atomic(const atomic&) = delete;
228  atomic& operator=(const atomic&) = delete;
229  atomic& operator=(const atomic&) volatile = delete;
230 
231  constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
232 
233  operator _Tp() const noexcept
234  { return load(); }
235 
236  operator _Tp() const volatile noexcept
237  { return load(); }
238 
239  _Tp
240  operator=(_Tp __i) noexcept
241  { store(__i); return __i; }
242 
243  _Tp
244  operator=(_Tp __i) volatile noexcept
245  { store(__i); return __i; }
246 
247  bool
248  is_lock_free() const noexcept
249  {
250  // Produce a fake, minimally aligned pointer.
251  return __atomic_is_lock_free(sizeof(_M_i),
252  reinterpret_cast<void *>(-_S_alignment));
253  }
254 
255  bool
256  is_lock_free() const volatile noexcept
257  {
258  // Produce a fake, minimally aligned pointer.
259  return __atomic_is_lock_free(sizeof(_M_i),
260  reinterpret_cast<void *>(-_S_alignment));
261  }
262 
263 #if __cplusplus >= 201703L
264  static constexpr bool is_always_lock_free
265  = __atomic_always_lock_free(sizeof(_M_i), 0);
266 #endif
267 
268  void
269  store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
270  {
271  __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
272  }
273 
274  void
275  store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
276  {
277  __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
278  }
279 
280  _Tp
281  load(memory_order __m = memory_order_seq_cst) const noexcept
282  {
283  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
284  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
285  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
286  return *__ptr;
287  }
288 
289  _Tp
290  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
291  {
292  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
293  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
294  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
295  return *__ptr;
296  }
297 
298  _Tp
299  exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
300  {
301  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
302  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
303  __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
304  __ptr, int(__m));
305  return *__ptr;
306  }
307 
308  _Tp
309  exchange(_Tp __i,
310  memory_order __m = memory_order_seq_cst) volatile noexcept
311  {
312  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
313  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
314  __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
315  __ptr, int(__m));
316  return *__ptr;
317  }
318 
319  bool
320  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
321  memory_order __f) noexcept
322  {
323  return __atomic_compare_exchange(std::__addressof(_M_i),
324  std::__addressof(__e),
325  std::__addressof(__i),
326  true, int(__s), int(__f));
327  }
328 
329  bool
330  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
331  memory_order __f) volatile noexcept
332  {
333  return __atomic_compare_exchange(std::__addressof(_M_i),
334  std::__addressof(__e),
335  std::__addressof(__i),
336  true, int(__s), int(__f));
337  }
338 
339  bool
340  compare_exchange_weak(_Tp& __e, _Tp __i,
341  memory_order __m = memory_order_seq_cst) noexcept
342  { return compare_exchange_weak(__e, __i, __m,
343  __cmpexch_failure_order(__m)); }
344 
345  bool
346  compare_exchange_weak(_Tp& __e, _Tp __i,
347  memory_order __m = memory_order_seq_cst) volatile noexcept
348  { return compare_exchange_weak(__e, __i, __m,
349  __cmpexch_failure_order(__m)); }
350 
351  bool
352  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
353  memory_order __f) noexcept
354  {
355  return __atomic_compare_exchange(std::__addressof(_M_i),
356  std::__addressof(__e),
357  std::__addressof(__i),
358  false, int(__s), int(__f));
359  }
360 
361  bool
362  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
363  memory_order __f) volatile noexcept
364  {
365  return __atomic_compare_exchange(std::__addressof(_M_i),
366  std::__addressof(__e),
367  std::__addressof(__i),
368  false, int(__s), int(__f));
369  }
370 
371  bool
372  compare_exchange_strong(_Tp& __e, _Tp __i,
373  memory_order __m = memory_order_seq_cst) noexcept
374  { return compare_exchange_strong(__e, __i, __m,
375  __cmpexch_failure_order(__m)); }
376 
377  bool
378  compare_exchange_strong(_Tp& __e, _Tp __i,
379  memory_order __m = memory_order_seq_cst) volatile noexcept
380  { return compare_exchange_strong(__e, __i, __m,
381  __cmpexch_failure_order(__m)); }
382 
383 #if __cpp_lib_atomic_wait
384  void
385  wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
386  {
387  std::__atomic_wait(&_M_i, __old,
388  [__m, this, __old]
389  {
390  const auto __v = this->load(__m);
391  // TODO make this ignore padding bits when we
392  // can do that
393  return __builtin_memcmp(&__old, &__v,
394  sizeof(_Tp)) != 0;
395  });
396  }
397 
398  // TODO add const volatile overload
399 
400  void
401  notify_one() const noexcept
402  { std::__atomic_notify(&_M_i, false); }
403 
404  void
405  notify_all() const noexcept
406  { std::__atomic_notify(&_M_i, true); }
407 #endif // __cpp_lib_atomic_wait
408 
409  };
410 #undef _GLIBCXX20_INIT
411 
412  /// Partial specialization for pointer types.
413  template<typename _Tp>
414  struct atomic<_Tp*>
415  {
416  using value_type = _Tp*;
417  using difference_type = ptrdiff_t;
418 
419  typedef _Tp* __pointer_type;
420  typedef __atomic_base<_Tp*> __base_type;
421  __base_type _M_b;
422 
423  atomic() noexcept = default;
424  ~atomic() noexcept = default;
425  atomic(const atomic&) = delete;
426  atomic& operator=(const atomic&) = delete;
427  atomic& operator=(const atomic&) volatile = delete;
428 
429  constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
430 
431  operator __pointer_type() const noexcept
432  { return __pointer_type(_M_b); }
433 
434  operator __pointer_type() const volatile noexcept
435  { return __pointer_type(_M_b); }
436 
437  __pointer_type
438  operator=(__pointer_type __p) noexcept
439  { return _M_b.operator=(__p); }
440 
441  __pointer_type
442  operator=(__pointer_type __p) volatile noexcept
443  { return _M_b.operator=(__p); }
444 
445  __pointer_type
446  operator++(int) noexcept
447  {
448 #if __cplusplus >= 201703L
449  static_assert( is_object<_Tp>::value, "pointer to object type" );
450 #endif
451  return _M_b++;
452  }
453 
454  __pointer_type
455  operator++(int) volatile noexcept
456  {
457 #if __cplusplus >= 201703L
458  static_assert( is_object<_Tp>::value, "pointer to object type" );
459 #endif
460  return _M_b++;
461  }
462 
463  __pointer_type
464  operator--(int) noexcept
465  {
466 #if __cplusplus >= 201703L
467  static_assert( is_object<_Tp>::value, "pointer to object type" );
468 #endif
469  return _M_b--;
470  }
471 
472  __pointer_type
473  operator--(int) volatile noexcept
474  {
475 #if __cplusplus >= 201703L
476  static_assert( is_object<_Tp>::value, "pointer to object type" );
477 #endif
478  return _M_b--;
479  }
480 
481  __pointer_type
482  operator++() noexcept
483  {
484 #if __cplusplus >= 201703L
485  static_assert( is_object<_Tp>::value, "pointer to object type" );
486 #endif
487  return ++_M_b;
488  }
489 
490  __pointer_type
491  operator++() volatile noexcept
492  {
493 #if __cplusplus >= 201703L
494  static_assert( is_object<_Tp>::value, "pointer to object type" );
495 #endif
496  return ++_M_b;
497  }
498 
499  __pointer_type
500  operator--() noexcept
501  {
502 #if __cplusplus >= 201703L
503  static_assert( is_object<_Tp>::value, "pointer to object type" );
504 #endif
505  return --_M_b;
506  }
507 
508  __pointer_type
509  operator--() volatile noexcept
510  {
511 #if __cplusplus >= 201703L
512  static_assert( is_object<_Tp>::value, "pointer to object type" );
513 #endif
514  return --_M_b;
515  }
516 
517  __pointer_type
518  operator+=(ptrdiff_t __d) noexcept
519  {
520 #if __cplusplus >= 201703L
521  static_assert( is_object<_Tp>::value, "pointer to object type" );
522 #endif
523  return _M_b.operator+=(__d);
524  }
525 
526  __pointer_type
527  operator+=(ptrdiff_t __d) volatile noexcept
528  {
529 #if __cplusplus >= 201703L
530  static_assert( is_object<_Tp>::value, "pointer to object type" );
531 #endif
532  return _M_b.operator+=(__d);
533  }
534 
535  __pointer_type
536  operator-=(ptrdiff_t __d) noexcept
537  {
538 #if __cplusplus >= 201703L
539  static_assert( is_object<_Tp>::value, "pointer to object type" );
540 #endif
541  return _M_b.operator-=(__d);
542  }
543 
544  __pointer_type
545  operator-=(ptrdiff_t __d) volatile noexcept
546  {
547 #if __cplusplus >= 201703L
548  static_assert( is_object<_Tp>::value, "pointer to object type" );
549 #endif
550  return _M_b.operator-=(__d);
551  }
552 
553  bool
554  is_lock_free() const noexcept
555  { return _M_b.is_lock_free(); }
556 
557  bool
558  is_lock_free() const volatile noexcept
559  { return _M_b.is_lock_free(); }
560 
561 #if __cplusplus >= 201703L
562  static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
563 #endif
564 
565  void
566  store(__pointer_type __p,
567  memory_order __m = memory_order_seq_cst) noexcept
568  { return _M_b.store(__p, __m); }
569 
570  void
571  store(__pointer_type __p,
572  memory_order __m = memory_order_seq_cst) volatile noexcept
573  { return _M_b.store(__p, __m); }
574 
575  __pointer_type
576  load(memory_order __m = memory_order_seq_cst) const noexcept
577  { return _M_b.load(__m); }
578 
579  __pointer_type
580  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
581  { return _M_b.load(__m); }
582 
583  __pointer_type
584  exchange(__pointer_type __p,
585  memory_order __m = memory_order_seq_cst) noexcept
586  { return _M_b.exchange(__p, __m); }
587 
588  __pointer_type
589  exchange(__pointer_type __p,
590  memory_order __m = memory_order_seq_cst) volatile noexcept
591  { return _M_b.exchange(__p, __m); }
592 
593  bool
594  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
595  memory_order __m1, memory_order __m2) noexcept
596  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
597 
598  bool
599  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
600  memory_order __m1,
601  memory_order __m2) volatile noexcept
602  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
603 
604  bool
605  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
606  memory_order __m = memory_order_seq_cst) noexcept
607  {
608  return compare_exchange_weak(__p1, __p2, __m,
609  __cmpexch_failure_order(__m));
610  }
611 
612  bool
613  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
614  memory_order __m = memory_order_seq_cst) volatile noexcept
615  {
616  return compare_exchange_weak(__p1, __p2, __m,
617  __cmpexch_failure_order(__m));
618  }
619 
620  bool
621  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
622  memory_order __m1, memory_order __m2) noexcept
623  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
624 
625  bool
626  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
627  memory_order __m1,
628  memory_order __m2) volatile noexcept
629  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
630 
631  bool
632  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
633  memory_order __m = memory_order_seq_cst) noexcept
634  {
635  return _M_b.compare_exchange_strong(__p1, __p2, __m,
636  __cmpexch_failure_order(__m));
637  }
638 
639  bool
640  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
641  memory_order __m = memory_order_seq_cst) volatile noexcept
642  {
643  return _M_b.compare_exchange_strong(__p1, __p2, __m,
644  __cmpexch_failure_order(__m));
645  }
646 
647 #if __cpp_lib_atomic_wait
648  void
649  wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) noexcept
650  { _M_b.wait(__old, __m); }
651 
652  // TODO add const volatile overload
653 
654  void
655  notify_one() const noexcept
656  { _M_b.notify_one(); }
657 
658  void
659  notify_all() const noexcept
660  { _M_b.notify_all(); }
661 #endif // __cpp_lib_atomic_wait
662  __pointer_type
663  fetch_add(ptrdiff_t __d,
664  memory_order __m = memory_order_seq_cst) noexcept
665  {
666 #if __cplusplus >= 201703L
667  static_assert( is_object<_Tp>::value, "pointer to object type" );
668 #endif
669  return _M_b.fetch_add(__d, __m);
670  }
671 
672  __pointer_type
673  fetch_add(ptrdiff_t __d,
674  memory_order __m = memory_order_seq_cst) volatile noexcept
675  {
676 #if __cplusplus >= 201703L
677  static_assert( is_object<_Tp>::value, "pointer to object type" );
678 #endif
679  return _M_b.fetch_add(__d, __m);
680  }
681 
682  __pointer_type
683  fetch_sub(ptrdiff_t __d,
684  memory_order __m = memory_order_seq_cst) noexcept
685  {
686 #if __cplusplus >= 201703L
687  static_assert( is_object<_Tp>::value, "pointer to object type" );
688 #endif
689  return _M_b.fetch_sub(__d, __m);
690  }
691 
692  __pointer_type
693  fetch_sub(ptrdiff_t __d,
694  memory_order __m = memory_order_seq_cst) volatile noexcept
695  {
696 #if __cplusplus >= 201703L
697  static_assert( is_object<_Tp>::value, "pointer to object type" );
698 #endif
699  return _M_b.fetch_sub(__d, __m);
700  }
701  };
702 
703 
704  /// Explicit specialization for char.
705  template<>
706  struct atomic<char> : __atomic_base<char>
707  {
708  typedef char __integral_type;
709  typedef __atomic_base<char> __base_type;
710 
711  atomic() noexcept = default;
712  ~atomic() noexcept = default;
713  atomic(const atomic&) = delete;
714  atomic& operator=(const atomic&) = delete;
715  atomic& operator=(const atomic&) volatile = delete;
716 
717  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
718 
719  using __base_type::operator __integral_type;
720  using __base_type::operator=;
721 
722 #if __cplusplus >= 201703L
723  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
724 #endif
725  };
726 
727  /// Explicit specialization for signed char.
728  template<>
729  struct atomic<signed char> : __atomic_base<signed char>
730  {
731  typedef signed char __integral_type;
732  typedef __atomic_base<signed char> __base_type;
733 
734  atomic() noexcept= default;
735  ~atomic() noexcept = default;
736  atomic(const atomic&) = delete;
737  atomic& operator=(const atomic&) = delete;
738  atomic& operator=(const atomic&) volatile = delete;
739 
740  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
741 
742  using __base_type::operator __integral_type;
743  using __base_type::operator=;
744 
745 #if __cplusplus >= 201703L
746  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
747 #endif
748  };
749 
750  /// Explicit specialization for unsigned char.
751  template<>
752  struct atomic<unsigned char> : __atomic_base<unsigned char>
753  {
754  typedef unsigned char __integral_type;
755  typedef __atomic_base<unsigned char> __base_type;
756 
757  atomic() noexcept= default;
758  ~atomic() noexcept = default;
759  atomic(const atomic&) = delete;
760  atomic& operator=(const atomic&) = delete;
761  atomic& operator=(const atomic&) volatile = delete;
762 
763  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
764 
765  using __base_type::operator __integral_type;
766  using __base_type::operator=;
767 
768 #if __cplusplus >= 201703L
769  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
770 #endif
771  };
772 
773  /// Explicit specialization for short.
774  template<>
775  struct atomic<short> : __atomic_base<short>
776  {
777  typedef short __integral_type;
778  typedef __atomic_base<short> __base_type;
779 
780  atomic() noexcept = default;
781  ~atomic() noexcept = default;
782  atomic(const atomic&) = delete;
783  atomic& operator=(const atomic&) = delete;
784  atomic& operator=(const atomic&) volatile = delete;
785 
786  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
787 
788  using __base_type::operator __integral_type;
789  using __base_type::operator=;
790 
791 #if __cplusplus >= 201703L
792  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
793 #endif
794  };
795 
796  /// Explicit specialization for unsigned short.
797  template<>
798  struct atomic<unsigned short> : __atomic_base<unsigned short>
799  {
800  typedef unsigned short __integral_type;
801  typedef __atomic_base<unsigned short> __base_type;
802 
803  atomic() noexcept = default;
804  ~atomic() noexcept = default;
805  atomic(const atomic&) = delete;
806  atomic& operator=(const atomic&) = delete;
807  atomic& operator=(const atomic&) volatile = delete;
808 
809  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
810 
811  using __base_type::operator __integral_type;
812  using __base_type::operator=;
813 
814 #if __cplusplus >= 201703L
815  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
816 #endif
817  };
818 
819  /// Explicit specialization for int.
820  template<>
821  struct atomic<int> : __atomic_base<int>
822  {
823  typedef int __integral_type;
824  typedef __atomic_base<int> __base_type;
825 
826  atomic() noexcept = default;
827  ~atomic() noexcept = default;
828  atomic(const atomic&) = delete;
829  atomic& operator=(const atomic&) = delete;
830  atomic& operator=(const atomic&) volatile = delete;
831 
832  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
833 
834  using __base_type::operator __integral_type;
835  using __base_type::operator=;
836 
837 #if __cplusplus >= 201703L
838  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
839 #endif
840  };
841 
842  /// Explicit specialization for unsigned int.
843  template<>
844  struct atomic<unsigned int> : __atomic_base<unsigned int>
845  {
846  typedef unsigned int __integral_type;
847  typedef __atomic_base<unsigned int> __base_type;
848 
849  atomic() noexcept = default;
850  ~atomic() noexcept = default;
851  atomic(const atomic&) = delete;
852  atomic& operator=(const atomic&) = delete;
853  atomic& operator=(const atomic&) volatile = delete;
854 
855  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
856 
857  using __base_type::operator __integral_type;
858  using __base_type::operator=;
859 
860 #if __cplusplus >= 201703L
861  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
862 #endif
863  };
864 
865  /// Explicit specialization for long.
866  template<>
867  struct atomic<long> : __atomic_base<long>
868  {
869  typedef long __integral_type;
870  typedef __atomic_base<long> __base_type;
871 
872  atomic() noexcept = default;
873  ~atomic() noexcept = default;
874  atomic(const atomic&) = delete;
875  atomic& operator=(const atomic&) = delete;
876  atomic& operator=(const atomic&) volatile = delete;
877 
878  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
879 
880  using __base_type::operator __integral_type;
881  using __base_type::operator=;
882 
883 #if __cplusplus >= 201703L
884  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
885 #endif
886  };
887 
888  /// Explicit specialization for unsigned long.
889  template<>
890  struct atomic<unsigned long> : __atomic_base<unsigned long>
891  {
892  typedef unsigned long __integral_type;
893  typedef __atomic_base<unsigned long> __base_type;
894 
895  atomic() noexcept = default;
896  ~atomic() noexcept = default;
897  atomic(const atomic&) = delete;
898  atomic& operator=(const atomic&) = delete;
899  atomic& operator=(const atomic&) volatile = delete;
900 
901  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
902 
903  using __base_type::operator __integral_type;
904  using __base_type::operator=;
905 
906 #if __cplusplus >= 201703L
907  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
908 #endif
909  };
910 
911  /// Explicit specialization for long long.
912  template<>
913  struct atomic<long long> : __atomic_base<long long>
914  {
915  typedef long long __integral_type;
916  typedef __atomic_base<long long> __base_type;
917 
918  atomic() noexcept = default;
919  ~atomic() noexcept = default;
920  atomic(const atomic&) = delete;
921  atomic& operator=(const atomic&) = delete;
922  atomic& operator=(const atomic&) volatile = delete;
923 
924  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
925 
926  using __base_type::operator __integral_type;
927  using __base_type::operator=;
928 
929 #if __cplusplus >= 201703L
930  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
931 #endif
932  };
933 
934  /// Explicit specialization for unsigned long long.
935  template<>
936  struct atomic<unsigned long long> : __atomic_base<unsigned long long>
937  {
938  typedef unsigned long long __integral_type;
939  typedef __atomic_base<unsigned long long> __base_type;
940 
941  atomic() noexcept = default;
942  ~atomic() noexcept = default;
943  atomic(const atomic&) = delete;
944  atomic& operator=(const atomic&) = delete;
945  atomic& operator=(const atomic&) volatile = delete;
946 
947  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
948 
949  using __base_type::operator __integral_type;
950  using __base_type::operator=;
951 
952 #if __cplusplus >= 201703L
953  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
954 #endif
955  };
956 
957  /// Explicit specialization for wchar_t.
958  template<>
959  struct atomic<wchar_t> : __atomic_base<wchar_t>
960  {
961  typedef wchar_t __integral_type;
962  typedef __atomic_base<wchar_t> __base_type;
963 
964  atomic() noexcept = default;
965  ~atomic() noexcept = default;
966  atomic(const atomic&) = delete;
967  atomic& operator=(const atomic&) = delete;
968  atomic& operator=(const atomic&) volatile = delete;
969 
970  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
971 
972  using __base_type::operator __integral_type;
973  using __base_type::operator=;
974 
975 #if __cplusplus >= 201703L
976  static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
977 #endif
978  };
979 
980 #ifdef _GLIBCXX_USE_CHAR8_T
981  /// Explicit specialization for char8_t.
982  template<>
983  struct atomic<char8_t> : __atomic_base<char8_t>
984  {
985  typedef char8_t __integral_type;
986  typedef __atomic_base<char8_t> __base_type;
987 
988  atomic() noexcept = default;
989  ~atomic() noexcept = default;
990  atomic(const atomic&) = delete;
991  atomic& operator=(const atomic&) = delete;
992  atomic& operator=(const atomic&) volatile = delete;
993 
994  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
995 
996  using __base_type::operator __integral_type;
997  using __base_type::operator=;
998 
999 #if __cplusplus > 201402L
1000  static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1001 #endif
1002  };
1003 #endif
1004 
1005  /// Explicit specialization for char16_t.
1006  template<>
1007  struct atomic<char16_t> : __atomic_base<char16_t>
1008  {
1009  typedef char16_t __integral_type;
1010  typedef __atomic_base<char16_t> __base_type;
1011 
1012  atomic() noexcept = default;
1013  ~atomic() noexcept = default;
1014  atomic(const atomic&) = delete;
1015  atomic& operator=(const atomic&) = delete;
1016  atomic& operator=(const atomic&) volatile = delete;
1017 
1018  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1019 
1020  using __base_type::operator __integral_type;
1021  using __base_type::operator=;
1022 
1023 #if __cplusplus >= 201703L
1024  static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1025 #endif
1026  };
1027 
1028  /// Explicit specialization for char32_t.
1029  template<>
1030  struct atomic<char32_t> : __atomic_base<char32_t>
1031  {
1032  typedef char32_t __integral_type;
1033  typedef __atomic_base<char32_t> __base_type;
1034 
1035  atomic() noexcept = default;
1036  ~atomic() noexcept = default;
1037  atomic(const atomic&) = delete;
1038  atomic& operator=(const atomic&) = delete;
1039  atomic& operator=(const atomic&) volatile = delete;
1040 
1041  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1042 
1043  using __base_type::operator __integral_type;
1044  using __base_type::operator=;
1045 
1046 #if __cplusplus >= 201703L
1047  static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1048 #endif
1049  };
1050 
1051 
1052  /// atomic_bool
1053  typedef atomic<bool> atomic_bool;
1054 
1055  /// atomic_char
1056  typedef atomic<char> atomic_char;
1057 
1058  /// atomic_schar
1059  typedef atomic<signed char> atomic_schar;
1060 
1061  /// atomic_uchar
1062  typedef atomic<unsigned char> atomic_uchar;
1063 
1064  /// atomic_short
1065  typedef atomic<short> atomic_short;
1066 
1067  /// atomic_ushort
1068  typedef atomic<unsigned short> atomic_ushort;
1069 
1070  /// atomic_int
1071  typedef atomic<int> atomic_int;
1072 
1073  /// atomic_uint
1074  typedef atomic<unsigned int> atomic_uint;
1075 
1076  /// atomic_long
1077  typedef atomic<long> atomic_long;
1078 
1079  /// atomic_ulong
1080  typedef atomic<unsigned long> atomic_ulong;
1081 
1082  /// atomic_llong
1083  typedef atomic<long long> atomic_llong;
1084 
1085  /// atomic_ullong
1086  typedef atomic<unsigned long long> atomic_ullong;
1087 
1088  /// atomic_wchar_t
1089  typedef atomic<wchar_t> atomic_wchar_t;
1090 
1091 #ifdef _GLIBCXX_USE_CHAR8_T
1092  /// atomic_char8_t
1093  typedef atomic<char8_t> atomic_char8_t;
1094 #endif
1095 
1096  /// atomic_char16_t
1097  typedef atomic<char16_t> atomic_char16_t;
1098 
1099  /// atomic_char32_t
1100  typedef atomic<char32_t> atomic_char32_t;
1101 
1102 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1103  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1104  // 2441. Exact-width atomic typedefs should be provided
1105 
1106  /// atomic_int8_t
1107  typedef atomic<int8_t> atomic_int8_t;
1108 
1109  /// atomic_uint8_t
1110  typedef atomic<uint8_t> atomic_uint8_t;
1111 
1112  /// atomic_int16_t
1113  typedef atomic<int16_t> atomic_int16_t;
1114 
1115  /// atomic_uint16_t
1116  typedef atomic<uint16_t> atomic_uint16_t;
1117 
1118  /// atomic_int32_t
1119  typedef atomic<int32_t> atomic_int32_t;
1120 
1121  /// atomic_uint32_t
1122  typedef atomic<uint32_t> atomic_uint32_t;
1123 
1124  /// atomic_int64_t
1125  typedef atomic<int64_t> atomic_int64_t;
1126 
1127  /// atomic_uint64_t
1128  typedef atomic<uint64_t> atomic_uint64_t;
1129 
1130 
1131  /// atomic_int_least8_t
1132  typedef atomic<int_least8_t> atomic_int_least8_t;
1133 
1134  /// atomic_uint_least8_t
1135  typedef atomic<uint_least8_t> atomic_uint_least8_t;
1136 
1137  /// atomic_int_least16_t
1138  typedef atomic<int_least16_t> atomic_int_least16_t;
1139 
1140  /// atomic_uint_least16_t
1141  typedef atomic<uint_least16_t> atomic_uint_least16_t;
1142 
1143  /// atomic_int_least32_t
1144  typedef atomic<int_least32_t> atomic_int_least32_t;
1145 
1146  /// atomic_uint_least32_t
1147  typedef atomic<uint_least32_t> atomic_uint_least32_t;
1148 
1149  /// atomic_int_least64_t
1150  typedef atomic<int_least64_t> atomic_int_least64_t;
1151 
1152  /// atomic_uint_least64_t
1153  typedef atomic<uint_least64_t> atomic_uint_least64_t;
1154 
1155 
1156  /// atomic_int_fast8_t
1157  typedef atomic<int_fast8_t> atomic_int_fast8_t;
1158 
1159  /// atomic_uint_fast8_t
1160  typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1161 
1162  /// atomic_int_fast16_t
1163  typedef atomic<int_fast16_t> atomic_int_fast16_t;
1164 
1165  /// atomic_uint_fast16_t
1166  typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1167 
1168  /// atomic_int_fast32_t
1169  typedef atomic<int_fast32_t> atomic_int_fast32_t;
1170 
1171  /// atomic_uint_fast32_t
1172  typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1173 
1174  /// atomic_int_fast64_t
1175  typedef atomic<int_fast64_t> atomic_int_fast64_t;
1176 
1177  /// atomic_uint_fast64_t
1178  typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1179 #endif
1180 
1181 
1182  /// atomic_intptr_t
1183  typedef atomic<intptr_t> atomic_intptr_t;
1184 
1185  /// atomic_uintptr_t
1186  typedef atomic<uintptr_t> atomic_uintptr_t;
1187 
1188  /// atomic_size_t
1189  typedef atomic<size_t> atomic_size_t;
1190 
1191  /// atomic_ptrdiff_t
1192  typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1193 
1194 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1195  /// atomic_intmax_t
1196  typedef atomic<intmax_t> atomic_intmax_t;
1197 
1198  /// atomic_uintmax_t
1199  typedef atomic<uintmax_t> atomic_uintmax_t;
1200 #endif
1201 
1202  // Function definitions, atomic_flag operations.
1203  inline bool
1204  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1205  memory_order __m) noexcept
1206  { return __a->test_and_set(__m); }
1207 
1208  inline bool
1209  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1210  memory_order __m) noexcept
1211  { return __a->test_and_set(__m); }
1212 
1213  inline void
1214  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1215  { __a->clear(__m); }
1216 
1217  inline void
1218  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1219  memory_order __m) noexcept
1220  { __a->clear(__m); }
1221 
1222  inline bool
1223  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1224  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1225 
1226  inline bool
1227  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1228  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1229 
1230  inline void
1231  atomic_flag_clear(atomic_flag* __a) noexcept
1232  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1233 
1234  inline void
1235  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1236  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1237 
1238 
1239  template<typename _Tp>
1240  using __atomic_val_t = typename atomic<_Tp>::value_type;
1241  template<typename _Tp>
1242  using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1243 
1244  // [atomics.nonmembers] Non-member functions.
1245  // Function templates generally applicable to atomic types.
1246  template<typename _ITp>
1247  inline bool
1248  atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1249  { return __a->is_lock_free(); }
1250 
1251  template<typename _ITp>
1252  inline bool
1253  atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1254  { return __a->is_lock_free(); }
1255 
1256  template<typename _ITp>
1257  inline void
1258  atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1259  { __a->store(__i, memory_order_relaxed); }
1260 
1261  template<typename _ITp>
1262  inline void
1263  atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1264  { __a->store(__i, memory_order_relaxed); }
1265 
1266  template<typename _ITp>
1267  inline void
1268  atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1269  memory_order __m) noexcept
1270  { __a->store(__i, __m); }
1271 
1272  template<typename _ITp>
1273  inline void
1274  atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1275  memory_order __m) noexcept
1276  { __a->store(__i, __m); }
1277 
1278  template<typename _ITp>
1279  inline _ITp
1280  atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1281  { return __a->load(__m); }
1282 
1283  template<typename _ITp>
1284  inline _ITp
1285  atomic_load_explicit(const volatile atomic<_ITp>* __a,
1286  memory_order __m) noexcept
1287  { return __a->load(__m); }
1288 
1289  template<typename _ITp>
1290  inline _ITp
1291  atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1292  memory_order __m) noexcept
1293  { return __a->exchange(__i, __m); }
1294 
1295  template<typename _ITp>
1296  inline _ITp
1297  atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1298  __atomic_val_t<_ITp> __i,
1299  memory_order __m) noexcept
1300  { return __a->exchange(__i, __m); }
1301 
1302  template<typename _ITp>
1303  inline bool
1304  atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1305  __atomic_val_t<_ITp>* __i1,
1306  __atomic_val_t<_ITp> __i2,
1307  memory_order __m1,
1308  memory_order __m2) noexcept
1309  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1310 
1311  template<typename _ITp>
1312  inline bool
1313  atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1314  __atomic_val_t<_ITp>* __i1,
1315  __atomic_val_t<_ITp> __i2,
1316  memory_order __m1,
1317  memory_order __m2) noexcept
1318  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1319 
1320  template<typename _ITp>
1321  inline bool
1322  atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1323  __atomic_val_t<_ITp>* __i1,
1324  __atomic_val_t<_ITp> __i2,
1325  memory_order __m1,
1326  memory_order __m2) noexcept
1327  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1328 
1329  template<typename _ITp>
1330  inline bool
1331  atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1332  __atomic_val_t<_ITp>* __i1,
1333  __atomic_val_t<_ITp> __i2,
1334  memory_order __m1,
1335  memory_order __m2) noexcept
1336  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1337 
1338 
1339  template<typename _ITp>
1340  inline void
1341  atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1342  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1343 
1344  template<typename _ITp>
1345  inline void
1346  atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1347  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1348 
1349  template<typename _ITp>
1350  inline _ITp
1351  atomic_load(const atomic<_ITp>* __a) noexcept
1352  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1353 
1354  template<typename _ITp>
1355  inline _ITp
1356  atomic_load(const volatile atomic<_ITp>* __a) noexcept
1357  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1358 
1359  template<typename _ITp>
1360  inline _ITp
1361  atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1362  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1363 
1364  template<typename _ITp>
1365  inline _ITp
1366  atomic_exchange(volatile atomic<_ITp>* __a,
1367  __atomic_val_t<_ITp> __i) noexcept
1368  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1369 
1370  template<typename _ITp>
1371  inline bool
1372  atomic_compare_exchange_weak(atomic<_ITp>* __a,
1373  __atomic_val_t<_ITp>* __i1,
1374  __atomic_val_t<_ITp> __i2) noexcept
1375  {
1376  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1377  memory_order_seq_cst,
1378  memory_order_seq_cst);
1379  }
1380 
1381  template<typename _ITp>
1382  inline bool
1383  atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1384  __atomic_val_t<_ITp>* __i1,
1385  __atomic_val_t<_ITp> __i2) noexcept
1386  {
1387  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1388  memory_order_seq_cst,
1389  memory_order_seq_cst);
1390  }
1391 
1392  template<typename _ITp>
1393  inline bool
1394  atomic_compare_exchange_strong(atomic<_ITp>* __a,
1395  __atomic_val_t<_ITp>* __i1,
1396  __atomic_val_t<_ITp> __i2) noexcept
1397  {
1398  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1399  memory_order_seq_cst,
1400  memory_order_seq_cst);
1401  }
1402 
1403  template<typename _ITp>
1404  inline bool
1405  atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1406  __atomic_val_t<_ITp>* __i1,
1407  __atomic_val_t<_ITp> __i2) noexcept
1408  {
1409  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1410  memory_order_seq_cst,
1411  memory_order_seq_cst);
1412  }
1413 
1414 
1415 #if __cpp_lib_atomic_wait
1416  template<typename _Tp>
1417  inline void
1418  atomic_wait(const atomic<_Tp>* __a,
1419  typename std::atomic<_Tp>::value_type __old) noexcept
1420  { __a->wait(__old); }
1421 
1422  template<typename _Tp>
1423  inline void
1424  atomic_wait_explicit(const atomic<_Tp>* __a,
1425  typename std::atomic<_Tp>::value_type __old,
1426  std::memory_order __m) noexcept
1427  { __a->wait(__old, __m); }
1428 
1429  template<typename _Tp>
1430  inline void
1431  atomic_notify_one(atomic<_Tp>* __a) noexcept
1432  { __a->notify_one(); }
1433 
1434  template<typename _Tp>
1435  inline void
1436  atomic_notify_all(atomic<_Tp>* __a) noexcept
1437  { __a->notify_all(); }
1438 #endif // __cpp_lib_atomic_wait
1439 
1440  // Function templates for atomic_integral and atomic_pointer operations only.
1441  // Some operations (and, or, xor) are only available for atomic integrals,
1442  // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1443 
1444  template<typename _ITp>
1445  inline _ITp
1446  atomic_fetch_add_explicit(atomic<_ITp>* __a,
1447  __atomic_diff_t<_ITp> __i,
1448  memory_order __m) noexcept
1449  { return __a->fetch_add(__i, __m); }
1450 
1451  template<typename _ITp>
1452  inline _ITp
1453  atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1454  __atomic_diff_t<_ITp> __i,
1455  memory_order __m) noexcept
1456  { return __a->fetch_add(__i, __m); }
1457 
1458  template<typename _ITp>
1459  inline _ITp
1460  atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1461  __atomic_diff_t<_ITp> __i,
1462  memory_order __m) noexcept
1463  { return __a->fetch_sub(__i, __m); }
1464 
1465  template<typename _ITp>
1466  inline _ITp
1467  atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1468  __atomic_diff_t<_ITp> __i,
1469  memory_order __m) noexcept
1470  { return __a->fetch_sub(__i, __m); }
1471 
1472  template<typename _ITp>
1473  inline _ITp
1474  atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1475  __atomic_val_t<_ITp> __i,
1476  memory_order __m) noexcept
1477  { return __a->fetch_and(__i, __m); }
1478 
1479  template<typename _ITp>
1480  inline _ITp
1481  atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1482  __atomic_val_t<_ITp> __i,
1483  memory_order __m) noexcept
1484  { return __a->fetch_and(__i, __m); }
1485 
1486  template<typename _ITp>
1487  inline _ITp
1488  atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1489  __atomic_val_t<_ITp> __i,
1490  memory_order __m) noexcept
1491  { return __a->fetch_or(__i, __m); }
1492 
1493  template<typename _ITp>
1494  inline _ITp
1495  atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1496  __atomic_val_t<_ITp> __i,
1497  memory_order __m) noexcept
1498  { return __a->fetch_or(__i, __m); }
1499 
1500  template<typename _ITp>
1501  inline _ITp
1502  atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1503  __atomic_val_t<_ITp> __i,
1504  memory_order __m) noexcept
1505  { return __a->fetch_xor(__i, __m); }
1506 
1507  template<typename _ITp>
1508  inline _ITp
1509  atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1510  __atomic_val_t<_ITp> __i,
1511  memory_order __m) noexcept
1512  { return __a->fetch_xor(__i, __m); }
1513 
1514  template<typename _ITp>
1515  inline _ITp
1516  atomic_fetch_add(atomic<_ITp>* __a,
1517  __atomic_diff_t<_ITp> __i) noexcept
1518  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1519 
1520  template<typename _ITp>
1521  inline _ITp
1522  atomic_fetch_add(volatile atomic<_ITp>* __a,
1523  __atomic_diff_t<_ITp> __i) noexcept
1524  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1525 
1526  template<typename _ITp>
1527  inline _ITp
1528  atomic_fetch_sub(atomic<_ITp>* __a,
1529  __atomic_diff_t<_ITp> __i) noexcept
1530  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1531 
1532  template<typename _ITp>
1533  inline _ITp
1534  atomic_fetch_sub(volatile atomic<_ITp>* __a,
1535  __atomic_diff_t<_ITp> __i) noexcept
1536  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1537 
1538  template<typename _ITp>
1539  inline _ITp
1540  atomic_fetch_and(__atomic_base<_ITp>* __a,
1541  __atomic_val_t<_ITp> __i) noexcept
1542  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1543 
1544  template<typename _ITp>
1545  inline _ITp
1546  atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1547  __atomic_val_t<_ITp> __i) noexcept
1548  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1549 
1550  template<typename _ITp>
1551  inline _ITp
1552  atomic_fetch_or(__atomic_base<_ITp>* __a,
1553  __atomic_val_t<_ITp> __i) noexcept
1554  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1555 
1556  template<typename _ITp>
1557  inline _ITp
1558  atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1559  __atomic_val_t<_ITp> __i) noexcept
1560  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1561 
1562  template<typename _ITp>
1563  inline _ITp
1564  atomic_fetch_xor(__atomic_base<_ITp>* __a,
1565  __atomic_val_t<_ITp> __i) noexcept
1566  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1567 
1568  template<typename _ITp>
1569  inline _ITp
1570  atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1571  __atomic_val_t<_ITp> __i) noexcept
1572  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1573 
1574 #if __cplusplus > 201703L
1575 #define __cpp_lib_atomic_float 201711L
1576  template<>
1577  struct atomic<float> : __atomic_float<float>
1578  {
1579  atomic() noexcept = default;
1580 
1581  constexpr
1582  atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1583  { }
1584 
1585  atomic& operator=(const atomic&) volatile = delete;
1586  atomic& operator=(const atomic&) = delete;
1587 
1588  using __atomic_float<float>::operator=;
1589  };
1590 
1591  template<>
1592  struct atomic<double> : __atomic_float<double>
1593  {
1594  atomic() noexcept = default;
1595 
1596  constexpr
1597  atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1598  { }
1599 
1600  atomic& operator=(const atomic&) volatile = delete;
1601  atomic& operator=(const atomic&) = delete;
1602 
1603  using __atomic_float<double>::operator=;
1604  };
1605 
1606  template<>
1607  struct atomic<long double> : __atomic_float<long double>
1608  {
1609  atomic() noexcept = default;
1610 
1611  constexpr
1612  atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1613  { }
1614 
1615  atomic& operator=(const atomic&) volatile = delete;
1616  atomic& operator=(const atomic&) = delete;
1617 
1618  using __atomic_float<long double>::operator=;
1619  };
1620 
1621 #define __cpp_lib_atomic_ref 201806L
1622 
1623  /// Class template to provide atomic operations on a non-atomic variable.
1624  template<typename _Tp>
1625  struct atomic_ref : __atomic_ref<_Tp>
1626  {
1627  explicit
1628  atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1629  { }
1630 
1631  atomic_ref& operator=(const atomic_ref&) = delete;
1632 
1633  atomic_ref(const atomic_ref&) = default;
1634 
1635  using __atomic_ref<_Tp>::operator=;
1636  };
1637 
1638 #endif // C++2a
1639 
1640  // @} group atomics
1641 
1642 _GLIBCXX_END_NAMESPACE_VERSION
1643 } // namespace
1644 
1645 #endif // C++11
1646 
1647 #endif // _GLIBCXX_ATOMIC