30 #ifndef _GLIBCXX_ATOMIC_BASE_H 31 #define _GLIBCXX_ATOMIC_BASE_H 1 33 #pragma GCC system_header 39 #ifndef _GLIBCXX_ALWAYS_INLINE 40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__)) 43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
65 enum __memory_order_modifier
67 __memory_order_mask = 0x0ffff,
68 __memory_order_modifier_mask = 0xffff0000,
69 __memory_order_hle_acquire = 0x10000,
70 __memory_order_hle_release = 0x20000
89 return __m == memory_order_acq_rel ? memory_order_acquire
90 : __m == memory_order_release ? memory_order_relaxed : __m;
96 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
97 | (__m & __memory_order_modifier_mask));
100 _GLIBCXX_ALWAYS_INLINE
void 102 { __atomic_thread_fence(__m); }
104 _GLIBCXX_ALWAYS_INLINE
void 106 { __atomic_signal_fence(__m); }
109 template<
typename _Tp>
119 template<
typename _IntTp>
123 #define ATOMIC_VAR_INIT(_VI) { _VI } 125 template<
typename _Tp>
128 template<
typename _Tp>
132 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1 133 typedef bool __atomic_flag_data_type;
135 typedef unsigned char __atomic_flag_data_type;
148 _GLIBCXX_BEGIN_EXTERN_C
152 __atomic_flag_data_type _M_i;
155 _GLIBCXX_END_EXTERN_C
157 #define ATOMIC_FLAG_INIT { 0 } 173 _GLIBCXX_ALWAYS_INLINE
bool 174 test_and_set(
memory_order __m = memory_order_seq_cst) noexcept
176 return __atomic_test_and_set (&_M_i, __m);
179 _GLIBCXX_ALWAYS_INLINE
bool 180 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
182 return __atomic_test_and_set (&_M_i, __m);
185 _GLIBCXX_ALWAYS_INLINE
void 189 __glibcxx_assert(__b != memory_order_consume);
190 __glibcxx_assert(__b != memory_order_acquire);
191 __glibcxx_assert(__b != memory_order_acq_rel);
193 __atomic_clear (&_M_i, __m);
196 _GLIBCXX_ALWAYS_INLINE
void 197 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
200 __glibcxx_assert(__b != memory_order_consume);
201 __glibcxx_assert(__b != memory_order_acquire);
202 __glibcxx_assert(__b != memory_order_acq_rel);
204 __atomic_clear (&_M_i, __m);
208 static constexpr __atomic_flag_data_type
210 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
237 template<
typename _ITp>
240 using value_type = _ITp;
241 using difference_type = value_type;
244 typedef _ITp __int_type;
246 static constexpr
int _S_alignment =
247 sizeof(_ITp) >
alignof(_ITp) ?
sizeof(_ITp) :
alignof(_ITp);
249 alignas(_S_alignment) __int_type _M_i;
259 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
261 operator __int_type() const noexcept
264 operator __int_type() const volatile noexcept
268 operator=(__int_type __i) noexcept
275 operator=(__int_type __i)
volatile noexcept
282 operator++(
int) noexcept
283 {
return fetch_add(1); }
286 operator++(
int)
volatile noexcept
287 {
return fetch_add(1); }
290 operator--(
int) noexcept
291 {
return fetch_sub(1); }
294 operator--(
int)
volatile noexcept
295 {
return fetch_sub(1); }
298 operator++() noexcept
299 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
302 operator++() volatile noexcept
303 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
306 operator--() noexcept
307 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
310 operator--() volatile noexcept
311 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
314 operator+=(__int_type __i) noexcept
315 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
318 operator+=(__int_type __i)
volatile noexcept
319 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
322 operator-=(__int_type __i) noexcept
323 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
326 operator-=(__int_type __i)
volatile noexcept
327 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
330 operator&=(__int_type __i) noexcept
331 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
334 operator&=(__int_type __i)
volatile noexcept
335 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
338 operator|=(__int_type __i) noexcept
339 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
342 operator|=(__int_type __i)
volatile noexcept
343 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
346 operator^=(__int_type __i) noexcept
347 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
350 operator^=(__int_type __i)
volatile noexcept
351 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
354 is_lock_free() const noexcept
357 return __atomic_is_lock_free(
sizeof(_M_i),
358 reinterpret_cast<void *>(-_S_alignment));
362 is_lock_free() const volatile noexcept
365 return __atomic_is_lock_free(
sizeof(_M_i),
366 reinterpret_cast<void *>(-_S_alignment));
369 _GLIBCXX_ALWAYS_INLINE
void 370 store(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
373 __glibcxx_assert(__b != memory_order_acquire);
374 __glibcxx_assert(__b != memory_order_acq_rel);
375 __glibcxx_assert(__b != memory_order_consume);
377 __atomic_store_n(&_M_i, __i, __m);
380 _GLIBCXX_ALWAYS_INLINE
void 381 store(__int_type __i,
382 memory_order __m = memory_order_seq_cst)
volatile noexcept
385 __glibcxx_assert(__b != memory_order_acquire);
386 __glibcxx_assert(__b != memory_order_acq_rel);
387 __glibcxx_assert(__b != memory_order_consume);
389 __atomic_store_n(&_M_i, __i, __m);
392 _GLIBCXX_ALWAYS_INLINE __int_type
393 load(
memory_order __m = memory_order_seq_cst)
const noexcept
396 __glibcxx_assert(__b != memory_order_release);
397 __glibcxx_assert(__b != memory_order_acq_rel);
399 return __atomic_load_n(&_M_i, __m);
402 _GLIBCXX_ALWAYS_INLINE __int_type
403 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
406 __glibcxx_assert(__b != memory_order_release);
407 __glibcxx_assert(__b != memory_order_acq_rel);
409 return __atomic_load_n(&_M_i, __m);
412 _GLIBCXX_ALWAYS_INLINE __int_type
416 return __atomic_exchange_n(&_M_i, __i, __m);
420 _GLIBCXX_ALWAYS_INLINE __int_type
422 memory_order __m = memory_order_seq_cst)
volatile noexcept
424 return __atomic_exchange_n(&_M_i, __i, __m);
427 _GLIBCXX_ALWAYS_INLINE
bool 428 compare_exchange_weak(__int_type& __i1, __int_type __i2,
433 __glibcxx_assert(__b2 != memory_order_release);
434 __glibcxx_assert(__b2 != memory_order_acq_rel);
435 __glibcxx_assert(__b2 <= __b1);
437 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
440 _GLIBCXX_ALWAYS_INLINE
bool 441 compare_exchange_weak(__int_type& __i1, __int_type __i2,
447 __glibcxx_assert(__b2 != memory_order_release);
448 __glibcxx_assert(__b2 != memory_order_acq_rel);
449 __glibcxx_assert(__b2 <= __b1);
451 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
454 _GLIBCXX_ALWAYS_INLINE
bool 455 compare_exchange_weak(__int_type& __i1, __int_type __i2,
458 return compare_exchange_weak(__i1, __i2, __m,
459 __cmpexch_failure_order(__m));
462 _GLIBCXX_ALWAYS_INLINE
bool 463 compare_exchange_weak(__int_type& __i1, __int_type __i2,
464 memory_order __m = memory_order_seq_cst)
volatile noexcept
466 return compare_exchange_weak(__i1, __i2, __m,
467 __cmpexch_failure_order(__m));
470 _GLIBCXX_ALWAYS_INLINE
bool 471 compare_exchange_strong(__int_type& __i1, __int_type __i2,
476 __glibcxx_assert(__b2 != memory_order_release);
477 __glibcxx_assert(__b2 != memory_order_acq_rel);
478 __glibcxx_assert(__b2 <= __b1);
480 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
483 _GLIBCXX_ALWAYS_INLINE
bool 484 compare_exchange_strong(__int_type& __i1, __int_type __i2,
491 __glibcxx_assert(__b2 != memory_order_release);
492 __glibcxx_assert(__b2 != memory_order_acq_rel);
493 __glibcxx_assert(__b2 <= __b1);
495 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
498 _GLIBCXX_ALWAYS_INLINE
bool 499 compare_exchange_strong(__int_type& __i1, __int_type __i2,
502 return compare_exchange_strong(__i1, __i2, __m,
503 __cmpexch_failure_order(__m));
506 _GLIBCXX_ALWAYS_INLINE
bool 507 compare_exchange_strong(__int_type& __i1, __int_type __i2,
508 memory_order __m = memory_order_seq_cst)
volatile noexcept
510 return compare_exchange_strong(__i1, __i2, __m,
511 __cmpexch_failure_order(__m));
514 _GLIBCXX_ALWAYS_INLINE __int_type
515 fetch_add(__int_type __i,
517 {
return __atomic_fetch_add(&_M_i, __i, __m); }
519 _GLIBCXX_ALWAYS_INLINE __int_type
520 fetch_add(__int_type __i,
521 memory_order __m = memory_order_seq_cst)
volatile noexcept
522 {
return __atomic_fetch_add(&_M_i, __i, __m); }
524 _GLIBCXX_ALWAYS_INLINE __int_type
525 fetch_sub(__int_type __i,
527 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
529 _GLIBCXX_ALWAYS_INLINE __int_type
530 fetch_sub(__int_type __i,
531 memory_order __m = memory_order_seq_cst)
volatile noexcept
532 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
534 _GLIBCXX_ALWAYS_INLINE __int_type
535 fetch_and(__int_type __i,
537 {
return __atomic_fetch_and(&_M_i, __i, __m); }
539 _GLIBCXX_ALWAYS_INLINE __int_type
540 fetch_and(__int_type __i,
541 memory_order __m = memory_order_seq_cst)
volatile noexcept
542 {
return __atomic_fetch_and(&_M_i, __i, __m); }
544 _GLIBCXX_ALWAYS_INLINE __int_type
545 fetch_or(__int_type __i,
547 {
return __atomic_fetch_or(&_M_i, __i, __m); }
549 _GLIBCXX_ALWAYS_INLINE __int_type
550 fetch_or(__int_type __i,
551 memory_order __m = memory_order_seq_cst)
volatile noexcept
552 {
return __atomic_fetch_or(&_M_i, __i, __m); }
554 _GLIBCXX_ALWAYS_INLINE __int_type
555 fetch_xor(__int_type __i,
557 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
559 _GLIBCXX_ALWAYS_INLINE __int_type
560 fetch_xor(__int_type __i,
561 memory_order __m = memory_order_seq_cst)
volatile noexcept
562 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
567 template<
typename _PTp>
571 typedef _PTp* __pointer_type;
577 _M_type_size(ptrdiff_t __d)
const {
return __d *
sizeof(_PTp); }
580 _M_type_size(ptrdiff_t __d)
const volatile {
return __d *
sizeof(_PTp); }
590 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
592 operator __pointer_type()
const noexcept
595 operator __pointer_type()
const volatile noexcept
599 operator=(__pointer_type __p) noexcept
606 operator=(__pointer_type __p)
volatile noexcept
613 operator++(
int) noexcept
614 {
return fetch_add(1); }
617 operator++(
int)
volatile noexcept
618 {
return fetch_add(1); }
621 operator--(
int) noexcept
622 {
return fetch_sub(1); }
625 operator--(
int)
volatile noexcept
626 {
return fetch_sub(1); }
629 operator++() noexcept
630 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
631 memory_order_seq_cst); }
634 operator++()
volatile noexcept
635 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
636 memory_order_seq_cst); }
639 operator--() noexcept
640 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
641 memory_order_seq_cst); }
644 operator--()
volatile noexcept
645 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
646 memory_order_seq_cst); }
649 operator+=(ptrdiff_t __d) noexcept
650 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
651 memory_order_seq_cst); }
654 operator+=(ptrdiff_t __d)
volatile noexcept
655 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
656 memory_order_seq_cst); }
659 operator-=(ptrdiff_t __d) noexcept
660 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
661 memory_order_seq_cst); }
664 operator-=(ptrdiff_t __d)
volatile noexcept
665 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
666 memory_order_seq_cst); }
669 is_lock_free()
const noexcept
672 return __atomic_is_lock_free(
sizeof(_M_p),
673 reinterpret_cast<void *>(-__alignof(_M_p)));
677 is_lock_free()
const volatile noexcept
680 return __atomic_is_lock_free(
sizeof(_M_p),
681 reinterpret_cast<void *>(-__alignof(_M_p)));
684 _GLIBCXX_ALWAYS_INLINE
void 685 store(__pointer_type __p,
690 __glibcxx_assert(__b != memory_order_acquire);
691 __glibcxx_assert(__b != memory_order_acq_rel);
692 __glibcxx_assert(__b != memory_order_consume);
694 __atomic_store_n(&_M_p, __p, __m);
697 _GLIBCXX_ALWAYS_INLINE
void 698 store(__pointer_type __p,
699 memory_order __m = memory_order_seq_cst)
volatile noexcept
702 __glibcxx_assert(__b != memory_order_acquire);
703 __glibcxx_assert(__b != memory_order_acq_rel);
704 __glibcxx_assert(__b != memory_order_consume);
706 __atomic_store_n(&_M_p, __p, __m);
709 _GLIBCXX_ALWAYS_INLINE __pointer_type
710 load(
memory_order __m = memory_order_seq_cst)
const noexcept
713 __glibcxx_assert(__b != memory_order_release);
714 __glibcxx_assert(__b != memory_order_acq_rel);
716 return __atomic_load_n(&_M_p, __m);
719 _GLIBCXX_ALWAYS_INLINE __pointer_type
720 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
723 __glibcxx_assert(__b != memory_order_release);
724 __glibcxx_assert(__b != memory_order_acq_rel);
726 return __atomic_load_n(&_M_p, __m);
729 _GLIBCXX_ALWAYS_INLINE __pointer_type
733 return __atomic_exchange_n(&_M_p, __p, __m);
737 _GLIBCXX_ALWAYS_INLINE __pointer_type
739 memory_order __m = memory_order_seq_cst)
volatile noexcept
741 return __atomic_exchange_n(&_M_p, __p, __m);
744 _GLIBCXX_ALWAYS_INLINE
bool 745 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
751 __glibcxx_assert(__b2 != memory_order_release);
752 __glibcxx_assert(__b2 != memory_order_acq_rel);
753 __glibcxx_assert(__b2 <= __b1);
755 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
758 _GLIBCXX_ALWAYS_INLINE
bool 759 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
766 __glibcxx_assert(__b2 != memory_order_release);
767 __glibcxx_assert(__b2 != memory_order_acq_rel);
768 __glibcxx_assert(__b2 <= __b1);
770 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
773 _GLIBCXX_ALWAYS_INLINE __pointer_type
774 fetch_add(ptrdiff_t __d,
776 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
778 _GLIBCXX_ALWAYS_INLINE __pointer_type
779 fetch_add(ptrdiff_t __d,
780 memory_order __m = memory_order_seq_cst)
volatile noexcept
781 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
783 _GLIBCXX_ALWAYS_INLINE __pointer_type
784 fetch_sub(ptrdiff_t __d,
786 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
788 _GLIBCXX_ALWAYS_INLINE __pointer_type
789 fetch_sub(ptrdiff_t __d,
790 memory_order __m = memory_order_seq_cst)
volatile noexcept
791 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
796 _GLIBCXX_END_NAMESPACE_VERSION
Base type for atomic_flag.
Base class for atomic integrals.
ISO C++ entities toplevel namespace is std.
Generic atomic type, primary class template.
_Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
bitset< _Nb > operator|(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
memory_order
Enumeration for memory_order.
bitset< _Nb > operator &(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.