libstdc++
|
00001 // -*- C++ -*- header. 00002 00003 // Copyright (C) 2008, 2009, 2010 00004 // Free Software Foundation, Inc. 00005 // 00006 // This file is part of the GNU ISO C++ Library. This library is free 00007 // software; you can redistribute it and/or modify it under the 00008 // terms of the GNU General Public License as published by the 00009 // Free Software Foundation; either version 3, or (at your option) 00010 // any later version. 00011 00012 // This library is distributed in the hope that it will be useful, 00013 // but WITHOUT ANY WARRANTY; without even the implied warranty of 00014 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00015 // GNU General Public License for more details. 00016 00017 // Under Section 7 of GPL version 3, you are granted additional 00018 // permissions described in the GCC Runtime Library Exception, version 00019 // 3.1, as published by the Free Software Foundation. 00020 00021 // You should have received a copy of the GNU General Public License and 00022 // a copy of the GCC Runtime Library Exception along with this program; 00023 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 00024 // <http://www.gnu.org/licenses/>. 00025 00026 /** @file bits/atomic_2.h 00027 * This is an internal header file, included by other library headers. 00028 * Do not attempt to use it directly. @headername{atomic} 00029 */ 00030 00031 #ifndef _GLIBCXX_ATOMIC_2_H 00032 #define _GLIBCXX_ATOMIC_2_H 1 00033 00034 #pragma GCC system_header 00035 00036 namespace std _GLIBCXX_VISIBILITY(default) 00037 { 00038 _GLIBCXX_BEGIN_NAMESPACE_VERSION 00039 00040 // 2 == __atomic2 == Always lock-free 00041 // Assumed: 00042 // _GLIBCXX_ATOMIC_BUILTINS_1 00043 // _GLIBCXX_ATOMIC_BUILTINS_2 00044 // _GLIBCXX_ATOMIC_BUILTINS_4 00045 // _GLIBCXX_ATOMIC_BUILTINS_8 00046 namespace __atomic2 00047 { 00048 /// atomic_flag 00049 struct atomic_flag : public __atomic_flag_base 00050 { 00051 atomic_flag() = default; 00052 ~atomic_flag() = default; 00053 atomic_flag(const atomic_flag&) = delete; 00054 atomic_flag& operator=(const atomic_flag&) = delete; 00055 atomic_flag& operator=(const atomic_flag&) volatile = delete; 00056 00057 // Conversion to ATOMIC_FLAG_INIT. 00058 atomic_flag(bool __i): __atomic_flag_base({ __i }) { } 00059 00060 bool 00061 test_and_set(memory_order __m = memory_order_seq_cst) 00062 { 00063 // Redundant synchronize if built-in for lock is a full barrier. 00064 if (__m != memory_order_acquire && __m != memory_order_acq_rel) 00065 __sync_synchronize(); 00066 return __sync_lock_test_and_set(&_M_i, 1); 00067 } 00068 00069 bool 00070 test_and_set(memory_order __m = memory_order_seq_cst) volatile 00071 { 00072 // Redundant synchronize if built-in for lock is a full barrier. 00073 if (__m != memory_order_acquire && __m != memory_order_acq_rel) 00074 __sync_synchronize(); 00075 return __sync_lock_test_and_set(&_M_i, 1); 00076 } 00077 00078 void 00079 clear(memory_order __m = memory_order_seq_cst) 00080 { 00081 __glibcxx_assert(__m != memory_order_consume); 00082 __glibcxx_assert(__m != memory_order_acquire); 00083 __glibcxx_assert(__m != memory_order_acq_rel); 00084 00085 __sync_lock_release(&_M_i); 00086 if (__m != memory_order_acquire && __m != memory_order_acq_rel) 00087 __sync_synchronize(); 00088 } 00089 00090 void 00091 clear(memory_order __m = memory_order_seq_cst) volatile 00092 { 00093 __glibcxx_assert(__m != memory_order_consume); 00094 __glibcxx_assert(__m != memory_order_acquire); 00095 __glibcxx_assert(__m != memory_order_acq_rel); 00096 00097 __sync_lock_release(&_M_i); 00098 if (__m != memory_order_acquire && __m != memory_order_acq_rel) 00099 __sync_synchronize(); 00100 } 00101 }; 00102 00103 00104 /// atomic_address 00105 struct atomic_address 00106 { 00107 private: 00108 void* _M_i; 00109 00110 public: 00111 atomic_address() = default; 00112 ~atomic_address() = default; 00113 atomic_address(const atomic_address&) = delete; 00114 atomic_address& operator=(const atomic_address&) = delete; 00115 atomic_address& operator=(const atomic_address&) volatile = delete; 00116 00117 constexpr atomic_address(void* __v): _M_i (__v) { } 00118 00119 bool 00120 is_lock_free() const { return true; } 00121 00122 bool 00123 is_lock_free() const volatile { return true; } 00124 00125 void 00126 store(void* __v, memory_order __m = memory_order_seq_cst) 00127 { 00128 __glibcxx_assert(__m != memory_order_acquire); 00129 __glibcxx_assert(__m != memory_order_acq_rel); 00130 __glibcxx_assert(__m != memory_order_consume); 00131 00132 if (__m == memory_order_relaxed) 00133 _M_i = __v; 00134 else 00135 { 00136 // write_mem_barrier(); 00137 _M_i = __v; 00138 if (__m == memory_order_seq_cst) 00139 __sync_synchronize(); 00140 } 00141 } 00142 00143 void 00144 store(void* __v, memory_order __m = memory_order_seq_cst) volatile 00145 { 00146 __glibcxx_assert(__m != memory_order_acquire); 00147 __glibcxx_assert(__m != memory_order_acq_rel); 00148 __glibcxx_assert(__m != memory_order_consume); 00149 00150 if (__m == memory_order_relaxed) 00151 _M_i = __v; 00152 else 00153 { 00154 // write_mem_barrier(); 00155 _M_i = __v; 00156 if (__m == memory_order_seq_cst) 00157 __sync_synchronize(); 00158 } 00159 } 00160 00161 void* 00162 load(memory_order __m = memory_order_seq_cst) const 00163 { 00164 __glibcxx_assert(__m != memory_order_release); 00165 __glibcxx_assert(__m != memory_order_acq_rel); 00166 00167 __sync_synchronize(); 00168 void* __ret = _M_i; 00169 __sync_synchronize(); 00170 return __ret; 00171 } 00172 00173 void* 00174 load(memory_order __m = memory_order_seq_cst) const volatile 00175 { 00176 __glibcxx_assert(__m != memory_order_release); 00177 __glibcxx_assert(__m != memory_order_acq_rel); 00178 00179 __sync_synchronize(); 00180 void* __ret = _M_i; 00181 __sync_synchronize(); 00182 return __ret; 00183 } 00184 00185 void* 00186 exchange(void* __v, memory_order __m = memory_order_seq_cst) 00187 { 00188 // XXX built-in assumes memory_order_acquire. 00189 return __sync_lock_test_and_set(&_M_i, __v); 00190 } 00191 00192 void* 00193 exchange(void* __v, memory_order __m = memory_order_seq_cst) volatile 00194 { 00195 // XXX built-in assumes memory_order_acquire. 00196 return __sync_lock_test_and_set(&_M_i, __v); 00197 } 00198 00199 bool 00200 compare_exchange_weak(void*& __v1, void* __v2, memory_order __m1, 00201 memory_order __m2) 00202 { return compare_exchange_strong(__v1, __v2, __m1, __m2); } 00203 00204 bool 00205 compare_exchange_weak(void*& __v1, void* __v2, memory_order __m1, 00206 memory_order __m2) volatile 00207 { return compare_exchange_strong(__v1, __v2, __m1, __m2); } 00208 00209 bool 00210 compare_exchange_weak(void*& __v1, void* __v2, 00211 memory_order __m = memory_order_seq_cst) 00212 { 00213 return compare_exchange_weak(__v1, __v2, __m, 00214 __calculate_memory_order(__m)); 00215 } 00216 00217 bool 00218 compare_exchange_weak(void*& __v1, void* __v2, 00219 memory_order __m = memory_order_seq_cst) volatile 00220 { 00221 return compare_exchange_weak(__v1, __v2, __m, 00222 __calculate_memory_order(__m)); 00223 } 00224 00225 bool 00226 compare_exchange_weak(const void*& __v1, const void* __v2, 00227 memory_order __m1, memory_order __m2) 00228 { return compare_exchange_strong(__v1, __v2, __m1, __m2); } 00229 00230 bool 00231 compare_exchange_weak(const void*& __v1, const void* __v2, 00232 memory_order __m1, memory_order __m2) volatile 00233 { return compare_exchange_strong(__v1, __v2, __m1, __m2); } 00234 00235 bool 00236 compare_exchange_weak(const void*& __v1, const void* __v2, 00237 memory_order __m = memory_order_seq_cst) 00238 { 00239 return compare_exchange_weak(__v1, __v2, __m, 00240 __calculate_memory_order(__m)); 00241 } 00242 00243 bool 00244 compare_exchange_weak(const void*& __v1, const void* __v2, 00245 memory_order __m = memory_order_seq_cst) volatile 00246 { 00247 return compare_exchange_weak(__v1, __v2, __m, 00248 __calculate_memory_order(__m)); 00249 } 00250 00251 bool 00252 compare_exchange_strong(void*& __v1, void* __v2, memory_order __m1, 00253 memory_order __m2) 00254 { 00255 __glibcxx_assert(__m2 != memory_order_release); 00256 __glibcxx_assert(__m2 != memory_order_acq_rel); 00257 __glibcxx_assert(__m2 <= __m1); 00258 00259 void* __v1o = __v1; 00260 void* __v1n = __sync_val_compare_and_swap(&_M_i, __v1o, __v2); 00261 00262 // Assume extra stores (of same value) allowed in true case. 00263 __v1 = __v1n; 00264 return __v1o == __v1n; 00265 } 00266 00267 bool 00268 compare_exchange_strong(void*& __v1, void* __v2, memory_order __m1, 00269 memory_order __m2) volatile 00270 { 00271 __glibcxx_assert(__m2 != memory_order_release); 00272 __glibcxx_assert(__m2 != memory_order_acq_rel); 00273 __glibcxx_assert(__m2 <= __m1); 00274 00275 void* __v1o = __v1; 00276 void* __v1n = __sync_val_compare_and_swap(&_M_i, __v1o, __v2); 00277 00278 // Assume extra stores (of same value) allowed in true case. 00279 __v1 = __v1n; 00280 return __v1o == __v1n; 00281 } 00282 00283 bool 00284 compare_exchange_strong(void*& __v1, void* __v2, 00285 memory_order __m = memory_order_seq_cst) 00286 { 00287 return compare_exchange_strong(__v1, __v2, __m, 00288 __calculate_memory_order(__m)); 00289 } 00290 00291 bool 00292 compare_exchange_strong(void*& __v1, void* __v2, 00293 memory_order __m = memory_order_seq_cst) volatile 00294 { 00295 return compare_exchange_strong(__v1, __v2, __m, 00296 __calculate_memory_order(__m)); 00297 } 00298 00299 bool 00300 compare_exchange_strong(const void*& __v1, const void* __v2, 00301 memory_order __m1, memory_order __m2) 00302 { 00303 __glibcxx_assert(__m2 != memory_order_release); 00304 __glibcxx_assert(__m2 != memory_order_acq_rel); 00305 __glibcxx_assert(__m2 <= __m1); 00306 00307 const void* __v1o = __v1; 00308 const void* __v1n = __sync_val_compare_and_swap(&_M_i, __v1o, __v2); 00309 00310 // Assume extra stores (of same value) allowed in true case. 00311 __v1 = __v1n; 00312 return __v1o == __v1n; 00313 } 00314 00315 bool 00316 compare_exchange_strong(const void*& __v1, const void* __v2, 00317 memory_order __m1, memory_order __m2) volatile 00318 { 00319 __glibcxx_assert(__m2 != memory_order_release); 00320 __glibcxx_assert(__m2 != memory_order_acq_rel); 00321 __glibcxx_assert(__m2 <= __m1); 00322 00323 const void* __v1o = __v1; 00324 const void* __v1n = __sync_val_compare_and_swap(&_M_i, __v1o, __v2); 00325 00326 // Assume extra stores (of same value) allowed in true case. 00327 __v1 = __v1n; 00328 return __v1o == __v1n; 00329 } 00330 00331 bool 00332 compare_exchange_strong(const void*& __v1, const void* __v2, 00333 memory_order __m = memory_order_seq_cst) 00334 { 00335 return compare_exchange_strong(__v1, __v2, __m, 00336 __calculate_memory_order(__m)); 00337 } 00338 00339 bool 00340 compare_exchange_strong(const void*& __v1, const void* __v2, 00341 memory_order __m = memory_order_seq_cst) volatile 00342 { 00343 return compare_exchange_strong(__v1, __v2, __m, 00344 __calculate_memory_order(__m)); 00345 } 00346 00347 void* 00348 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) 00349 { return __sync_fetch_and_add(&_M_i, __d); } 00350 00351 void* 00352 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile 00353 { return __sync_fetch_and_add(&_M_i, __d); } 00354 00355 void* 00356 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) 00357 { return __sync_fetch_and_sub(&_M_i, __d); } 00358 00359 void* 00360 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile 00361 { return __sync_fetch_and_sub(&_M_i, __d); } 00362 00363 operator void*() const 00364 { return load(); } 00365 00366 operator void*() const volatile 00367 { return load(); } 00368 00369 void* 00370 #if 0 00371 // XXX as specified but won't compile as store takes void*, 00372 // invalid conversion from const void* to void* 00373 // CD1 had this signature 00374 operator=(const void* __v) 00375 #else 00376 operator=(void* __v) 00377 #endif 00378 { 00379 store(__v); 00380 return __v; 00381 } 00382 00383 void* 00384 #if 0 00385 // XXX as specified but won't compile as store takes void*, 00386 // invalid conversion from const void* to void* 00387 // CD1 had this signature, but store and this could both be const void*? 00388 operator=(const void* __v) volatile 00389 #else 00390 operator=(void* __v) volatile 00391 #endif 00392 { 00393 store(__v); 00394 return __v; 00395 } 00396 00397 void* 00398 operator+=(ptrdiff_t __d) 00399 { return __sync_add_and_fetch(&_M_i, __d); } 00400 00401 void* 00402 operator+=(ptrdiff_t __d) volatile 00403 { return __sync_add_and_fetch(&_M_i, __d); } 00404 00405 void* 00406 operator-=(ptrdiff_t __d) 00407 { return __sync_sub_and_fetch(&_M_i, __d); } 00408 00409 void* 00410 operator-=(ptrdiff_t __d) volatile 00411 { return __sync_sub_and_fetch(&_M_i, __d); } 00412 }; 00413 00414 00415 /// Base class for atomic integrals. 00416 // 00417 // For each of the integral types, define atomic_[integral type] struct 00418 // 00419 // atomic_bool bool 00420 // atomic_char char 00421 // atomic_schar signed char 00422 // atomic_uchar unsigned char 00423 // atomic_short short 00424 // atomic_ushort unsigned short 00425 // atomic_int int 00426 // atomic_uint unsigned int 00427 // atomic_long long 00428 // atomic_ulong unsigned long 00429 // atomic_llong long long 00430 // atomic_ullong unsigned long long 00431 // atomic_char16_t char16_t 00432 // atomic_char32_t char32_t 00433 // atomic_wchar_t wchar_t 00434 // 00435 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or 00436 // 8 bytes, since that is what GCC built-in functions for atomic 00437 // memory access expect. 00438 template<typename _ITp> 00439 struct __atomic_base 00440 { 00441 private: 00442 typedef _ITp __int_type; 00443 00444 __int_type _M_i; 00445 00446 public: 00447 __atomic_base() = default; 00448 ~__atomic_base() = default; 00449 __atomic_base(const __atomic_base&) = delete; 00450 __atomic_base& operator=(const __atomic_base&) = delete; 00451 __atomic_base& operator=(const __atomic_base&) volatile = delete; 00452 00453 // Requires __int_type convertible to _M_i. 00454 constexpr __atomic_base(__int_type __i): _M_i (__i) { } 00455 00456 operator __int_type() const 00457 { return load(); } 00458 00459 operator __int_type() const volatile 00460 { return load(); } 00461 00462 __int_type 00463 operator=(__int_type __i) 00464 { 00465 store(__i); 00466 return __i; 00467 } 00468 00469 __int_type 00470 operator=(__int_type __i) volatile 00471 { 00472 store(__i); 00473 return __i; 00474 } 00475 00476 __int_type 00477 operator++(int) 00478 { return fetch_add(1); } 00479 00480 __int_type 00481 operator++(int) volatile 00482 { return fetch_add(1); } 00483 00484 __int_type 00485 operator--(int) 00486 { return fetch_sub(1); } 00487 00488 __int_type 00489 operator--(int) volatile 00490 { return fetch_sub(1); } 00491 00492 __int_type 00493 operator++() 00494 { return __sync_add_and_fetch(&_M_i, 1); } 00495 00496 __int_type 00497 operator++() volatile 00498 { return __sync_add_and_fetch(&_M_i, 1); } 00499 00500 __int_type 00501 operator--() 00502 { return __sync_sub_and_fetch(&_M_i, 1); } 00503 00504 __int_type 00505 operator--() volatile 00506 { return __sync_sub_and_fetch(&_M_i, 1); } 00507 00508 __int_type 00509 operator+=(__int_type __i) 00510 { return __sync_add_and_fetch(&_M_i, __i); } 00511 00512 __int_type 00513 operator+=(__int_type __i) volatile 00514 { return __sync_add_and_fetch(&_M_i, __i); } 00515 00516 __int_type 00517 operator-=(__int_type __i) 00518 { return __sync_sub_and_fetch(&_M_i, __i); } 00519 00520 __int_type 00521 operator-=(__int_type __i) volatile 00522 { return __sync_sub_and_fetch(&_M_i, __i); } 00523 00524 __int_type 00525 operator&=(__int_type __i) 00526 { return __sync_and_and_fetch(&_M_i, __i); } 00527 00528 __int_type 00529 operator&=(__int_type __i) volatile 00530 { return __sync_and_and_fetch(&_M_i, __i); } 00531 00532 __int_type 00533 operator|=(__int_type __i) 00534 { return __sync_or_and_fetch(&_M_i, __i); } 00535 00536 __int_type 00537 operator|=(__int_type __i) volatile 00538 { return __sync_or_and_fetch(&_M_i, __i); } 00539 00540 __int_type 00541 operator^=(__int_type __i) 00542 { return __sync_xor_and_fetch(&_M_i, __i); } 00543 00544 __int_type 00545 operator^=(__int_type __i) volatile 00546 { return __sync_xor_and_fetch(&_M_i, __i); } 00547 00548 bool 00549 is_lock_free() const 00550 { return true; } 00551 00552 bool 00553 is_lock_free() const volatile 00554 { return true; } 00555 00556 void 00557 store(__int_type __i, memory_order __m = memory_order_seq_cst) 00558 { 00559 __glibcxx_assert(__m != memory_order_acquire); 00560 __glibcxx_assert(__m != memory_order_acq_rel); 00561 __glibcxx_assert(__m != memory_order_consume); 00562 00563 if (__m == memory_order_relaxed) 00564 _M_i = __i; 00565 else 00566 { 00567 // write_mem_barrier(); 00568 _M_i = __i; 00569 if (__m == memory_order_seq_cst) 00570 __sync_synchronize(); 00571 } 00572 } 00573 00574 void 00575 store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile 00576 { 00577 __glibcxx_assert(__m != memory_order_acquire); 00578 __glibcxx_assert(__m != memory_order_acq_rel); 00579 __glibcxx_assert(__m != memory_order_consume); 00580 00581 if (__m == memory_order_relaxed) 00582 _M_i = __i; 00583 else 00584 { 00585 // write_mem_barrier(); 00586 _M_i = __i; 00587 if (__m == memory_order_seq_cst) 00588 __sync_synchronize(); 00589 } 00590 } 00591 00592 __int_type 00593 load(memory_order __m = memory_order_seq_cst) const 00594 { 00595 __glibcxx_assert(__m != memory_order_release); 00596 __glibcxx_assert(__m != memory_order_acq_rel); 00597 00598 __sync_synchronize(); 00599 __int_type __ret = _M_i; 00600 __sync_synchronize(); 00601 return __ret; 00602 } 00603 00604 __int_type 00605 load(memory_order __m = memory_order_seq_cst) const volatile 00606 { 00607 __glibcxx_assert(__m != memory_order_release); 00608 __glibcxx_assert(__m != memory_order_acq_rel); 00609 00610 __sync_synchronize(); 00611 __int_type __ret = _M_i; 00612 __sync_synchronize(); 00613 return __ret; 00614 } 00615 00616 __int_type 00617 exchange(__int_type __i, memory_order __m = memory_order_seq_cst) 00618 { 00619 // XXX built-in assumes memory_order_acquire. 00620 return __sync_lock_test_and_set(&_M_i, __i); 00621 } 00622 00623 00624 __int_type 00625 exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile 00626 { 00627 // XXX built-in assumes memory_order_acquire. 00628 return __sync_lock_test_and_set(&_M_i, __i); 00629 } 00630 00631 bool 00632 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00633 memory_order __m1, memory_order __m2) 00634 { return compare_exchange_strong(__i1, __i2, __m1, __m2); } 00635 00636 bool 00637 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00638 memory_order __m1, memory_order __m2) volatile 00639 { return compare_exchange_strong(__i1, __i2, __m1, __m2); } 00640 00641 bool 00642 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00643 memory_order __m = memory_order_seq_cst) 00644 { 00645 return compare_exchange_weak(__i1, __i2, __m, 00646 __calculate_memory_order(__m)); 00647 } 00648 00649 bool 00650 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00651 memory_order __m = memory_order_seq_cst) volatile 00652 { 00653 return compare_exchange_weak(__i1, __i2, __m, 00654 __calculate_memory_order(__m)); 00655 } 00656 00657 bool 00658 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00659 memory_order __m1, memory_order __m2) 00660 { 00661 __glibcxx_assert(__m2 != memory_order_release); 00662 __glibcxx_assert(__m2 != memory_order_acq_rel); 00663 __glibcxx_assert(__m2 <= __m1); 00664 00665 __int_type __i1o = __i1; 00666 __int_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2); 00667 00668 // Assume extra stores (of same value) allowed in true case. 00669 __i1 = __i1n; 00670 return __i1o == __i1n; 00671 } 00672 00673 bool 00674 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00675 memory_order __m1, memory_order __m2) volatile 00676 { 00677 __glibcxx_assert(__m2 != memory_order_release); 00678 __glibcxx_assert(__m2 != memory_order_acq_rel); 00679 __glibcxx_assert(__m2 <= __m1); 00680 00681 __int_type __i1o = __i1; 00682 __int_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2); 00683 00684 // Assume extra stores (of same value) allowed in true case. 00685 __i1 = __i1n; 00686 return __i1o == __i1n; 00687 } 00688 00689 bool 00690 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00691 memory_order __m = memory_order_seq_cst) 00692 { 00693 return compare_exchange_strong(__i1, __i2, __m, 00694 __calculate_memory_order(__m)); 00695 } 00696 00697 bool 00698 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00699 memory_order __m = memory_order_seq_cst) volatile 00700 { 00701 return compare_exchange_strong(__i1, __i2, __m, 00702 __calculate_memory_order(__m)); 00703 } 00704 00705 __int_type 00706 fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) 00707 { return __sync_fetch_and_add(&_M_i, __i); } 00708 00709 __int_type 00710 fetch_add(__int_type __i, 00711 memory_order __m = memory_order_seq_cst) volatile 00712 { return __sync_fetch_and_add(&_M_i, __i); } 00713 00714 __int_type 00715 fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) 00716 { return __sync_fetch_and_sub(&_M_i, __i); } 00717 00718 __int_type 00719 fetch_sub(__int_type __i, 00720 memory_order __m = memory_order_seq_cst) volatile 00721 { return __sync_fetch_and_sub(&_M_i, __i); } 00722 00723 __int_type 00724 fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) 00725 { return __sync_fetch_and_and(&_M_i, __i); } 00726 00727 __int_type 00728 fetch_and(__int_type __i, 00729 memory_order __m = memory_order_seq_cst) volatile 00730 { return __sync_fetch_and_and(&_M_i, __i); } 00731 00732 __int_type 00733 fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) 00734 { return __sync_fetch_and_or(&_M_i, __i); } 00735 00736 __int_type 00737 fetch_or(__int_type __i, 00738 memory_order __m = memory_order_seq_cst) volatile 00739 { return __sync_fetch_and_or(&_M_i, __i); } 00740 00741 __int_type 00742 fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) 00743 { return __sync_fetch_and_xor(&_M_i, __i); } 00744 00745 __int_type 00746 fetch_xor(__int_type __i, 00747 memory_order __m = memory_order_seq_cst) volatile 00748 { return __sync_fetch_and_xor(&_M_i, __i); } 00749 }; 00750 } // namespace __atomic2 00751 00752 _GLIBCXX_END_NAMESPACE_VERSION 00753 } // namespace 00754 00755 #endif