libstdc++
atomic_base.h
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008-2013 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file bits/atomic_base.h
00026  *  This is an internal header file, included by other library headers.
00027  *  Do not attempt to use it directly. @headername{atomic}
00028  */
00029 
00030 #ifndef _GLIBCXX_ATOMIC_BASE_H
00031 #define _GLIBCXX_ATOMIC_BASE_H 1
00032 
00033 #pragma GCC system_header
00034 
00035 #include <bits/c++config.h>
00036 #include <stdbool.h>
00037 #include <stdint.h>
00038 #include <bits/atomic_lockfree_defines.h>
00039 
00040 namespace std _GLIBCXX_VISIBILITY(default)
00041 {
00042 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00043 
00044   /**
00045    * @defgroup atomics Atomics
00046    *
00047    * Components for performing atomic operations.
00048    * @{
00049    */
00050 
00051   /// Enumeration for memory_order
00052   typedef enum memory_order
00053     {
00054       memory_order_relaxed,
00055       memory_order_consume,
00056       memory_order_acquire,
00057       memory_order_release,
00058       memory_order_acq_rel,
00059       memory_order_seq_cst
00060     } memory_order;
00061 
00062   enum __memory_order_modifier
00063     {
00064       __memory_order_mask          = 0x0ffff,
00065       __memory_order_modifier_mask = 0xffff0000,
00066       __memory_order_hle_acquire   = 0x10000,
00067       __memory_order_hle_release   = 0x20000
00068     };
00069 
00070   constexpr memory_order
00071   operator|(memory_order __m, __memory_order_modifier __mod)
00072   {
00073     return memory_order(__m | int(__mod));
00074   }
00075 
00076   constexpr memory_order
00077   operator&(memory_order __m, __memory_order_modifier __mod)
00078   {
00079     return memory_order(__m & int(__mod));
00080   }
00081 
00082   // Drop release ordering as per [atomics.types.operations.req]/21
00083   constexpr memory_order
00084   __cmpexch_failure_order2(memory_order __m) noexcept
00085   {
00086     return __m == memory_order_acq_rel ? memory_order_acquire
00087       : __m == memory_order_release ? memory_order_relaxed : __m;
00088   }
00089 
00090   constexpr memory_order
00091   __cmpexch_failure_order(memory_order __m) noexcept
00092   {
00093     return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
00094       | (__m & __memory_order_modifier_mask));
00095   }
00096 
00097   inline void
00098   atomic_thread_fence(memory_order __m) noexcept
00099   { __atomic_thread_fence(__m); }
00100 
00101   inline void
00102   atomic_signal_fence(memory_order __m) noexcept
00103   { __atomic_signal_fence(__m); }
00104 
00105   /// kill_dependency
00106   template<typename _Tp>
00107     inline _Tp
00108     kill_dependency(_Tp __y) noexcept
00109     {
00110       _Tp __ret(__y);
00111       return __ret;
00112     }
00113 
00114 
00115   // Base types for atomics.
00116   template<typename _IntTp>
00117     struct __atomic_base;
00118 
00119   /// atomic_char
00120   typedef __atomic_base<char>               atomic_char;
00121 
00122   /// atomic_schar
00123   typedef __atomic_base<signed char>            atomic_schar;
00124 
00125   /// atomic_uchar
00126   typedef __atomic_base<unsigned char>      atomic_uchar;
00127 
00128   /// atomic_short
00129   typedef __atomic_base<short>          atomic_short;
00130 
00131   /// atomic_ushort
00132   typedef __atomic_base<unsigned short>     atomic_ushort;
00133 
00134   /// atomic_int
00135   typedef __atomic_base<int>                atomic_int;
00136 
00137   /// atomic_uint
00138   typedef __atomic_base<unsigned int>           atomic_uint;
00139 
00140   /// atomic_long
00141   typedef __atomic_base<long>               atomic_long;
00142 
00143   /// atomic_ulong
00144   typedef __atomic_base<unsigned long>      atomic_ulong;
00145 
00146   /// atomic_llong
00147   typedef __atomic_base<long long>          atomic_llong;
00148 
00149   /// atomic_ullong
00150   typedef __atomic_base<unsigned long long>     atomic_ullong;
00151 
00152   /// atomic_wchar_t
00153   typedef __atomic_base<wchar_t>        atomic_wchar_t;
00154 
00155   /// atomic_char16_t
00156   typedef __atomic_base<char16_t>       atomic_char16_t;
00157 
00158   /// atomic_char32_t
00159   typedef __atomic_base<char32_t>       atomic_char32_t;
00160 
00161   /// atomic_char32_t
00162   typedef __atomic_base<char32_t>       atomic_char32_t;
00163 
00164 
00165   /// atomic_int_least8_t
00166   typedef __atomic_base<int_least8_t>       atomic_int_least8_t;
00167 
00168   /// atomic_uint_least8_t
00169   typedef __atomic_base<uint_least8_t>          atomic_uint_least8_t;
00170 
00171   /// atomic_int_least16_t
00172   typedef __atomic_base<int_least16_t>          atomic_int_least16_t;
00173 
00174   /// atomic_uint_least16_t
00175   typedef __atomic_base<uint_least16_t>         atomic_uint_least16_t;
00176 
00177   /// atomic_int_least32_t
00178   typedef __atomic_base<int_least32_t>          atomic_int_least32_t;
00179 
00180   /// atomic_uint_least32_t
00181   typedef __atomic_base<uint_least32_t>         atomic_uint_least32_t;
00182 
00183   /// atomic_int_least64_t
00184   typedef __atomic_base<int_least64_t>          atomic_int_least64_t;
00185 
00186   /// atomic_uint_least64_t
00187   typedef __atomic_base<uint_least64_t>         atomic_uint_least64_t;
00188 
00189 
00190   /// atomic_int_fast8_t
00191   typedef __atomic_base<int_fast8_t>        atomic_int_fast8_t;
00192 
00193   /// atomic_uint_fast8_t
00194   typedef __atomic_base<uint_fast8_t>           atomic_uint_fast8_t;
00195 
00196   /// atomic_int_fast16_t
00197   typedef __atomic_base<int_fast16_t>           atomic_int_fast16_t;
00198 
00199   /// atomic_uint_fast16_t
00200   typedef __atomic_base<uint_fast16_t>          atomic_uint_fast16_t;
00201 
00202   /// atomic_int_fast32_t
00203   typedef __atomic_base<int_fast32_t>           atomic_int_fast32_t;
00204 
00205   /// atomic_uint_fast32_t
00206   typedef __atomic_base<uint_fast32_t>          atomic_uint_fast32_t;
00207 
00208   /// atomic_int_fast64_t
00209   typedef __atomic_base<int_fast64_t>           atomic_int_fast64_t;
00210 
00211   /// atomic_uint_fast64_t
00212   typedef __atomic_base<uint_fast64_t>          atomic_uint_fast64_t;
00213 
00214 
00215   /// atomic_intptr_t
00216   typedef __atomic_base<intptr_t>           atomic_intptr_t;
00217 
00218   /// atomic_uintptr_t
00219   typedef __atomic_base<uintptr_t>              atomic_uintptr_t;
00220 
00221   /// atomic_size_t
00222   typedef __atomic_base<size_t>             atomic_size_t;
00223 
00224   /// atomic_intmax_t
00225   typedef __atomic_base<intmax_t>           atomic_intmax_t;
00226 
00227   /// atomic_uintmax_t
00228   typedef __atomic_base<uintmax_t>              atomic_uintmax_t;
00229 
00230   /// atomic_ptrdiff_t
00231   typedef __atomic_base<ptrdiff_t>              atomic_ptrdiff_t;
00232 
00233 
00234 #define ATOMIC_VAR_INIT(_VI) { _VI }
00235 
00236   template<typename _Tp>
00237     struct atomic;
00238 
00239   template<typename _Tp>
00240     struct atomic<_Tp*>;
00241 
00242     /* The target's "set" value for test-and-set may not be exactly 1.  */
00243 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
00244     typedef bool __atomic_flag_data_type;
00245 #else
00246     typedef unsigned char __atomic_flag_data_type;
00247 #endif
00248 
00249   /**
00250    *  @brief Base type for atomic_flag.
00251    *
00252    *  Base type is POD with data, allowing atomic_flag to derive from
00253    *  it and meet the standard layout type requirement. In addition to
00254    *  compatibilty with a C interface, this allows different
00255    *  implementations of atomic_flag to use the same atomic operation
00256    *  functions, via a standard conversion to the __atomic_flag_base
00257    *  argument.
00258   */
00259   _GLIBCXX_BEGIN_EXTERN_C
00260 
00261   struct __atomic_flag_base
00262   {
00263     __atomic_flag_data_type _M_i;
00264   };
00265 
00266   _GLIBCXX_END_EXTERN_C
00267 
00268 #define ATOMIC_FLAG_INIT { 0 }
00269 
00270   /// atomic_flag
00271   struct atomic_flag : public __atomic_flag_base
00272   {
00273     atomic_flag() noexcept = default;
00274     ~atomic_flag() noexcept = default;
00275     atomic_flag(const atomic_flag&) = delete;
00276     atomic_flag& operator=(const atomic_flag&) = delete;
00277     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00278 
00279     // Conversion to ATOMIC_FLAG_INIT.
00280     constexpr atomic_flag(bool __i) noexcept
00281       : __atomic_flag_base{ _S_init(__i) }
00282     { }
00283 
00284     bool
00285     test_and_set(memory_order __m = memory_order_seq_cst) noexcept
00286     {
00287       return __atomic_test_and_set (&_M_i, __m);
00288     }
00289 
00290     bool
00291     test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
00292     {
00293       return __atomic_test_and_set (&_M_i, __m);
00294     }
00295 
00296     void
00297     clear(memory_order __m = memory_order_seq_cst) noexcept
00298     {
00299       memory_order __b = __m & __memory_order_mask;
00300       __glibcxx_assert(__b != memory_order_consume);
00301       __glibcxx_assert(__b != memory_order_acquire);
00302       __glibcxx_assert(__b != memory_order_acq_rel);
00303 
00304       __atomic_clear (&_M_i, __m);
00305     }
00306 
00307     void
00308     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
00309     {
00310       memory_order __b = __m & __memory_order_mask;
00311       __glibcxx_assert(__b != memory_order_consume);
00312       __glibcxx_assert(__b != memory_order_acquire);
00313       __glibcxx_assert(__b != memory_order_acq_rel);
00314 
00315       __atomic_clear (&_M_i, __m);
00316     }
00317 
00318   private:
00319     static constexpr __atomic_flag_data_type
00320     _S_init(bool __i)
00321     { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
00322   };
00323 
00324 
00325   /// Base class for atomic integrals.
00326   //
00327   // For each of the integral types, define atomic_[integral type] struct
00328   //
00329   // atomic_bool     bool
00330   // atomic_char     char
00331   // atomic_schar    signed char
00332   // atomic_uchar    unsigned char
00333   // atomic_short    short
00334   // atomic_ushort   unsigned short
00335   // atomic_int      int
00336   // atomic_uint     unsigned int
00337   // atomic_long     long
00338   // atomic_ulong    unsigned long
00339   // atomic_llong    long long
00340   // atomic_ullong   unsigned long long
00341   // atomic_char16_t char16_t
00342   // atomic_char32_t char32_t
00343   // atomic_wchar_t  wchar_t
00344   //
00345   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
00346   // 8 bytes, since that is what GCC built-in functions for atomic
00347   // memory access expect.
00348   template<typename _ITp>
00349     struct __atomic_base
00350     {
00351     private:
00352       typedef _ITp  __int_type;
00353 
00354       __int_type    _M_i;
00355 
00356     public:
00357       __atomic_base() noexcept = default;
00358       ~__atomic_base() noexcept = default;
00359       __atomic_base(const __atomic_base&) = delete;
00360       __atomic_base& operator=(const __atomic_base&) = delete;
00361       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00362 
00363       // Requires __int_type convertible to _M_i.
00364       constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
00365 
00366       operator __int_type() const noexcept
00367       { return load(); }
00368 
00369       operator __int_type() const volatile noexcept
00370       { return load(); }
00371 
00372       __int_type
00373       operator=(__int_type __i) noexcept
00374       {
00375     store(__i);
00376     return __i;
00377       }
00378 
00379       __int_type
00380       operator=(__int_type __i) volatile noexcept
00381       {
00382     store(__i);
00383     return __i;
00384       }
00385 
00386       __int_type
00387       operator++(int) noexcept
00388       { return fetch_add(1); }
00389 
00390       __int_type
00391       operator++(int) volatile noexcept
00392       { return fetch_add(1); }
00393 
00394       __int_type
00395       operator--(int) noexcept
00396       { return fetch_sub(1); }
00397 
00398       __int_type
00399       operator--(int) volatile noexcept
00400       { return fetch_sub(1); }
00401 
00402       __int_type
00403       operator++() noexcept
00404       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00405 
00406       __int_type
00407       operator++() volatile noexcept
00408       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00409 
00410       __int_type
00411       operator--() noexcept
00412       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00413 
00414       __int_type
00415       operator--() volatile noexcept
00416       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00417 
00418       __int_type
00419       operator+=(__int_type __i) noexcept
00420       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00421 
00422       __int_type
00423       operator+=(__int_type __i) volatile noexcept
00424       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00425 
00426       __int_type
00427       operator-=(__int_type __i) noexcept
00428       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00429 
00430       __int_type
00431       operator-=(__int_type __i) volatile noexcept
00432       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00433 
00434       __int_type
00435       operator&=(__int_type __i) noexcept
00436       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00437 
00438       __int_type
00439       operator&=(__int_type __i) volatile noexcept
00440       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00441 
00442       __int_type
00443       operator|=(__int_type __i) noexcept
00444       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00445 
00446       __int_type
00447       operator|=(__int_type __i) volatile noexcept
00448       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00449 
00450       __int_type
00451       operator^=(__int_type __i) noexcept
00452       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00453 
00454       __int_type
00455       operator^=(__int_type __i) volatile noexcept
00456       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00457 
00458       bool
00459       is_lock_free() const noexcept
00460       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
00461 
00462       bool
00463       is_lock_free() const volatile noexcept
00464       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
00465 
00466       void
00467       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
00468       {
00469         memory_order __b = __m & __memory_order_mask;
00470     __glibcxx_assert(__b != memory_order_acquire);
00471     __glibcxx_assert(__b != memory_order_acq_rel);
00472     __glibcxx_assert(__b != memory_order_consume);
00473 
00474     __atomic_store_n(&_M_i, __i, __m);
00475       }
00476 
00477       void
00478       store(__int_type __i,
00479         memory_order __m = memory_order_seq_cst) volatile noexcept
00480       {
00481         memory_order __b = __m & __memory_order_mask;
00482     __glibcxx_assert(__b != memory_order_acquire);
00483     __glibcxx_assert(__b != memory_order_acq_rel);
00484     __glibcxx_assert(__b != memory_order_consume);
00485 
00486     __atomic_store_n(&_M_i, __i, __m);
00487       }
00488 
00489       __int_type
00490       load(memory_order __m = memory_order_seq_cst) const noexcept
00491       {
00492        memory_order __b = __m & __memory_order_mask;
00493     __glibcxx_assert(__b != memory_order_release);
00494     __glibcxx_assert(__b != memory_order_acq_rel);
00495 
00496     return __atomic_load_n(&_M_i, __m);
00497       }
00498 
00499       __int_type
00500       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00501       {
00502         memory_order __b = __m & __memory_order_mask;
00503     __glibcxx_assert(__b != memory_order_release);
00504     __glibcxx_assert(__b != memory_order_acq_rel);
00505 
00506     return __atomic_load_n(&_M_i, __m);
00507       }
00508 
00509       __int_type
00510       exchange(__int_type __i,
00511            memory_order __m = memory_order_seq_cst) noexcept
00512       {
00513     return __atomic_exchange_n(&_M_i, __i, __m);
00514       }
00515 
00516 
00517       __int_type
00518       exchange(__int_type __i,
00519            memory_order __m = memory_order_seq_cst) volatile noexcept
00520       {
00521     return __atomic_exchange_n(&_M_i, __i, __m);
00522       }
00523 
00524       bool
00525       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00526                 memory_order __m1, memory_order __m2) noexcept
00527       {
00528        memory_order __b2 = __m2 & __memory_order_mask;
00529        memory_order __b1 = __m1 & __memory_order_mask;
00530     __glibcxx_assert(__b2 != memory_order_release);
00531     __glibcxx_assert(__b2 != memory_order_acq_rel);
00532     __glibcxx_assert(__b2 <= __b1);
00533 
00534     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00535       }
00536 
00537       bool
00538       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00539                 memory_order __m1,
00540                 memory_order __m2) volatile noexcept
00541       {
00542        memory_order __b2 = __m2 & __memory_order_mask;
00543        memory_order __b1 = __m1 & __memory_order_mask;
00544     __glibcxx_assert(__b2 != memory_order_release);
00545     __glibcxx_assert(__b2 != memory_order_acq_rel);
00546     __glibcxx_assert(__b2 <= __b1);
00547 
00548     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00549       }
00550 
00551       bool
00552       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00553                 memory_order __m = memory_order_seq_cst) noexcept
00554       {
00555     return compare_exchange_weak(__i1, __i2, __m,
00556                      __cmpexch_failure_order(__m));
00557       }
00558 
00559       bool
00560       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00561            memory_order __m = memory_order_seq_cst) volatile noexcept
00562       {
00563     return compare_exchange_weak(__i1, __i2, __m,
00564                      __cmpexch_failure_order(__m));
00565       }
00566 
00567       bool
00568       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00569                   memory_order __m1, memory_order __m2) noexcept
00570       {
00571         memory_order __b2 = __m2 & __memory_order_mask;
00572         memory_order __b1 = __m1 & __memory_order_mask;
00573     __glibcxx_assert(__b2 != memory_order_release);
00574     __glibcxx_assert(__b2 != memory_order_acq_rel);
00575     __glibcxx_assert(__b2 <= __b1);
00576 
00577     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00578       }
00579 
00580       bool
00581       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00582                   memory_order __m1,
00583                   memory_order __m2) volatile noexcept
00584       {
00585         memory_order __b2 = __m2 & __memory_order_mask;
00586         memory_order __b1 = __m1 & __memory_order_mask;
00587 
00588     __glibcxx_assert(__b2 != memory_order_release);
00589     __glibcxx_assert(__b2 != memory_order_acq_rel);
00590     __glibcxx_assert(__b2 <= __b1);
00591 
00592     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00593       }
00594 
00595       bool
00596       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00597                   memory_order __m = memory_order_seq_cst) noexcept
00598       {
00599     return compare_exchange_strong(__i1, __i2, __m,
00600                        __cmpexch_failure_order(__m));
00601       }
00602 
00603       bool
00604       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00605          memory_order __m = memory_order_seq_cst) volatile noexcept
00606       {
00607     return compare_exchange_strong(__i1, __i2, __m,
00608                        __cmpexch_failure_order(__m));
00609       }
00610 
00611       __int_type
00612       fetch_add(__int_type __i,
00613         memory_order __m = memory_order_seq_cst) noexcept
00614       { return __atomic_fetch_add(&_M_i, __i, __m); }
00615 
00616       __int_type
00617       fetch_add(__int_type __i,
00618         memory_order __m = memory_order_seq_cst) volatile noexcept
00619       { return __atomic_fetch_add(&_M_i, __i, __m); }
00620 
00621       __int_type
00622       fetch_sub(__int_type __i,
00623         memory_order __m = memory_order_seq_cst) noexcept
00624       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00625 
00626       __int_type
00627       fetch_sub(__int_type __i,
00628         memory_order __m = memory_order_seq_cst) volatile noexcept
00629       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00630 
00631       __int_type
00632       fetch_and(__int_type __i,
00633         memory_order __m = memory_order_seq_cst) noexcept
00634       { return __atomic_fetch_and(&_M_i, __i, __m); }
00635 
00636       __int_type
00637       fetch_and(__int_type __i,
00638         memory_order __m = memory_order_seq_cst) volatile noexcept
00639       { return __atomic_fetch_and(&_M_i, __i, __m); }
00640 
00641       __int_type
00642       fetch_or(__int_type __i,
00643            memory_order __m = memory_order_seq_cst) noexcept
00644       { return __atomic_fetch_or(&_M_i, __i, __m); }
00645 
00646       __int_type
00647       fetch_or(__int_type __i,
00648            memory_order __m = memory_order_seq_cst) volatile noexcept
00649       { return __atomic_fetch_or(&_M_i, __i, __m); }
00650 
00651       __int_type
00652       fetch_xor(__int_type __i,
00653         memory_order __m = memory_order_seq_cst) noexcept
00654       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00655 
00656       __int_type
00657       fetch_xor(__int_type __i,
00658         memory_order __m = memory_order_seq_cst) volatile noexcept
00659       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00660     };
00661 
00662 
00663   /// Partial specialization for pointer types.
00664   template<typename _PTp>
00665     struct __atomic_base<_PTp*>
00666     {
00667     private:
00668       typedef _PTp*     __pointer_type;
00669 
00670       __pointer_type    _M_p;
00671 
00672       // Factored out to facilitate explicit specialization.
00673       constexpr ptrdiff_t
00674       _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
00675 
00676       constexpr ptrdiff_t
00677       _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
00678 
00679     public:
00680       __atomic_base() noexcept = default;
00681       ~__atomic_base() noexcept = default;
00682       __atomic_base(const __atomic_base&) = delete;
00683       __atomic_base& operator=(const __atomic_base&) = delete;
00684       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00685 
00686       // Requires __pointer_type convertible to _M_p.
00687       constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
00688 
00689       operator __pointer_type() const noexcept
00690       { return load(); }
00691 
00692       operator __pointer_type() const volatile noexcept
00693       { return load(); }
00694 
00695       __pointer_type
00696       operator=(__pointer_type __p) noexcept
00697       {
00698     store(__p);
00699     return __p;
00700       }
00701 
00702       __pointer_type
00703       operator=(__pointer_type __p) volatile noexcept
00704       {
00705     store(__p);
00706     return __p;
00707       }
00708 
00709       __pointer_type
00710       operator++(int) noexcept
00711       { return fetch_add(1); }
00712 
00713       __pointer_type
00714       operator++(int) volatile noexcept
00715       { return fetch_add(1); }
00716 
00717       __pointer_type
00718       operator--(int) noexcept
00719       { return fetch_sub(1); }
00720 
00721       __pointer_type
00722       operator--(int) volatile noexcept
00723       { return fetch_sub(1); }
00724 
00725       __pointer_type
00726       operator++() noexcept
00727       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00728                   memory_order_seq_cst); }
00729 
00730       __pointer_type
00731       operator++() volatile noexcept
00732       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00733                   memory_order_seq_cst); }
00734 
00735       __pointer_type
00736       operator--() noexcept
00737       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00738                   memory_order_seq_cst); }
00739 
00740       __pointer_type
00741       operator--() volatile noexcept
00742       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00743                   memory_order_seq_cst); }
00744 
00745       __pointer_type
00746       operator+=(ptrdiff_t __d) noexcept
00747       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00748                   memory_order_seq_cst); }
00749 
00750       __pointer_type
00751       operator+=(ptrdiff_t __d) volatile noexcept
00752       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00753                   memory_order_seq_cst); }
00754 
00755       __pointer_type
00756       operator-=(ptrdiff_t __d) noexcept
00757       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00758                   memory_order_seq_cst); }
00759 
00760       __pointer_type
00761       operator-=(ptrdiff_t __d) volatile noexcept
00762       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00763                   memory_order_seq_cst); }
00764 
00765       bool
00766       is_lock_free() const noexcept
00767       { return __atomic_is_lock_free(_M_type_size(1), nullptr); }
00768 
00769       bool
00770       is_lock_free() const volatile noexcept
00771       { return __atomic_is_lock_free(_M_type_size(1), nullptr); }
00772 
00773       void
00774       store(__pointer_type __p,
00775         memory_order __m = memory_order_seq_cst) noexcept
00776       {
00777         memory_order __b = __m & __memory_order_mask;
00778 
00779     __glibcxx_assert(__b != memory_order_acquire);
00780     __glibcxx_assert(__b != memory_order_acq_rel);
00781     __glibcxx_assert(__b != memory_order_consume);
00782 
00783     __atomic_store_n(&_M_p, __p, __m);
00784       }
00785 
00786       void
00787       store(__pointer_type __p,
00788         memory_order __m = memory_order_seq_cst) volatile noexcept
00789       {
00790         memory_order __b = __m & __memory_order_mask;
00791     __glibcxx_assert(__b != memory_order_acquire);
00792     __glibcxx_assert(__b != memory_order_acq_rel);
00793     __glibcxx_assert(__b != memory_order_consume);
00794 
00795     __atomic_store_n(&_M_p, __p, __m);
00796       }
00797 
00798       __pointer_type
00799       load(memory_order __m = memory_order_seq_cst) const noexcept
00800       {
00801         memory_order __b = __m & __memory_order_mask;
00802     __glibcxx_assert(__b != memory_order_release);
00803     __glibcxx_assert(__b != memory_order_acq_rel);
00804 
00805     return __atomic_load_n(&_M_p, __m);
00806       }
00807 
00808       __pointer_type
00809       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00810       {
00811         memory_order __b = __m & __memory_order_mask;
00812     __glibcxx_assert(__b != memory_order_release);
00813     __glibcxx_assert(__b != memory_order_acq_rel);
00814 
00815     return __atomic_load_n(&_M_p, __m);
00816       }
00817 
00818       __pointer_type
00819       exchange(__pointer_type __p,
00820            memory_order __m = memory_order_seq_cst) noexcept
00821       {
00822     return __atomic_exchange_n(&_M_p, __p, __m);
00823       }
00824 
00825 
00826       __pointer_type
00827       exchange(__pointer_type __p,
00828            memory_order __m = memory_order_seq_cst) volatile noexcept
00829       {
00830     return __atomic_exchange_n(&_M_p, __p, __m);
00831       }
00832 
00833       bool
00834       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00835                   memory_order __m1,
00836                   memory_order __m2) noexcept
00837       {
00838         memory_order __b2 = __m2 & __memory_order_mask;
00839         memory_order __b1 = __m1 & __memory_order_mask;
00840     __glibcxx_assert(__b2 != memory_order_release);
00841     __glibcxx_assert(__b2 != memory_order_acq_rel);
00842     __glibcxx_assert(__b2 <= __b1);
00843 
00844     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00845       }
00846 
00847       bool
00848       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00849                   memory_order __m1,
00850                   memory_order __m2) volatile noexcept
00851       {
00852         memory_order __b2 = __m2 & __memory_order_mask;
00853         memory_order __b1 = __m1 & __memory_order_mask;
00854 
00855     __glibcxx_assert(__b2 != memory_order_release);
00856     __glibcxx_assert(__b2 != memory_order_acq_rel);
00857     __glibcxx_assert(__b2 <= __b1);
00858 
00859     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00860       }
00861 
00862       __pointer_type
00863       fetch_add(ptrdiff_t __d,
00864         memory_order __m = memory_order_seq_cst) noexcept
00865       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00866 
00867       __pointer_type
00868       fetch_add(ptrdiff_t __d,
00869         memory_order __m = memory_order_seq_cst) volatile noexcept
00870       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00871 
00872       __pointer_type
00873       fetch_sub(ptrdiff_t __d,
00874         memory_order __m = memory_order_seq_cst) noexcept
00875       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00876 
00877       __pointer_type
00878       fetch_sub(ptrdiff_t __d,
00879         memory_order __m = memory_order_seq_cst) volatile noexcept
00880       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00881     };
00882 
00883   // @} group atomics
00884 
00885 _GLIBCXX_END_NAMESPACE_VERSION
00886 } // namespace std
00887 
00888 #endif