libstdc++
atomic_base.h
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file bits/atomic_base.h
00026  *  This is an internal header file, included by other library headers.
00027  *  Do not attempt to use it directly. @headername{atomic}
00028  */
00029 
00030 #ifndef _GLIBCXX_ATOMIC_BASE_H
00031 #define _GLIBCXX_ATOMIC_BASE_H 1
00032 
00033 #pragma GCC system_header
00034 
00035 #include <bits/c++config.h>
00036 #include <stdbool.h>
00037 #include <stdint.h>
00038 #include <bits/atomic_lockfree_defines.h>
00039 
00040 #ifndef _GLIBCXX_ALWAYS_INLINE
00041 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
00042 #endif
00043 
00044 namespace std _GLIBCXX_VISIBILITY(default)
00045 {
00046 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00047 
00048   /**
00049    * @defgroup atomics Atomics
00050    *
00051    * Components for performing atomic operations.
00052    * @{
00053    */
00054 
00055   /// Enumeration for memory_order
00056   typedef enum memory_order
00057     {
00058       memory_order_relaxed,
00059       memory_order_consume,
00060       memory_order_acquire,
00061       memory_order_release,
00062       memory_order_acq_rel,
00063       memory_order_seq_cst
00064     } memory_order;
00065 
00066   enum __memory_order_modifier
00067     {
00068       __memory_order_mask          = 0x0ffff,
00069       __memory_order_modifier_mask = 0xffff0000,
00070       __memory_order_hle_acquire   = 0x10000,
00071       __memory_order_hle_release   = 0x20000
00072     };
00073 
00074   constexpr memory_order
00075   operator|(memory_order __m, __memory_order_modifier __mod)
00076   {
00077     return memory_order(__m | int(__mod));
00078   }
00079 
00080   constexpr memory_order
00081   operator&(memory_order __m, __memory_order_modifier __mod)
00082   {
00083     return memory_order(__m & int(__mod));
00084   }
00085 
00086   // Drop release ordering as per [atomics.types.operations.req]/21
00087   constexpr memory_order
00088   __cmpexch_failure_order2(memory_order __m) noexcept
00089   {
00090     return __m == memory_order_acq_rel ? memory_order_acquire
00091       : __m == memory_order_release ? memory_order_relaxed : __m;
00092   }
00093 
00094   constexpr memory_order
00095   __cmpexch_failure_order(memory_order __m) noexcept
00096   {
00097     return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
00098       | (__m & __memory_order_modifier_mask));
00099   }
00100 
00101   _GLIBCXX_ALWAYS_INLINE void
00102   atomic_thread_fence(memory_order __m) noexcept
00103   { __atomic_thread_fence(__m); }
00104 
00105   _GLIBCXX_ALWAYS_INLINE void
00106   atomic_signal_fence(memory_order __m) noexcept
00107   { __atomic_signal_fence(__m); }
00108 
00109   /// kill_dependency
00110   template<typename _Tp>
00111     inline _Tp
00112     kill_dependency(_Tp __y) noexcept
00113     {
00114       _Tp __ret(__y);
00115       return __ret;
00116     }
00117 
00118 
00119   // Base types for atomics.
00120   template<typename _IntTp>
00121     struct __atomic_base;
00122 
00123   /// atomic_char
00124   typedef __atomic_base<char>               atomic_char;
00125 
00126   /// atomic_schar
00127   typedef __atomic_base<signed char>            atomic_schar;
00128 
00129   /// atomic_uchar
00130   typedef __atomic_base<unsigned char>      atomic_uchar;
00131 
00132   /// atomic_short
00133   typedef __atomic_base<short>          atomic_short;
00134 
00135   /// atomic_ushort
00136   typedef __atomic_base<unsigned short>     atomic_ushort;
00137 
00138   /// atomic_int
00139   typedef __atomic_base<int>                atomic_int;
00140 
00141   /// atomic_uint
00142   typedef __atomic_base<unsigned int>           atomic_uint;
00143 
00144   /// atomic_long
00145   typedef __atomic_base<long>               atomic_long;
00146 
00147   /// atomic_ulong
00148   typedef __atomic_base<unsigned long>      atomic_ulong;
00149 
00150   /// atomic_llong
00151   typedef __atomic_base<long long>          atomic_llong;
00152 
00153   /// atomic_ullong
00154   typedef __atomic_base<unsigned long long>     atomic_ullong;
00155 
00156   /// atomic_wchar_t
00157   typedef __atomic_base<wchar_t>        atomic_wchar_t;
00158 
00159   /// atomic_char16_t
00160   typedef __atomic_base<char16_t>       atomic_char16_t;
00161 
00162   /// atomic_char32_t
00163   typedef __atomic_base<char32_t>       atomic_char32_t;
00164 
00165   /// atomic_char32_t
00166   typedef __atomic_base<char32_t>       atomic_char32_t;
00167 
00168 
00169   /// atomic_int_least8_t
00170   typedef __atomic_base<int_least8_t>       atomic_int_least8_t;
00171 
00172   /// atomic_uint_least8_t
00173   typedef __atomic_base<uint_least8_t>          atomic_uint_least8_t;
00174 
00175   /// atomic_int_least16_t
00176   typedef __atomic_base<int_least16_t>          atomic_int_least16_t;
00177 
00178   /// atomic_uint_least16_t
00179   typedef __atomic_base<uint_least16_t>         atomic_uint_least16_t;
00180 
00181   /// atomic_int_least32_t
00182   typedef __atomic_base<int_least32_t>          atomic_int_least32_t;
00183 
00184   /// atomic_uint_least32_t
00185   typedef __atomic_base<uint_least32_t>         atomic_uint_least32_t;
00186 
00187   /// atomic_int_least64_t
00188   typedef __atomic_base<int_least64_t>          atomic_int_least64_t;
00189 
00190   /// atomic_uint_least64_t
00191   typedef __atomic_base<uint_least64_t>         atomic_uint_least64_t;
00192 
00193 
00194   /// atomic_int_fast8_t
00195   typedef __atomic_base<int_fast8_t>        atomic_int_fast8_t;
00196 
00197   /// atomic_uint_fast8_t
00198   typedef __atomic_base<uint_fast8_t>           atomic_uint_fast8_t;
00199 
00200   /// atomic_int_fast16_t
00201   typedef __atomic_base<int_fast16_t>           atomic_int_fast16_t;
00202 
00203   /// atomic_uint_fast16_t
00204   typedef __atomic_base<uint_fast16_t>          atomic_uint_fast16_t;
00205 
00206   /// atomic_int_fast32_t
00207   typedef __atomic_base<int_fast32_t>           atomic_int_fast32_t;
00208 
00209   /// atomic_uint_fast32_t
00210   typedef __atomic_base<uint_fast32_t>          atomic_uint_fast32_t;
00211 
00212   /// atomic_int_fast64_t
00213   typedef __atomic_base<int_fast64_t>           atomic_int_fast64_t;
00214 
00215   /// atomic_uint_fast64_t
00216   typedef __atomic_base<uint_fast64_t>          atomic_uint_fast64_t;
00217 
00218 
00219   /// atomic_intptr_t
00220   typedef __atomic_base<intptr_t>           atomic_intptr_t;
00221 
00222   /// atomic_uintptr_t
00223   typedef __atomic_base<uintptr_t>              atomic_uintptr_t;
00224 
00225   /// atomic_size_t
00226   typedef __atomic_base<size_t>             atomic_size_t;
00227 
00228   /// atomic_intmax_t
00229   typedef __atomic_base<intmax_t>           atomic_intmax_t;
00230 
00231   /// atomic_uintmax_t
00232   typedef __atomic_base<uintmax_t>              atomic_uintmax_t;
00233 
00234   /// atomic_ptrdiff_t
00235   typedef __atomic_base<ptrdiff_t>              atomic_ptrdiff_t;
00236 
00237 
00238 #define ATOMIC_VAR_INIT(_VI) { _VI }
00239 
00240   template<typename _Tp>
00241     struct atomic;
00242 
00243   template<typename _Tp>
00244     struct atomic<_Tp*>;
00245 
00246     /* The target's "set" value for test-and-set may not be exactly 1.  */
00247 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
00248     typedef bool __atomic_flag_data_type;
00249 #else
00250     typedef unsigned char __atomic_flag_data_type;
00251 #endif
00252 
00253   /**
00254    *  @brief Base type for atomic_flag.
00255    *
00256    *  Base type is POD with data, allowing atomic_flag to derive from
00257    *  it and meet the standard layout type requirement. In addition to
00258    *  compatibility with a C interface, this allows different
00259    *  implementations of atomic_flag to use the same atomic operation
00260    *  functions, via a standard conversion to the __atomic_flag_base
00261    *  argument.
00262   */
00263   _GLIBCXX_BEGIN_EXTERN_C
00264 
00265   struct __atomic_flag_base
00266   {
00267     __atomic_flag_data_type _M_i;
00268   };
00269 
00270   _GLIBCXX_END_EXTERN_C
00271 
00272 #define ATOMIC_FLAG_INIT { 0 }
00273 
00274   /// atomic_flag
00275   struct atomic_flag : public __atomic_flag_base
00276   {
00277     atomic_flag() noexcept = default;
00278     ~atomic_flag() noexcept = default;
00279     atomic_flag(const atomic_flag&) = delete;
00280     atomic_flag& operator=(const atomic_flag&) = delete;
00281     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00282 
00283     // Conversion to ATOMIC_FLAG_INIT.
00284     constexpr atomic_flag(bool __i) noexcept
00285       : __atomic_flag_base{ _S_init(__i) }
00286     { }
00287 
00288     _GLIBCXX_ALWAYS_INLINE bool
00289     test_and_set(memory_order __m = memory_order_seq_cst) noexcept
00290     {
00291       return __atomic_test_and_set (&_M_i, __m);
00292     }
00293 
00294     _GLIBCXX_ALWAYS_INLINE bool
00295     test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
00296     {
00297       return __atomic_test_and_set (&_M_i, __m);
00298     }
00299 
00300     _GLIBCXX_ALWAYS_INLINE void
00301     clear(memory_order __m = memory_order_seq_cst) noexcept
00302     {
00303       memory_order __b = __m & __memory_order_mask;
00304       __glibcxx_assert(__b != memory_order_consume);
00305       __glibcxx_assert(__b != memory_order_acquire);
00306       __glibcxx_assert(__b != memory_order_acq_rel);
00307 
00308       __atomic_clear (&_M_i, __m);
00309     }
00310 
00311     _GLIBCXX_ALWAYS_INLINE void
00312     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
00313     {
00314       memory_order __b = __m & __memory_order_mask;
00315       __glibcxx_assert(__b != memory_order_consume);
00316       __glibcxx_assert(__b != memory_order_acquire);
00317       __glibcxx_assert(__b != memory_order_acq_rel);
00318 
00319       __atomic_clear (&_M_i, __m);
00320     }
00321 
00322   private:
00323     static constexpr __atomic_flag_data_type
00324     _S_init(bool __i)
00325     { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
00326   };
00327 
00328 
00329   /// Base class for atomic integrals.
00330   //
00331   // For each of the integral types, define atomic_[integral type] struct
00332   //
00333   // atomic_bool     bool
00334   // atomic_char     char
00335   // atomic_schar    signed char
00336   // atomic_uchar    unsigned char
00337   // atomic_short    short
00338   // atomic_ushort   unsigned short
00339   // atomic_int      int
00340   // atomic_uint     unsigned int
00341   // atomic_long     long
00342   // atomic_ulong    unsigned long
00343   // atomic_llong    long long
00344   // atomic_ullong   unsigned long long
00345   // atomic_char16_t char16_t
00346   // atomic_char32_t char32_t
00347   // atomic_wchar_t  wchar_t
00348   //
00349   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
00350   // 8 bytes, since that is what GCC built-in functions for atomic
00351   // memory access expect.
00352   template<typename _ITp>
00353     struct __atomic_base
00354     {
00355     private:
00356       typedef _ITp  __int_type;
00357 
00358       __int_type    _M_i;
00359 
00360     public:
00361       __atomic_base() noexcept = default;
00362       ~__atomic_base() noexcept = default;
00363       __atomic_base(const __atomic_base&) = delete;
00364       __atomic_base& operator=(const __atomic_base&) = delete;
00365       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00366 
00367       // Requires __int_type convertible to _M_i.
00368       constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
00369 
00370       operator __int_type() const noexcept
00371       { return load(); }
00372 
00373       operator __int_type() const volatile noexcept
00374       { return load(); }
00375 
00376       __int_type
00377       operator=(__int_type __i) noexcept
00378       {
00379     store(__i);
00380     return __i;
00381       }
00382 
00383       __int_type
00384       operator=(__int_type __i) volatile noexcept
00385       {
00386     store(__i);
00387     return __i;
00388       }
00389 
00390       __int_type
00391       operator++(int) noexcept
00392       { return fetch_add(1); }
00393 
00394       __int_type
00395       operator++(int) volatile noexcept
00396       { return fetch_add(1); }
00397 
00398       __int_type
00399       operator--(int) noexcept
00400       { return fetch_sub(1); }
00401 
00402       __int_type
00403       operator--(int) volatile noexcept
00404       { return fetch_sub(1); }
00405 
00406       __int_type
00407       operator++() noexcept
00408       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00409 
00410       __int_type
00411       operator++() volatile noexcept
00412       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00413 
00414       __int_type
00415       operator--() noexcept
00416       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00417 
00418       __int_type
00419       operator--() volatile noexcept
00420       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00421 
00422       __int_type
00423       operator+=(__int_type __i) noexcept
00424       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00425 
00426       __int_type
00427       operator+=(__int_type __i) volatile noexcept
00428       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00429 
00430       __int_type
00431       operator-=(__int_type __i) noexcept
00432       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00433 
00434       __int_type
00435       operator-=(__int_type __i) volatile noexcept
00436       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00437 
00438       __int_type
00439       operator&=(__int_type __i) noexcept
00440       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00441 
00442       __int_type
00443       operator&=(__int_type __i) volatile noexcept
00444       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00445 
00446       __int_type
00447       operator|=(__int_type __i) noexcept
00448       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00449 
00450       __int_type
00451       operator|=(__int_type __i) volatile noexcept
00452       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00453 
00454       __int_type
00455       operator^=(__int_type __i) noexcept
00456       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00457 
00458       __int_type
00459       operator^=(__int_type __i) volatile noexcept
00460       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00461 
00462       bool
00463       is_lock_free() const noexcept
00464       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
00465 
00466       bool
00467       is_lock_free() const volatile noexcept
00468       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
00469 
00470       _GLIBCXX_ALWAYS_INLINE void
00471       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
00472       {
00473         memory_order __b = __m & __memory_order_mask;
00474     __glibcxx_assert(__b != memory_order_acquire);
00475     __glibcxx_assert(__b != memory_order_acq_rel);
00476     __glibcxx_assert(__b != memory_order_consume);
00477 
00478     __atomic_store_n(&_M_i, __i, __m);
00479       }
00480 
00481       _GLIBCXX_ALWAYS_INLINE void
00482       store(__int_type __i,
00483         memory_order __m = memory_order_seq_cst) volatile noexcept
00484       {
00485         memory_order __b = __m & __memory_order_mask;
00486     __glibcxx_assert(__b != memory_order_acquire);
00487     __glibcxx_assert(__b != memory_order_acq_rel);
00488     __glibcxx_assert(__b != memory_order_consume);
00489 
00490     __atomic_store_n(&_M_i, __i, __m);
00491       }
00492 
00493       _GLIBCXX_ALWAYS_INLINE __int_type
00494       load(memory_order __m = memory_order_seq_cst) const noexcept
00495       {
00496        memory_order __b = __m & __memory_order_mask;
00497     __glibcxx_assert(__b != memory_order_release);
00498     __glibcxx_assert(__b != memory_order_acq_rel);
00499 
00500     return __atomic_load_n(&_M_i, __m);
00501       }
00502 
00503       _GLIBCXX_ALWAYS_INLINE __int_type
00504       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00505       {
00506         memory_order __b = __m & __memory_order_mask;
00507     __glibcxx_assert(__b != memory_order_release);
00508     __glibcxx_assert(__b != memory_order_acq_rel);
00509 
00510     return __atomic_load_n(&_M_i, __m);
00511       }
00512 
00513       _GLIBCXX_ALWAYS_INLINE __int_type
00514       exchange(__int_type __i,
00515            memory_order __m = memory_order_seq_cst) noexcept
00516       {
00517     return __atomic_exchange_n(&_M_i, __i, __m);
00518       }
00519 
00520 
00521       _GLIBCXX_ALWAYS_INLINE __int_type
00522       exchange(__int_type __i,
00523            memory_order __m = memory_order_seq_cst) volatile noexcept
00524       {
00525     return __atomic_exchange_n(&_M_i, __i, __m);
00526       }
00527 
00528       _GLIBCXX_ALWAYS_INLINE bool
00529       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00530                 memory_order __m1, memory_order __m2) noexcept
00531       {
00532        memory_order __b2 = __m2 & __memory_order_mask;
00533        memory_order __b1 = __m1 & __memory_order_mask;
00534     __glibcxx_assert(__b2 != memory_order_release);
00535     __glibcxx_assert(__b2 != memory_order_acq_rel);
00536     __glibcxx_assert(__b2 <= __b1);
00537 
00538     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00539       }
00540 
00541       _GLIBCXX_ALWAYS_INLINE bool
00542       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00543                 memory_order __m1,
00544                 memory_order __m2) volatile noexcept
00545       {
00546        memory_order __b2 = __m2 & __memory_order_mask;
00547        memory_order __b1 = __m1 & __memory_order_mask;
00548     __glibcxx_assert(__b2 != memory_order_release);
00549     __glibcxx_assert(__b2 != memory_order_acq_rel);
00550     __glibcxx_assert(__b2 <= __b1);
00551 
00552     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00553       }
00554 
00555       _GLIBCXX_ALWAYS_INLINE bool
00556       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00557                 memory_order __m = memory_order_seq_cst) noexcept
00558       {
00559     return compare_exchange_weak(__i1, __i2, __m,
00560                      __cmpexch_failure_order(__m));
00561       }
00562 
00563       _GLIBCXX_ALWAYS_INLINE bool
00564       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00565            memory_order __m = memory_order_seq_cst) volatile noexcept
00566       {
00567     return compare_exchange_weak(__i1, __i2, __m,
00568                      __cmpexch_failure_order(__m));
00569       }
00570 
00571       _GLIBCXX_ALWAYS_INLINE bool
00572       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00573                   memory_order __m1, memory_order __m2) noexcept
00574       {
00575         memory_order __b2 = __m2 & __memory_order_mask;
00576         memory_order __b1 = __m1 & __memory_order_mask;
00577     __glibcxx_assert(__b2 != memory_order_release);
00578     __glibcxx_assert(__b2 != memory_order_acq_rel);
00579     __glibcxx_assert(__b2 <= __b1);
00580 
00581     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00582       }
00583 
00584       _GLIBCXX_ALWAYS_INLINE bool
00585       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00586                   memory_order __m1,
00587                   memory_order __m2) volatile noexcept
00588       {
00589         memory_order __b2 = __m2 & __memory_order_mask;
00590         memory_order __b1 = __m1 & __memory_order_mask;
00591 
00592     __glibcxx_assert(__b2 != memory_order_release);
00593     __glibcxx_assert(__b2 != memory_order_acq_rel);
00594     __glibcxx_assert(__b2 <= __b1);
00595 
00596     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00597       }
00598 
00599       _GLIBCXX_ALWAYS_INLINE bool
00600       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00601                   memory_order __m = memory_order_seq_cst) noexcept
00602       {
00603     return compare_exchange_strong(__i1, __i2, __m,
00604                        __cmpexch_failure_order(__m));
00605       }
00606 
00607       _GLIBCXX_ALWAYS_INLINE bool
00608       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00609          memory_order __m = memory_order_seq_cst) volatile noexcept
00610       {
00611     return compare_exchange_strong(__i1, __i2, __m,
00612                        __cmpexch_failure_order(__m));
00613       }
00614 
00615       _GLIBCXX_ALWAYS_INLINE __int_type
00616       fetch_add(__int_type __i,
00617         memory_order __m = memory_order_seq_cst) noexcept
00618       { return __atomic_fetch_add(&_M_i, __i, __m); }
00619 
00620       _GLIBCXX_ALWAYS_INLINE __int_type
00621       fetch_add(__int_type __i,
00622         memory_order __m = memory_order_seq_cst) volatile noexcept
00623       { return __atomic_fetch_add(&_M_i, __i, __m); }
00624 
00625       _GLIBCXX_ALWAYS_INLINE __int_type
00626       fetch_sub(__int_type __i,
00627         memory_order __m = memory_order_seq_cst) noexcept
00628       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00629 
00630       _GLIBCXX_ALWAYS_INLINE __int_type
00631       fetch_sub(__int_type __i,
00632         memory_order __m = memory_order_seq_cst) volatile noexcept
00633       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00634 
00635       _GLIBCXX_ALWAYS_INLINE __int_type
00636       fetch_and(__int_type __i,
00637         memory_order __m = memory_order_seq_cst) noexcept
00638       { return __atomic_fetch_and(&_M_i, __i, __m); }
00639 
00640       _GLIBCXX_ALWAYS_INLINE __int_type
00641       fetch_and(__int_type __i,
00642         memory_order __m = memory_order_seq_cst) volatile noexcept
00643       { return __atomic_fetch_and(&_M_i, __i, __m); }
00644 
00645       _GLIBCXX_ALWAYS_INLINE __int_type
00646       fetch_or(__int_type __i,
00647            memory_order __m = memory_order_seq_cst) noexcept
00648       { return __atomic_fetch_or(&_M_i, __i, __m); }
00649 
00650       _GLIBCXX_ALWAYS_INLINE __int_type
00651       fetch_or(__int_type __i,
00652            memory_order __m = memory_order_seq_cst) volatile noexcept
00653       { return __atomic_fetch_or(&_M_i, __i, __m); }
00654 
00655       _GLIBCXX_ALWAYS_INLINE __int_type
00656       fetch_xor(__int_type __i,
00657         memory_order __m = memory_order_seq_cst) noexcept
00658       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00659 
00660       _GLIBCXX_ALWAYS_INLINE __int_type
00661       fetch_xor(__int_type __i,
00662         memory_order __m = memory_order_seq_cst) volatile noexcept
00663       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00664     };
00665 
00666 
00667   /// Partial specialization for pointer types.
00668   template<typename _PTp>
00669     struct __atomic_base<_PTp*>
00670     {
00671     private:
00672       typedef _PTp*     __pointer_type;
00673 
00674       __pointer_type    _M_p;
00675 
00676       // Factored out to facilitate explicit specialization.
00677       constexpr ptrdiff_t
00678       _M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); }
00679 
00680       constexpr ptrdiff_t
00681       _M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); }
00682 
00683     public:
00684       __atomic_base() noexcept = default;
00685       ~__atomic_base() noexcept = default;
00686       __atomic_base(const __atomic_base&) = delete;
00687       __atomic_base& operator=(const __atomic_base&) = delete;
00688       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00689 
00690       // Requires __pointer_type convertible to _M_p.
00691       constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
00692 
00693       operator __pointer_type() const noexcept
00694       { return load(); }
00695 
00696       operator __pointer_type() const volatile noexcept
00697       { return load(); }
00698 
00699       __pointer_type
00700       operator=(__pointer_type __p) noexcept
00701       {
00702     store(__p);
00703     return __p;
00704       }
00705 
00706       __pointer_type
00707       operator=(__pointer_type __p) volatile noexcept
00708       {
00709     store(__p);
00710     return __p;
00711       }
00712 
00713       __pointer_type
00714       operator++(int) noexcept
00715       { return fetch_add(1); }
00716 
00717       __pointer_type
00718       operator++(int) volatile noexcept
00719       { return fetch_add(1); }
00720 
00721       __pointer_type
00722       operator--(int) noexcept
00723       { return fetch_sub(1); }
00724 
00725       __pointer_type
00726       operator--(int) volatile noexcept
00727       { return fetch_sub(1); }
00728 
00729       __pointer_type
00730       operator++() noexcept
00731       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00732                   memory_order_seq_cst); }
00733 
00734       __pointer_type
00735       operator++() volatile noexcept
00736       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00737                   memory_order_seq_cst); }
00738 
00739       __pointer_type
00740       operator--() noexcept
00741       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00742                   memory_order_seq_cst); }
00743 
00744       __pointer_type
00745       operator--() volatile noexcept
00746       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00747                   memory_order_seq_cst); }
00748 
00749       __pointer_type
00750       operator+=(ptrdiff_t __d) noexcept
00751       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00752                   memory_order_seq_cst); }
00753 
00754       __pointer_type
00755       operator+=(ptrdiff_t __d) volatile noexcept
00756       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00757                   memory_order_seq_cst); }
00758 
00759       __pointer_type
00760       operator-=(ptrdiff_t __d) noexcept
00761       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00762                   memory_order_seq_cst); }
00763 
00764       __pointer_type
00765       operator-=(ptrdiff_t __d) volatile noexcept
00766       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00767                   memory_order_seq_cst); }
00768 
00769       bool
00770       is_lock_free() const noexcept
00771       { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
00772 
00773       bool
00774       is_lock_free() const volatile noexcept
00775       { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
00776 
00777       _GLIBCXX_ALWAYS_INLINE void
00778       store(__pointer_type __p,
00779         memory_order __m = memory_order_seq_cst) noexcept
00780       {
00781         memory_order __b = __m & __memory_order_mask;
00782 
00783     __glibcxx_assert(__b != memory_order_acquire);
00784     __glibcxx_assert(__b != memory_order_acq_rel);
00785     __glibcxx_assert(__b != memory_order_consume);
00786 
00787     __atomic_store_n(&_M_p, __p, __m);
00788       }
00789 
00790       _GLIBCXX_ALWAYS_INLINE void
00791       store(__pointer_type __p,
00792         memory_order __m = memory_order_seq_cst) volatile noexcept
00793       {
00794         memory_order __b = __m & __memory_order_mask;
00795     __glibcxx_assert(__b != memory_order_acquire);
00796     __glibcxx_assert(__b != memory_order_acq_rel);
00797     __glibcxx_assert(__b != memory_order_consume);
00798 
00799     __atomic_store_n(&_M_p, __p, __m);
00800       }
00801 
00802       _GLIBCXX_ALWAYS_INLINE __pointer_type
00803       load(memory_order __m = memory_order_seq_cst) const noexcept
00804       {
00805         memory_order __b = __m & __memory_order_mask;
00806     __glibcxx_assert(__b != memory_order_release);
00807     __glibcxx_assert(__b != memory_order_acq_rel);
00808 
00809     return __atomic_load_n(&_M_p, __m);
00810       }
00811 
00812       _GLIBCXX_ALWAYS_INLINE __pointer_type
00813       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00814       {
00815         memory_order __b = __m & __memory_order_mask;
00816     __glibcxx_assert(__b != memory_order_release);
00817     __glibcxx_assert(__b != memory_order_acq_rel);
00818 
00819     return __atomic_load_n(&_M_p, __m);
00820       }
00821 
00822       _GLIBCXX_ALWAYS_INLINE __pointer_type
00823       exchange(__pointer_type __p,
00824            memory_order __m = memory_order_seq_cst) noexcept
00825       {
00826     return __atomic_exchange_n(&_M_p, __p, __m);
00827       }
00828 
00829 
00830       _GLIBCXX_ALWAYS_INLINE __pointer_type
00831       exchange(__pointer_type __p,
00832            memory_order __m = memory_order_seq_cst) volatile noexcept
00833       {
00834     return __atomic_exchange_n(&_M_p, __p, __m);
00835       }
00836 
00837       _GLIBCXX_ALWAYS_INLINE bool
00838       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00839                   memory_order __m1,
00840                   memory_order __m2) noexcept
00841       {
00842         memory_order __b2 = __m2 & __memory_order_mask;
00843         memory_order __b1 = __m1 & __memory_order_mask;
00844     __glibcxx_assert(__b2 != memory_order_release);
00845     __glibcxx_assert(__b2 != memory_order_acq_rel);
00846     __glibcxx_assert(__b2 <= __b1);
00847 
00848     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00849       }
00850 
00851       _GLIBCXX_ALWAYS_INLINE bool
00852       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00853                   memory_order __m1,
00854                   memory_order __m2) volatile noexcept
00855       {
00856         memory_order __b2 = __m2 & __memory_order_mask;
00857         memory_order __b1 = __m1 & __memory_order_mask;
00858 
00859     __glibcxx_assert(__b2 != memory_order_release);
00860     __glibcxx_assert(__b2 != memory_order_acq_rel);
00861     __glibcxx_assert(__b2 <= __b1);
00862 
00863     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00864       }
00865 
00866       _GLIBCXX_ALWAYS_INLINE __pointer_type
00867       fetch_add(ptrdiff_t __d,
00868         memory_order __m = memory_order_seq_cst) noexcept
00869       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00870 
00871       _GLIBCXX_ALWAYS_INLINE __pointer_type
00872       fetch_add(ptrdiff_t __d,
00873         memory_order __m = memory_order_seq_cst) volatile noexcept
00874       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00875 
00876       _GLIBCXX_ALWAYS_INLINE __pointer_type
00877       fetch_sub(ptrdiff_t __d,
00878         memory_order __m = memory_order_seq_cst) noexcept
00879       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00880 
00881       _GLIBCXX_ALWAYS_INLINE __pointer_type
00882       fetch_sub(ptrdiff_t __d,
00883         memory_order __m = memory_order_seq_cst) volatile noexcept
00884       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00885     };
00886 
00887   // @} group atomics
00888 
00889 _GLIBCXX_END_NAMESPACE_VERSION
00890 } // namespace std
00891 
00892 #endif