1 #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_STD_ATOMIC_HPP_INCLUDED 2 #define BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_STD_ATOMIC_HPP_INCLUDED 3 4 // MS compatible compilers support #pragma once 5 6 #if defined(_MSC_VER) && (_MSC_VER >= 1020) 7 # pragma once 8 #endif 9 10 // detail/sp_counted_base_std_atomic.hpp - C++11 std::atomic 11 // 12 // Copyright (c) 2007, 2013 Peter Dimov 13 // 14 // Distributed under the Boost Software License, Version 1.0. 15 // See accompanying file LICENSE_1_0.txt or copy at 16 // http://www.boost.org/LICENSE_1_0.txt 17 18 #include <boost/detail/sp_typeinfo.hpp> 19 #include <atomic> 20 #include <cstdint> 21 22 namespace boost 23 { 24 25 namespace detail 26 { 27 atomic_increment(std::atomic_int_least32_t * pw)28inline void atomic_increment( std::atomic_int_least32_t * pw ) 29 { 30 pw->fetch_add( 1, std::memory_order_relaxed ); 31 } 32 atomic_decrement(std::atomic_int_least32_t * pw)33inline std::int_least32_t atomic_decrement( std::atomic_int_least32_t * pw ) 34 { 35 return pw->fetch_sub( 1, std::memory_order_acq_rel ); 36 } 37 atomic_conditional_increment(std::atomic_int_least32_t * pw)38inline std::int_least32_t atomic_conditional_increment( std::atomic_int_least32_t * pw ) 39 { 40 // long r = *pw; 41 // if( r != 0 ) ++*pw; 42 // return r; 43 44 std::int_least32_t r = pw->load( std::memory_order_relaxed ); 45 46 for( ;; ) 47 { 48 if( r == 0 ) 49 { 50 return r; 51 } 52 53 if( pw->compare_exchange_weak( r, r + 1, std::memory_order_relaxed, std::memory_order_relaxed ) ) 54 { 55 return r; 56 } 57 } 58 } 59 60 class sp_counted_base 61 { 62 private: 63 64 sp_counted_base( sp_counted_base const & ); 65 sp_counted_base & operator= ( sp_counted_base const & ); 66 67 std::atomic_int_least32_t use_count_; // #shared 68 std::atomic_int_least32_t weak_count_; // #weak + (#shared != 0) 69 70 public: 71 sp_counted_base()72 sp_counted_base(): use_count_( 1 ), weak_count_( 1 ) 73 { 74 } 75 ~sp_counted_base()76 virtual ~sp_counted_base() // nothrow 77 { 78 } 79 80 // dispose() is called when use_count_ drops to zero, to release 81 // the resources managed by *this. 82 83 virtual void dispose() = 0; // nothrow 84 85 // destroy() is called when weak_count_ drops to zero. 86 destroy()87 virtual void destroy() // nothrow 88 { 89 delete this; 90 } 91 92 virtual void * get_deleter( sp_typeinfo const & ti ) = 0; 93 virtual void * get_local_deleter( sp_typeinfo const & ti ) = 0; 94 virtual void * get_untyped_deleter() = 0; 95 add_ref_copy()96 void add_ref_copy() 97 { 98 atomic_increment( &use_count_ ); 99 } 100 add_ref_lock()101 bool add_ref_lock() // true on success 102 { 103 return atomic_conditional_increment( &use_count_ ) != 0; 104 } 105 release()106 void release() // nothrow 107 { 108 if( atomic_decrement( &use_count_ ) == 1 ) 109 { 110 dispose(); 111 weak_release(); 112 } 113 } 114 weak_add_ref()115 void weak_add_ref() // nothrow 116 { 117 atomic_increment( &weak_count_ ); 118 } 119 weak_release()120 void weak_release() // nothrow 121 { 122 if( atomic_decrement( &weak_count_ ) == 1 ) 123 { 124 destroy(); 125 } 126 } 127 use_count() const128 long use_count() const // nothrow 129 { 130 return use_count_.load( std::memory_order_acquire ); 131 } 132 }; 133 134 } // namespace detail 135 136 } // namespace boost 137 138 #endif // #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_STD_ATOMIC_HPP_INCLUDED 139