// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_MEMORY_REF_COUNTED_H_ #define BASE_MEMORY_REF_COUNTED_H_ #include #include #include #include "base/atomic_ref_count.h" #include "base/base_export.h" #include "base/compiler_specific.h" #include "base/logging.h" #include "base/macros.h" #include "base/sequence_checker.h" #include "base/threading/thread_collision_warner.h" #include "build/build_config.h" template class scoped_refptr; namespace base { template scoped_refptr AdoptRef(T* t); namespace subtle { enum AdoptRefTag { kAdoptRefTag }; enum StartRefCountFromZeroTag { kStartRefCountFromZeroTag }; enum StartRefCountFromOneTag { kStartRefCountFromOneTag }; class BASE_EXPORT RefCountedBase { public: bool HasOneRef() const { return ref_count_ == 1; } protected: explicit RefCountedBase(StartRefCountFromZeroTag) { #if DCHECK_IS_ON() sequence_checker_.DetachFromSequence(); #endif } explicit RefCountedBase(StartRefCountFromOneTag) : ref_count_(1) { #if DCHECK_IS_ON() needs_adopt_ref_ = true; sequence_checker_.DetachFromSequence(); #endif } ~RefCountedBase() { #if DCHECK_IS_ON() DCHECK(in_dtor_) << "RefCounted object deleted without calling Release()"; #endif } void AddRef() const { // TODO(maruel): Add back once it doesn't assert 500 times/sec. // Current thread books the critical section "AddRelease" // without release it. // DFAKE_SCOPED_LOCK_THREAD_LOCKED(add_release_); #if DCHECK_IS_ON() DCHECK(!in_dtor_); DCHECK(!needs_adopt_ref_) << "This RefCounted object is created with non-zero reference count." << " The first reference to such a object has to be made by AdoptRef or" << " MakeRefCounted."; if (ref_count_ >= 1) { DCHECK(CalledOnValidSequence()); } #endif AddRefImpl(); } // Returns true if the object should self-delete. bool Release() const { --ref_count_; // TODO(maruel): Add back once it doesn't assert 500 times/sec. // Current thread books the critical section "AddRelease" // without release it. // DFAKE_SCOPED_LOCK_THREAD_LOCKED(add_release_); #if DCHECK_IS_ON() DCHECK(!in_dtor_); if (ref_count_ == 0) in_dtor_ = true; if (ref_count_ >= 1) DCHECK(CalledOnValidSequence()); if (ref_count_ == 1) sequence_checker_.DetachFromSequence(); #endif return ref_count_ == 0; } // Returns true if it is safe to read or write the object, from a thread // safety standpoint. Should be DCHECK'd from the methods of RefCounted // classes if there is a danger of objects being shared across threads. // // This produces fewer false positives than adding a separate SequenceChecker // into the subclass, because it automatically detaches from the sequence when // the reference count is 1 (and never fails if there is only one reference). // // This means unlike a separate SequenceChecker, it will permit a singly // referenced object to be passed between threads (not holding a reference on // the sending thread), but will trap if the sending thread holds onto a // reference, or if the object is accessed from multiple threads // simultaneously. bool IsOnValidSequence() const { #if DCHECK_IS_ON() return ref_count_ <= 1 || CalledOnValidSequence(); #else return true; #endif } private: template friend scoped_refptr base::AdoptRef(U*); void Adopted() const { #if DCHECK_IS_ON() DCHECK(needs_adopt_ref_); needs_adopt_ref_ = false; #endif } #if defined(ARCH_CPU_64_BIT) void AddRefImpl() const; #else void AddRefImpl() const { ++ref_count_; } #endif #if DCHECK_IS_ON() bool CalledOnValidSequence() const; #endif mutable uint32_t ref_count_ = 0; #if DCHECK_IS_ON() mutable bool needs_adopt_ref_ = false; mutable bool in_dtor_ = false; mutable SequenceChecker sequence_checker_; #endif DFAKE_MUTEX(add_release_); DISALLOW_COPY_AND_ASSIGN(RefCountedBase); }; class BASE_EXPORT RefCountedThreadSafeBase { public: bool HasOneRef() const; protected: explicit RefCountedThreadSafeBase(StartRefCountFromZeroTag) {} explicit RefCountedThreadSafeBase(StartRefCountFromOneTag) : ref_count_(1) { #if DCHECK_IS_ON() needs_adopt_ref_ = true; #endif } #if DCHECK_IS_ON() ~RefCountedThreadSafeBase(); #else ~RefCountedThreadSafeBase() = default; #endif // Release and AddRef are suitable for inlining on X86 because they generate // very small code sequences. On other platforms (ARM), it causes a size // regression and is probably not worth it. #if defined(ARCH_CPU_X86_FAMILY) // Returns true if the object should self-delete. bool Release() const { return ReleaseImpl(); } void AddRef() const { AddRefImpl(); } #else // Returns true if the object should self-delete. bool Release() const; void AddRef() const; #endif private: template friend scoped_refptr base::AdoptRef(U*); void Adopted() const { #if DCHECK_IS_ON() DCHECK(needs_adopt_ref_); needs_adopt_ref_ = false; #endif } ALWAYS_INLINE void AddRefImpl() const { #if DCHECK_IS_ON() DCHECK(!in_dtor_); DCHECK(!needs_adopt_ref_) << "This RefCounted object is created with non-zero reference count." << " The first reference to such a object has to be made by AdoptRef or" << " MakeRefCounted."; #endif ref_count_.Increment(); } ALWAYS_INLINE bool ReleaseImpl() const { #if DCHECK_IS_ON() DCHECK(!in_dtor_); DCHECK(!ref_count_.IsZero()); #endif if (!ref_count_.Decrement()) { #if DCHECK_IS_ON() in_dtor_ = true; #endif return true; } return false; } mutable AtomicRefCount ref_count_{0}; #if DCHECK_IS_ON() mutable bool needs_adopt_ref_ = false; mutable bool in_dtor_ = false; #endif DISALLOW_COPY_AND_ASSIGN(RefCountedThreadSafeBase); }; } // namespace subtle // ScopedAllowCrossThreadRefCountAccess disables the check documented on // RefCounted below for rare pre-existing use cases where thread-safety was // guaranteed through other means (e.g. explicit sequencing of calls across // execution sequences when bouncing between threads in order). New callers // should refrain from using this (callsites handling thread-safety through // locks should use RefCountedThreadSafe per the overhead of its atomics being // negligible compared to locks anyways and callsites doing explicit sequencing // should properly std::move() the ref to avoid hitting this check). // TODO(tzik): Cleanup existing use cases and remove // ScopedAllowCrossThreadRefCountAccess. class BASE_EXPORT ScopedAllowCrossThreadRefCountAccess final { public: #if DCHECK_IS_ON() ScopedAllowCrossThreadRefCountAccess(); ~ScopedAllowCrossThreadRefCountAccess(); #else ScopedAllowCrossThreadRefCountAccess() {} ~ScopedAllowCrossThreadRefCountAccess() {} #endif }; // // A base class for reference counted classes. Otherwise, known as a cheap // knock-off of WebKit's RefCounted class. To use this, just extend your // class from it like so: // // class MyFoo : public base::RefCounted { // ... // private: // friend class base::RefCounted; // ~MyFoo(); // }; // // You should always make your destructor non-public, to avoid any code deleting // the object accidently while there are references to it. // // // The ref count manipulation to RefCounted is NOT thread safe and has DCHECKs // to trap unsafe cross thread usage. A subclass instance of RefCounted can be // passed to another execution sequence only when its ref count is 1. If the ref // count is more than 1, the RefCounted class verifies the ref updates are made // on the same execution sequence as the previous ones. The subclass can also // manually call IsOnValidSequence to trap other non-thread-safe accesses; see // the documentation for that method. // // // The reference count starts from zero by default, and we intended to migrate // to start-from-one ref count. Put REQUIRE_ADOPTION_FOR_REFCOUNTED_TYPE() to // the ref counted class to opt-in. // // If an object has start-from-one ref count, the first scoped_refptr need to be // created by base::AdoptRef() or base::MakeRefCounted(). We can use // base::MakeRefCounted() to create create both type of ref counted object. // // The motivations to use start-from-one ref count are: // - Start-from-one ref count doesn't need the ref count increment for the // first reference. // - It can detect an invalid object acquisition for a being-deleted object // that has zero ref count. That tends to happen on custom deleter that // delays the deletion. // TODO(tzik): Implement invalid acquisition detection. // - Behavior parity to Blink's WTF::RefCounted, whose count starts from one. // And start-from-one ref count is a step to merge WTF::RefCounted into // base::RefCounted. // #define REQUIRE_ADOPTION_FOR_REFCOUNTED_TYPE() \ static constexpr ::base::subtle::StartRefCountFromOneTag \ kRefCountPreference = ::base::subtle::kStartRefCountFromOneTag template class RefCounted; template struct DefaultRefCountedTraits { static void Destruct(const T* x) { RefCounted::DeleteInternal(x); } }; template > class RefCounted : public subtle::RefCountedBase { public: static constexpr subtle::StartRefCountFromZeroTag kRefCountPreference = subtle::kStartRefCountFromZeroTag; RefCounted() : subtle::RefCountedBase(T::kRefCountPreference) {} void AddRef() const { subtle::RefCountedBase::AddRef(); } void Release() const { if (subtle::RefCountedBase::Release()) { // Prune the code paths which the static analyzer may take to simulate // object destruction. Use-after-free errors aren't possible given the // lifetime guarantees of the refcounting system. ANALYZER_SKIP_THIS_PATH(); Traits::Destruct(static_cast(this)); } } protected: ~RefCounted() = default; private: friend struct DefaultRefCountedTraits; template static void DeleteInternal(const U* x) { delete x; } DISALLOW_COPY_AND_ASSIGN(RefCounted); }; // Forward declaration. template class RefCountedThreadSafe; // Default traits for RefCountedThreadSafe. Deletes the object when its ref // count reaches 0. Overload to delete it on a different thread etc. template struct DefaultRefCountedThreadSafeTraits { static void Destruct(const T* x) { // Delete through RefCountedThreadSafe to make child classes only need to be // friend with RefCountedThreadSafe instead of this struct, which is an // implementation detail. RefCountedThreadSafe::DeleteInternal(x); } }; // // A thread-safe variant of RefCounted // // class MyFoo : public base::RefCountedThreadSafe { // ... // }; // // If you're using the default trait, then you should add compile time // asserts that no one else is deleting your object. i.e. // private: // friend class base::RefCountedThreadSafe; // ~MyFoo(); // // We can use REQUIRE_ADOPTION_FOR_REFCOUNTED_TYPE() with RefCountedThreadSafe // too. See the comment above the RefCounted definition for details. template > class RefCountedThreadSafe : public subtle::RefCountedThreadSafeBase { public: static constexpr subtle::StartRefCountFromZeroTag kRefCountPreference = subtle::kStartRefCountFromZeroTag; explicit RefCountedThreadSafe() : subtle::RefCountedThreadSafeBase(T::kRefCountPreference) {} void AddRef() const { subtle::RefCountedThreadSafeBase::AddRef(); } void Release() const { if (subtle::RefCountedThreadSafeBase::Release()) { ANALYZER_SKIP_THIS_PATH(); Traits::Destruct(static_cast(this)); } } protected: ~RefCountedThreadSafe() = default; private: friend struct DefaultRefCountedThreadSafeTraits; template static void DeleteInternal(const U* x) { delete x; } DISALLOW_COPY_AND_ASSIGN(RefCountedThreadSafe); }; // // A thread-safe wrapper for some piece of data so we can place other // things in scoped_refptrs<>. // template class RefCountedData : public base::RefCountedThreadSafe< base::RefCountedData > { public: RefCountedData() : data() {} RefCountedData(const T& in_value) : data(in_value) {} RefCountedData(T&& in_value) : data(std::move(in_value)) {} T data; private: friend class base::RefCountedThreadSafe >; ~RefCountedData() = default; }; // Creates a scoped_refptr from a raw pointer without incrementing the reference // count. Use this only for a newly created object whose reference count starts // from 1 instead of 0. template scoped_refptr AdoptRef(T* obj) { using Tag = typename std::decay::type; static_assert(std::is_same::value, "Use AdoptRef only for the reference count starts from one."); DCHECK(obj); DCHECK(obj->HasOneRef()); obj->Adopted(); return scoped_refptr(obj, subtle::kAdoptRefTag); } namespace subtle { template scoped_refptr AdoptRefIfNeeded(T* obj, StartRefCountFromZeroTag) { return scoped_refptr(obj); } template scoped_refptr AdoptRefIfNeeded(T* obj, StartRefCountFromOneTag) { return AdoptRef(obj); } } // namespace subtle // Constructs an instance of T, which is a ref counted type, and wraps the // object into a scoped_refptr. template scoped_refptr MakeRefCounted(Args&&... args) { T* obj = new T(std::forward(args)...); return subtle::AdoptRefIfNeeded(obj, T::kRefCountPreference); } // Takes an instance of T, which is a ref counted type, and wraps the object // into a scoped_refptr. template scoped_refptr WrapRefCounted(T* t) { return scoped_refptr(t); } } // namespace base // // A smart pointer class for reference counted objects. Use this class instead // of calling AddRef and Release manually on a reference counted object to // avoid common memory leaks caused by forgetting to Release an object // reference. Sample usage: // // class MyFoo : public RefCounted { // ... // private: // friend class RefCounted; // Allow destruction by RefCounted<>. // ~MyFoo(); // Destructor must be private/protected. // }; // // void some_function() { // scoped_refptr foo = new MyFoo(); // foo->Method(param); // // |foo| is released when this function returns // } // // void some_other_function() { // scoped_refptr foo = new MyFoo(); // ... // foo = nullptr; // explicitly releases |foo| // ... // if (foo) // foo->Method(param); // } // // The above examples show how scoped_refptr acts like a pointer to T. // Given two scoped_refptr classes, it is also possible to exchange // references between the two objects, like so: // // { // scoped_refptr a = new MyFoo(); // scoped_refptr b; // // b.swap(a); // // now, |b| references the MyFoo object, and |a| references nullptr. // } // // To make both |a| and |b| in the above example reference the same MyFoo // object, simply use the assignment operator: // // { // scoped_refptr a = new MyFoo(); // scoped_refptr b; // // b = a; // // now, |a| and |b| each own a reference to the same MyFoo object. // } // template class scoped_refptr { public: typedef T element_type; scoped_refptr() {} scoped_refptr(T* p) : ptr_(p) { if (ptr_) AddRef(ptr_); } // Copy constructor. scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { if (ptr_) AddRef(ptr_); } // Copy conversion constructor. template ::value>::type> scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { if (ptr_) AddRef(ptr_); } // Move constructor. This is required in addition to the conversion // constructor below in order for clang to warn about pessimizing moves. scoped_refptr(scoped_refptr&& r) : ptr_(r.get()) { r.ptr_ = nullptr; } // Move conversion constructor. template ::value>::type> scoped_refptr(scoped_refptr&& r) : ptr_(r.get()) { r.ptr_ = nullptr; } ~scoped_refptr() { if (ptr_) Release(ptr_); } T* get() const { return ptr_; } T& operator*() const { DCHECK(ptr_); return *ptr_; } T* operator->() const { DCHECK(ptr_); return ptr_; } scoped_refptr& operator=(T* p) { // AddRef first so that self assignment should work if (p) AddRef(p); T* old_ptr = ptr_; ptr_ = p; if (old_ptr) Release(old_ptr); return *this; } scoped_refptr& operator=(const scoped_refptr& r) { return *this = r.ptr_; } template scoped_refptr& operator=(const scoped_refptr& r) { return *this = r.get(); } scoped_refptr& operator=(scoped_refptr&& r) { scoped_refptr tmp(std::move(r)); tmp.swap(*this); return *this; } template scoped_refptr& operator=(scoped_refptr&& r) { // We swap with a temporary variable to guarantee that |ptr_| is released // immediately. A naive implementation which swaps |this| and |r| would // unintentionally extend the lifetime of |ptr_| to at least the lifetime of // |r|. scoped_refptr tmp(std::move(r)); tmp.swap(*this); return *this; } void swap(scoped_refptr& r) { T* tmp = ptr_; ptr_ = r.ptr_; r.ptr_ = tmp; } explicit operator bool() const { return ptr_ != nullptr; } template bool operator==(const scoped_refptr& rhs) const { return ptr_ == rhs.get(); } template bool operator!=(const scoped_refptr& rhs) const { return !operator==(rhs); } template bool operator<(const scoped_refptr& rhs) const { return ptr_ < rhs.get(); } protected: T* ptr_ = nullptr; private: template friend scoped_refptr base::AdoptRef(U*); scoped_refptr(T* p, base::subtle::AdoptRefTag) : ptr_(p) {} // Friend required for move constructors that set r.ptr_ to null. template friend class scoped_refptr; // Non-inline helpers to allow: // class Opaque; // extern template class scoped_refptr; // Otherwise the compiler will complain that Opaque is an incomplete type. static void AddRef(T* ptr); static void Release(T* ptr); }; // static template void scoped_refptr::AddRef(T* ptr) { ptr->AddRef(); } // static template void scoped_refptr::Release(T* ptr) { ptr->Release(); } template bool operator==(const scoped_refptr& lhs, const U* rhs) { return lhs.get() == rhs; } template bool operator==(const T* lhs, const scoped_refptr& rhs) { return lhs == rhs.get(); } template bool operator==(const scoped_refptr& lhs, std::nullptr_t null) { return !static_cast(lhs); } template bool operator==(std::nullptr_t null, const scoped_refptr& rhs) { return !static_cast(rhs); } template bool operator!=(const scoped_refptr& lhs, const U* rhs) { return !operator==(lhs, rhs); } template bool operator!=(const T* lhs, const scoped_refptr& rhs) { return !operator==(lhs, rhs); } template bool operator!=(const scoped_refptr& lhs, std::nullptr_t null) { return !operator==(lhs, null); } template bool operator!=(std::nullptr_t null, const scoped_refptr& rhs) { return !operator==(null, rhs); } template std::ostream& operator<<(std::ostream& out, const scoped_refptr& p) { return out << p.get(); } #endif // BASE_MEMORY_REF_COUNTED_H_