• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2012 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef SkWeakRefCnt_DEFINED
9 #define SkWeakRefCnt_DEFINED
10 
11 #include "SkRefCnt.h"
12 #include <atomic>
13 
14 /** \class SkWeakRefCnt
15 
16     SkWeakRefCnt is the base class for objects that may be shared by multiple
17     objects. When an existing strong owner wants to share a reference, it calls
18     ref(). When a strong owner wants to release its reference, it calls
19     unref(). When the shared object's strong reference count goes to zero as
20     the result of an unref() call, its (virtual) weak_dispose method is called.
21     It is an error for the destructor to be called explicitly (or via the
22     object going out of scope on the stack or calling delete) if
23     getRefCnt() > 1.
24 
25     In addition to strong ownership, an owner may instead obtain a weak
26     reference by calling weak_ref(). A call to weak_ref() must be balanced by a
27     call to weak_unref(). To obtain a strong reference from a weak reference,
28     call try_ref(). If try_ref() returns true, the owner's pointer is now also
29     a strong reference on which unref() must be called. Note that this does not
30     affect the original weak reference, weak_unref() must still be called. When
31     the weak reference count goes to zero, the object is deleted. While the
32     weak reference count is positive and the strong reference count is zero the
33     object still exists, but will be in the disposed state. It is up to the
34     object to define what this means.
35 
36     Note that a strong reference implicitly implies a weak reference. As a
37     result, it is allowable for the owner of a strong ref to call try_ref().
38     This will have the same effect as calling ref(), but may be more expensive.
39 
40     Example:
41 
42     SkWeakRefCnt myRef = strongRef.weak_ref();
43     ... // strongRef.unref() may or may not be called
44     if (myRef.try_ref()) {
45         ... // use myRef
46         myRef.unref();
47     } else {
48         // myRef is in the disposed state
49     }
50     myRef.weak_unref();
51 */
52 class SK_API SkWeakRefCnt : public SkRefCnt {
53 public:
54     /** Default construct, initializing the reference counts to 1.
55         The strong references collectively hold one weak reference. When the
56         strong reference count goes to zero, the collectively held weak
57         reference is released.
58     */
SkWeakRefCnt()59     SkWeakRefCnt() : SkRefCnt(), fWeakCnt(1) {}
60 
61     /** Destruct, asserting that the weak reference count is 1.
62     */
~SkWeakRefCnt()63     ~SkWeakRefCnt() override {
64 #ifdef SK_DEBUG
65         SkASSERT(getWeakCnt() == 1);
66         fWeakCnt.store(0, std::memory_order_relaxed);
67 #endif
68     }
69 
70 #ifdef SK_DEBUG
71     /** Return the weak reference count. */
getWeakCnt()72     int32_t getWeakCnt() const {
73         return fWeakCnt.load(std::memory_order_relaxed);
74     }
75 #endif
76 
77 private:
78     /** If fRefCnt is 0, returns 0.
79      *  Otherwise increments fRefCnt, acquires, and returns the old value.
80      */
atomic_conditional_acquire_strong_ref()81     int32_t atomic_conditional_acquire_strong_ref() const {
82         int32_t prev = fRefCnt.load(std::memory_order_relaxed);
83         do {
84             if (0 == prev) {
85                 break;
86             }
87         } while(!fRefCnt.compare_exchange_weak(prev, prev+1, std::memory_order_acquire,
88                                                              std::memory_order_relaxed));
89         return prev;
90     }
91 
92 public:
93     /** Creates a strong reference from a weak reference, if possible. The
94         caller must already be an owner. If try_ref() returns true the owner
95         is in posession of an additional strong reference. Both the original
96         reference and new reference must be properly unreferenced. If try_ref()
97         returns false, no strong reference could be created and the owner's
98         reference is in the same state as before the call.
99     */
try_ref()100     bool SK_WARN_UNUSED_RESULT try_ref() const {
101         if (atomic_conditional_acquire_strong_ref() != 0) {
102             // Acquire barrier (L/SL), if not provided above.
103             // Prevents subsequent code from happening before the increment.
104             return true;
105         }
106         return false;
107     }
108 
109     /** Increment the weak reference count. Must be balanced by a call to
110         weak_unref().
111     */
weak_ref()112     void weak_ref() const {
113         SkASSERT(getRefCnt() > 0);
114         SkASSERT(getWeakCnt() > 0);
115         // No barrier required.
116         (void)fWeakCnt.fetch_add(+1, std::memory_order_relaxed);
117     }
118 
119     /** Decrement the weak reference count. If the weak reference count is 1
120         before the decrement, then call delete on the object. Note that if this
121         is the case, then the object needs to have been allocated via new, and
122         not on the stack.
123     */
weak_unref()124     void weak_unref() const {
125         SkASSERT(getWeakCnt() > 0);
126         // A release here acts in place of all releases we "should" have been doing in ref().
127         if (1 == fWeakCnt.fetch_add(-1, std::memory_order_acq_rel)) {
128             // Like try_ref(), the acquire is only needed on success, to make sure
129             // code in internal_dispose() doesn't happen before the decrement.
130 #ifdef SK_DEBUG
131             // so our destructor won't complain
132             fWeakCnt.store(1, std::memory_order_relaxed);
133 #endif
134             this->INHERITED::internal_dispose();
135         }
136     }
137 
138     /** Returns true if there are no strong references to the object. When this
139         is the case all future calls to try_ref() will return false.
140     */
weak_expired()141     bool weak_expired() const {
142         return fRefCnt.load(std::memory_order_relaxed) == 0;
143     }
144 
145 protected:
146     /** Called when the strong reference count goes to zero. This allows the
147         object to free any resources it may be holding. Weak references may
148         still exist and their level of allowed access to the object is defined
149         by the object's class.
150     */
weak_dispose()151     virtual void weak_dispose() const {
152     }
153 
154 private:
155     /** Called when the strong reference count goes to zero. Calls weak_dispose
156         on the object and releases the implicit weak reference held
157         collectively by the strong references.
158     */
internal_dispose()159     void internal_dispose() const override {
160         weak_dispose();
161         weak_unref();
162     }
163 
164     /* Invariant: fWeakCnt = #weak + (fRefCnt > 0 ? 1 : 0) */
165     mutable std::atomic<int32_t> fWeakCnt;
166 
167     typedef SkRefCnt INHERITED;
168 };
169 
170 #endif
171