1
2 /*
3 * Copyright 2006 The Android Open Source Project
4 *
5 * Use of this source code is governed by a BSD-style license that can be
6 * found in the LICENSE file.
7 */
8
9
10 #ifndef SkRefCnt_DEFINED
11 #define SkRefCnt_DEFINED
12
13 #include "SkThread.h"
14 #include "SkInstCnt.h"
15 #include "SkTemplates.h"
16
17 /** \class SkRefCnt
18
19 SkRefCnt is the base class for objects that may be shared by multiple
20 objects. When an existing owner wants to share a reference, it calls ref().
21 When an owner wants to release its reference, it calls unref(). When the
22 shared object's reference count goes to zero as the result of an unref()
23 call, its (virtual) destructor is called. It is an error for the
24 destructor to be called explicitly (or via the object going out of scope on
25 the stack or calling delete) if getRefCnt() > 1.
26 */
27 class SK_API SkRefCnt : SkNoncopyable {
28 public:
SK_DECLARE_INST_COUNT_ROOT(SkRefCnt)29 SK_DECLARE_INST_COUNT_ROOT(SkRefCnt)
30
31 /** Default construct, initializing the reference count to 1.
32 */
33 SkRefCnt() : fRefCnt(1) {}
34
35 /** Destruct, asserting that the reference count is 1.
36 */
~SkRefCnt()37 virtual ~SkRefCnt() {
38 #ifdef SK_DEBUG
39 SkASSERT(fRefCnt == 1);
40 fRefCnt = 0; // illegal value, to catch us if we reuse after delete
41 #endif
42 }
43
44 /** Return the reference count.
45 */
getRefCnt()46 int32_t getRefCnt() const { return fRefCnt; }
47
48 /** Increment the reference count. Must be balanced by a call to unref().
49 */
ref()50 void ref() const {
51 SkASSERT(fRefCnt > 0);
52 sk_atomic_inc(&fRefCnt); // No barrier required.
53 }
54
55 /** Decrement the reference count. If the reference count is 1 before the
56 decrement, then delete the object. Note that if this is the case, then
57 the object needs to have been allocated via new, and not on the stack.
58 */
unref()59 void unref() const {
60 SkASSERT(fRefCnt > 0);
61 // Release barrier (SL/S), if not provided below.
62 if (sk_atomic_dec(&fRefCnt) == 1) {
63 // Aquire barrier (L/SL), if not provided above.
64 // Prevents code in dispose from happening before the decrement.
65 sk_membar_aquire__after_atomic_dec();
66 internal_dispose();
67 }
68 }
69
validate()70 void validate() const {
71 SkASSERT(fRefCnt > 0);
72 }
73
74 /**
75 * Alias for ref(), for compatibility with scoped_refptr.
76 */
AddRef()77 void AddRef() { this->ref(); }
78
79 /**
80 * Alias for unref(), for compatibility with scoped_refptr.
81 */
Release()82 void Release() { this->unref(); }
83
84 protected:
85 /**
86 * Allow subclasses to call this if they've overridden internal_dispose
87 * so they can reset fRefCnt before the destructor is called. Should only
88 * be called right before calling through to inherited internal_dispose()
89 * or before calling the destructor.
90 */
internal_dispose_restore_refcnt_to_1()91 void internal_dispose_restore_refcnt_to_1() const {
92 #ifdef SK_DEBUG
93 SkASSERT(0 == fRefCnt);
94 fRefCnt = 1;
95 #endif
96 }
97
98 private:
99 /**
100 * Called when the ref count goes to 0.
101 */
internal_dispose()102 virtual void internal_dispose() const {
103 this->internal_dispose_restore_refcnt_to_1();
104 SkDELETE(this);
105 }
106
107 friend class SkWeakRefCnt;
108 friend class GrTexture; // to allow GrTexture's internal_dispose to
109 // call SkRefCnt's & directly set fRefCnt (to 1)
110
111 mutable int32_t fRefCnt;
112
113 typedef SkNoncopyable INHERITED;
114 };
115
116 ///////////////////////////////////////////////////////////////////////////////
117
118 /** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for
119 null in on each side of the assignment, and ensuring that ref() is called
120 before unref(), in case the two pointers point to the same object.
121 */
122 #define SkRefCnt_SafeAssign(dst, src) \
123 do { \
124 if (src) src->ref(); \
125 if (dst) dst->unref(); \
126 dst = src; \
127 } while (0)
128
129
130 /** Call obj->ref() and return obj. The obj must not be NULL.
131 */
SkRef(T * obj)132 template <typename T> static inline T* SkRef(T* obj) {
133 SkASSERT(obj);
134 obj->ref();
135 return obj;
136 }
137
138 /** Check if the argument is non-null, and if so, call obj->ref() and return obj.
139 */
SkSafeRef(T * obj)140 template <typename T> static inline T* SkSafeRef(T* obj) {
141 if (obj) {
142 obj->ref();
143 }
144 return obj;
145 }
146
147 /** Check if the argument is non-null, and if so, call obj->unref()
148 */
SkSafeUnref(T * obj)149 template <typename T> static inline void SkSafeUnref(T* obj) {
150 if (obj) {
151 obj->unref();
152 }
153 }
154
155 ///////////////////////////////////////////////////////////////////////////////
156
157 /**
158 * Utility class that simply unref's its argument in the destructor.
159 */
160 template <typename T> class SkAutoTUnref : SkNoncopyable {
161 public:
fObj(obj)162 explicit SkAutoTUnref(T* obj = NULL) : fObj(obj) {}
~SkAutoTUnref()163 ~SkAutoTUnref() { SkSafeUnref(fObj); }
164
get()165 T* get() const { return fObj; }
166
reset(T * obj)167 void reset(T* obj) {
168 SkSafeUnref(fObj);
169 fObj = obj;
170 }
171
swap(SkAutoTUnref * other)172 void swap(SkAutoTUnref* other) {
173 T* tmp = fObj;
174 fObj = other->fObj;
175 other->fObj = tmp;
176 }
177
178 /**
179 * Return the hosted object (which may be null), transferring ownership.
180 * The reference count is not modified, and the internal ptr is set to NULL
181 * so unref() will not be called in our destructor. A subsequent call to
182 * detach() will do nothing and return null.
183 */
detach()184 T* detach() {
185 T* obj = fObj;
186 fObj = NULL;
187 return obj;
188 }
189
190 /**
191 * BlockRef<B> is a type which inherits from B, cannot be created,
192 * and makes ref and unref private.
193 */
194 template<typename B> class BlockRef : public B {
195 private:
196 BlockRef();
197 void ref() const;
198 void unref() const;
199 };
200
201 /** If T is const, the type returned from operator-> will also be const. */
202 typedef typename SkTConstType<BlockRef<T>, SkTIsConst<T>::value>::type BlockRefType;
203
204 /**
205 * SkAutoTUnref assumes ownership of the ref. As a result, it is an error
206 * for the user to ref or unref through SkAutoTUnref. Therefore
207 * SkAutoTUnref::operator-> returns BlockRef<T>*. This prevents use of
208 * skAutoTUnrefInstance->ref() and skAutoTUnrefInstance->unref().
209 */
210 BlockRefType *operator->() const {
211 return static_cast<BlockRefType*>(fObj);
212 }
213 operator T*() { return fObj; }
214
215 private:
216 T* fObj;
217 };
218
219 class SkAutoUnref : public SkAutoTUnref<SkRefCnt> {
220 public:
SkAutoUnref(SkRefCnt * obj)221 SkAutoUnref(SkRefCnt* obj) : SkAutoTUnref<SkRefCnt>(obj) {}
222 };
223
224 class SkAutoRef : SkNoncopyable {
225 public:
SkAutoRef(SkRefCnt * obj)226 SkAutoRef(SkRefCnt* obj) : fObj(obj) { SkSafeRef(obj); }
~SkAutoRef()227 ~SkAutoRef() { SkSafeUnref(fObj); }
228 private:
229 SkRefCnt* fObj;
230 };
231
232 /** Wrapper class for SkRefCnt pointers. This manages ref/unref of a pointer to
233 a SkRefCnt (or subclass) object.
234 */
235 template <typename T> class SkRefPtr {
236 public:
SkRefPtr()237 SkRefPtr() : fObj(NULL) {}
SkRefPtr(T * obj)238 SkRefPtr(T* obj) : fObj(obj) { SkSafeRef(fObj); }
SkRefPtr(const SkRefPtr & o)239 SkRefPtr(const SkRefPtr& o) : fObj(o.fObj) { SkSafeRef(fObj); }
~SkRefPtr()240 ~SkRefPtr() { SkSafeUnref(fObj); }
241
242 SkRefPtr& operator=(const SkRefPtr& rp) {
243 SkRefCnt_SafeAssign(fObj, rp.fObj);
244 return *this;
245 }
246 SkRefPtr& operator=(T* obj) {
247 SkRefCnt_SafeAssign(fObj, obj);
248 return *this;
249 }
250
get()251 T* get() const { return fObj; }
252 T& operator*() const { return *fObj; }
253 T* operator->() const { return fObj; }
254
255 typedef T* SkRefPtr::*unspecified_bool_type;
unspecified_bool_type()256 operator unspecified_bool_type() const {
257 return fObj ? &SkRefPtr::fObj : NULL;
258 }
259
260 private:
261 T* fObj;
262 };
263
264 #endif
265