1 /*
2 * Copyright 2012 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "GrTextureStripAtlas.h"
9 #include "GrContext.h"
10 #include "GrContextPriv.h"
11 #include "GrResourceProvider.h"
12 #include "GrSurfaceContext.h"
13 #include "SkGr.h"
14 #include "SkPixelRef.h"
15 #include "SkTSearch.h"
16
17 #ifdef SK_DEBUG
18 #define VALIDATE this->validate()
19 #else
20 #define VALIDATE
21 #endif
22
23 class GrTextureStripAtlas::Hash : public SkTDynamicHash<GrTextureStripAtlas::AtlasEntry,
24 GrTextureStripAtlas::Desc> {};
25
26 int32_t GrTextureStripAtlas::gCacheCount = 0;
27
28 GrTextureStripAtlas::Hash* GrTextureStripAtlas::gAtlasCache = nullptr;
29
GetCache()30 GrTextureStripAtlas::Hash* GrTextureStripAtlas::GetCache() {
31
32 if (nullptr == gAtlasCache) {
33 gAtlasCache = new Hash;
34 }
35
36 return gAtlasCache;
37 }
38
39 // Remove the specified atlas from the cache
CleanUp(const GrContext *,void * info)40 void GrTextureStripAtlas::CleanUp(const GrContext*, void* info) {
41 SkASSERT(info);
42
43 AtlasEntry* entry = static_cast<AtlasEntry*>(info);
44
45 // remove the cache entry
46 GetCache()->remove(entry->fDesc);
47
48 // remove the actual entry
49 delete entry;
50
51 if (0 == GetCache()->count()) {
52 delete gAtlasCache;
53 gAtlasCache = nullptr;
54 }
55 }
56
GetAtlas(const GrTextureStripAtlas::Desc & desc)57 GrTextureStripAtlas* GrTextureStripAtlas::GetAtlas(const GrTextureStripAtlas::Desc& desc) {
58 AtlasEntry* entry = GetCache()->find(desc);
59 if (nullptr == entry) {
60 entry = new AtlasEntry;
61
62 entry->fAtlas = new GrTextureStripAtlas(desc);
63 entry->fDesc = desc;
64
65 desc.fContext->addCleanUp(CleanUp, entry);
66
67 GetCache()->add(entry);
68 }
69
70 return entry->fAtlas;
71 }
72
GrTextureStripAtlas(GrTextureStripAtlas::Desc desc)73 GrTextureStripAtlas::GrTextureStripAtlas(GrTextureStripAtlas::Desc desc)
74 : fCacheKey(sk_atomic_inc(&gCacheCount))
75 , fLockedRows(0)
76 , fDesc(desc)
77 , fNumRows(desc.fHeight / desc.fRowHeight)
78 , fRows(new AtlasRow[fNumRows])
79 , fLRUFront(nullptr)
80 , fLRUBack(nullptr) {
81 SkASSERT(fNumRows * fDesc.fRowHeight == fDesc.fHeight);
82 this->initLRU();
83 fNormalizedYHeight = SK_Scalar1 / fDesc.fHeight;
84 VALIDATE;
85 }
86
~GrTextureStripAtlas()87 GrTextureStripAtlas::~GrTextureStripAtlas() { delete[] fRows; }
88
lockRow(const SkBitmap & bitmap)89 int GrTextureStripAtlas::lockRow(const SkBitmap& bitmap) {
90 VALIDATE;
91 if (0 == fLockedRows) {
92 this->lockTexture();
93 if (!fTexContext) {
94 return -1;
95 }
96 }
97
98 int key = bitmap.getGenerationID();
99 int rowNumber = -1;
100 int index = this->searchByKey(key);
101
102 if (index >= 0) {
103 // We already have the data in a row, so we can just return that row
104 AtlasRow* row = fKeyTable[index];
105 if (0 == row->fLocks) {
106 this->removeFromLRU(row);
107 }
108 ++row->fLocks;
109 ++fLockedRows;
110
111 // Since all the rows are always stored in a contiguous array, we can save the memory
112 // required for storing row numbers and just compute it with some pointer arithmetic
113 rowNumber = static_cast<int>(row - fRows);
114 } else {
115 // ~index is the index where we will insert the new key to keep things sorted
116 index = ~index;
117
118 // We don't have this data cached, so pick the least recently used row to copy into
119 AtlasRow* row = this->getLRU();
120
121 ++fLockedRows;
122
123 if (nullptr == row) {
124 // force a flush, which should unlock all the rows; then try again
125 fDesc.fContext->contextPriv().flush(nullptr); // tighten this up?
126 row = this->getLRU();
127 if (nullptr == row) {
128 --fLockedRows;
129 return -1;
130 }
131 }
132
133 this->removeFromLRU(row);
134
135 uint32_t oldKey = row->fKey;
136
137 // If we are writing into a row that already held bitmap data, we need to remove the
138 // reference to that genID which is stored in our sorted table of key values.
139 if (oldKey != kEmptyAtlasRowKey) {
140
141 // Find the entry in the list; if it's before the index where we plan on adding the new
142 // entry, we decrement since it will shift elements ahead of it back by one.
143 int oldIndex = this->searchByKey(oldKey);
144 if (oldIndex < index) {
145 --index;
146 }
147
148 fKeyTable.remove(oldIndex);
149 }
150
151 row->fKey = key;
152 row->fLocks = 1;
153 fKeyTable.insert(index, 1, &row);
154 rowNumber = static_cast<int>(row - fRows);
155
156 SkASSERT(bitmap.width() == fDesc.fWidth);
157 SkASSERT(bitmap.height() == fDesc.fRowHeight);
158
159 // Pass in the kDontFlush flag, since we know we're writing to a part of this texture
160 // that is not currently in use
161 fTexContext->writePixels(bitmap.info(), bitmap.getPixels(), bitmap.rowBytes(),
162 0, rowNumber * fDesc.fRowHeight,
163 GrContextPriv::kDontFlush_PixelOpsFlag);
164 }
165
166 SkASSERT(rowNumber >= 0);
167 VALIDATE;
168 return rowNumber;
169 }
170
asTextureProxyRef() const171 sk_sp<GrTextureProxy> GrTextureStripAtlas::asTextureProxyRef() const {
172 return fTexContext->asTextureProxyRef();
173 }
174
unlockRow(int row)175 void GrTextureStripAtlas::unlockRow(int row) {
176 VALIDATE;
177 --fRows[row].fLocks;
178 --fLockedRows;
179 SkASSERT(fRows[row].fLocks >= 0 && fLockedRows >= 0);
180 if (0 == fRows[row].fLocks) {
181 this->appendLRU(fRows + row);
182 }
183 if (0 == fLockedRows) {
184 this->unlockTexture();
185 }
186 VALIDATE;
187 }
188
getLRU()189 GrTextureStripAtlas::AtlasRow* GrTextureStripAtlas::getLRU() {
190 // Front is least-recently-used
191 AtlasRow* row = fLRUFront;
192 return row;
193 }
194
lockTexture()195 void GrTextureStripAtlas::lockTexture() {
196 GrSurfaceDesc texDesc;
197 texDesc.fOrigin = kTopLeft_GrSurfaceOrigin;
198 texDesc.fWidth = fDesc.fWidth;
199 texDesc.fHeight = fDesc.fHeight;
200 texDesc.fConfig = fDesc.fConfig;
201
202 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
203 GrUniqueKey key;
204 GrUniqueKey::Builder builder(&key, kDomain, 1);
205 builder[0] = static_cast<uint32_t>(fCacheKey);
206 builder.finish();
207
208 sk_sp<GrTextureProxy> proxy = fDesc.fContext->resourceProvider()->findProxyByUniqueKey(key);
209 if (!proxy) {
210 proxy = GrSurfaceProxy::MakeDeferred(fDesc.fContext->resourceProvider(),
211 texDesc, SkBackingFit::kExact,
212 SkBudgeted::kYes,
213 GrResourceProvider::kNoPendingIO_Flag);
214 if (!proxy) {
215 return;
216 }
217
218 fDesc.fContext->resourceProvider()->assignUniqueKeyToProxy(key, proxy.get());
219 // This is a new texture, so all of our cache info is now invalid
220 this->initLRU();
221 fKeyTable.rewind();
222 }
223 SkASSERT(proxy);
224 fTexContext = fDesc.fContext->contextPriv().makeWrappedSurfaceContext(std::move(proxy),
225 nullptr);
226 }
227
unlockTexture()228 void GrTextureStripAtlas::unlockTexture() {
229 SkASSERT(fTexContext && 0 == fLockedRows);
230 fTexContext.reset();
231 }
232
initLRU()233 void GrTextureStripAtlas::initLRU() {
234 fLRUFront = nullptr;
235 fLRUBack = nullptr;
236 // Initially all the rows are in the LRU list
237 for (int i = 0; i < fNumRows; ++i) {
238 fRows[i].fKey = kEmptyAtlasRowKey;
239 fRows[i].fNext = nullptr;
240 fRows[i].fPrev = nullptr;
241 this->appendLRU(fRows + i);
242 }
243 SkASSERT(nullptr == fLRUFront || nullptr == fLRUFront->fPrev);
244 SkASSERT(nullptr == fLRUBack || nullptr == fLRUBack->fNext);
245 }
246
appendLRU(AtlasRow * row)247 void GrTextureStripAtlas::appendLRU(AtlasRow* row) {
248 SkASSERT(nullptr == row->fPrev && nullptr == row->fNext);
249 if (nullptr == fLRUFront && nullptr == fLRUBack) {
250 fLRUFront = row;
251 fLRUBack = row;
252 } else {
253 row->fPrev = fLRUBack;
254 fLRUBack->fNext = row;
255 fLRUBack = row;
256 }
257 }
258
removeFromLRU(AtlasRow * row)259 void GrTextureStripAtlas::removeFromLRU(AtlasRow* row) {
260 SkASSERT(row);
261 if (row->fNext && row->fPrev) {
262 row->fPrev->fNext = row->fNext;
263 row->fNext->fPrev = row->fPrev;
264 } else {
265 if (nullptr == row->fNext) {
266 SkASSERT(row == fLRUBack);
267 fLRUBack = row->fPrev;
268 if (fLRUBack) {
269 fLRUBack->fNext = nullptr;
270 }
271 }
272 if (nullptr == row->fPrev) {
273 SkASSERT(row == fLRUFront);
274 fLRUFront = row->fNext;
275 if (fLRUFront) {
276 fLRUFront->fPrev = nullptr;
277 }
278 }
279 }
280 row->fNext = nullptr;
281 row->fPrev = nullptr;
282 }
283
searchByKey(uint32_t key)284 int GrTextureStripAtlas::searchByKey(uint32_t key) {
285 AtlasRow target;
286 target.fKey = key;
287 return SkTSearch<const AtlasRow,
288 GrTextureStripAtlas::KeyLess>((const AtlasRow**)fKeyTable.begin(),
289 fKeyTable.count(),
290 &target,
291 sizeof(AtlasRow*));
292 }
293
294 #ifdef SK_DEBUG
validate()295 void GrTextureStripAtlas::validate() {
296
297 // Our key table should be sorted
298 uint32_t prev = 1 > fKeyTable.count() ? 0 : fKeyTable[0]->fKey;
299 for (int i = 1; i < fKeyTable.count(); ++i) {
300 SkASSERT(prev < fKeyTable[i]->fKey);
301 SkASSERT(fKeyTable[i]->fKey != kEmptyAtlasRowKey);
302 prev = fKeyTable[i]->fKey;
303 }
304
305 int lruCount = 0;
306 // Validate LRU pointers, and count LRU entries
307 SkASSERT(nullptr == fLRUFront || nullptr == fLRUFront->fPrev);
308 SkASSERT(nullptr == fLRUBack || nullptr == fLRUBack->fNext);
309 for (AtlasRow* r = fLRUFront; r != nullptr; r = r->fNext) {
310 if (nullptr == r->fNext) {
311 SkASSERT(r == fLRUBack);
312 } else {
313 SkASSERT(r->fNext->fPrev == r);
314 }
315 ++lruCount;
316 }
317
318 int rowLocks = 0;
319 int freeRows = 0;
320
321 for (int i = 0; i < fNumRows; ++i) {
322 rowLocks += fRows[i].fLocks;
323 if (0 == fRows[i].fLocks) {
324 ++freeRows;
325 bool inLRU = false;
326 // Step through the LRU and make sure it's present
327 for (AtlasRow* r = fLRUFront; r != nullptr; r = r->fNext) {
328 if (r == &fRows[i]) {
329 inLRU = true;
330 break;
331 }
332 }
333 SkASSERT(inLRU);
334 } else {
335 // If we are locked, we should have a key
336 SkASSERT(kEmptyAtlasRowKey != fRows[i].fKey);
337 }
338
339 // If we have a key != kEmptyAtlasRowKey, it should be in the key table
340 SkASSERT(fRows[i].fKey == kEmptyAtlasRowKey || this->searchByKey(fRows[i].fKey) >= 0);
341 }
342
343 // Our count of locks should equal the sum of row locks, unless we ran out of rows and flushed,
344 // in which case we'll have one more lock than recorded in the rows (to represent the pending
345 // lock of a row; which ensures we don't unlock the texture prematurely).
346 SkASSERT(rowLocks == fLockedRows || rowLocks + 1 == fLockedRows);
347
348 // We should have one lru entry for each free row
349 SkASSERT(freeRows == lruCount);
350
351 // If we have locked rows, we should have a locked texture, otherwise
352 // it should be unlocked
353 if (fLockedRows == 0) {
354 SkASSERT(!fTexContext);
355 } else {
356 SkASSERT(fTexContext);
357 }
358 }
359 #endif
360