1 /*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/GrResourceAllocator.h"
9
10 #include "src/gpu/GrDeinstantiateProxyTracker.h"
11 #include "src/gpu/GrGpuResourcePriv.h"
12 #include "src/gpu/GrOpList.h"
13 #include "src/gpu/GrRenderTargetProxy.h"
14 #include "src/gpu/GrResourceCache.h"
15 #include "src/gpu/GrResourceProvider.h"
16 #include "src/gpu/GrSurfacePriv.h"
17 #include "src/gpu/GrSurfaceProxy.h"
18 #include "src/gpu/GrSurfaceProxyPriv.h"
19 #include "src/gpu/GrTextureProxy.h"
20
21 #if GR_TRACK_INTERVAL_CREATION
22 #include <atomic>
23
CreateUniqueID()24 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
25 static std::atomic<uint32_t> nextID{1};
26 uint32_t id;
27 do {
28 id = nextID++;
29 } while (id == SK_InvalidUniqueID);
30 return id;
31 }
32 #endif
33
assign(sk_sp<GrSurface> s)34 void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
35 SkASSERT(!fAssignedSurface);
36 fAssignedSurface = s;
37 fProxy->priv().assign(std::move(s));
38 }
39
determineRecyclability()40 void GrResourceAllocator::determineRecyclability() {
41 for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
42 if (cur->proxy()->canSkipResourceAllocator()) {
43 // These types of proxies can slip in here if they require a stencil buffer
44 continue;
45 }
46
47 if (cur->uses() >= cur->proxy()->priv().getProxyRefCnt()) {
48 // All the refs on the proxy are known to the resource allocator thus no one
49 // should be holding onto it outside of Ganesh.
50 SkASSERT(cur->uses() == cur->proxy()->priv().getProxyRefCnt());
51 cur->markAsRecyclable();
52 }
53 }
54 }
55
markEndOfOpList(int opListIndex)56 void GrResourceAllocator::markEndOfOpList(int opListIndex) {
57 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
58
59 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
60 if (!fEndOfOpListOpIndices.empty()) {
61 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
62 }
63
64 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
65 SkASSERT(fEndOfOpListOpIndices.count() <= fNumOpLists);
66 }
67
~GrResourceAllocator()68 GrResourceAllocator::~GrResourceAllocator() {
69 SkASSERT(fIntvlList.empty());
70 SkASSERT(fActiveIntvls.empty());
71 SkASSERT(!fIntvlHash.count());
72 }
73
74 void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
75 ActualUse actualUse
76 SkDEBUGCODE(, bool isDirectDstRead)) {
77
78 if (proxy->canSkipResourceAllocator()) {
79 // If the proxy is still not instantiated at this point but will need stencil, it will
80 // attach its own stencil buffer upon onFlush instantiation.
81 if (proxy->isInstantiated()) {
82 int minStencilSampleCount = (proxy->asRenderTargetProxy())
83 ? proxy->asRenderTargetProxy()->numStencilSamples()
84 : 0;
85 if (minStencilSampleCount) {
86 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
87 fResourceProvider, proxy->peekSurface(), minStencilSampleCount)) {
88 SkDebugf("WARNING: failed to attach stencil buffer. "
89 "Rendering may be incorrect.\n");
90 }
91 }
92 }
93 return;
94 }
95
96 SkASSERT(!proxy->priv().ignoredByResourceAllocator());
97
98 SkASSERT(start <= end);
99 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
100
101 // If a proxy is read only it must refer to a texture with specific content that cannot be
102 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
103 // with the same texture.
104 if (proxy->readOnly()) {
105 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So it
106 // must already be instantiated or it must be a lazy proxy that we will instantiate below.
107 SkASSERT(proxy->isInstantiated() ||
108 GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState());
109 } else {
110 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
111 // Revise the interval for an existing use
112 #ifdef SK_DEBUG
113 if (0 == start && 0 == end) {
114 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
115 // of how deferred proxies are collected they can appear as uploads multiple times
116 // in a single opLists' list and as uploads in several opLists.
117 SkASSERT(0 == intvl->start());
118 } else if (isDirectDstRead) {
119 // Direct reads from the render target itself should occur w/in the existing
120 // interval
121 SkASSERT(intvl->start() <= start && intvl->end() >= end);
122 } else {
123 SkASSERT(intvl->end() <= start && intvl->end() <= end);
124 }
125 #endif
126 if (ActualUse::kYes == actualUse) {
127 intvl->addUse();
128 }
129 intvl->extendEnd(end);
130 return;
131 }
132 Interval* newIntvl;
133 if (fFreeIntervalList) {
134 newIntvl = fFreeIntervalList;
135 fFreeIntervalList = newIntvl->next();
136 newIntvl->setNext(nullptr);
137 newIntvl->resetTo(proxy, start, end);
138 } else {
139 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
140 }
141
142 if (ActualUse::kYes == actualUse) {
143 newIntvl->addUse();
144 }
145 fIntvlList.insertByIncreasingStart(newIntvl);
146 fIntvlHash.add(newIntvl);
147 }
148
149 // Because readOnly proxies do not get a usage interval we must instantiate them here (since it
150 // won't occur in GrResourceAllocator::assign)
151 if (proxy->readOnly()) {
152 // FIXME: remove this once we can do the lazy instantiation from assign instead.
153 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
154 if (proxy->priv().doLazyInstantiation(fResourceProvider)) {
155 if (proxy->priv().lazyInstantiationType() ==
156 GrSurfaceProxy::LazyInstantiationType::kDeinstantiate) {
157 fDeinstantiateTracker->addProxy(proxy);
158 }
159 } else {
160 fLazyInstantiationError = true;
161 }
162 }
163 }
164 }
165
popHead()166 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
167 SkDEBUGCODE(this->validate());
168
169 Interval* temp = fHead;
170 if (temp) {
171 fHead = temp->next();
172 if (!fHead) {
173 fTail = nullptr;
174 }
175 temp->setNext(nullptr);
176 }
177
178 SkDEBUGCODE(this->validate());
179 return temp;
180 }
181
182 // TODO: fuse this with insertByIncreasingEnd
insertByIncreasingStart(Interval * intvl)183 void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
184 SkDEBUGCODE(this->validate());
185 SkASSERT(!intvl->next());
186
187 if (!fHead) {
188 // 14%
189 fHead = fTail = intvl;
190 } else if (intvl->start() <= fHead->start()) {
191 // 3%
192 intvl->setNext(fHead);
193 fHead = intvl;
194 } else if (fTail->start() <= intvl->start()) {
195 // 83%
196 fTail->setNext(intvl);
197 fTail = intvl;
198 } else {
199 // almost never
200 Interval* prev = fHead;
201 Interval* next = prev->next();
202 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
203 }
204
205 SkASSERT(next);
206 intvl->setNext(next);
207 prev->setNext(intvl);
208 }
209
210 SkDEBUGCODE(this->validate());
211 }
212
213 // TODO: fuse this with insertByIncreasingStart
insertByIncreasingEnd(Interval * intvl)214 void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
215 SkDEBUGCODE(this->validate());
216 SkASSERT(!intvl->next());
217
218 if (!fHead) {
219 // 14%
220 fHead = fTail = intvl;
221 } else if (intvl->end() <= fHead->end()) {
222 // 64%
223 intvl->setNext(fHead);
224 fHead = intvl;
225 } else if (fTail->end() <= intvl->end()) {
226 // 3%
227 fTail->setNext(intvl);
228 fTail = intvl;
229 } else {
230 // 19% but 81% of those land right after the list's head
231 Interval* prev = fHead;
232 Interval* next = prev->next();
233 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
234 }
235
236 SkASSERT(next);
237 intvl->setNext(next);
238 prev->setNext(intvl);
239 }
240
241 SkDEBUGCODE(this->validate());
242 }
243
244 #ifdef SK_DEBUG
validate() const245 void GrResourceAllocator::IntervalList::validate() const {
246 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
247
248 Interval* prev = nullptr;
249 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
250 }
251
252 SkASSERT(fTail == prev);
253 }
254 #endif
255
detachAll()256 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
257 Interval* tmp = fHead;
258 fHead = nullptr;
259 fTail = nullptr;
260 return tmp;
261 }
262
263 // 'surface' can be reused. Add it back to the free pool.
recycleSurface(sk_sp<GrSurface> surface)264 void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
265 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
266
267 if (!key.isValid()) {
268 return; // can't do it w/o a valid scratch key
269 }
270
271 if (surface->getUniqueKey().isValid()) {
272 // If the surface has a unique key we throw it back into the resource cache.
273 // If things get really tight 'findSurfaceFor' may pull it back out but there is
274 // no need to have it in tight rotation.
275 return;
276 }
277
278 #if GR_ALLOCATION_SPEW
279 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
280 #endif
281 // TODO: fix this insertion so we get a more LRU-ish behavior
282 fFreePool.insert(key, surface.release());
283 }
284
285 // First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
286 // If we can't find a useable one, create a new one.
findSurfaceFor(const GrSurfaceProxy * proxy,int minStencilSampleCount)287 sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
288 int minStencilSampleCount) {
289
290 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
291 // First try to reattach to a cached version if the proxy is uniquely keyed
292 sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
293 proxy->asTextureProxy()->getUniqueKey());
294 if (surface) {
295 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
296 minStencilSampleCount)) {
297 return nullptr;
298 }
299
300 return surface;
301 }
302 }
303
304 // First look in the free pool
305 GrScratchKey key;
306
307 proxy->priv().computeScratchKey(&key);
308
309 auto filter = [] (const GrSurface* s) {
310 return true;
311 };
312 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
313 if (surface) {
314 if (SkBudgeted::kYes == proxy->isBudgeted() &&
315 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
316 // This gets the job done but isn't quite correct. It would be better to try to
317 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
318 surface->resourcePriv().makeBudgeted();
319 }
320
321 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
322 minStencilSampleCount)) {
323 return nullptr;
324 }
325 SkASSERT(!surface->getUniqueKey().isValid());
326 return surface;
327 }
328
329 // Failing that, try to grab a new one from the resource cache
330 return proxy->priv().createSurface(fResourceProvider);
331 }
332
333 // Remove any intervals that end before the current index. Return their GrSurfaces
334 // to the free pool if possible.
expire(unsigned int curIndex)335 void GrResourceAllocator::expire(unsigned int curIndex) {
336 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
337 Interval* temp = fActiveIntvls.popHead();
338 SkASSERT(!temp->next());
339
340 if (temp->wasAssignedSurface()) {
341 sk_sp<GrSurface> surface = temp->detachSurface();
342
343 if (temp->isRecyclable()) {
344 this->recycleSurface(std::move(surface));
345 }
346 }
347
348 // Add temp to the free interval list so it can be reused
349 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
350 temp->setNext(fFreeIntervalList);
351 fFreeIntervalList = temp;
352 }
353 }
354
onOpListBoundary() const355 bool GrResourceAllocator::onOpListBoundary() const {
356 if (fIntvlList.empty()) {
357 SkASSERT(fCurOpListIndex+1 <= fNumOpLists);
358 // Although technically on an opList boundary there is no need to force an
359 // intermediate flush here
360 return false;
361 }
362
363 const Interval* tmp = fIntvlList.peekHead();
364 return fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start();
365 }
366
forceIntermediateFlush(int * stopIndex)367 void GrResourceAllocator::forceIntermediateFlush(int* stopIndex) {
368 *stopIndex = fCurOpListIndex+1;
369
370 // This is interrupting the allocation of resources for this flush. We need to
371 // proactively clear the active interval list of any intervals that aren't
372 // guaranteed to survive the partial flush lest they become zombies (i.e.,
373 // holding a deleted surface proxy).
374 const Interval* tmp = fIntvlList.peekHead();
375 SkASSERT(fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start());
376
377 fCurOpListIndex++;
378 SkASSERT(fCurOpListIndex < fNumOpLists);
379
380 this->expire(tmp->start());
381 }
382
assign(int * startIndex,int * stopIndex,AssignError * outError)383 bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
384 SkASSERT(outError);
385 *outError = fLazyInstantiationError ? AssignError::kFailedProxyInstantiation
386 : AssignError::kNoError;
387
388 SkASSERT(fNumOpLists == fEndOfOpListOpIndices.count());
389
390 fIntvlHash.reset(); // we don't need the interval hash anymore
391
392 if (fCurOpListIndex >= fEndOfOpListOpIndices.count()) {
393 return false; // nothing to render
394 }
395
396 *startIndex = fCurOpListIndex;
397 *stopIndex = fEndOfOpListOpIndices.count();
398
399 if (fIntvlList.empty()) {
400 fCurOpListIndex = fEndOfOpListOpIndices.count();
401 return true; // no resources to assign
402 }
403
404 #if GR_ALLOCATION_SPEW
405 SkDebugf("assigning opLists %d through %d out of %d numOpLists\n",
406 *startIndex, *stopIndex, fNumOpLists);
407 SkDebugf("EndOfOpListIndices: ");
408 for (int i = 0; i < fEndOfOpListOpIndices.count(); ++i) {
409 SkDebugf("%d ", fEndOfOpListOpIndices[i]);
410 }
411 SkDebugf("\n");
412 #endif
413
414 SkDEBUGCODE(fAssigned = true;)
415
416 #if GR_ALLOCATION_SPEW
417 this->dumpIntervals();
418 #endif
419 while (Interval* cur = fIntvlList.popHead()) {
420 if (fEndOfOpListOpIndices[fCurOpListIndex] <= cur->start()) {
421 fCurOpListIndex++;
422 SkASSERT(fCurOpListIndex < fNumOpLists);
423 }
424
425 this->expire(cur->start());
426
427 int minStencilSampleCount = (cur->proxy()->asRenderTargetProxy())
428 ? cur->proxy()->asRenderTargetProxy()->numStencilSamples()
429 : 0;
430
431 if (cur->proxy()->isInstantiated()) {
432 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
433 fResourceProvider, cur->proxy()->peekSurface(), minStencilSampleCount)) {
434 *outError = AssignError::kFailedProxyInstantiation;
435 }
436
437 fActiveIntvls.insertByIncreasingEnd(cur);
438
439 if (fResourceProvider->overBudget()) {
440 // Only force intermediate draws on opList boundaries
441 if (this->onOpListBoundary()) {
442 this->forceIntermediateFlush(stopIndex);
443 return true;
444 }
445 }
446
447 continue;
448 }
449
450 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
451 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
452 *outError = AssignError::kFailedProxyInstantiation;
453 } else {
454 if (GrSurfaceProxy::LazyInstantiationType::kDeinstantiate ==
455 cur->proxy()->priv().lazyInstantiationType()) {
456 fDeinstantiateTracker->addProxy(cur->proxy());
457 }
458 }
459 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(
460 cur->proxy(), minStencilSampleCount)) {
461 // TODO: make getUniqueKey virtual on GrSurfaceProxy
462 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
463
464 if (texProxy && texProxy->getUniqueKey().isValid()) {
465 if (!surface->getUniqueKey().isValid()) {
466 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
467 surface.get());
468 }
469 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
470 }
471
472 #if GR_ALLOCATION_SPEW
473 SkDebugf("Assigning %d to %d\n",
474 surface->uniqueID().asUInt(),
475 cur->proxy()->uniqueID().asUInt());
476 #endif
477
478 cur->assign(std::move(surface));
479 } else {
480 SkASSERT(!cur->proxy()->isInstantiated());
481 *outError = AssignError::kFailedProxyInstantiation;
482 }
483
484 fActiveIntvls.insertByIncreasingEnd(cur);
485
486 if (fResourceProvider->overBudget()) {
487 // Only force intermediate draws on opList boundaries
488 if (this->onOpListBoundary()) {
489 this->forceIntermediateFlush(stopIndex);
490 return true;
491 }
492 }
493 }
494
495 // expire all the remaining intervals to drain the active interval list
496 this->expire(std::numeric_limits<unsigned int>::max());
497 return true;
498 }
499
500 #if GR_ALLOCATION_SPEW
dumpIntervals()501 void GrResourceAllocator::dumpIntervals() {
502 // Print all the intervals while computing their range
503 SkDebugf("------------------------------------------------------------\n");
504 unsigned int min = std::numeric_limits<unsigned int>::max();
505 unsigned int max = 0;
506 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
507 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d\n",
508 cur->proxy()->uniqueID().asUInt(),
509 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
510 cur->start(),
511 cur->end(),
512 cur->proxy()->priv().getProxyRefCnt(),
513 cur->proxy()->testingOnly_getBackingRefCnt());
514 min = SkTMin(min, cur->start());
515 max = SkTMax(max, cur->end());
516 }
517
518 // Draw a graph of the useage intervals
519 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
520 SkDebugf("{ %3d,%3d }: ",
521 cur->proxy()->uniqueID().asUInt(),
522 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
523 for (unsigned int i = min; i <= max; ++i) {
524 if (i >= cur->start() && i <= cur->end()) {
525 SkDebugf("x");
526 } else {
527 SkDebugf(" ");
528 }
529 }
530 SkDebugf("\n");
531 }
532 }
533 #endif
534