1 /*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "GrResourceAllocator.h"
9
10 #include "GrGpuResourcePriv.h"
11 #include "GrOpList.h"
12 #include "GrRenderTargetProxy.h"
13 #include "GrResourceCache.h"
14 #include "GrResourceProvider.h"
15 #include "GrSurfacePriv.h"
16 #include "GrSurfaceProxy.h"
17 #include "GrSurfaceProxyPriv.h"
18 #include "GrTextureProxy.h"
19
assign(sk_sp<GrSurface> s)20 void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
21 SkASSERT(!fAssignedSurface);
22 fAssignedSurface = s;
23 fProxy->priv().assign(std::move(s));
24 }
25
26
markEndOfOpList(int opListIndex)27 void GrResourceAllocator::markEndOfOpList(int opListIndex) {
28 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
29
30 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
31 if (!fEndOfOpListOpIndices.empty()) {
32 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
33 }
34
35 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
36 }
37
~GrResourceAllocator()38 GrResourceAllocator::~GrResourceAllocator() {
39 SkASSERT(fIntvlList.empty());
40 SkASSERT(fActiveIntvls.empty());
41 SkASSERT(!fIntvlHash.count());
42 }
43
44 void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end
45 SkDEBUGCODE(, bool isDirectDstRead)) {
46 SkASSERT(start <= end);
47 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
48
49 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
50 // Revise the interval for an existing use
51 #ifdef SK_DEBUG
52 if (0 == start && 0 == end) {
53 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
54 // of how deferred proxies are collected they can appear as uploads multiple times in a
55 // single opLists' list and as uploads in several opLists.
56 SkASSERT(0 == intvl->start());
57 } else if (isDirectDstRead) {
58 // Direct reads from the render target itself should occur w/in the existing interval
59 SkASSERT(intvl->start() <= start && intvl->end() >= end);
60 } else {
61 SkASSERT(intvl->end() <= start && intvl->end() <= end);
62 }
63 #endif
64 intvl->extendEnd(end);
65 return;
66 }
67
68 Interval* newIntvl;
69 if (fFreeIntervalList) {
70 newIntvl = fFreeIntervalList;
71 fFreeIntervalList = newIntvl->next();
72 newIntvl->resetTo(proxy, start, end);
73 } else {
74 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
75 }
76
77 fIntvlList.insertByIncreasingStart(newIntvl);
78 fIntvlHash.add(newIntvl);
79
80 if (!fResourceProvider->explicitlyAllocateGPUResources()) {
81 // FIXME: remove this once we can do the lazy instantiation from assign instead.
82 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
83 proxy->priv().doLazyInstantiation(fResourceProvider);
84 }
85 }
86 }
87
popHead()88 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
89 Interval* temp = fHead;
90 if (temp) {
91 fHead = temp->next();
92 }
93 return temp;
94 }
95
96 // TODO: fuse this with insertByIncreasingEnd
insertByIncreasingStart(Interval * intvl)97 void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
98 if (!fHead) {
99 intvl->setNext(nullptr);
100 fHead = intvl;
101 } else if (intvl->start() <= fHead->start()) {
102 intvl->setNext(fHead);
103 fHead = intvl;
104 } else {
105 Interval* prev = fHead;
106 Interval* next = prev->next();
107 for (; next && intvl->start() > next->start(); prev = next, next = next->next()) {
108 }
109 intvl->setNext(next);
110 prev->setNext(intvl);
111 }
112 }
113
114 // TODO: fuse this with insertByIncreasingStart
insertByIncreasingEnd(Interval * intvl)115 void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
116 if (!fHead) {
117 intvl->setNext(nullptr);
118 fHead = intvl;
119 } else if (intvl->end() <= fHead->end()) {
120 intvl->setNext(fHead);
121 fHead = intvl;
122 } else {
123 Interval* prev = fHead;
124 Interval* next = prev->next();
125 for (; next && intvl->end() > next->end(); prev = next, next = next->next()) {
126 }
127 intvl->setNext(next);
128 prev->setNext(intvl);
129 }
130 }
131
132
detachAll()133 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
134 Interval* tmp = fHead;
135 fHead = nullptr;
136 return tmp;
137 }
138
139 // 'surface' can be reused. Add it back to the free pool.
freeUpSurface(sk_sp<GrSurface> surface)140 void GrResourceAllocator::freeUpSurface(sk_sp<GrSurface> surface) {
141 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
142
143 if (!key.isValid()) {
144 return; // can't do it w/o a valid scratch key
145 }
146
147 if (surface->getUniqueKey().isValid()) {
148 // If the surface has a unique key we throw it back into the resource cache.
149 // If things get really tight 'findSurfaceFor' may pull it back out but there is
150 // no need to have it in tight rotation.
151 return;
152 }
153
154 // TODO: fix this insertion so we get a more LRU-ish behavior
155 fFreePool.insert(key, surface.release());
156 }
157
158 // First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
159 // If we can't find a useable one, create a new one.
findSurfaceFor(const GrSurfaceProxy * proxy,bool needsStencil)160 sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
161 bool needsStencil) {
162 // First look in the free pool
163 GrScratchKey key;
164
165 proxy->priv().computeScratchKey(&key);
166
167 auto filter = [&] (const GrSurface* s) {
168 return !proxy->priv().requiresNoPendingIO() || !s->surfacePriv().hasPendingIO();
169 };
170 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
171 if (surface) {
172 if (SkBudgeted::kYes == proxy->isBudgeted() &&
173 SkBudgeted::kNo == surface->resourcePriv().isBudgeted()) {
174 // This gets the job done but isn't quite correct. It would be better to try to
175 // match budgeted proxies w/ budgeted surface and unbudgeted w/ unbudgeted.
176 surface->resourcePriv().makeBudgeted();
177 }
178
179 GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(), needsStencil);
180 return surface;
181 }
182
183 // Failing that, try to grab a new one from the resource cache
184 return proxy->priv().createSurface(fResourceProvider);
185 }
186
187 // Remove any intervals that end before the current index. Return their GrSurfaces
188 // to the free pool.
expire(unsigned int curIndex)189 void GrResourceAllocator::expire(unsigned int curIndex) {
190 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
191 Interval* temp = fActiveIntvls.popHead();
192
193 if (temp->wasAssignedSurface()) {
194 this->freeUpSurface(temp->detachSurface());
195 }
196
197 // Add temp to the free interval list so it can be reused
198 temp->setNext(fFreeIntervalList);
199 fFreeIntervalList = temp;
200 }
201 }
202
assign(int * startIndex,int * stopIndex,AssignError * outError)203 bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
204 SkASSERT(outError);
205 *outError = AssignError::kNoError;
206
207 fIntvlHash.reset(); // we don't need the interval hash anymore
208 if (fIntvlList.empty()) {
209 return false; // nothing to render
210 }
211
212 *startIndex = fCurOpListIndex;
213 *stopIndex = fEndOfOpListOpIndices.count();
214
215 if (!fResourceProvider->explicitlyAllocateGPUResources()) {
216 fIntvlList.detachAll(); // arena allocator will clean these up for us
217 return true;
218 }
219
220 SkDEBUGCODE(fAssigned = true;)
221
222 while (Interval* cur = fIntvlList.popHead()) {
223 if (fEndOfOpListOpIndices[fCurOpListIndex] < cur->start()) {
224 fCurOpListIndex++;
225 }
226
227 this->expire(cur->start());
228
229 bool needsStencil = cur->proxy()->asRenderTargetProxy()
230 ? cur->proxy()->asRenderTargetProxy()->needsStencil()
231 : false;
232
233 if (cur->proxy()->priv().isInstantiated()) {
234 GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider,
235 cur->proxy()->priv().peekSurface(),
236 needsStencil);
237
238 fActiveIntvls.insertByIncreasingEnd(cur);
239
240 if (fResourceProvider->overBudget()) {
241 // Only force intermediate draws on opList boundaries
242 if (!fIntvlList.empty() &&
243 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
244 *stopIndex = fCurOpListIndex+1;
245 return true;
246 }
247 }
248
249 continue;
250 }
251
252 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
253 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
254 *outError = AssignError::kFailedProxyInstantiation;
255 }
256 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy(), needsStencil)) {
257 // TODO: make getUniqueKey virtual on GrSurfaceProxy
258 GrTextureProxy* tex = cur->proxy()->asTextureProxy();
259 if (tex && tex->getUniqueKey().isValid()) {
260 fResourceProvider->assignUniqueKeyToResource(tex->getUniqueKey(), surface.get());
261 SkASSERT(surface->getUniqueKey() == tex->getUniqueKey());
262 }
263
264 cur->assign(std::move(surface));
265 } else {
266 SkASSERT(!cur->proxy()->priv().isInstantiated());
267 *outError = AssignError::kFailedProxyInstantiation;
268 }
269
270 fActiveIntvls.insertByIncreasingEnd(cur);
271
272 if (fResourceProvider->overBudget()) {
273 // Only force intermediate draws on opList boundaries
274 if (!fIntvlList.empty() &&
275 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
276 *stopIndex = fCurOpListIndex+1;
277 return true;
278 }
279 }
280 }
281
282 // expire all the remaining intervals to drain the active interval list
283 this->expire(std::numeric_limits<unsigned int>::max());
284 return true;
285 }
286