1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/GrSurfaceProxy.h"
9 #include "src/gpu/GrSurfaceProxyPriv.h"
10
11 #include "include/gpu/GrRecordingContext.h"
12 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
13 #include "include/gpu/vk/GrVulkanTrackerInterface.h"
14 #endif
15 #include "src/core/SkMathPriv.h"
16 #include "src/gpu/GrAttachment.h"
17 #include "src/gpu/GrCaps.h"
18 #include "src/gpu/GrGpuResourcePriv.h"
19 #include "src/gpu/GrImageInfo.h"
20 #include "src/gpu/GrRecordingContextPriv.h"
21 #include "src/gpu/GrResourceProvider.h"
22 #include "src/gpu/GrSurface.h"
23 #include "src/gpu/GrTexture.h"
24 #include "src/gpu/GrTextureRenderTargetProxy.h"
25 #include "src/gpu/SurfaceFillContext.h"
26 #ifdef SKIA_OHOS
27 #include "src/gpu/GrPerfMonitorReporter.h"
28 #endif
29
30 #ifdef SK_DEBUG
31 #include "include/gpu/GrDirectContext.h"
32 #include "src/gpu/GrDirectContextPriv.h"
33 #include "src/gpu/GrRenderTarget.h"
34
is_valid_lazy(const SkISize & dimensions,SkBackingFit fit)35 static bool is_valid_lazy(const SkISize& dimensions, SkBackingFit fit) {
36 // A "fully" lazy proxy's width and height are not known until instantiation time.
37 // So fully lazy proxies are created with width and height < 0. Regular lazy proxies must be
38 // created with positive widths and heights. The width and height are set to 0 only after a
39 // failed instantiation. The former must be "approximate" fit while the latter can be either.
40 return ((dimensions.fWidth < 0 && dimensions.fHeight < 0 && SkBackingFit::kApprox == fit) ||
41 (dimensions.fWidth > 0 && dimensions.fHeight > 0));
42 }
43
is_valid_non_lazy(SkISize dimensions)44 static bool is_valid_non_lazy(SkISize dimensions) {
45 return dimensions.fWidth > 0 && dimensions.fHeight > 0;
46 }
47 #endif
48
49 // emulator mock
50 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
51 #ifndef SK_VULKAN
52 namespace ParallelDebug {
RecordNodeId(uint64_t nodeId)53 void RecordNodeId(uint64_t nodeId) {}
GetNodeId()54 uint64_t GetNodeId() {return 0;}
55 };
56 #endif
57 #endif
58
59 // OH ISSUE: emulator mock
60 #ifdef SKIA_DFX_FOR_OHOS
61 #ifndef SK_VULKAN
62 namespace RealAllocConfig {
GetRealAllocStatus()63 bool GetRealAllocStatus() { return false; }
SetRealAllocStatus(bool ret)64 void SetRealAllocStatus(bool ret) { static_cast<void>(ret); }
65 };
66 #endif
67 #endif
68
69 // Deferred version
GrSurfaceProxy(const GrBackendFormat & format,SkISize dimensions,SkBackingFit fit,SkBudgeted budgeted,GrProtected isProtected,GrInternalSurfaceFlags surfaceFlags,UseAllocator useAllocator)70 GrSurfaceProxy::GrSurfaceProxy(const GrBackendFormat& format,
71 SkISize dimensions,
72 SkBackingFit fit,
73 SkBudgeted budgeted,
74 GrProtected isProtected,
75 GrInternalSurfaceFlags surfaceFlags,
76 UseAllocator useAllocator)
77 : fSurfaceFlags(surfaceFlags)
78 , fFormat(format)
79 , fDimensions(dimensions)
80 , fFit(fit)
81 , fBudgeted(budgeted)
82 , fUseAllocator(useAllocator)
83 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
84 , fNodeId(ParallelDebug::GetNodeId())
85 #endif
86 #ifdef SKIA_DFX_FOR_OHOS
87 , fRealAllocProxy(RealAllocConfig::GetRealAllocStatus())
88 #endif
89 , fIsProtected(isProtected) {
90 SkASSERT(fFormat.isValid());
91 SkASSERT(is_valid_non_lazy(dimensions));
92 }
93
94 // Lazy-callback version
GrSurfaceProxy(LazyInstantiateCallback && callback,const GrBackendFormat & format,SkISize dimensions,SkBackingFit fit,SkBudgeted budgeted,GrProtected isProtected,GrInternalSurfaceFlags surfaceFlags,UseAllocator useAllocator)95 GrSurfaceProxy::GrSurfaceProxy(LazyInstantiateCallback&& callback,
96 const GrBackendFormat& format,
97 SkISize dimensions,
98 SkBackingFit fit,
99 SkBudgeted budgeted,
100 GrProtected isProtected,
101 GrInternalSurfaceFlags surfaceFlags,
102 UseAllocator useAllocator)
103 : fSurfaceFlags(surfaceFlags)
104 , fFormat(format)
105 , fDimensions(dimensions)
106 , fFit(fit)
107 , fBudgeted(budgeted)
108 , fUseAllocator(useAllocator)
109 , fLazyInstantiateCallback(std::move(callback))
110 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
111 , fNodeId(ParallelDebug::GetNodeId())
112 #endif
113 #ifdef SKIA_DFX_FOR_OHOS
114 , fRealAllocProxy(RealAllocConfig::GetRealAllocStatus())
115 #endif
116 , fIsProtected(isProtected) {
117 SkASSERT(fFormat.isValid());
118 SkASSERT(fLazyInstantiateCallback);
119 SkASSERT(is_valid_lazy(dimensions, fit));
120 }
121
122 // Wrapped version
GrSurfaceProxy(sk_sp<GrSurface> surface,SkBackingFit fit,UseAllocator useAllocator)123 GrSurfaceProxy::GrSurfaceProxy(sk_sp<GrSurface> surface,
124 SkBackingFit fit,
125 UseAllocator useAllocator)
126 : fTarget(std::move(surface))
127 , fSurfaceFlags(fTarget->flags())
128 , fFormat(fTarget->backendFormat())
129 , fDimensions(fTarget->dimensions())
130 , fFit(fit)
131 , fBudgeted(fTarget->resourcePriv().budgetedType() == GrBudgetedType::kBudgeted
132 ? SkBudgeted::kYes
133 : SkBudgeted::kNo)
134 , fUseAllocator(useAllocator)
135 , fUniqueID(fTarget->uniqueID()) // Note: converting from unique resource ID to a proxy ID!
136 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
137 , fNodeId(ParallelDebug::GetNodeId())
138 #endif
139 #ifdef SKIA_DFX_FOR_OHOS
140 , fRealAllocProxy(RealAllocConfig::GetRealAllocStatus())
141 #endif
142 , fIsProtected(fTarget->isProtected() ? GrProtected::kYes : GrProtected::kNo) {
143 SkASSERT(fFormat.isValid());
144 }
145
~GrSurfaceProxy()146 GrSurfaceProxy::~GrSurfaceProxy() {
147 }
148
149 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
150 struct NodeIdHelper {
NodeIdHelperNodeIdHelper151 explicit inline NodeIdHelper(uint64_t nodeId): initNodeId_(ParallelDebug::GetNodeId())
152 {
153 ParallelDebug::RecordNodeId(nodeId);
154 }
~NodeIdHelperNodeIdHelper155 inline ~NodeIdHelper()
156 {
157 ParallelDebug::RecordNodeId(initNodeId_);
158 }
159 uint64_t initNodeId_;
160 };
161 #endif
162
createSurfaceImpl(GrResourceProvider * resourceProvider,int sampleCnt,GrRenderable renderable,GrMipmapped mipMapped) const163 sk_sp<GrSurface> GrSurfaceProxy::createSurfaceImpl(GrResourceProvider* resourceProvider,
164 int sampleCnt,
165 GrRenderable renderable,
166 GrMipmapped mipMapped) const {
167 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
168 NodeIdHelper helper(fNodeId);
169 #endif
170 SkASSERT(mipMapped == GrMipmapped::kNo || fFit == SkBackingFit::kExact);
171 SkASSERT(!this->isLazy());
172 SkASSERT(!fTarget);
173 #ifdef SKIA_OHOS
174 int64_t currentTime = GrPerfMonitorReporter::getCurrentTime();
175 #endif
176 sk_sp<GrSurface> surface;
177 if (SkBackingFit::kApprox == fFit) {
178 surface = resourceProvider->createApproxTexture(fDimensions,
179 fFormat,
180 fFormat.textureType(),
181 renderable,
182 sampleCnt,
183 fIsProtected);
184 } else {
185 surface = resourceProvider->createTexture(fDimensions,
186 fFormat,
187 fFormat.textureType(),
188 renderable,
189 sampleCnt,
190 mipMapped,
191 fBudgeted,
192 fIsProtected);
193 }
194 if (!surface) {
195 return nullptr;
196 }
197
198 if (fGrProxyTag.isGrTagValid()) {
199 #ifdef SKIA_DFX_FOR_OHOS
200 surface->setResourceTag(fGrProxyTag, fRealAllocProxy);
201 #else
202 surface->setResourceTag(fGrProxyTag);
203 #endif
204 #ifdef SKIA_OHOS
205 int64_t allocTime = GrPerfMonitorReporter::getCurrentTime() - currentTime;
206 GrPerfMonitorReporter::GetInstance().recordTextureNode(fGrProxyTag.fName, allocTime);
207 GrPerfMonitorReporter::GetInstance().recordTexturePerfEvent(fGrProxyTag.fName,
208 fGrProxyTag.fPid, static_cast<int32_t>(resourceProvider->getMaxResourceBytes()),
209 static_cast<int32_t>(resourceProvider->getBudgetedResourceBytes()), allocTime);
210 #endif
211 }
212 return surface;
213 }
214
canSkipResourceAllocator() const215 bool GrSurfaceProxy::canSkipResourceAllocator() const {
216 if (fUseAllocator == UseAllocator::kNo) {
217 // Usually an atlas or onFlush proxy
218 return true;
219 }
220
221 auto peek = this->peekSurface();
222 if (!peek) {
223 return false;
224 }
225 // If this resource is already allocated and not recyclable then the resource allocator does
226 // not need to do anything with it.
227 return !peek->resourcePriv().getScratchKey().isValid();
228 }
229
assign(sk_sp<GrSurface> surface)230 void GrSurfaceProxy::assign(sk_sp<GrSurface> surface) {
231 SkASSERT(!fTarget && surface);
232
233 SkDEBUGCODE(this->validateSurface(surface.get());)
234
235 fTarget = std::move(surface);
236
237 #ifdef SK_DEBUG
238 if (this->asRenderTargetProxy()) {
239 SkASSERT(fTarget->asRenderTarget());
240 }
241
242 // In order to give DDL users some flexibility in the destination of there DDLs,
243 // a DDL's target proxy can be more conservative (and thus require less memory)
244 // than the actual GrSurface used to fulfill it.
245 if (!this->isDDLTarget() && kInvalidGpuMemorySize != this->getRawGpuMemorySize_debugOnly()) {
246 // TODO(11373): Can this check be exact?
247 SkASSERT(fTarget->gpuMemorySize() <= this->getRawGpuMemorySize_debugOnly());
248 }
249 #endif
250 }
251
instantiateImpl(GrResourceProvider * resourceProvider,int sampleCnt,GrRenderable renderable,GrMipmapped mipMapped,const GrUniqueKey * uniqueKey)252 bool GrSurfaceProxy::instantiateImpl(GrResourceProvider* resourceProvider, int sampleCnt,
253 GrRenderable renderable, GrMipmapped mipMapped,
254 const GrUniqueKey* uniqueKey) {
255 SkASSERT(!this->isLazy());
256 if (fTarget) {
257 if (uniqueKey && uniqueKey->isValid()) {
258 SkASSERT(fTarget->getUniqueKey().isValid() && fTarget->getUniqueKey() == *uniqueKey);
259 }
260 return true;
261 }
262
263 sk_sp<GrSurface> surface = this->createSurfaceImpl(resourceProvider, sampleCnt, renderable,
264 mipMapped);
265 if (!surface) {
266 return false;
267 }
268
269 // If there was an invalidation message pending for this key, we might have just processed it,
270 // causing the key (stored on this proxy) to become invalid.
271 if (uniqueKey && uniqueKey->isValid()) {
272 resourceProvider->assignUniqueKeyToResource(*uniqueKey, surface.get());
273 }
274
275 this->assign(std::move(surface));
276
277 return true;
278 }
279
deinstantiate()280 void GrSurfaceProxy::deinstantiate() {
281 SkASSERT(this->isInstantiated());
282 fTarget = nullptr;
283 }
284
computeScratchKey(const GrCaps & caps,GrScratchKey * key) const285 void GrSurfaceProxy::computeScratchKey(const GrCaps& caps, GrScratchKey* key) const {
286 SkASSERT(!this->isFullyLazy());
287 GrRenderable renderable = GrRenderable::kNo;
288 int sampleCount = 1;
289 if (const auto* rtp = this->asRenderTargetProxy()) {
290 renderable = GrRenderable::kYes;
291 sampleCount = rtp->numSamples();
292 }
293
294 const GrTextureProxy* tp = this->asTextureProxy();
295 GrMipmapped mipMapped = GrMipmapped::kNo;
296 if (tp) {
297 mipMapped = tp->mipmapped();
298 }
299
300 GrTexture::ComputeScratchKey(caps, this->backendFormat(), this->backingStoreDimensions(),
301 renderable, sampleCount, mipMapped, fIsProtected, key);
302 }
303
backingStoreDimensions() const304 SkISize GrSurfaceProxy::backingStoreDimensions() const {
305 SkASSERT(!this->isFullyLazy());
306 if (fTarget) {
307 return fTarget->dimensions();
308 }
309
310 if (SkBackingFit::kExact == fFit) {
311 return fDimensions;
312 }
313 return GrResourceProvider::MakeApprox(fDimensions);
314 }
315
isFunctionallyExact() const316 bool GrSurfaceProxy::isFunctionallyExact() const {
317 SkASSERT(!this->isFullyLazy());
318 return fFit == SkBackingFit::kExact ||
319 fDimensions == GrResourceProvider::MakeApprox(fDimensions);
320 }
321
isFormatCompressed(const GrCaps * caps) const322 bool GrSurfaceProxy::isFormatCompressed(const GrCaps* caps) const {
323 return caps->isFormatCompressed(this->backendFormat());
324 }
325
326 #ifdef SK_DEBUG
validate(GrContext_Base * context) const327 void GrSurfaceProxy::validate(GrContext_Base* context) const {
328 if (fTarget) {
329 SkASSERT(fTarget->getContext()->priv().matches(context));
330 }
331 }
332 #endif
333
Copy(GrRecordingContext * rContext,sk_sp<GrSurfaceProxy> src,GrSurfaceOrigin origin,GrMipmapped mipMapped,SkIRect srcRect,SkBackingFit fit,SkBudgeted budgeted,RectsMustMatch rectsMustMatch,sk_sp<GrRenderTask> * outTask)334 sk_sp<GrSurfaceProxy> GrSurfaceProxy::Copy(GrRecordingContext* rContext,
335 sk_sp<GrSurfaceProxy> src,
336 GrSurfaceOrigin origin,
337 GrMipmapped mipMapped,
338 SkIRect srcRect,
339 SkBackingFit fit,
340 SkBudgeted budgeted,
341 RectsMustMatch rectsMustMatch,
342 sk_sp<GrRenderTask>* outTask) {
343 SkASSERT(!src->isFullyLazy());
344 int width;
345 int height;
346
347 SkIPoint dstPoint;
348 if (rectsMustMatch == RectsMustMatch::kYes) {
349 width = src->width();
350 height = src->height();
351 dstPoint = {srcRect.fLeft, srcRect.fTop};
352 } else {
353 width = srcRect.width();
354 height = srcRect.height();
355 dstPoint = {0, 0};
356 }
357
358 if (!srcRect.intersect(SkIRect::MakeSize(src->dimensions()))) {
359 return {};
360 }
361 auto format = src->backendFormat().makeTexture2D();
362 SkASSERT(format.isValid());
363
364 if (src->backendFormat().textureType() != GrTextureType::kExternal) {
365 GrImageInfo info(GrColorType::kUnknown, kUnknown_SkAlphaType, nullptr, {width, height});
366 auto dstContext = rContext->priv().makeSC(info,
367 format,
368 fit,
369 origin,
370 GrRenderable::kNo,
371 1,
372 mipMapped,
373 src->isProtected(),
374 budgeted);
375 sk_sp<GrRenderTask> copyTask;
376 if (dstContext && (copyTask = dstContext->copy(src, srcRect, dstPoint))) {
377 if (outTask) {
378 *outTask = std::move(copyTask);
379 }
380 return dstContext->asSurfaceProxyRef();
381 }
382 }
383 if (src->asTextureProxy()) {
384 auto dstContext = rContext->priv().makeSFC(kUnknown_SkAlphaType,
385 nullptr,
386 {width, height},
387 fit,
388 format,
389 1,
390 mipMapped,
391 src->isProtected(),
392 GrSwizzle::RGBA(),
393 GrSwizzle::RGBA(),
394 origin,
395 budgeted);
396 GrSurfaceProxyView view(std::move(src), origin, GrSwizzle::RGBA());
397 if (dstContext && dstContext->blitTexture(std::move(view), srcRect, dstPoint)) {
398 if (outTask) {
399 *outTask = dstContext->refRenderTask();
400 }
401 return dstContext->asSurfaceProxyRef();
402 }
403 }
404 // Can't use backend copies or draws.
405 return nullptr;
406 }
407
Copy(GrRecordingContext * context,sk_sp<GrSurfaceProxy> src,GrSurfaceOrigin origin,GrMipmapped mipMapped,SkBackingFit fit,SkBudgeted budgeted,sk_sp<GrRenderTask> * outTask)408 sk_sp<GrSurfaceProxy> GrSurfaceProxy::Copy(GrRecordingContext* context,
409 sk_sp<GrSurfaceProxy> src,
410 GrSurfaceOrigin origin,
411 GrMipmapped mipMapped,
412 SkBackingFit fit,
413 SkBudgeted budgeted,
414 sk_sp<GrRenderTask>* outTask) {
415 SkASSERT(!src->isFullyLazy());
416 auto rect = SkIRect::MakeSize(src->dimensions());
417 return Copy(context,
418 std::move(src),
419 origin,
420 mipMapped,
421 rect,
422 fit,
423 budgeted,
424 RectsMustMatch::kNo,
425 outTask);
426 }
427
428 #if GR_TEST_UTILS
testingOnly_getBackingRefCnt() const429 int32_t GrSurfaceProxy::testingOnly_getBackingRefCnt() const {
430 if (fTarget) {
431 return fTarget->testingOnly_getRefCnt();
432 }
433
434 return -1; // no backing GrSurface
435 }
436
testingOnly_getFlags() const437 GrInternalSurfaceFlags GrSurfaceProxy::testingOnly_getFlags() const {
438 return fSurfaceFlags;
439 }
440
dump() const441 SkString GrSurfaceProxy::dump() const {
442 SkString tmp;
443
444 tmp.appendf("proxyID: %d - surfaceID: %d",
445 this->uniqueID().asUInt(),
446 this->peekSurface() ? this->peekSurface()->uniqueID().asUInt()
447 : -1);
448 return tmp;
449 }
450
451 #endif
452
exactify(bool allocatedCaseOnly)453 void GrSurfaceProxyPriv::exactify(bool allocatedCaseOnly) {
454 SkASSERT(!fProxy->isFullyLazy());
455 if (this->isExact()) {
456 return;
457 }
458
459 SkASSERT(SkBackingFit::kApprox == fProxy->fFit);
460
461 if (fProxy->fTarget) {
462 // The kApprox but already instantiated case. Setting the proxy's width & height to
463 // the instantiated width & height could have side-effects going forward, since we're
464 // obliterating the area of interest information. This call (exactify) only used
465 // when converting an SkSpecialImage to an SkImage so the proxy shouldn't be
466 // used for additional draws.
467 fProxy->fDimensions = fProxy->fTarget->dimensions();
468 return;
469 }
470
471 #ifndef SK_CRIPPLE_TEXTURE_REUSE
472 // In the post-implicit-allocation world we can't convert this proxy to be exact fit
473 // at this point. With explicit allocation switching this to exact will result in a
474 // different allocation at flush time. With implicit allocation, allocation would occur
475 // at draw time (rather than flush time) so this pathway was encountered less often (if
476 // at all).
477 if (allocatedCaseOnly) {
478 return;
479 }
480 #endif
481
482 // The kApprox uninstantiated case. Making this proxy be exact should be okay.
483 // It could mess things up if prior decisions were based on the approximate size.
484 fProxy->fFit = SkBackingFit::kExact;
485 // If fGpuMemorySize is used when caching specialImages for the image filter DAG. If it has
486 // already been computed we want to leave it alone so that amount will be removed when
487 // the special image goes away. If it hasn't been computed yet it might as well compute the
488 // exact amount.
489 }
490
doLazyInstantiation(GrResourceProvider * resourceProvider)491 bool GrSurfaceProxyPriv::doLazyInstantiation(GrResourceProvider* resourceProvider) {
492 SkASSERT(fProxy->isLazy());
493
494 sk_sp<GrSurface> surface;
495 if (const auto& uniqueKey = fProxy->getUniqueKey(); uniqueKey.isValid()) {
496 // First try to reattach to a cached version if the proxy is uniquely keyed
497 surface = resourceProvider->findByUniqueKey<GrSurface>(uniqueKey);
498 }
499
500 bool syncKey = true;
501 bool releaseCallback = false;
502 if (!surface) {
503 #ifdef SKIA_DFX_FOR_RECORD_VKIMAGE
504 NodeIdHelper helper(nodeId);
505 #endif
506 auto result = fProxy->fLazyInstantiateCallback(resourceProvider, fProxy->callbackDesc());
507 surface = std::move(result.fSurface);
508 syncKey = result.fKeyMode == GrSurfaceProxy::LazyInstantiationKeyMode::kSynced;
509 releaseCallback = surface && result.fReleaseCallback;
510 }
511 if (!surface) {
512 fProxy->fDimensions.setEmpty();
513 return false;
514 }
515
516 if (fProxy->isFullyLazy()) {
517 // This was a fully lazy proxy. We need to fill in the width & height. For partially
518 // lazy proxies we must preserve the original width & height since that indicates
519 // the content area.
520 fProxy->fDimensions = surface->dimensions();
521 }
522
523 SkASSERT(fProxy->width() <= surface->width());
524 SkASSERT(fProxy->height() <= surface->height());
525
526 if (GrTextureProxy* texProxy = fProxy->asTextureProxy()) {
527 texProxy->setTargetKeySync(syncKey);
528 if (syncKey) {
529 const GrUniqueKey& key = texProxy->getUniqueKey();
530 if (key.isValid()) {
531 if (!surface->asTexture()->getUniqueKey().isValid()) {
532 // If 'surface' is newly created, attach the unique key
533 resourceProvider->assignUniqueKeyToResource(key, surface.get());
534 } else {
535 // otherwise we had better have reattached to a cached version
536 SkASSERT(surface->asTexture()->getUniqueKey() == key);
537 }
538 } else {
539 SkASSERT(!surface->getUniqueKey().isValid());
540 }
541 }
542 }
543
544 this->assign(std::move(surface));
545 if (releaseCallback) {
546 fProxy->fLazyInstantiateCallback = nullptr;
547 }
548
549 return true;
550 }
551
552 #ifdef SK_DEBUG
validateSurface(const GrSurface * surface)553 void GrSurfaceProxy::validateSurface(const GrSurface* surface) {
554 SkASSERTF(surface->backendFormat() == fFormat, "%s != %s",
555 surface->backendFormat().toStr().c_str(), fFormat.toStr().c_str());
556
557 this->onValidateSurface(surface);
558 }
559 #endif
560