• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2010 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 
9 #include "src/gpu/GrGpu.h"
10 
11 #include "include/gpu/GrBackendSemaphore.h"
12 #include "include/gpu/GrBackendSurface.h"
13 #include "include/gpu/GrContext.h"
14 #include "src/core/SkCompressedDataUtils.h"
15 #include "src/core/SkMathPriv.h"
16 #include "src/core/SkMipMap.h"
17 #include "src/gpu/GrAuditTrail.h"
18 #include "src/gpu/GrCaps.h"
19 #include "src/gpu/GrContextPriv.h"
20 #include "src/gpu/GrDataUtils.h"
21 #include "src/gpu/GrGpuResourcePriv.h"
22 #include "src/gpu/GrMesh.h"
23 #include "src/gpu/GrNativeRect.h"
24 #include "src/gpu/GrPathRendering.h"
25 #include "src/gpu/GrPipeline.h"
26 #include "src/gpu/GrRenderTargetPriv.h"
27 #include "src/gpu/GrResourceCache.h"
28 #include "src/gpu/GrResourceProvider.h"
29 #include "src/gpu/GrSemaphore.h"
30 #include "src/gpu/GrStencilAttachment.h"
31 #include "src/gpu/GrStencilSettings.h"
32 #include "src/gpu/GrSurfacePriv.h"
33 #include "src/gpu/GrTexturePriv.h"
34 #include "src/gpu/GrTextureProxyPriv.h"
35 #include "src/gpu/GrTracing.h"
36 #include "src/utils/SkJSONWriter.h"
37 
38 ////////////////////////////////////////////////////////////////////////////////
39 
GrGpu(GrContext * context)40 GrGpu::GrGpu(GrContext* context) : fResetBits(kAll_GrBackendState), fContext(context) {}
41 
~GrGpu()42 GrGpu::~GrGpu() {}
43 
disconnect(DisconnectType)44 void GrGpu::disconnect(DisconnectType) {}
45 
46 ////////////////////////////////////////////////////////////////////////////////
47 
IsACopyNeededForRepeatWrapMode(const GrCaps * caps,GrTextureProxy * texProxy,SkISize dimensions,GrSamplerState::Filter filter,GrTextureProducer::CopyParams * copyParams,SkScalar scaleAdjust[2])48 bool GrGpu::IsACopyNeededForRepeatWrapMode(const GrCaps* caps,
49                                            GrTextureProxy* texProxy,
50                                            SkISize dimensions,
51                                            GrSamplerState::Filter filter,
52                                            GrTextureProducer::CopyParams* copyParams,
53                                            SkScalar scaleAdjust[2]) {
54     if (!caps->npotTextureTileSupport() &&
55         (!SkIsPow2(dimensions.width()) || !SkIsPow2(dimensions.height()))) {
56         SkASSERT(scaleAdjust);
57         copyParams->fDimensions = {SkNextPow2(dimensions.width()), SkNextPow2(dimensions.height())};
58         SkASSERT(scaleAdjust);
59         scaleAdjust[0] = ((SkScalar)copyParams->fDimensions.width()) / dimensions.width();
60         scaleAdjust[1] = ((SkScalar)copyParams->fDimensions.height()) / dimensions.height();
61         switch (filter) {
62         case GrSamplerState::Filter::kNearest:
63             copyParams->fFilter = GrSamplerState::Filter::kNearest;
64             break;
65         case GrSamplerState::Filter::kBilerp:
66         case GrSamplerState::Filter::kMipMap:
67             // We are only ever scaling up so no reason to ever indicate kMipMap.
68             copyParams->fFilter = GrSamplerState::Filter::kBilerp;
69             break;
70         }
71         return true;
72     }
73 
74     if (texProxy) {
75         // If the texture format itself doesn't support repeat wrap mode or mipmapping (and
76         // those capabilities are required) force a copy.
77         if (texProxy->hasRestrictedSampling()) {
78             copyParams->fFilter = GrSamplerState::Filter::kNearest;
79             copyParams->fDimensions = texProxy->dimensions();
80             return true;
81         }
82     }
83 
84     return false;
85 }
86 
IsACopyNeededForMips(const GrCaps * caps,const GrTextureProxy * texProxy,GrSamplerState::Filter filter,GrTextureProducer::CopyParams * copyParams)87 bool GrGpu::IsACopyNeededForMips(const GrCaps* caps, const GrTextureProxy* texProxy,
88                                  GrSamplerState::Filter filter,
89                                  GrTextureProducer::CopyParams* copyParams) {
90     SkASSERT(texProxy);
91     int mipCount = SkMipMap::ComputeLevelCount(texProxy->width(), texProxy->height());
92     bool willNeedMips = GrSamplerState::Filter::kMipMap == filter && caps->mipMapSupport() &&
93             mipCount;
94     // If the texture format itself doesn't support mipmapping (and those capabilities are required)
95     // force a copy.
96     if (willNeedMips && texProxy->mipMapped() == GrMipMapped::kNo) {
97         copyParams->fFilter = GrSamplerState::Filter::kNearest;
98         copyParams->fDimensions = texProxy->dimensions();
99         return true;
100     }
101 
102     return false;
103 }
104 
validate_texel_levels(SkISize dimensions,GrColorType texelColorType,const GrMipLevel * texels,int mipLevelCount,const GrCaps * caps)105 static bool validate_texel_levels(SkISize dimensions, GrColorType texelColorType,
106                                   const GrMipLevel* texels, int mipLevelCount, const GrCaps* caps) {
107     SkASSERT(mipLevelCount > 0);
108     bool hasBasePixels = texels[0].fPixels;
109     int levelsWithPixelsCnt = 0;
110     auto bpp = GrColorTypeBytesPerPixel(texelColorType);
111     int w = dimensions.fWidth;
112     int h = dimensions.fHeight;
113     for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; ++currentMipLevel) {
114         if (texels[currentMipLevel].fPixels) {
115             const size_t minRowBytes = w * bpp;
116             if (caps->writePixelsRowBytesSupport()) {
117                 if (texels[currentMipLevel].fRowBytes < minRowBytes) {
118                     return false;
119                 }
120                 if (texels[currentMipLevel].fRowBytes % bpp) {
121                     return false;
122                 }
123             } else {
124                 if (texels[currentMipLevel].fRowBytes != minRowBytes) {
125                     return false;
126                 }
127             }
128             ++levelsWithPixelsCnt;
129         }
130         if (w == 1 && h == 1) {
131             if (currentMipLevel != mipLevelCount - 1) {
132                 return false;
133             }
134         } else {
135             w = std::max(w / 2, 1);
136             h = std::max(h / 2, 1);
137         }
138     }
139     // Either just a base layer or a full stack is required.
140     if (mipLevelCount != 1 && (w != 1 || h != 1)) {
141         return false;
142     }
143     // Can specify just the base, all levels, or no levels.
144     if (!hasBasePixels) {
145         return levelsWithPixelsCnt == 0;
146     }
147     return levelsWithPixelsCnt == 1 || levelsWithPixelsCnt == mipLevelCount;
148 }
149 
createTextureCommon(SkISize dimensions,const GrBackendFormat & format,GrRenderable renderable,int renderTargetSampleCnt,SkBudgeted budgeted,GrProtected isProtected,int mipLevelCount,uint32_t levelClearMask)150 sk_sp<GrTexture> GrGpu::createTextureCommon(SkISize dimensions,
151                                             const GrBackendFormat& format,
152                                             GrRenderable renderable,
153                                             int renderTargetSampleCnt,
154                                             SkBudgeted budgeted,
155                                             GrProtected isProtected,
156                                             int mipLevelCount,
157                                             uint32_t levelClearMask) {
158     if (this->caps()->isFormatCompressed(format)) {
159         // Call GrGpu::createCompressedTexture.
160         return nullptr;
161     }
162 
163     GrMipMapped mipMapped = mipLevelCount > 1 ? GrMipMapped::kYes : GrMipMapped::kNo;
164     if (!this->caps()->validateSurfaceParams(dimensions, format, renderable, renderTargetSampleCnt,
165                                              mipMapped)) {
166         return nullptr;
167     }
168 
169     if (renderable == GrRenderable::kYes) {
170         renderTargetSampleCnt =
171                 this->caps()->getRenderTargetSampleCount(renderTargetSampleCnt, format);
172     }
173     // Attempt to catch un- or wrongly initialized sample counts.
174     SkASSERT(renderTargetSampleCnt > 0 && renderTargetSampleCnt <= 64);
175     this->handleDirtyContext();
176     auto tex = this->onCreateTexture(dimensions,
177                                      format,
178                                      renderable,
179                                      renderTargetSampleCnt,
180                                      budgeted,
181                                      isProtected,
182                                      mipLevelCount,
183                                      levelClearMask);
184     if (tex) {
185         SkASSERT(tex->backendFormat() == format);
186         SkASSERT(GrRenderable::kNo == renderable || tex->asRenderTarget());
187         if (!this->caps()->reuseScratchTextures() && renderable == GrRenderable::kNo) {
188             tex->resourcePriv().removeScratchKey();
189         }
190         fStats.incTextureCreates();
191         if (renderTargetSampleCnt > 1 && !this->caps()->msaaResolvesAutomatically()) {
192             SkASSERT(GrRenderable::kYes == renderable);
193             tex->asRenderTarget()->setRequiresManualMSAAResolve();
194         }
195     }
196     return tex;
197 }
198 
createTexture(SkISize dimensions,const GrBackendFormat & format,GrRenderable renderable,int renderTargetSampleCnt,GrMipMapped mipMapped,SkBudgeted budgeted,GrProtected isProtected)199 sk_sp<GrTexture> GrGpu::createTexture(SkISize dimensions,
200                                       const GrBackendFormat& format,
201                                       GrRenderable renderable,
202                                       int renderTargetSampleCnt,
203                                       GrMipMapped mipMapped,
204                                       SkBudgeted budgeted,
205                                       GrProtected isProtected) {
206     int mipLevelCount = 1;
207     if (mipMapped == GrMipMapped::kYes) {
208         mipLevelCount =
209                 32 - SkCLZ(static_cast<uint32_t>(std::max(dimensions.fWidth, dimensions.fHeight)));
210     }
211     uint32_t levelClearMask =
212             this->caps()->shouldInitializeTextures() ? (1 << mipLevelCount) - 1 : 0;
213     auto tex = this->createTextureCommon(dimensions, format, renderable, renderTargetSampleCnt,
214                                          budgeted, isProtected, mipLevelCount, levelClearMask);
215     if (tex && mipMapped == GrMipMapped::kYes && levelClearMask) {
216         tex->texturePriv().markMipMapsClean();
217     }
218     return tex;
219 }
220 
createTexture(SkISize dimensions,const GrBackendFormat & format,GrRenderable renderable,int renderTargetSampleCnt,SkBudgeted budgeted,GrProtected isProtected,GrColorType textureColorType,GrColorType srcColorType,const GrMipLevel texels[],int texelLevelCount)221 sk_sp<GrTexture> GrGpu::createTexture(SkISize dimensions,
222                                       const GrBackendFormat& format,
223                                       GrRenderable renderable,
224                                       int renderTargetSampleCnt,
225                                       SkBudgeted budgeted,
226                                       GrProtected isProtected,
227                                       GrColorType textureColorType,
228                                       GrColorType srcColorType,
229                                       const GrMipLevel texels[],
230                                       int texelLevelCount) {
231     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
232     if (texelLevelCount) {
233         if (!validate_texel_levels(dimensions, srcColorType, texels, texelLevelCount,
234                                    this->caps())) {
235             return nullptr;
236         }
237     }
238 
239     int mipLevelCount = std::max(1, texelLevelCount);
240     uint32_t levelClearMask = 0;
241     if (this->caps()->shouldInitializeTextures()) {
242         if (texelLevelCount) {
243             for (int i = 0; i < mipLevelCount; ++i) {
244                 if (!texels->fPixels) {
245                     levelClearMask |= static_cast<uint32_t>(1 << i);
246                 }
247             }
248         } else {
249             levelClearMask = static_cast<uint32_t>((1 << mipLevelCount) - 1);
250         }
251     }
252 
253     auto tex = this->createTextureCommon(dimensions, format, renderable, renderTargetSampleCnt,
254                                          budgeted, isProtected, texelLevelCount, levelClearMask);
255     if (tex) {
256         bool markMipLevelsClean = false;
257         // Currently if level 0 does not have pixels then no other level may, as enforced by
258         // validate_texel_levels.
259         if (texelLevelCount && texels[0].fPixels) {
260             if (!this->writePixels(tex.get(), 0, 0, dimensions.fWidth, dimensions.fHeight,
261                                    textureColorType, srcColorType, texels, texelLevelCount)) {
262                 return nullptr;
263             }
264             // Currently if level[1] of mip map has pixel data then so must all other levels.
265             // as enforced by validate_texel_levels.
266             markMipLevelsClean = (texelLevelCount > 1 && !levelClearMask && texels[1].fPixels);
267             fStats.incTextureUploads();
268         } else if (levelClearMask && mipLevelCount > 1) {
269             markMipLevelsClean = true;
270         }
271         if (markMipLevelsClean) {
272             tex->texturePriv().markMipMapsClean();
273         }
274     }
275     return tex;
276 }
277 
createCompressedTexture(SkISize dimensions,const GrBackendFormat & format,SkBudgeted budgeted,GrMipMapped mipMapped,GrProtected isProtected,const void * data,size_t dataSize)278 sk_sp<GrTexture> GrGpu::createCompressedTexture(SkISize dimensions,
279                                                 const GrBackendFormat& format,
280                                                 SkBudgeted budgeted,
281                                                 GrMipMapped mipMapped,
282                                                 GrProtected isProtected,
283                                                 const void* data,
284                                                 size_t dataSize) {
285     this->handleDirtyContext();
286     if (dimensions.width()  < 1 || dimensions.width()  > this->caps()->maxTextureSize() ||
287         dimensions.height() < 1 || dimensions.height() > this->caps()->maxTextureSize()) {
288         return nullptr;
289     }
290     // Note if we relax the requirement that data must be provided then we must check
291     // caps()->shouldInitializeTextures() here.
292     if (!data) {
293         return nullptr;
294     }
295     if (!this->caps()->isFormatTexturable(format)) {
296         return nullptr;
297     }
298 
299     // TODO: expand CompressedDataIsCorrect to work here too
300     SkImage::CompressionType compressionType = this->caps()->compressionType(format);
301 
302     if (dataSize < SkCompressedDataSize(compressionType, dimensions, nullptr,
303                                         mipMapped == GrMipMapped::kYes)) {
304         return nullptr;
305     }
306     return this->onCreateCompressedTexture(dimensions, format, budgeted, mipMapped, isProtected,
307                                            data, dataSize);
308 }
309 
wrapBackendTexture(const GrBackendTexture & backendTex,GrColorType colorType,GrWrapOwnership ownership,GrWrapCacheable cacheable,GrIOType ioType)310 sk_sp<GrTexture> GrGpu::wrapBackendTexture(const GrBackendTexture& backendTex,
311                                            GrColorType colorType,
312                                            GrWrapOwnership ownership, GrWrapCacheable cacheable,
313                                            GrIOType ioType) {
314     SkASSERT(ioType != kWrite_GrIOType);
315     this->handleDirtyContext();
316 
317     const GrCaps* caps = this->caps();
318     SkASSERT(caps);
319 
320     if (!caps->isFormatTexturable(backendTex.getBackendFormat())) {
321         return nullptr;
322     }
323     if (backendTex.width() > caps->maxTextureSize() ||
324         backendTex.height() > caps->maxTextureSize()) {
325         return nullptr;
326     }
327 
328     return this->onWrapBackendTexture(backendTex, colorType, ownership, cacheable, ioType);
329 }
330 
wrapCompressedBackendTexture(const GrBackendTexture & backendTex,GrWrapOwnership ownership,GrWrapCacheable cacheable)331 sk_sp<GrTexture> GrGpu::wrapCompressedBackendTexture(const GrBackendTexture& backendTex,
332                                                      GrWrapOwnership ownership,
333                                                      GrWrapCacheable cacheable) {
334     this->handleDirtyContext();
335 
336     const GrCaps* caps = this->caps();
337     SkASSERT(caps);
338 
339     if (!caps->isFormatTexturable(backendTex.getBackendFormat())) {
340         return nullptr;
341     }
342     if (backendTex.width() > caps->maxTextureSize() ||
343         backendTex.height() > caps->maxTextureSize()) {
344         return nullptr;
345     }
346 
347     return this->onWrapCompressedBackendTexture(backendTex, ownership, cacheable);
348 }
349 
350 
wrapRenderableBackendTexture(const GrBackendTexture & backendTex,int sampleCnt,GrColorType colorType,GrWrapOwnership ownership,GrWrapCacheable cacheable)351 sk_sp<GrTexture> GrGpu::wrapRenderableBackendTexture(const GrBackendTexture& backendTex,
352                                                      int sampleCnt, GrColorType colorType,
353                                                      GrWrapOwnership ownership,
354                                                      GrWrapCacheable cacheable) {
355     this->handleDirtyContext();
356     if (sampleCnt < 1) {
357         return nullptr;
358     }
359 
360     const GrCaps* caps = this->caps();
361 
362     if (!caps->isFormatTexturable(backendTex.getBackendFormat()) ||
363         !caps->isFormatRenderable(backendTex.getBackendFormat(), sampleCnt)) {
364         return nullptr;
365     }
366 
367     if (backendTex.width() > caps->maxRenderTargetSize() ||
368         backendTex.height() > caps->maxRenderTargetSize()) {
369         return nullptr;
370     }
371     sk_sp<GrTexture> tex = this->onWrapRenderableBackendTexture(backendTex, sampleCnt, colorType,
372                                                                 ownership, cacheable);
373     SkASSERT(!tex || tex->asRenderTarget());
374     if (tex && sampleCnt > 1 && !caps->msaaResolvesAutomatically()) {
375         tex->asRenderTarget()->setRequiresManualMSAAResolve();
376     }
377     return tex;
378 }
379 
wrapBackendRenderTarget(const GrBackendRenderTarget & backendRT,GrColorType colorType)380 sk_sp<GrRenderTarget> GrGpu::wrapBackendRenderTarget(const GrBackendRenderTarget& backendRT,
381                                                      GrColorType colorType) {
382     this->handleDirtyContext();
383 
384     const GrCaps* caps = this->caps();
385 
386     if (!caps->isFormatRenderable(backendRT.getBackendFormat(), backendRT.sampleCnt())) {
387         return nullptr;
388     }
389 
390     sk_sp<GrRenderTarget> rt = this->onWrapBackendRenderTarget(backendRT, colorType);
391     if (backendRT.isFramebufferOnly()) {
392         rt->setFramebufferOnly();
393     }
394     return rt;
395 }
396 
wrapBackendTextureAsRenderTarget(const GrBackendTexture & backendTex,int sampleCnt,GrColorType colorType)397 sk_sp<GrRenderTarget> GrGpu::wrapBackendTextureAsRenderTarget(const GrBackendTexture& backendTex,
398                                                               int sampleCnt,
399                                                               GrColorType colorType) {
400     this->handleDirtyContext();
401 
402     const GrCaps* caps = this->caps();
403 
404     int maxSize = caps->maxTextureSize();
405     if (backendTex.width() > maxSize || backendTex.height() > maxSize) {
406         return nullptr;
407     }
408 
409     if (!caps->isFormatRenderable(backendTex.getBackendFormat(), sampleCnt)) {
410         return nullptr;
411     }
412 
413     auto rt = this->onWrapBackendTextureAsRenderTarget(backendTex, sampleCnt, colorType);
414     if (rt && sampleCnt > 1 && !this->caps()->msaaResolvesAutomatically()) {
415         rt->setRequiresManualMSAAResolve();
416     }
417     return rt;
418 }
419 
wrapVulkanSecondaryCBAsRenderTarget(const SkImageInfo & imageInfo,const GrVkDrawableInfo & vkInfo)420 sk_sp<GrRenderTarget> GrGpu::wrapVulkanSecondaryCBAsRenderTarget(const SkImageInfo& imageInfo,
421                                                                  const GrVkDrawableInfo& vkInfo) {
422     return this->onWrapVulkanSecondaryCBAsRenderTarget(imageInfo, vkInfo);
423 }
424 
onWrapVulkanSecondaryCBAsRenderTarget(const SkImageInfo & imageInfo,const GrVkDrawableInfo & vkInfo)425 sk_sp<GrRenderTarget> GrGpu::onWrapVulkanSecondaryCBAsRenderTarget(const SkImageInfo& imageInfo,
426                                                                    const GrVkDrawableInfo& vkInfo) {
427     // This is only supported on Vulkan so we default to returning nullptr here
428     return nullptr;
429 }
430 
createBuffer(size_t size,GrGpuBufferType intendedType,GrAccessPattern accessPattern,const void * data)431 sk_sp<GrGpuBuffer> GrGpu::createBuffer(size_t size, GrGpuBufferType intendedType,
432                                        GrAccessPattern accessPattern, const void* data) {
433     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
434     this->handleDirtyContext();
435     sk_sp<GrGpuBuffer> buffer = this->onCreateBuffer(size, intendedType, accessPattern, data);
436     if (!this->caps()->reuseScratchBuffers()) {
437         buffer->resourcePriv().removeScratchKey();
438     }
439     return buffer;
440 }
441 
copySurface(GrSurface * dst,GrSurface * src,const SkIRect & srcRect,const SkIPoint & dstPoint)442 bool GrGpu::copySurface(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
443                         const SkIPoint& dstPoint) {
444     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
445     SkASSERT(dst && src);
446     SkASSERT(!src->framebufferOnly());
447 
448     if (dst->readOnly()) {
449         return false;
450     }
451 
452     this->handleDirtyContext();
453 
454     return this->onCopySurface(dst, src, srcRect, dstPoint);
455 }
456 
readPixels(GrSurface * surface,int left,int top,int width,int height,GrColorType surfaceColorType,GrColorType dstColorType,void * buffer,size_t rowBytes)457 bool GrGpu::readPixels(GrSurface* surface, int left, int top, int width, int height,
458                        GrColorType surfaceColorType, GrColorType dstColorType, void* buffer,
459                        size_t rowBytes) {
460     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
461     SkASSERT(surface);
462     SkASSERT(!surface->framebufferOnly());
463     SkASSERT(this->caps()->isFormatTexturable(surface->backendFormat()));
464 
465     auto subRect = SkIRect::MakeXYWH(left, top, width, height);
466     auto bounds  = SkIRect::MakeWH(surface->width(), surface->height());
467     if (!bounds.contains(subRect)) {
468         return false;
469     }
470 
471     size_t minRowBytes = SkToSizeT(GrColorTypeBytesPerPixel(dstColorType) * width);
472     if (!this->caps()->readPixelsRowBytesSupport()) {
473         if (rowBytes != minRowBytes) {
474             return false;
475         }
476     } else {
477         if (rowBytes < minRowBytes) {
478             return false;
479         }
480         if (rowBytes % GrColorTypeBytesPerPixel(dstColorType)) {
481             return false;
482         }
483     }
484 
485     this->handleDirtyContext();
486 
487     return this->onReadPixels(surface, left, top, width, height, surfaceColorType, dstColorType,
488                               buffer, rowBytes);
489 }
490 
writePixels(GrSurface * surface,int left,int top,int width,int height,GrColorType surfaceColorType,GrColorType srcColorType,const GrMipLevel texels[],int mipLevelCount,bool prepForTexSampling)491 bool GrGpu::writePixels(GrSurface* surface, int left, int top, int width, int height,
492                         GrColorType surfaceColorType, GrColorType srcColorType,
493                         const GrMipLevel texels[], int mipLevelCount, bool prepForTexSampling) {
494     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
495     ATRACE_ANDROID_FRAMEWORK_ALWAYS("texture_upload");
496     SkASSERT(surface);
497     SkASSERT(!surface->framebufferOnly());
498     SkASSERT(this->caps()->isFormatTexturableAndUploadable(surfaceColorType,
499                                                            surface->backendFormat()));
500 
501     if (surface->readOnly()) {
502         return false;
503     }
504 
505     if (mipLevelCount == 0) {
506         return false;
507     } else if (mipLevelCount == 1) {
508         // We require that if we are not mipped, then the write region is contained in the surface
509         auto subRect = SkIRect::MakeXYWH(left, top, width, height);
510         auto bounds  = SkIRect::MakeWH(surface->width(), surface->height());
511         if (!bounds.contains(subRect)) {
512             return false;
513         }
514     } else if (0 != left || 0 != top || width != surface->width() || height != surface->height()) {
515         // We require that if the texels are mipped, than the write region is the entire surface
516         return false;
517     }
518 
519     if (!validate_texel_levels({width, height}, srcColorType, texels, mipLevelCount,
520                                this->caps())) {
521         return false;
522     }
523 
524     this->handleDirtyContext();
525     if (this->onWritePixels(surface, left, top, width, height, surfaceColorType, srcColorType,
526                             texels, mipLevelCount, prepForTexSampling)) {
527         SkIRect rect = SkIRect::MakeXYWH(left, top, width, height);
528         this->didWriteToSurface(surface, kTopLeft_GrSurfaceOrigin, &rect, mipLevelCount);
529         fStats.incTextureUploads();
530         return true;
531     }
532     return false;
533 }
534 
transferPixelsTo(GrTexture * texture,int left,int top,int width,int height,GrColorType textureColorType,GrColorType bufferColorType,GrGpuBuffer * transferBuffer,size_t offset,size_t rowBytes)535 bool GrGpu::transferPixelsTo(GrTexture* texture, int left, int top, int width, int height,
536                              GrColorType textureColorType, GrColorType bufferColorType,
537                              GrGpuBuffer* transferBuffer, size_t offset, size_t rowBytes) {
538     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
539     SkASSERT(texture);
540     SkASSERT(transferBuffer);
541     SkASSERT(this->caps()->isFormatTexturableAndUploadable(textureColorType,
542                                                            texture->backendFormat()));
543 
544     if (texture->readOnly()) {
545         return false;
546     }
547 
548     // We require that the write region is contained in the texture
549     SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
550     SkIRect bounds = SkIRect::MakeWH(texture->width(), texture->height());
551     if (!bounds.contains(subRect)) {
552         return false;
553     }
554 
555     size_t bpp = GrColorTypeBytesPerPixel(bufferColorType);
556     if (this->caps()->writePixelsRowBytesSupport()) {
557         if (rowBytes < SkToSizeT(bpp * width)) {
558             return false;
559         }
560         if (rowBytes % bpp) {
561             return false;
562         }
563     } else {
564         if (rowBytes != SkToSizeT(bpp * width)) {
565             return false;
566         }
567     }
568 
569     this->handleDirtyContext();
570     if (this->onTransferPixelsTo(texture, left, top, width, height, textureColorType,
571                                  bufferColorType, transferBuffer, offset, rowBytes)) {
572         SkIRect rect = SkIRect::MakeXYWH(left, top, width, height);
573         this->didWriteToSurface(texture, kTopLeft_GrSurfaceOrigin, &rect);
574         fStats.incTransfersToTexture();
575 
576         return true;
577     }
578     return false;
579 }
580 
transferPixelsFrom(GrSurface * surface,int left,int top,int width,int height,GrColorType surfaceColorType,GrColorType bufferColorType,GrGpuBuffer * transferBuffer,size_t offset)581 bool GrGpu::transferPixelsFrom(GrSurface* surface, int left, int top, int width, int height,
582                                GrColorType surfaceColorType, GrColorType bufferColorType,
583                                GrGpuBuffer* transferBuffer, size_t offset) {
584     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
585     SkASSERT(surface);
586     SkASSERT(transferBuffer);
587     SkASSERT(this->caps()->isFormatTexturable(surface->backendFormat()));
588 
589 #ifdef SK_DEBUG
590     auto supportedRead = this->caps()->supportedReadPixelsColorType(
591             surfaceColorType, surface->backendFormat(), bufferColorType);
592     SkASSERT(supportedRead.fOffsetAlignmentForTransferBuffer);
593     SkASSERT(offset % supportedRead.fOffsetAlignmentForTransferBuffer == 0);
594 #endif
595 
596     // We require that the write region is contained in the texture
597     SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
598     SkIRect bounds = SkIRect::MakeWH(surface->width(), surface->height());
599     if (!bounds.contains(subRect)) {
600         return false;
601     }
602 
603     this->handleDirtyContext();
604     if (this->onTransferPixelsFrom(surface, left, top, width, height, surfaceColorType,
605                                    bufferColorType, transferBuffer, offset)) {
606         fStats.incTransfersFromSurface();
607         return true;
608     }
609     return false;
610 }
611 
regenerateMipMapLevels(GrTexture * texture)612 bool GrGpu::regenerateMipMapLevels(GrTexture* texture) {
613     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
614     SkASSERT(texture);
615     SkASSERT(this->caps()->mipMapSupport());
616     SkASSERT(texture->texturePriv().mipMapped() == GrMipMapped::kYes);
617     if (!texture->texturePriv().mipMapsAreDirty()) {
618         // This can happen when the proxy expects mipmaps to be dirty, but they are not dirty on the
619         // actual target. This may be caused by things that the drawingManager could not predict,
620         // i.e., ops that don't draw anything, aborting a draw for exceptional circumstances, etc.
621         // NOTE: This goes away once we quit tracking mipmap state on the actual texture.
622         return true;
623     }
624     if (texture->readOnly()) {
625         return false;
626     }
627     if (this->onRegenerateMipMapLevels(texture)) {
628         texture->texturePriv().markMipMapsClean();
629         return true;
630     }
631     return false;
632 }
633 
resetTextureBindings()634 void GrGpu::resetTextureBindings() {
635     this->handleDirtyContext();
636     this->onResetTextureBindings();
637 }
638 
resolveRenderTarget(GrRenderTarget * target,const SkIRect & resolveRect,ForExternalIO forExternalIO)639 void GrGpu::resolveRenderTarget(GrRenderTarget* target, const SkIRect& resolveRect,
640                                 ForExternalIO forExternalIO) {
641     SkASSERT(target);
642     this->handleDirtyContext();
643     this->onResolveRenderTarget(target, resolveRect, forExternalIO);
644 }
645 
didWriteToSurface(GrSurface * surface,GrSurfaceOrigin origin,const SkIRect * bounds,uint32_t mipLevels) const646 void GrGpu::didWriteToSurface(GrSurface* surface, GrSurfaceOrigin origin, const SkIRect* bounds,
647                               uint32_t mipLevels) const {
648     SkASSERT(surface);
649     SkASSERT(!surface->readOnly());
650     // Mark any MIP chain and resolve buffer as dirty if and only if there is a non-empty bounds.
651     if (nullptr == bounds || !bounds->isEmpty()) {
652         GrTexture* texture = surface->asTexture();
653         if (texture && 1 == mipLevels) {
654             texture->texturePriv().markMipMapsDirty();
655         }
656     }
657 }
658 
findOrAssignSamplePatternKey(GrRenderTarget * renderTarget)659 int GrGpu::findOrAssignSamplePatternKey(GrRenderTarget* renderTarget) {
660     SkASSERT(this->caps()->sampleLocationsSupport());
661     SkASSERT(renderTarget->numSamples() > 1 ||
662              (renderTarget->renderTargetPriv().getStencilAttachment() &&
663               renderTarget->renderTargetPriv().getStencilAttachment()->numSamples() > 1));
664 
665     SkSTArray<16, SkPoint> sampleLocations;
666     this->querySampleLocations(renderTarget, &sampleLocations);
667     return fSamplePatternDictionary.findOrAssignSamplePatternKey(sampleLocations);
668 }
669 
finishFlush(GrSurfaceProxy * proxies[],int n,SkSurface::BackendSurfaceAccess access,const GrFlushInfo & info,const GrPrepareForExternalIORequests & externalRequests)670 GrSemaphoresSubmitted GrGpu::finishFlush(GrSurfaceProxy* proxies[],
671                                          int n,
672                                          SkSurface::BackendSurfaceAccess access,
673                                          const GrFlushInfo& info,
674                                          const GrPrepareForExternalIORequests& externalRequests) {
675     TRACE_EVENT0("skia.gpu", TRACE_FUNC);
676     this->stats()->incNumFinishFlushes();
677     GrResourceProvider* resourceProvider = fContext->priv().resourceProvider();
678 
679     struct SemaphoreInfo {
680         std::unique_ptr<GrSemaphore> fSemaphore;
681         bool fDidCreate = false;
682     };
683 
684     bool failedSemaphoreCreation = false;
685     std::unique_ptr<SemaphoreInfo[]> semaphoreInfos(new SemaphoreInfo[info.fNumSemaphores]);
686     if (this->caps()->semaphoreSupport() && info.fNumSemaphores) {
687         for (int i = 0; i < info.fNumSemaphores && !failedSemaphoreCreation; ++i) {
688             if (info.fSignalSemaphores[i].isInitialized()) {
689                 semaphoreInfos[i].fSemaphore = resourceProvider->wrapBackendSemaphore(
690                         info.fSignalSemaphores[i],
691                         GrResourceProvider::SemaphoreWrapType::kWillSignal,
692                         kBorrow_GrWrapOwnership);
693             } else {
694                 semaphoreInfos[i].fSemaphore = resourceProvider->makeSemaphore(false);
695                 semaphoreInfos[i].fDidCreate = true;
696             }
697             if (!semaphoreInfos[i].fSemaphore) {
698                 semaphoreInfos[i].fDidCreate = false;
699                 failedSemaphoreCreation = true;
700             }
701         }
702         if (!failedSemaphoreCreation) {
703             for (int i = 0; i < info.fNumSemaphores && !failedSemaphoreCreation; ++i) {
704                 this->insertSemaphore(semaphoreInfos[i].fSemaphore.get());
705             }
706         }
707     }
708 
709     // We always want to try flushing, so do that before checking if we failed semaphore creation.
710     if (!this->onFinishFlush(proxies, n, access, info, externalRequests) ||
711         failedSemaphoreCreation) {
712         // If we didn't do the flush or failed semaphore creations then none of the semaphores were
713         // submitted. Therefore the client can't wait on any of the semaphores. Additionally any
714         // semaphores we created here the client is not responsible for deleting so we must make
715         // sure they get deleted. We do this by changing the ownership from borrowed to owned.
716         for (int i = 0; i < info.fNumSemaphores; ++i) {
717             if (semaphoreInfos[i].fDidCreate) {
718                 SkASSERT(semaphoreInfos[i].fSemaphore);
719                 semaphoreInfos[i].fSemaphore->setIsOwned();
720             }
721         }
722         return GrSemaphoresSubmitted::kNo;
723     }
724 
725     for (int i = 0; i < info.fNumSemaphores; ++i) {
726         if (!info.fSignalSemaphores[i].isInitialized()) {
727             SkASSERT(semaphoreInfos[i].fSemaphore);
728             info.fSignalSemaphores[i] = semaphoreInfos[i].fSemaphore->backendSemaphore();
729         }
730     }
731 
732     return this->caps()->semaphoreSupport() ? GrSemaphoresSubmitted::kYes
733                                             : GrSemaphoresSubmitted::kNo;
734 }
735 
736 #ifdef SK_ENABLE_DUMP_GPU
dumpJSON(SkJSONWriter * writer) const737 void GrGpu::dumpJSON(SkJSONWriter* writer) const {
738     writer->beginObject();
739 
740     // TODO: Is there anything useful in the base class to dump here?
741 
742     this->onDumpJSON(writer);
743 
744     writer->endObject();
745 }
746 #else
dumpJSON(SkJSONWriter * writer) const747 void GrGpu::dumpJSON(SkJSONWriter* writer) const { }
748 #endif
749 
750 #if GR_TEST_UTILS
751 
752 #if GR_GPU_STATS
dump(SkString * out)753 void GrGpu::Stats::dump(SkString* out) {
754     out->appendf("Render Target Binds: %d\n", fRenderTargetBinds);
755     out->appendf("Shader Compilations: %d\n", fShaderCompilations);
756     out->appendf("Textures Created: %d\n", fTextureCreates);
757     out->appendf("Texture Uploads: %d\n", fTextureUploads);
758     out->appendf("Transfers to Texture: %d\n", fTransfersToTexture);
759     out->appendf("Transfers from Surface: %d\n", fTransfersFromSurface);
760     out->appendf("Stencil Buffer Creates: %d\n", fStencilAttachmentCreates);
761     out->appendf("Number of draws: %d\n", fNumDraws);
762     out->appendf("Number of Scratch Textures reused %d\n", fNumScratchTexturesReused);
763 }
764 
dumpKeyValuePairs(SkTArray<SkString> * keys,SkTArray<double> * values)765 void GrGpu::Stats::dumpKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) {
766     keys->push_back(SkString("render_target_binds")); values->push_back(fRenderTargetBinds);
767     keys->push_back(SkString("shader_compilations")); values->push_back(fShaderCompilations);
768 }
769 
770 #endif // GR_GPU_STATS
771 #endif // GR_TEST_UTILS
772 
MipMapsAreCorrect(SkISize dimensions,GrMipMapped mipMapped,const BackendTextureData * data)773 bool GrGpu::MipMapsAreCorrect(SkISize dimensions,
774                               GrMipMapped mipMapped,
775                               const BackendTextureData* data) {
776     int numMipLevels = 1;
777     if (mipMapped == GrMipMapped::kYes) {
778         numMipLevels = SkMipMap::ComputeLevelCount(dimensions.width(), dimensions.height()) + 1;
779     }
780 
781     if (!data || data->type() == BackendTextureData::Type::kColor) {
782         return true;
783     }
784 
785     if (data->type() == BackendTextureData::Type::kCompressed) {
786         return false;  // This should be going through CompressedDataIsCorrect
787     }
788 
789     SkASSERT(data->type() == BackendTextureData::Type::kPixmaps);
790 
791     if (data->pixmap(0).dimensions() != dimensions) {
792         return false;
793     }
794 
795     SkColorType colorType = data->pixmap(0).colorType();
796     for (int i = 1; i < numMipLevels; ++i) {
797         dimensions = {std::max(1, dimensions.width()/2), std::max(1, dimensions.height()/2)};
798         if (dimensions != data->pixmap(i).dimensions()) {
799             return false;
800         }
801         if (colorType != data->pixmap(i).colorType()) {
802             return false;
803         }
804     }
805     return true;
806 }
807 
CompressedDataIsCorrect(SkISize dimensions,SkImage::CompressionType compressionType,GrMipMapped mipMapped,const BackendTextureData * data)808 bool GrGpu::CompressedDataIsCorrect(SkISize dimensions, SkImage::CompressionType compressionType,
809                                     GrMipMapped mipMapped, const BackendTextureData* data) {
810 
811     if (!data || data->type() == BackendTextureData::Type::kColor) {
812         return true;
813     }
814 
815     if (data->type() == BackendTextureData::Type::kPixmaps) {
816         return false;
817     }
818 
819     SkASSERT(data->type() == BackendTextureData::Type::kCompressed);
820 
821     size_t computedSize = SkCompressedDataSize(compressionType, dimensions,
822                                                nullptr, mipMapped == GrMipMapped::kYes);
823 
824     return computedSize == data->compressedSize();
825 }
826 
createBackendTexture(SkISize dimensions,const GrBackendFormat & format,GrRenderable renderable,GrMipMapped mipMapped,GrProtected isProtected,const BackendTextureData * data)827 GrBackendTexture GrGpu::createBackendTexture(SkISize dimensions,
828                                              const GrBackendFormat& format,
829                                              GrRenderable renderable,
830                                              GrMipMapped mipMapped,
831                                              GrProtected isProtected,
832                                              const BackendTextureData* data) {
833     const GrCaps* caps = this->caps();
834 
835     if (!format.isValid()) {
836         return {};
837     }
838 
839     if (caps->isFormatCompressed(format)) {
840         // Compressed formats must go through the createCompressedBackendTexture API
841         return {};
842     }
843 
844     if (data && data->type() == BackendTextureData::Type::kPixmaps) {
845         auto ct = SkColorTypeToGrColorType(data->pixmap(0).colorType());
846         if (!caps->areColorTypeAndFormatCompatible(ct, format)) {
847             return {};
848         }
849     }
850 
851     if (dimensions.isEmpty() || dimensions.width()  > caps->maxTextureSize() ||
852                                 dimensions.height() > caps->maxTextureSize()) {
853         return {};
854     }
855 
856     if (mipMapped == GrMipMapped::kYes && !this->caps()->mipMapSupport()) {
857         return {};
858     }
859 
860     if (!MipMapsAreCorrect(dimensions, mipMapped, data)) {
861         return {};
862     }
863 
864     return this->onCreateBackendTexture(dimensions, format, renderable, mipMapped,
865                                         isProtected, data);
866 }
867 
createCompressedBackendTexture(SkISize dimensions,const GrBackendFormat & format,GrMipMapped mipMapped,GrProtected isProtected,const BackendTextureData * data)868 GrBackendTexture GrGpu::createCompressedBackendTexture(SkISize dimensions,
869                                                        const GrBackendFormat& format,
870                                                        GrMipMapped mipMapped,
871                                                        GrProtected isProtected,
872                                                        const BackendTextureData* data) {
873     const GrCaps* caps = this->caps();
874 
875     if (!format.isValid()) {
876         return {};
877     }
878 
879     SkImage::CompressionType compressionType = caps->compressionType(format);
880     if (compressionType == SkImage::CompressionType::kNone) {
881         // Uncompressed formats must go through the createBackendTexture API
882         return {};
883     }
884 
885     if (dimensions.isEmpty() ||
886         dimensions.width()  > caps->maxTextureSize() ||
887         dimensions.height() > caps->maxTextureSize()) {
888         return {};
889     }
890 
891     if (mipMapped == GrMipMapped::kYes && !this->caps()->mipMapSupport()) {
892         return {};
893     }
894 
895     if (!CompressedDataIsCorrect(dimensions, compressionType, mipMapped, data)) {
896         return {};
897     }
898 
899     return this->onCreateCompressedBackendTexture(dimensions, format, mipMapped,
900                                                   isProtected, data);
901 }
902