• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2017 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "src/gpu/mock/GrMockGpu.h"
9 
10 #include "src/gpu/GrThreadSafePipelineBuilder.h"
11 #include "src/gpu/mock/GrMockAttachment.h"
12 #include "src/gpu/mock/GrMockBuffer.h"
13 #include "src/gpu/mock/GrMockCaps.h"
14 #include "src/gpu/mock/GrMockOpsRenderPass.h"
15 #include "src/gpu/mock/GrMockTexture.h"
16 
17 #include <atomic>
18 
NextInternalTextureID()19 int GrMockGpu::NextInternalTextureID() {
20     static std::atomic<int> nextID{1};
21     int id;
22     do {
23         id = nextID.fetch_add(1, std::memory_order_relaxed);
24     } while (0 == id);  // Reserve 0 for an invalid ID.
25     return id;
26 }
27 
NextExternalTextureID()28 int GrMockGpu::NextExternalTextureID() {
29     // We use negative ints for the "testing only external textures" so they can easily be
30     // identified when debugging.
31     static std::atomic<int> nextID{-1};
32     return nextID.fetch_add(-1, std::memory_order_relaxed);
33 }
34 
NextInternalRenderTargetID()35 int GrMockGpu::NextInternalRenderTargetID() {
36     // We start off with large numbers to differentiate from texture IDs, even though they're
37     // technically in a different space.
38     static std::atomic<int> nextID{SK_MaxS32};
39     return nextID.fetch_add(-1, std::memory_order_relaxed);
40 }
41 
NextExternalRenderTargetID()42 int GrMockGpu::NextExternalRenderTargetID() {
43     // We use large negative ints for the "testing only external render targets" so they can easily
44     // be identified when debugging.
45     static std::atomic<int> nextID{SK_MinS32};
46     return nextID.fetch_add(1, std::memory_order_relaxed);
47 }
48 
Make(const GrMockOptions * mockOptions,const GrContextOptions & contextOptions,GrDirectContext * direct)49 sk_sp<GrGpu> GrMockGpu::Make(const GrMockOptions* mockOptions,
50                              const GrContextOptions& contextOptions, GrDirectContext* direct) {
51     static const GrMockOptions kDefaultOptions = GrMockOptions();
52     if (!mockOptions) {
53         mockOptions = &kDefaultOptions;
54     }
55     return sk_sp<GrGpu>(new GrMockGpu(direct, *mockOptions, contextOptions));
56 }
57 
onGetOpsRenderPass(GrRenderTarget * rt,bool,GrAttachment *,GrSurfaceOrigin origin,const SkIRect & bounds,const GrOpsRenderPass::LoadAndStoreInfo & colorInfo,const GrOpsRenderPass::StencilLoadAndStoreInfo &,const SkTArray<GrSurfaceProxy *,true> & sampledProxies,GrXferBarrierFlags renderPassXferBarriers)58 GrOpsRenderPass* GrMockGpu::onGetOpsRenderPass(GrRenderTarget* rt,
59                                                bool /*useMSAASurface*/,
60                                                GrAttachment*,
61                                                GrSurfaceOrigin origin,
62                                                const SkIRect& bounds,
63                                                const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
64                                                const GrOpsRenderPass::StencilLoadAndStoreInfo&,
65                                                const SkTArray<GrSurfaceProxy*,true>& sampledProxies,
66                                                GrXferBarrierFlags renderPassXferBarriers) {
67     return new GrMockOpsRenderPass(this, rt, origin, colorInfo);
68 }
69 
submit(GrOpsRenderPass * renderPass)70 void GrMockGpu::submit(GrOpsRenderPass* renderPass) {
71     for (int i = 0; i < static_cast<GrMockOpsRenderPass*>(renderPass)->numDraws(); ++i) {
72         fStats.incNumDraws();
73     }
74     delete renderPass;
75 }
76 
GrMockGpu(GrDirectContext * direct,const GrMockOptions & options,const GrContextOptions & contextOptions)77 GrMockGpu::GrMockGpu(GrDirectContext* direct, const GrMockOptions& options,
78                      const GrContextOptions& contextOptions)
79         : INHERITED(direct)
80         , fMockOptions(options) {
81     this->initCapsAndCompiler(sk_make_sp<GrMockCaps>(contextOptions, options));
82 }
83 
~GrMockGpu()84 GrMockGpu::~GrMockGpu() {}
85 
pipelineBuilder()86 GrThreadSafePipelineBuilder* GrMockGpu::pipelineBuilder() {
87     return nullptr;
88 }
89 
refPipelineBuilder()90 sk_sp<GrThreadSafePipelineBuilder> GrMockGpu::refPipelineBuilder() {
91     return nullptr;
92 }
93 
onCreateTexture(SkISize dimensions,const GrBackendFormat & format,GrRenderable renderable,int renderTargetSampleCnt,SkBudgeted budgeted,GrProtected isProtected,int mipLevelCount,uint32_t levelClearMask)94 sk_sp<GrTexture> GrMockGpu::onCreateTexture(SkISize dimensions,
95                                             const GrBackendFormat& format,
96                                             GrRenderable renderable,
97                                             int renderTargetSampleCnt,
98                                             SkBudgeted budgeted,
99                                             GrProtected isProtected,
100                                             int mipLevelCount,
101                                             uint32_t levelClearMask) {
102     if (fMockOptions.fFailTextureAllocations) {
103         return nullptr;
104     }
105 
106     // Compressed formats should go through onCreateCompressedTexture
107     SkASSERT(format.asMockCompressionType() == SkImage::CompressionType::kNone);
108 
109     GrColorType ct = format.asMockColorType();
110     SkASSERT(ct != GrColorType::kUnknown);
111 
112     GrMipmapStatus mipmapStatus =
113             mipLevelCount > 1 ? GrMipmapStatus::kDirty : GrMipmapStatus::kNotAllocated;
114     GrMockTextureInfo texInfo(ct, SkImage::CompressionType::kNone, NextInternalTextureID());
115     if (renderable == GrRenderable::kYes) {
116         GrMockRenderTargetInfo rtInfo(ct, NextInternalRenderTargetID());
117         return sk_sp<GrTexture>(new GrMockTextureRenderTarget(this, budgeted, dimensions,
118                                                               renderTargetSampleCnt, isProtected,
119                                                               mipmapStatus, texInfo, rtInfo));
120     }
121     return sk_sp<GrTexture>(
122             new GrMockTexture(this, budgeted, dimensions, isProtected, mipmapStatus, texInfo));
123 }
124 
125 // TODO: why no 'isProtected' ?!
onCreateCompressedTexture(SkISize dimensions,const GrBackendFormat & format,SkBudgeted budgeted,GrMipmapped mipMapped,GrProtected isProtected,const void * data,size_t dataSize)126 sk_sp<GrTexture> GrMockGpu::onCreateCompressedTexture(SkISize dimensions,
127                                                       const GrBackendFormat& format,
128                                                       SkBudgeted budgeted,
129                                                       GrMipmapped mipMapped,
130                                                       GrProtected isProtected,
131                                                       const void* data, size_t dataSize) {
132     if (fMockOptions.fFailTextureAllocations) {
133         return nullptr;
134     }
135 
136 #ifdef SK_DEBUG
137     // Uncompressed formats should go through onCreateTexture
138     SkImage::CompressionType compression = format.asMockCompressionType();
139     SkASSERT(compression != SkImage::CompressionType::kNone);
140 #endif
141 
142     GrMipmapStatus mipmapStatus = (mipMapped == GrMipmapped::kYes)
143                                                                 ? GrMipmapStatus::kValid
144                                                                 : GrMipmapStatus::kNotAllocated;
145     GrMockTextureInfo texInfo(GrColorType::kUnknown,
146                               format.asMockCompressionType(),
147                               NextInternalTextureID());
148 
149     return sk_sp<GrTexture>(
150             new GrMockTexture(this, budgeted, dimensions, isProtected, mipmapStatus, texInfo));
151 }
152 
onWrapBackendTexture(const GrBackendTexture & tex,GrWrapOwnership ownership,GrWrapCacheable wrapType,GrIOType ioType)153 sk_sp<GrTexture> GrMockGpu::onWrapBackendTexture(const GrBackendTexture& tex,
154                                                  GrWrapOwnership ownership,
155                                                  GrWrapCacheable wrapType,
156                                                  GrIOType ioType) {
157     GrMockTextureInfo texInfo;
158     SkAssertResult(tex.getMockTextureInfo(&texInfo));
159 
160     SkImage::CompressionType compression = texInfo.compressionType();
161     if (compression != SkImage::CompressionType::kNone) {
162         return nullptr;
163     }
164 
165     GrMipmapStatus mipmapStatus = tex.hasMipmaps() ? GrMipmapStatus::kValid
166                                                    : GrMipmapStatus::kNotAllocated;
167     auto isProtected = GrProtected(tex.isProtected());
168     return sk_sp<GrTexture>(new GrMockTexture(this, tex.dimensions(), isProtected, mipmapStatus,
169                                               texInfo, wrapType, ioType));
170 }
171 
onWrapCompressedBackendTexture(const GrBackendTexture & tex,GrWrapOwnership ownership,GrWrapCacheable wrapType)172 sk_sp<GrTexture> GrMockGpu::onWrapCompressedBackendTexture(const GrBackendTexture& tex,
173                                                            GrWrapOwnership ownership,
174                                                            GrWrapCacheable wrapType) {
175     return nullptr;
176 }
177 
onWrapRenderableBackendTexture(const GrBackendTexture & tex,int sampleCnt,GrWrapOwnership ownership,GrWrapCacheable cacheable)178 sk_sp<GrTexture> GrMockGpu::onWrapRenderableBackendTexture(const GrBackendTexture& tex,
179                                                            int sampleCnt,
180                                                            GrWrapOwnership ownership,
181                                                            GrWrapCacheable cacheable) {
182     GrMockTextureInfo texInfo;
183     SkAssertResult(tex.getMockTextureInfo(&texInfo));
184     SkASSERT(texInfo.compressionType() == SkImage::CompressionType::kNone);
185 
186     GrMipmapStatus mipmapStatus =
187             tex.hasMipmaps() ? GrMipmapStatus::kValid : GrMipmapStatus::kNotAllocated;
188 
189     // The client gave us the texture ID but we supply the render target ID.
190     GrMockRenderTargetInfo rtInfo(texInfo.colorType(), NextInternalRenderTargetID());
191 
192     auto isProtected = GrProtected(tex.isProtected());
193     return sk_sp<GrTexture>(new GrMockTextureRenderTarget(this, tex.dimensions(), sampleCnt,
194                                                           isProtected, mipmapStatus, texInfo,
195                                                           rtInfo, cacheable));
196 }
197 
onWrapBackendRenderTarget(const GrBackendRenderTarget & rt)198 sk_sp<GrRenderTarget> GrMockGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& rt) {
199     GrMockRenderTargetInfo info;
200     SkAssertResult(rt.getMockRenderTargetInfo(&info));
201 
202     auto isProtected = GrProtected(rt.isProtected());
203     return sk_sp<GrRenderTarget>(new GrMockRenderTarget(this, GrMockRenderTarget::kWrapped,
204                                                         rt.dimensions(), rt.sampleCnt(),
205                                                         isProtected, info));
206 }
207 
onCreateBuffer(size_t sizeInBytes,GrGpuBufferType type,GrAccessPattern accessPattern,const void *)208 sk_sp<GrGpuBuffer> GrMockGpu::onCreateBuffer(size_t sizeInBytes, GrGpuBufferType type,
209                                              GrAccessPattern accessPattern, const void*) {
210     return sk_sp<GrGpuBuffer>(new GrMockBuffer(this, sizeInBytes, type, accessPattern));
211 }
212 
makeStencilAttachment(const GrBackendFormat &,SkISize dimensions,int numStencilSamples)213 sk_sp<GrAttachment> GrMockGpu::makeStencilAttachment(const GrBackendFormat& /*colorFormat*/,
214                                                      SkISize dimensions, int numStencilSamples) {
215     fStats.incStencilAttachmentCreates();
216     return sk_sp<GrAttachment>(
217             new GrMockAttachment(this, dimensions, GrAttachment::UsageFlags::kStencilAttachment,
218                                  numStencilSamples));
219 }
220 
onCreateBackendTexture(SkISize dimensions,const GrBackendFormat & format,GrRenderable,GrMipmapped mipMapped,GrProtected)221 GrBackendTexture GrMockGpu::onCreateBackendTexture(SkISize dimensions,
222                                                    const GrBackendFormat& format,
223                                                    GrRenderable,
224                                                    GrMipmapped mipMapped,
225                                                    GrProtected) {
226     SkImage::CompressionType compression = format.asMockCompressionType();
227     if (compression != SkImage::CompressionType::kNone) {
228         return {}; // should go through onCreateCompressedBackendTexture
229     }
230 
231     auto colorType = format.asMockColorType();
232     if (!this->caps()->isFormatTexturable(format)) {
233         return GrBackendTexture();  // invalid
234     }
235 
236     GrMockTextureInfo info(colorType, SkImage::CompressionType::kNone, NextExternalTextureID());
237 
238     fOutstandingTestingOnlyTextureIDs.add(info.id());
239     return GrBackendTexture(dimensions.width(), dimensions.height(), mipMapped, info);
240 }
241 
onCreateCompressedBackendTexture(SkISize dimensions,const GrBackendFormat & format,GrMipmapped mipMapped,GrProtected)242 GrBackendTexture GrMockGpu::onCreateCompressedBackendTexture(
243         SkISize dimensions, const GrBackendFormat& format, GrMipmapped mipMapped,
244          GrProtected) {
245     SkImage::CompressionType compression = format.asMockCompressionType();
246     if (compression == SkImage::CompressionType::kNone) {
247         return {}; // should go through onCreateBackendTexture
248     }
249 
250     if (!this->caps()->isFormatTexturable(format)) {
251         return {};
252     }
253 
254     GrMockTextureInfo info(GrColorType::kUnknown, compression, NextExternalTextureID());
255 
256     fOutstandingTestingOnlyTextureIDs.add(info.id());
257     return GrBackendTexture(dimensions.width(), dimensions.height(), mipMapped, info);
258 }
259 
deleteBackendTexture(const GrBackendTexture & tex)260 void GrMockGpu::deleteBackendTexture(const GrBackendTexture& tex) {
261     SkASSERT(GrBackendApi::kMock == tex.backend());
262 
263     GrMockTextureInfo info;
264     if (tex.getMockTextureInfo(&info)) {
265         fOutstandingTestingOnlyTextureIDs.remove(info.id());
266     }
267 }
268 
269 #if GR_TEST_UTILS
isTestingOnlyBackendTexture(const GrBackendTexture & tex) const270 bool GrMockGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
271     SkASSERT(GrBackendApi::kMock == tex.backend());
272 
273     GrMockTextureInfo info;
274     if (!tex.getMockTextureInfo(&info)) {
275         return false;
276     }
277 
278     return fOutstandingTestingOnlyTextureIDs.contains(info.id());
279 }
280 
createTestingOnlyBackendRenderTarget(SkISize dimensions,GrColorType colorType,int sampleCnt,GrProtected)281 GrBackendRenderTarget GrMockGpu::createTestingOnlyBackendRenderTarget(SkISize dimensions,
282                                                                       GrColorType colorType,
283                                                                       int sampleCnt,
284                                                                       GrProtected) {
285     GrMockRenderTargetInfo info(colorType, NextExternalRenderTargetID());
286     static constexpr int kStencilBits = 8;
287     return GrBackendRenderTarget(dimensions.width(), dimensions.height(), sampleCnt, kStencilBits,
288                                  info);
289 }
290 
deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget &)291 void GrMockGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget&) {}
292 #endif
293