• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2017 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "include/core/SkTypes.h"
9 #include "tests/Test.h"
10 
11 #include "include/core/SkMatrix.h"
12 #include "include/core/SkRect.h"
13 #include "include/gpu/GrTexture.h"
14 #include "include/gpu/mock/GrMockTypes.h"
15 #include "include/private/GrRecordingContext.h"
16 #include "src/core/SkExchange.h"
17 #include "src/core/SkPathPriv.h"
18 #include "src/gpu/GrClip.h"
19 #include "src/gpu/GrContextPriv.h"
20 #include "src/gpu/GrDrawingManager.h"
21 #include "src/gpu/GrPaint.h"
22 #include "src/gpu/GrPathRenderer.h"
23 #include "src/gpu/GrRecordingContextPriv.h"
24 #include "src/gpu/GrRenderTargetContext.h"
25 #include "src/gpu/GrRenderTargetContextPriv.h"
26 #include "src/gpu/ccpr/GrCCPathCache.h"
27 #include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
28 #include "src/gpu/geometry/GrShape.h"
29 #include "tools/ToolUtils.h"
30 
31 #include <cmath>
32 
33 static constexpr int kCanvasSize = 100;
34 
35 enum class DoCoverageCount { kNo = false, kYes };
36 enum class DoStroke { kNo = false, kYes };
37 
38 class CCPRClip : public GrClip {
39 public:
CCPRClip(GrCoverageCountingPathRenderer * ccpr,const SkPath & path)40     CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
41 
42 private:
apply(GrRecordingContext * context,GrRenderTargetContext * rtc,bool useHWAA,bool hasUserStencilSettings,GrAppliedClip * out,SkRect * bounds) const43     bool apply(GrRecordingContext* context, GrRenderTargetContext* rtc, bool useHWAA,
44                bool hasUserStencilSettings, GrAppliedClip* out, SkRect* bounds) const override {
45         out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
46                                                     SkIRect::MakeWH(rtc->width(), rtc->height()),
47                                                     *context->priv().caps()));
48         return true;
49     }
quickContains(const SkRect &) const50     bool quickContains(const SkRect&) const final { return false; }
isRRect(const SkRect & rtBounds,SkRRect * rr,GrAA *) const51     bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
getConservativeBounds(int width,int height,SkIRect * rect,bool * iior) const52     void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
53         rect->set(0, 0, width, height);
54         if (iior) {
55             *iior = false;
56         }
57     }
58     GrCoverageCountingPathRenderer* const fCCPR;
59     const SkPath fPath;
60 };
61 
62 class CCPRPathDrawer {
63 public:
CCPRPathDrawer(sk_sp<GrContext> ctx,skiatest::Reporter * reporter,DoStroke doStroke)64     CCPRPathDrawer(sk_sp<GrContext> ctx, skiatest::Reporter* reporter, DoStroke doStroke)
65             : fCtx(ctx)
66             , fCCPR(fCtx->priv().drawingManager()->getCoverageCountingPathRenderer())
67             , fRTC(fCtx->priv().makeDeferredRenderTargetContext(
68                       SkBackingFit::kExact, kCanvasSize, kCanvasSize, GrColorType::kRGBA_8888,
69                       nullptr))
70             , fDoStroke(DoStroke::kYes == doStroke) {
71         if (!fCCPR) {
72             ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
73         }
74         if (!fRTC) {
75             ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
76         }
77     }
78 
ctx() const79     GrContext* ctx() const { return fCtx.get(); }
ccpr() const80     GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
81 
valid() const82     bool valid() const { return fCCPR && fRTC; }
clear() const83     void clear() const { fRTC->clear(nullptr, SK_PMColor4fTRANSPARENT,
84                                      GrRenderTargetContext::CanClearFullscreen::kYes); }
destroyGrContext()85     void destroyGrContext() {
86         SkASSERT(fRTC->unique());
87         SkASSERT(fCtx->unique());
88         fRTC.reset();
89         fCCPR = nullptr;
90         fCtx.reset();
91     }
92 
drawPath(const SkPath & path,const SkMatrix & matrix=SkMatrix::I ()) const93     void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
94         SkASSERT(this->valid());
95 
96         GrPaint paint;
97         paint.setColor4f({ 0, 1, 0, 1 });
98 
99         GrNoClip noClip;
100         SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
101 
102         GrShape shape;
103         if (!fDoStroke) {
104             shape = GrShape(path);
105         } else {
106             // Use hairlines for now, since they are the only stroke type that doesn't require a
107             // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
108             // and regular strokes other than how it decides the device-space stroke width.
109             SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
110             stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
111             shape = GrShape(path, GrStyle(stroke, nullptr));
112         }
113 
114         fCCPR->testingOnly_drawPathDirectly({
115                 fCtx.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
116                 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
117     }
118 
clipFullscreenRect(SkPath clipPath,SkPMColor4f color={ 0, 1, 0, 1 })119     void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
120         SkASSERT(this->valid());
121 
122         GrPaint paint;
123         paint.setColor4f(color);
124 
125         fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
126                        SkRect::MakeIWH(kCanvasSize, kCanvasSize));
127     }
128 
flush() const129     void flush() const {
130         SkASSERT(this->valid());
131         fCtx->flush();
132     }
133 
134 private:
135     sk_sp<GrContext> fCtx;
136     GrCoverageCountingPathRenderer* fCCPR;
137     sk_sp<GrRenderTargetContext> fRTC;
138     const bool fDoStroke;
139 };
140 
141 class CCPRTest {
142 public:
run(skiatest::Reporter * reporter,DoCoverageCount doCoverageCount,DoStroke doStroke)143     void run(skiatest::Reporter* reporter, DoCoverageCount doCoverageCount, DoStroke doStroke) {
144         GrMockOptions mockOptions;
145         mockOptions.fInstanceAttribSupport = true;
146         mockOptions.fHalfFloatVertexAttributeSupport = true;
147         mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
148         mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fRenderability =
149                 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
150         mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fTexturable = true;
151         mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fRenderability =
152                 GrMockOptions::ConfigOptions::Renderability::kMSAA;
153         mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fTexturable = true;
154         mockOptions.fGeometryShaderSupport = true;
155         mockOptions.fIntegerSupport = true;
156         mockOptions.fFlatInterpolationSupport = true;
157 
158         GrContextOptions ctxOptions;
159         ctxOptions.fDisableCoverageCountingPaths = (DoCoverageCount::kNo == doCoverageCount);
160         ctxOptions.fAllowPathMaskCaching = false;
161         ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
162 
163         this->customizeOptions(&mockOptions, &ctxOptions);
164 
165         sk_sp<GrContext> mockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
166         if (!mockContext) {
167             ERRORF(reporter, "could not create mock context");
168             return;
169         }
170         if (!mockContext->unique()) {
171             ERRORF(reporter, "mock context is not unique");
172             return;
173         }
174 
175         CCPRPathDrawer ccpr(skstd::exchange(mockContext, nullptr), reporter, doStroke);
176         if (!ccpr.valid()) {
177             return;
178         }
179 
180         fPath.moveTo(0, 0);
181         fPath.cubicTo(50, 50, 0, 50, 50, 0);
182         this->onRun(reporter, ccpr);
183     }
184 
~CCPRTest()185     virtual ~CCPRTest() {}
186 
187 protected:
customizeOptions(GrMockOptions *,GrContextOptions *)188     virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
189     virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
190 
191     SkPath fPath;
192 };
193 
194 #define DEF_CCPR_TEST(name) \
195     DEF_GPUTEST(name, reporter, /* options */) { \
196         name test; \
197         test.run(reporter, DoCoverageCount::kYes, DoStroke::kNo); \
198         test.run(reporter, DoCoverageCount::kYes, DoStroke::kYes); \
199         test.run(reporter, DoCoverageCount::kNo, DoStroke::kNo); \
200         /* FIXME: test.run(reporter, (DoCoverageCount::kNo, DoStroke::kYes) once supported. */ \
201     }
202 
203 class CCPR_cleanup : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)204     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
205         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
206 
207         // Ensure paths get unreffed.
208         for (int i = 0; i < 10; ++i) {
209             ccpr.drawPath(fPath);
210         }
211         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
212         ccpr.flush();
213         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
214 
215         // Ensure clip paths get unreffed.
216         for (int i = 0; i < 10; ++i) {
217             ccpr.clipFullscreenRect(fPath);
218         }
219         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
220         ccpr.flush();
221         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
222 
223         // Ensure paths get unreffed when we delete the context without flushing.
224         for (int i = 0; i < 10; ++i) {
225             ccpr.drawPath(fPath);
226             ccpr.clipFullscreenRect(fPath);
227         }
228         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
229 
230         ccpr.destroyGrContext();
231         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
232     }
233 };
234 DEF_CCPR_TEST(CCPR_cleanup)
235 
236 class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
customizeOptions(GrMockOptions * mockOptions,GrContextOptions *)237     void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
238         mockOptions->fFailTextureAllocations = true;
239     }
240 };
241 DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
242 
243 class CCPR_unregisterCulledOps : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)244     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
245         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
246 
247         // Ensure Ops get unregistered from CCPR when culled early.
248         ccpr.drawPath(fPath);
249         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
250         ccpr.clear(); // Clear should delete the CCPR Op.
251         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
252         ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
253 
254         // Ensure Op unregisters work when we delete the context without flushing.
255         ccpr.drawPath(fPath);
256         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
257         ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
258         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
259         ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
260     }
261 };
262 DEF_CCPR_TEST(CCPR_unregisterCulledOps)
263 
264 class CCPR_parseEmptyPath : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)265     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
266         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
267 
268         // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
269         // an empty path.
270         SkPath largeOutsidePath;
271         largeOutsidePath.moveTo(-1e30f, -1e30f);
272         largeOutsidePath.lineTo(-1e30f, +1e30f);
273         largeOutsidePath.lineTo(-1e10f, +1e30f);
274         ccpr.drawPath(largeOutsidePath);
275 
276         // Normally an empty path is culled before reaching ccpr, however we use a back door for
277         // testing so this path will make it.
278         SkPath emptyPath;
279         SkASSERT(emptyPath.isEmpty());
280         ccpr.drawPath(emptyPath);
281 
282         // This is the test. It will exercise various internal asserts and verify we do not crash.
283         ccpr.flush();
284 
285         // Now try again with clips.
286         ccpr.clipFullscreenRect(largeOutsidePath);
287         ccpr.clipFullscreenRect(emptyPath);
288         ccpr.flush();
289 
290         // ... and both.
291         ccpr.drawPath(largeOutsidePath);
292         ccpr.clipFullscreenRect(largeOutsidePath);
293         ccpr.drawPath(emptyPath);
294         ccpr.clipFullscreenRect(emptyPath);
295         ccpr.flush();
296     }
297 };
DEF_CCPR_TEST(CCPR_parseEmptyPath)298 DEF_CCPR_TEST(CCPR_parseEmptyPath)
299 
300 static int get_mock_texture_id(const GrTexture* texture) {
301     const GrBackendTexture& backingTexture = texture->getBackendTexture();
302     SkASSERT(GrBackendApi::kMock == backingTexture.backend());
303 
304     if (!backingTexture.isValid()) {
305         return 0;
306     }
307 
308     GrMockTextureInfo info;
309     backingTexture.getMockTextureInfo(&info);
310     return info.fID;
311 }
312 
313 // Base class for cache path unit tests.
314 class CCPRCacheTest : public CCPRTest {
315 protected:
316     // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
317     // texture IDs.
318     class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
319     public:
RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr)320         RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
321 
lastCopyAtlasID() const322         int lastCopyAtlasID() const { return fLastCopyAtlasID; }
lastRenderedAtlasID() const323         int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
324 
preFlush(GrOnFlushResourceProvider *,const uint32_t * opListIDs,int numOpListIDs,SkTArray<sk_sp<GrRenderTargetContext>> * out)325         void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs,
326                       SkTArray<sk_sp<GrRenderTargetContext>>* out) override {
327             fLastRenderedAtlasID = fLastCopyAtlasID = 0;
328 
329             const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
330             if (!resources) {
331                 return;
332             }
333 
334             if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
335                 fLastCopyAtlasID = get_mock_texture_id(tex);
336             }
337             if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
338                 fLastRenderedAtlasID = get_mock_texture_id(tex);
339             }
340         }
341 
postFlush(GrDeferredUploadToken,const uint32_t *,int)342         void postFlush(GrDeferredUploadToken, const uint32_t*, int) override {}
343 
344     private:
345         sk_sp<GrCoverageCountingPathRenderer> fCCPR;
346         int fLastCopyAtlasID = 0;
347         int fLastRenderedAtlasID = 0;
348     };
349 
CCPRCacheTest()350     CCPRCacheTest() {
351         static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
352 
353         SkRandom rand;
354         for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
355             int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
356                                          GrShape::kMaxKeyFromDataVerbCnt * 2);
357             int step;
358             do {
359                 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
360             } while (step == numPts);
361             fPaths[i] = ToolUtils::make_star(SkRect::MakeLTRB(0, 0, 1, 1), numPts, step);
362         }
363     }
364 
drawPathsAndFlush(CCPRPathDrawer & ccpr,const SkMatrix & m)365     void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
366         this->drawPathsAndFlush(ccpr, &m, 1);
367     }
drawPathsAndFlush(CCPRPathDrawer & ccpr,const SkMatrix * matrices,int numMatrices)368     void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
369         // Draw all the paths.
370         for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
371             ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
372         }
373         // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
374         // single flush.
375         SkRandom rand;
376         int duplicateIndices[10];
377         for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
378             duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
379         }
380         for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
381             for (size_t j = 0; j <= i; ++j) {
382                 int idx = duplicateIndices[j];
383                 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
384             }
385         }
386         ccpr.flush();
387     }
388 
389 private:
customizeOptions(GrMockOptions *,GrContextOptions * ctxOptions)390     void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
391         ctxOptions->fAllowPathMaskCaching = true;
392     }
393 
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)394     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
395         RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
396         ccpr.ctx()->priv().addOnFlushCallbackObject(&atlasIDRecorder);
397 
398         this->onRun(reporter, ccpr, atlasIDRecorder);
399 
400         ccpr.ctx()->priv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
401     }
402 
403     virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
404                        const RecordLastMockAtlasIDs&) = 0;
405 
406 protected:
407     SkPath fPaths[350];
408 };
409 
410 // Ensures ccpr always reuses the same atlas texture in the animation use case.
411 class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)412     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
413                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
414         SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
415         m.preScale(80, 80);
416         m.preTranslate(-.5,-.5);
417         this->drawPathsAndFlush(ccpr, m);
418 
419         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
420         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
421         const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
422 
423         // Ensures we always reuse the same atlas texture in the animation use case.
424         for (int i = 0; i < 12; ++i) {
425             // 59 is prime, so we will hit every integer modulo 360 before repeating.
426             m.preRotate(59, .5, .5);
427 
428             // Go twice. Paths have to get drawn twice with the same matrix before we cache their
429             // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
430             // and is then invalidated since the matrix will change, that the same underlying
431             // texture object is still reused for the next atlas.
432             for (int j = 0; j < 2; ++j) {
433                 this->drawPathsAndFlush(ccpr, m);
434                 // Nothing should be copied to an 8-bit atlas after just two draws.
435                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
436                 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
437             }
438         }
439 
440         // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
441         this->drawPathsAndFlush(ccpr, m);
442         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
443         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
444 
445         // Now double-check that everything continues to hit the cache as expected when the matrix
446         // doesn't change.
447         for (int i = 0; i < 10; ++i) {
448             this->drawPathsAndFlush(ccpr, m);
449             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
450             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
451         }
452     }
453 };
454 DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
455 
456 class CCPR_cache_recycleEntries : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)457     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
458                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
459         SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
460         m.preScale(80, 80);
461         m.preTranslate(-.5,-.5);
462 
463         auto cache = ccpr.ccpr()->testingOnly_getPathCache();
464         REPORTER_ASSERT(reporter, cache);
465 
466         const auto& lru = cache->testingOnly_getLRU();
467 
468         SkTArray<const void*> expectedPtrs;
469 
470         // Ensures we always reuse the same atlas texture in the animation use case.
471         for (int i = 0; i < 5; ++i) {
472             // 59 is prime, so we will hit every integer modulo 360 before repeating.
473             m.preRotate(59, .5, .5);
474 
475             // Go twice. Paths have to get drawn twice with the same matrix before we cache their
476             // atlas.
477             for (int j = 0; j < 2; ++j) {
478                 this->drawPathsAndFlush(ccpr, m);
479                 // Nothing should be copied to an 8-bit atlas after just two draws.
480                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
481                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
482             }
483 
484             int idx = 0;
485             for (const GrCCPathCacheEntry* entry : lru) {
486                 if (0 == i) {
487                     expectedPtrs.push_back(entry);
488                 } else {
489                     // The same pointer should have been recycled for the new matrix.
490                     REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
491                 }
492                 ++idx;
493             }
494         }
495     }
496 };
497 DEF_CCPR_TEST(CCPR_cache_recycleEntries)
498 
499 // Ensures mostly-visible paths get their full mask cached.
500 class CCPR_cache_mostlyVisible : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)501     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
502                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
503         SkMatrix matrices[3] = {
504             SkMatrix::MakeScale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
505             SkMatrix::MakeScale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
506             SkMatrix::MakeScale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
507         };
508 
509         for (int i = 0; i < 10; ++i) {
510             this->drawPathsAndFlush(ccpr, matrices, 3);
511             if (2 == i) {
512                 // The mostly-visible paths should still get cached.
513                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
514             } else {
515                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
516             }
517             // Ensure mostly NOT-visible paths never get cached.
518             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
519         }
520 
521         // Clear the path cache.
522         this->drawPathsAndFlush(ccpr, SkMatrix::I());
523 
524         // Now only draw the fully/mostly visible ones.
525         for (int i = 0; i < 2; ++i) {
526             this->drawPathsAndFlush(ccpr, matrices, 2);
527             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
528             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
529         }
530 
531         // On draw 3 they should get copied to an 8-bit atlas.
532         this->drawPathsAndFlush(ccpr, matrices, 2);
533         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
534         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
535 
536         for (int i = 0; i < 10; ++i) {
537             this->drawPathsAndFlush(ccpr, matrices, 2);
538             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
539             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
540         }
541 
542         // Draw a different part of the path to ensure the full mask was cached.
543         matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
544                                   SkScalarFloorToInt(kCanvasSize * -.25f));
545         for (int i = 0; i < 10; ++i) {
546             this->drawPathsAndFlush(ccpr, matrices, 2);
547             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
548             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
549         }
550     }
551 };
552 DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
553 
554 // Ensures GrContext::performDeferredCleanup works.
555 class CCPR_cache_deferredCleanup : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)556     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
557                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
558         SkMatrix m = SkMatrix::MakeScale(20, 20);
559         int lastRenderedAtlasID = 0;
560 
561         for (int i = 0; i < 5; ++i) {
562             this->drawPathsAndFlush(ccpr, m);
563             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
564             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
565             int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
566             REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
567             lastRenderedAtlasID = renderedAtlasID;
568 
569             this->drawPathsAndFlush(ccpr, m);
570             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
571             REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
572 
573             // On draw 3 they should get copied to an 8-bit atlas.
574             this->drawPathsAndFlush(ccpr, m);
575             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
576             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
577 
578             for (int i = 0; i < 10; ++i) {
579                 this->drawPathsAndFlush(ccpr, m);
580                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
581                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
582             }
583 
584             ccpr.ctx()->performDeferredCleanup(std::chrono::milliseconds(0));
585         }
586     }
587 };
588 DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
589 
590 // Verifies the cache/hash table internals.
591 class CCPR_cache_hashTable : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)592     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
593                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
594         using CoverageType = GrCCAtlas::CoverageType;
595         SkMatrix m = SkMatrix::MakeScale(20, 20);
596 
597         for (int i = 0; i < 5; ++i) {
598             this->drawPathsAndFlush(ccpr, m);
599             if (2 == i) {
600                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
601             } else {
602                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
603             }
604             if (i < 2) {
605                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
606             } else {
607                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
608             }
609 
610             auto cache = ccpr.ccpr()->testingOnly_getPathCache();
611             REPORTER_ASSERT(reporter, cache);
612 
613             const auto& hash = cache->testingOnly_getHashTable();
614             const auto& lru = cache->testingOnly_getLRU();
615             int count = 0;
616             for (GrCCPathCacheEntry* entry : lru) {
617                 auto* node = hash.find(entry->cacheKey());
618                 REPORTER_ASSERT(reporter, node);
619                 REPORTER_ASSERT(reporter, node->entry() == entry);
620                 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
621                 REPORTER_ASSERT(reporter, entry->unique());
622                 if (0 == i) {
623                     REPORTER_ASSERT(reporter, !entry->cachedAtlas());
624                 } else {
625                     const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
626                     REPORTER_ASSERT(reporter, cachedAtlas);
627                     if (1 == i) {
628                         REPORTER_ASSERT(reporter, ccpr.ccpr()->coverageType()
629                                                           == cachedAtlas->coverageType());
630                     } else {
631                         REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
632                                                           == cachedAtlas->coverageType());
633                     }
634                     REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
635                     // The actual proxy should not be held past the end of a flush.
636                     REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
637                     REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
638                 }
639                 ++count;
640             }
641             REPORTER_ASSERT(reporter, hash.count() == count);
642         }
643     }
644 };
645 DEF_CCPR_TEST(CCPR_cache_hashTable)
646 
647 // Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
648 // (a la Chrome tiles).
649 class CCPR_cache_multiFlush : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)650     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
651                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
652         static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
653         static constexpr int kBigPrimes[] = {
654                 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
655 
656         SkRandom rand;
657         SkMatrix m = SkMatrix::I();
658 
659         for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
660             int prime = kBigPrimes[i];
661             int endPathIdx = (int)rand.nextULessThan(kNumPaths);
662             int pathIdx = endPathIdx;
663             int nextFlush = rand.nextRangeU(1, 47);
664             for (int j = 0; j < kNumPaths; ++j) {
665                 pathIdx = (pathIdx + prime) % kNumPaths;
666                 int repeat = rand.nextRangeU(1, 3);
667                 for (int k = 0; k < repeat; ++k) {
668                     ccpr.drawPath(fPaths[pathIdx], m);
669                 }
670                 if (nextFlush == j) {
671                     ccpr.flush();
672                     // The paths are small enough that we should never copy to an A8 atlas.
673                     REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
674                     if (i < 2) {
675                         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
676                     } else {
677                         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
678                     }
679                     nextFlush = SkTMin(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
680                 }
681             }
682             SkASSERT(endPathIdx == pathIdx % kNumPaths);
683         }
684     }
685 };
686 DEF_CCPR_TEST(CCPR_cache_multiFlush)
687 
688 // Ensures a path drawn over mutiple tiles gets cached.
689 class CCPR_cache_multiTileCache : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)690     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
691                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
692         // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
693         const SkMatrix m0 = SkMatrix::MakeScale(kCanvasSize*3, kCanvasSize*3);
694         const SkPath p0 = fPaths[0];
695         for (int i = 0; i < 9; ++i) {
696             static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
697             static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
698 
699             SkMatrix tileM = m0;
700             tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
701             ccpr.drawPath(p0, tileM);
702             ccpr.flush();
703             if (i < 5) {
704                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
705                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
706             } else if (5 == i) {
707                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
708                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
709             } else {
710                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
711                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
712             }
713         }
714 
715         // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
716         const SkMatrix m1 = SkMatrix::MakeScale(kCanvasSize*4, kCanvasSize*3);
717         const SkPath p1 = fPaths[1];
718         for (int row = 0; row < 3; ++row) {
719             for (int col = 0; col < 4; ++col) {
720                 SkMatrix tileM = m1;
721                 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
722                 ccpr.drawPath(p1, tileM);
723                 ccpr.flush();
724                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
725                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
726             }
727         }
728 
729         // Double-check the cache is still intact.
730         ccpr.drawPath(p0, m0);
731         ccpr.flush();
732         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
733         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
734 
735         ccpr.drawPath(p1, m1);
736         ccpr.flush();
737         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
738         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
739     }
740 };
741 DEF_CCPR_TEST(CCPR_cache_multiTileCache)
742 
743 // This test exercises CCPR's cache capabilities by drawing many paths with two different
744 // transformation matrices. We then vary the matrices independently by whole and partial pixels,
745 // and verify the caching behaved as expected.
746 class CCPR_cache_partialInvalidate : public CCPRCacheTest {
customizeOptions(GrMockOptions *,GrContextOptions * ctxOptions)747     void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
748         ctxOptions->fAllowPathMaskCaching = true;
749     }
750 
751     static constexpr int kPathSize = 4;
752 
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)753     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
754                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
755         SkMatrix matrices[2] = {
756             SkMatrix::MakeTrans(5, 5),
757             SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
758         };
759         matrices[0].preScale(kPathSize, kPathSize);
760         matrices[1].preScale(kPathSize, kPathSize);
761 
762         int firstAtlasID = 0;
763 
764         for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
765             this->drawPathsAndFlush(ccpr, matrices, 2);
766 
767             if (0 == iterIdx) {
768                 // First iteration: just note the ID of the stashed atlas and continue.
769                 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
770                 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
771                 continue;
772             }
773 
774             int testIdx = (iterIdx/2) % 3;
775             int repetitionIdx = iterIdx % 2;
776             switch (testIdx) {
777                 case 0:
778                     if (0 == repetitionIdx) {
779                         // This is the big test. New paths were drawn twice last round. On hit 2
780                         // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
781                         // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
782                         // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
783                         // render the new masks.
784                         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
785                         REPORTER_ASSERT(reporter,
786                                         atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
787                     } else {
788                         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
789                         // This is hit 2 for the new masks. Next time they will be copied to an A8
790                         // atlas.
791                         REPORTER_ASSERT(reporter,
792                                         atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
793                     }
794 
795                     if (1 == repetitionIdx) {
796                         // Integer translates: all path masks stay valid.
797                         matrices[0].preTranslate(-1, -1);
798                         matrices[1].preTranslate(1, 1);
799                     }
800                     break;
801 
802                 case 1:
803                     if (0 == repetitionIdx) {
804                         // New paths were drawn twice last round. The third hit (now) they should be
805                         // copied to an A8 atlas.
806                         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
807                     } else {
808                         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
809                     }
810 
811                     // This draw should have gotten 100% cache hits; we only did integer translates
812                     // last time (or none if it was the first flush). Therefore, everything should
813                     // have been cached.
814                     REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
815 
816                     if (1 == repetitionIdx) {
817                         // Invalidate even path masks.
818                         matrices[0].preTranslate(1.6f, 1.4f);
819                     }
820                     break;
821 
822                 case 2:
823                     // No new masks to copy from last time; it had 100% cache hits.
824                     REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
825 
826                     // Even path masks were invalidated last iteration by a subpixel translate.
827                     // They should have been re-rendered this time in the original 'firstAtlasID'
828                     // texture.
829                     REPORTER_ASSERT(reporter,
830                                     atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
831 
832                     if (1 == repetitionIdx) {
833                         // Invalidate odd path masks.
834                         matrices[1].preTranslate(-1.4f, -1.6f);
835                     }
836                     break;
837             }
838         }
839     }
840 };
841 DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
842 
843 class CCPR_unrefPerOpListPathsBeforeOps : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)844     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
845         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
846         for (int i = 0; i < 10000; ++i) {
847             // Draw enough paths to make the arena allocator hit the heap.
848             ccpr.drawPath(fPath);
849         }
850 
851         // Unref the GrCCPerOpListPaths object.
852         auto perOpListPathsMap = ccpr.ccpr()->detachPendingPaths();
853         perOpListPathsMap.clear();
854 
855         // Now delete the Op and all its draws.
856         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
857         ccpr.flush();
858         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
859     }
860 };
861 DEF_CCPR_TEST(CCPR_unrefPerOpListPathsBeforeOps)
862 
863 class CCPRRenderingTest {
864 public:
run(skiatest::Reporter * reporter,GrContext * ctx,DoStroke doStroke) const865     void run(skiatest::Reporter* reporter, GrContext* ctx, DoStroke doStroke) const {
866         if (auto ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer()) {
867             if (DoStroke::kYes == doStroke &&
868                 GrCCAtlas::CoverageType::kA8_Multisample == ccpr->coverageType()) {
869                 return;  // Stroking is not yet supported for multisample.
870             }
871             CCPRPathDrawer drawer(sk_ref_sp(ctx), reporter, doStroke);
872             if (!drawer.valid()) {
873                 return;
874             }
875             this->onRun(reporter, drawer);
876         }
877     }
878 
~CCPRRenderingTest()879     virtual ~CCPRRenderingTest() {}
880 
881 protected:
882     virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
883 };
884 
885 #define DEF_CCPR_RENDERING_TEST(name) \
886     DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
887         name test; \
888         test.run(reporter, ctxInfo.grContext(), DoStroke::kNo); \
889         test.run(reporter, ctxInfo.grContext(), DoStroke::kYes); \
890     }
891 
892 class CCPR_busyPath : public CCPRRenderingTest {
onRun(skiatest::Reporter * reporter,const CCPRPathDrawer & ccpr) const893     void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
894         static constexpr int kNumBusyVerbs = 1 << 17;
895         ccpr.clear();
896         SkPath busyPath;
897         busyPath.moveTo(0, 0); // top left
898         busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
899         for (int i = 2; i < kNumBusyVerbs; ++i) {
900             float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
901             busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
902         }
903         ccpr.drawPath(busyPath);
904 
905         ccpr.flush(); // If this doesn't crash, the test passed.
906                       // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
907                       // your platform's GrGLCaps.
908     }
909 };
910 DEF_CCPR_RENDERING_TEST(CCPR_busyPath)
911