• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2017 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "SkTypes.h"
9 #include "Test.h"
10 
11 #include "GrContext.h"
12 #include "GrContextPriv.h"
13 #include "GrClip.h"
14 #include "GrDrawingManager.h"
15 #include "GrPathRenderer.h"
16 #include "GrPaint.h"
17 #include "GrRenderTargetContext.h"
18 #include "GrRenderTargetContextPriv.h"
19 #include "GrShape.h"
20 #include "GrTexture.h"
21 #include "SkExchange.h"
22 #include "SkMatrix.h"
23 #include "SkPathPriv.h"
24 #include "SkRect.h"
25 #include "sk_tool_utils.h"
26 #include "ccpr/GrCoverageCountingPathRenderer.h"
27 #include "ccpr/GrCCPathCache.h"
28 #include "mock/GrMockTypes.h"
29 
30 #include <cmath>
31 
32 static constexpr int kCanvasSize = 100;
33 
34 class CCPRClip : public GrClip {
35 public:
CCPRClip(GrCoverageCountingPathRenderer * ccpr,const SkPath & path)36     CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
37 
38 private:
apply(GrContext * context,GrRenderTargetContext * rtc,bool,bool,GrAppliedClip * out,SkRect * bounds) const39     bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out,
40                SkRect* bounds) const override {
41         out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
42                                                     SkIRect::MakeWH(rtc->width(), rtc->height()),
43                                                     rtc->width(), rtc->height(),
44                                                     *context->contextPriv().caps()));
45         return true;
46     }
quickContains(const SkRect &) const47     bool quickContains(const SkRect&) const final { return false; }
isRRect(const SkRect & rtBounds,SkRRect * rr,GrAA *) const48     bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
getConservativeBounds(int width,int height,SkIRect * rect,bool * iior) const49     void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
50         rect->set(0, 0, width, height);
51         if (iior) {
52             *iior = false;
53         }
54     }
55     GrCoverageCountingPathRenderer* const fCCPR;
56     const SkPath fPath;
57 };
58 
59 class CCPRPathDrawer {
60 public:
CCPRPathDrawer(sk_sp<GrContext> ctx,skiatest::Reporter * reporter,bool doStroke)61     CCPRPathDrawer(sk_sp<GrContext> ctx, skiatest::Reporter* reporter, bool doStroke)
62             : fCtx(ctx)
63             , fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer())
64             , fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext(
65                 ctx->contextPriv().caps()->getBackendFormatFromColorType(kRGBA_8888_SkColorType),
66                 SkBackingFit::kExact, kCanvasSize, kCanvasSize, kRGBA_8888_GrPixelConfig,
67                 nullptr))
68             , fDoStroke(doStroke) {
69         if (!fCCPR) {
70             ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
71         }
72         if (!fRTC) {
73             ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
74         }
75     }
76 
ctx() const77     GrContext* ctx() const { return fCtx.get(); }
ccpr() const78     GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
79 
valid() const80     bool valid() const { return fCCPR && fRTC; }
clear() const81     void clear() const { fRTC->clear(nullptr, SK_PMColor4fTRANSPARENT,
82                                      GrRenderTargetContext::CanClearFullscreen::kYes); }
destroyGrContext()83     void destroyGrContext() {
84         SkASSERT(fRTC->unique());
85         SkASSERT(fCtx->unique());
86         fRTC.reset();
87         fCCPR = nullptr;
88         fCtx.reset();
89     }
90 
drawPath(const SkPath & path,const SkMatrix & matrix=SkMatrix::I ()) const91     void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
92         SkASSERT(this->valid());
93 
94         GrPaint paint;
95         paint.setColor4f({ 0, 1, 0, 1 });
96 
97         GrNoClip noClip;
98         SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
99 
100         GrShape shape;
101         if (!fDoStroke) {
102             shape = GrShape(path);
103         } else {
104             // Use hairlines for now, since they are the only stroke type that doesn't require a
105             // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
106             // and regular strokes other than how it decides the device-space stroke width.
107             SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
108             stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
109             shape = GrShape(path, GrStyle(stroke, nullptr));
110         }
111 
112         fCCPR->testingOnly_drawPathDirectly({
113                 fCtx.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
114                 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
115     }
116 
clipFullscreenRect(SkPath clipPath,SkPMColor4f color={ 0, 1, 0, 1 })117     void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
118         SkASSERT(this->valid());
119 
120         GrPaint paint;
121         paint.setColor4f(color);
122 
123         fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
124                        SkRect::MakeIWH(kCanvasSize, kCanvasSize));
125     }
126 
flush() const127     void flush() const {
128         SkASSERT(this->valid());
129         fCtx->flush();
130     }
131 
132 private:
133     sk_sp<GrContext> fCtx;
134     GrCoverageCountingPathRenderer* fCCPR;
135     sk_sp<GrRenderTargetContext> fRTC;
136     const bool fDoStroke;
137 };
138 
139 class CCPRTest {
140 public:
run(skiatest::Reporter * reporter,bool doStroke)141     void run(skiatest::Reporter* reporter, bool doStroke) {
142         GrMockOptions mockOptions;
143         mockOptions.fInstanceAttribSupport = true;
144         mockOptions.fHalfFloatVertexAttributeSupport = true;
145         mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
146         mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability =
147                 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
148         mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true;
149         mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability =
150                 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
151         mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true;
152         mockOptions.fGeometryShaderSupport = true;
153         mockOptions.fIntegerSupport = true;
154         mockOptions.fFlatInterpolationSupport = true;
155 
156         GrContextOptions ctxOptions;
157         ctxOptions.fAllowPathMaskCaching = false;
158         ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
159 
160         this->customizeOptions(&mockOptions, &ctxOptions);
161 
162         sk_sp<GrContext> mockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
163         if (!mockContext) {
164             ERRORF(reporter, "could not create mock context");
165             return;
166         }
167         if (!mockContext->unique()) {
168             ERRORF(reporter, "mock context is not unique");
169             return;
170         }
171 
172         CCPRPathDrawer ccpr(skstd::exchange(mockContext, nullptr), reporter, doStroke);
173         if (!ccpr.valid()) {
174             return;
175         }
176 
177         fPath.moveTo(0, 0);
178         fPath.cubicTo(50, 50, 0, 50, 50, 0);
179         this->onRun(reporter, ccpr);
180     }
181 
~CCPRTest()182     virtual ~CCPRTest() {}
183 
184 protected:
customizeOptions(GrMockOptions *,GrContextOptions *)185     virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
186     virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
187 
188     SkPath fPath;
189 };
190 
191 #define DEF_CCPR_TEST(name) \
192     DEF_GPUTEST(name, reporter, /* options */) { \
193         name test; \
194         test.run(reporter, false); \
195         test.run(reporter, true); \
196     }
197 
198 class CCPR_cleanup : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)199     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
200         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
201 
202         // Ensure paths get unreffed.
203         for (int i = 0; i < 10; ++i) {
204             ccpr.drawPath(fPath);
205         }
206         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
207         ccpr.flush();
208         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
209 
210         // Ensure clip paths get unreffed.
211         for (int i = 0; i < 10; ++i) {
212             ccpr.clipFullscreenRect(fPath);
213         }
214         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
215         ccpr.flush();
216         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
217 
218         // Ensure paths get unreffed when we delete the context without flushing.
219         for (int i = 0; i < 10; ++i) {
220             ccpr.drawPath(fPath);
221             ccpr.clipFullscreenRect(fPath);
222         }
223         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
224 
225         ccpr.destroyGrContext();
226         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
227     }
228 };
229 DEF_CCPR_TEST(CCPR_cleanup)
230 
231 class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
customizeOptions(GrMockOptions * mockOptions,GrContextOptions *)232     void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
233         mockOptions->fFailTextureAllocations = true;
234     }
235 };
236 DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
237 
238 class CCPR_unregisterCulledOps : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)239     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
240         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
241 
242         // Ensure Ops get unregistered from CCPR when culled early.
243         ccpr.drawPath(fPath);
244         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
245         ccpr.clear(); // Clear should delete the CCPR Op.
246         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
247         ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
248 
249         // Ensure Op unregisters work when we delete the context without flushing.
250         ccpr.drawPath(fPath);
251         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
252         ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
253         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
254         ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
255     }
256 };
257 DEF_CCPR_TEST(CCPR_unregisterCulledOps)
258 
259 class CCPR_parseEmptyPath : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)260     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
261         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
262 
263         // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
264         // an empty path.
265         SkPath largeOutsidePath;
266         largeOutsidePath.moveTo(-1e30f, -1e30f);
267         largeOutsidePath.lineTo(-1e30f, +1e30f);
268         largeOutsidePath.lineTo(-1e10f, +1e30f);
269         ccpr.drawPath(largeOutsidePath);
270 
271         // Normally an empty path is culled before reaching ccpr, however we use a back door for
272         // testing so this path will make it.
273         SkPath emptyPath;
274         SkASSERT(emptyPath.isEmpty());
275         ccpr.drawPath(emptyPath);
276 
277         // This is the test. It will exercise various internal asserts and verify we do not crash.
278         ccpr.flush();
279 
280         // Now try again with clips.
281         ccpr.clipFullscreenRect(largeOutsidePath);
282         ccpr.clipFullscreenRect(emptyPath);
283         ccpr.flush();
284 
285         // ... and both.
286         ccpr.drawPath(largeOutsidePath);
287         ccpr.clipFullscreenRect(largeOutsidePath);
288         ccpr.drawPath(emptyPath);
289         ccpr.clipFullscreenRect(emptyPath);
290         ccpr.flush();
291     }
292 };
DEF_CCPR_TEST(CCPR_parseEmptyPath)293 DEF_CCPR_TEST(CCPR_parseEmptyPath)
294 
295 static int get_mock_texture_id(const GrTexture* texture) {
296     const GrBackendTexture& backingTexture = texture->getBackendTexture();
297     SkASSERT(GrBackendApi::kMock == backingTexture.backend());
298 
299     if (!backingTexture.isValid()) {
300         return 0;
301     }
302 
303     GrMockTextureInfo info;
304     backingTexture.getMockTextureInfo(&info);
305     return info.fID;
306 }
307 
308 // Base class for cache path unit tests.
309 class CCPRCacheTest : public CCPRTest {
310 protected:
311     // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
312     // texture IDs.
313     class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
314     public:
RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr)315         RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
316 
lastCopyAtlasID() const317         int lastCopyAtlasID() const { return fLastCopyAtlasID; }
lastRenderedAtlasID() const318         int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
319 
preFlush(GrOnFlushResourceProvider *,const uint32_t * opListIDs,int numOpListIDs,SkTArray<sk_sp<GrRenderTargetContext>> * out)320         void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs,
321                       SkTArray<sk_sp<GrRenderTargetContext>>* out) override {
322             fLastRenderedAtlasID = fLastCopyAtlasID = 0;
323 
324             const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
325             if (!resources) {
326                 return;
327             }
328 
329             if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
330                 fLastCopyAtlasID = get_mock_texture_id(tex);
331             }
332             if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
333                 fLastRenderedAtlasID = get_mock_texture_id(tex);
334             }
335         }
336 
postFlush(GrDeferredUploadToken,const uint32_t *,int)337         void postFlush(GrDeferredUploadToken, const uint32_t*, int) override {}
338 
339     private:
340         sk_sp<GrCoverageCountingPathRenderer> fCCPR;
341         int fLastCopyAtlasID = 0;
342         int fLastRenderedAtlasID = 0;
343     };
344 
CCPRCacheTest()345     CCPRCacheTest() {
346         static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
347 
348         SkRandom rand;
349         for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
350             int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
351                                          GrShape::kMaxKeyFromDataVerbCnt * 2);
352             int step;
353             do {
354                 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
355             } while (step == numPts);
356             fPaths[i] = sk_tool_utils::make_star(SkRect::MakeLTRB(0,0,1,1), numPts, step);
357         }
358     }
359 
drawPathsAndFlush(CCPRPathDrawer & ccpr,const SkMatrix & m)360     void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
361         this->drawPathsAndFlush(ccpr, &m, 1);
362     }
drawPathsAndFlush(CCPRPathDrawer & ccpr,const SkMatrix * matrices,int numMatrices)363     void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
364         // Draw all the paths.
365         for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
366             ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
367         }
368         // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
369         // single flush.
370         SkRandom rand;
371         int duplicateIndices[10];
372         for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
373             duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
374         }
375         for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
376             for (size_t j = 0; j <= i; ++j) {
377                 int idx = duplicateIndices[j];
378                 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
379             }
380         }
381         ccpr.flush();
382     }
383 
384 private:
customizeOptions(GrMockOptions *,GrContextOptions * ctxOptions)385     void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
386         ctxOptions->fAllowPathMaskCaching = true;
387     }
388 
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)389     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
390         RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
391         ccpr.ctx()->contextPriv().addOnFlushCallbackObject(&atlasIDRecorder);
392 
393         this->onRun(reporter, ccpr, atlasIDRecorder);
394 
395         ccpr.ctx()->contextPriv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
396     }
397 
398     virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
399                        const RecordLastMockAtlasIDs&) = 0;
400 
401 protected:
402     SkPath fPaths[350];
403 };
404 
405 // Ensures ccpr always reuses the same atlas texture in the animation use case.
406 class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)407     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
408                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
409         SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
410         m.preScale(80, 80);
411         m.preTranslate(-.5,-.5);
412         this->drawPathsAndFlush(ccpr, m);
413 
414         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
415         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
416         const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
417 
418         // Ensures we always reuse the same atlas texture in the animation use case.
419         for (int i = 0; i < 12; ++i) {
420             // 59 is prime, so we will hit every integer modulo 360 before repeating.
421             m.preRotate(59, .5, .5);
422 
423             // Go twice. Paths have to get drawn twice with the same matrix before we cache their
424             // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
425             // and is then invalidated since the matrix will change, that the same underlying
426             // texture object is still reused for the next atlas.
427             for (int j = 0; j < 2; ++j) {
428                 this->drawPathsAndFlush(ccpr, m);
429                 // Nothing should be copied to an 8-bit atlas after just two draws.
430                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
431                 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
432             }
433         }
434 
435         // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
436         this->drawPathsAndFlush(ccpr, m);
437         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
438         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
439 
440         // Now double-check that everything continues to hit the cache as expected when the matrix
441         // doesn't change.
442         for (int i = 0; i < 10; ++i) {
443             this->drawPathsAndFlush(ccpr, m);
444             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
445             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
446         }
447     }
448 };
449 DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
450 
451 class CCPR_cache_recycleEntries : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)452     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
453                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
454         SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
455         m.preScale(80, 80);
456         m.preTranslate(-.5,-.5);
457 
458         auto cache = ccpr.ccpr()->testingOnly_getPathCache();
459         REPORTER_ASSERT(reporter, cache);
460 
461         const auto& lru = cache->testingOnly_getLRU();
462 
463         SkTArray<const void*> expectedPtrs;
464 
465         // Ensures we always reuse the same atlas texture in the animation use case.
466         for (int i = 0; i < 5; ++i) {
467             // 59 is prime, so we will hit every integer modulo 360 before repeating.
468             m.preRotate(59, .5, .5);
469 
470             // Go twice. Paths have to get drawn twice with the same matrix before we cache their
471             // atlas.
472             for (int j = 0; j < 2; ++j) {
473                 this->drawPathsAndFlush(ccpr, m);
474                 // Nothing should be copied to an 8-bit atlas after just two draws.
475                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
476                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
477             }
478 
479             int idx = 0;
480             for (const GrCCPathCacheEntry* entry : lru) {
481                 if (0 == i) {
482                     expectedPtrs.push_back(entry);
483                 } else {
484                     // The same pointer should have been recycled for the new matrix.
485                     REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
486                 }
487                 ++idx;
488             }
489         }
490     }
491 };
492 DEF_CCPR_TEST(CCPR_cache_recycleEntries)
493 
494 // Ensures mostly-visible paths get their full mask cached.
495 class CCPR_cache_mostlyVisible : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)496     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
497                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
498         SkMatrix matrices[3] = {
499             SkMatrix::MakeScale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
500             SkMatrix::MakeScale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
501             SkMatrix::MakeScale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
502         };
503 
504         for (int i = 0; i < 10; ++i) {
505             this->drawPathsAndFlush(ccpr, matrices, 3);
506             if (2 == i) {
507                 // The mostly-visible paths should still get cached.
508                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
509             } else {
510                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
511             }
512             // Ensure mostly NOT-visible paths never get cached.
513             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
514         }
515 
516         // Clear the path cache.
517         this->drawPathsAndFlush(ccpr, SkMatrix::I());
518 
519         // Now only draw the fully/mostly visible ones.
520         for (int i = 0; i < 2; ++i) {
521             this->drawPathsAndFlush(ccpr, matrices, 2);
522             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
523             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
524         }
525 
526         // On draw 3 they should get copied to an 8-bit atlas.
527         this->drawPathsAndFlush(ccpr, matrices, 2);
528         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
529         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
530 
531         for (int i = 0; i < 10; ++i) {
532             this->drawPathsAndFlush(ccpr, matrices, 2);
533             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
534             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
535         }
536 
537         // Draw a different part of the path to ensure the full mask was cached.
538         matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
539                                   SkScalarFloorToInt(kCanvasSize * -.25f));
540         for (int i = 0; i < 10; ++i) {
541             this->drawPathsAndFlush(ccpr, matrices, 2);
542             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
543             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
544         }
545     }
546 };
547 DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
548 
549 // Ensures GrContext::performDeferredCleanup works.
550 class CCPR_cache_deferredCleanup : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)551     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
552                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
553         SkMatrix m = SkMatrix::MakeScale(20, 20);
554         int lastRenderedAtlasID = 0;
555 
556         for (int i = 0; i < 5; ++i) {
557             this->drawPathsAndFlush(ccpr, m);
558             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
559             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
560             int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
561             REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
562             lastRenderedAtlasID = renderedAtlasID;
563 
564             this->drawPathsAndFlush(ccpr, m);
565             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
566             REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
567 
568             // On draw 3 they should get copied to an 8-bit atlas.
569             this->drawPathsAndFlush(ccpr, m);
570             REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
571             REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
572 
573             for (int i = 0; i < 10; ++i) {
574                 this->drawPathsAndFlush(ccpr, m);
575                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
576                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
577             }
578 
579             ccpr.ctx()->performDeferredCleanup(std::chrono::milliseconds(0));
580         }
581     }
582 };
583 DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
584 
585 // Verifies the cache/hash table internals.
586 class CCPR_cache_hashTable : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)587     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
588                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
589         using CoverageType = GrCCAtlas::CoverageType;
590         SkMatrix m = SkMatrix::MakeScale(20, 20);
591 
592         for (int i = 0; i < 5; ++i) {
593             this->drawPathsAndFlush(ccpr, m);
594             if (2 == i) {
595                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
596             } else {
597                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
598             }
599             if (i < 2) {
600                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
601             } else {
602                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
603             }
604 
605             auto cache = ccpr.ccpr()->testingOnly_getPathCache();
606             REPORTER_ASSERT(reporter, cache);
607 
608             const auto& hash = cache->testingOnly_getHashTable();
609             const auto& lru = cache->testingOnly_getLRU();
610             int count = 0;
611             for (GrCCPathCacheEntry* entry : lru) {
612                 auto* node = hash.find(entry->cacheKey());
613                 REPORTER_ASSERT(reporter, node);
614                 REPORTER_ASSERT(reporter, node->entry() == entry);
615                 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
616                 REPORTER_ASSERT(reporter, entry->unique());
617                 if (0 == i) {
618                     REPORTER_ASSERT(reporter, !entry->cachedAtlas());
619                 } else {
620                     const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
621                     REPORTER_ASSERT(reporter, cachedAtlas);
622                     if (1 == i) {
623                         REPORTER_ASSERT(reporter, CoverageType::kFP16_CoverageCount
624                                                           == cachedAtlas->coverageType());
625                     } else {
626                         REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
627                                                           == cachedAtlas->coverageType());
628                     }
629                     REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
630                     // The actual proxy should not be held past the end of a flush.
631                     REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
632                     REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
633                 }
634                 ++count;
635             }
636             REPORTER_ASSERT(reporter, hash.count() == count);
637         }
638     }
639 };
640 DEF_CCPR_TEST(CCPR_cache_hashTable)
641 
642 // Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
643 // (a la Chrome tiles).
644 class CCPR_cache_multiFlush : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)645     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
646                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
647         static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
648         static constexpr int kBigPrimes[] = {
649                 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
650 
651         SkRandom rand;
652         SkMatrix m = SkMatrix::I();
653 
654         for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
655             int prime = kBigPrimes[i];
656             int endPathIdx = (int)rand.nextULessThan(kNumPaths);
657             int pathIdx = endPathIdx;
658             int nextFlush = rand.nextRangeU(1, 47);
659             for (int j = 0; j < kNumPaths; ++j) {
660                 pathIdx = (pathIdx + prime) % kNumPaths;
661                 int repeat = rand.nextRangeU(1, 3);
662                 for (int k = 0; k < repeat; ++k) {
663                     ccpr.drawPath(fPaths[pathIdx], m);
664                 }
665                 if (nextFlush == j) {
666                     ccpr.flush();
667                     // The paths are small enough that we should never copy to an A8 atlas.
668                     REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
669                     if (i < 2) {
670                         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
671                     } else {
672                         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
673                     }
674                     nextFlush = SkTMin(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
675                 }
676             }
677             SkASSERT(endPathIdx == pathIdx % kNumPaths);
678         }
679     }
680 };
681 DEF_CCPR_TEST(CCPR_cache_multiFlush)
682 
683 // Ensures a path drawn over mutiple tiles gets cached.
684 class CCPR_cache_multiTileCache : public CCPRCacheTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)685     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
686                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
687         // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
688         const SkMatrix m0 = SkMatrix::MakeScale(kCanvasSize*3, kCanvasSize*3);
689         const SkPath p0 = fPaths[0];
690         for (int i = 0; i < 9; ++i) {
691             static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
692             static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
693 
694             SkMatrix tileM = m0;
695             tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
696             ccpr.drawPath(p0, tileM);
697             ccpr.flush();
698             if (i < 5) {
699                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
700                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
701             } else if (5 == i) {
702                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
703                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
704             } else {
705                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
706                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
707             }
708         }
709 
710         // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
711         const SkMatrix m1 = SkMatrix::MakeScale(kCanvasSize*4, kCanvasSize*3);
712         const SkPath p1 = fPaths[1];
713         for (int row = 0; row < 3; ++row) {
714             for (int col = 0; col < 4; ++col) {
715                 SkMatrix tileM = m1;
716                 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
717                 ccpr.drawPath(p1, tileM);
718                 ccpr.flush();
719                 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
720                 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
721             }
722         }
723 
724         // Double-check the cache is still intact.
725         ccpr.drawPath(p0, m0);
726         ccpr.flush();
727         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
728         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
729 
730         ccpr.drawPath(p1, m1);
731         ccpr.flush();
732         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
733         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
734     }
735 };
736 DEF_CCPR_TEST(CCPR_cache_multiTileCache)
737 
738 // This test exercises CCPR's cache capabilities by drawing many paths with two different
739 // transformation matrices. We then vary the matrices independently by whole and partial pixels,
740 // and verify the caching behaved as expected.
741 class CCPR_cache_partialInvalidate : public CCPRCacheTest {
customizeOptions(GrMockOptions *,GrContextOptions * ctxOptions)742     void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
743         ctxOptions->fAllowPathMaskCaching = true;
744     }
745 
746     static constexpr int kPathSize = 4;
747 
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr,const RecordLastMockAtlasIDs & atlasIDRecorder)748     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
749                const RecordLastMockAtlasIDs& atlasIDRecorder) override {
750         SkMatrix matrices[2] = {
751             SkMatrix::MakeTrans(5, 5),
752             SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
753         };
754         matrices[0].preScale(kPathSize, kPathSize);
755         matrices[1].preScale(kPathSize, kPathSize);
756 
757         int firstAtlasID = 0;
758 
759         for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
760             this->drawPathsAndFlush(ccpr, matrices, 2);
761 
762             if (0 == iterIdx) {
763                 // First iteration: just note the ID of the stashed atlas and continue.
764                 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
765                 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
766                 continue;
767             }
768 
769             int testIdx = (iterIdx/2) % 3;
770             int repetitionIdx = iterIdx % 2;
771             switch (testIdx) {
772                 case 0:
773                     if (0 == repetitionIdx) {
774                         // This is the big test. New paths were drawn twice last round. On hit 2
775                         // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
776                         // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
777                         // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
778                         // render the new masks.
779                         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
780                         REPORTER_ASSERT(reporter,
781                                         atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
782                     } else {
783                         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
784                         // This is hit 2 for the new masks. Next time they will be copied to an A8
785                         // atlas.
786                         REPORTER_ASSERT(reporter,
787                                         atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
788                     }
789 
790                     if (1 == repetitionIdx) {
791                         // Integer translates: all path masks stay valid.
792                         matrices[0].preTranslate(-1, -1);
793                         matrices[1].preTranslate(1, 1);
794                     }
795                     break;
796 
797                 case 1:
798                     if (0 == repetitionIdx) {
799                         // New paths were drawn twice last round. The third hit (now) they should be
800                         // copied to an A8 atlas.
801                         REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
802                     } else {
803                         REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
804                     }
805 
806                     // This draw should have gotten 100% cache hits; we only did integer translates
807                     // last time (or none if it was the first flush). Therefore, everything should
808                     // have been cached.
809                     REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
810 
811                     if (1 == repetitionIdx) {
812                         // Invalidate even path masks.
813                         matrices[0].preTranslate(1.6f, 1.4f);
814                     }
815                     break;
816 
817                 case 2:
818                     // No new masks to copy from last time; it had 100% cache hits.
819                     REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
820 
821                     // Even path masks were invalidated last iteration by a subpixel translate.
822                     // They should have been re-rendered this time in the original 'firstAtlasID'
823                     // texture.
824                     REPORTER_ASSERT(reporter,
825                                     atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
826 
827                     if (1 == repetitionIdx) {
828                         // Invalidate odd path masks.
829                         matrices[1].preTranslate(-1.4f, -1.6f);
830                     }
831                     break;
832             }
833         }
834     }
835 };
836 DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
837 
838 class CCPR_unrefPerOpListPathsBeforeOps : public CCPRTest {
onRun(skiatest::Reporter * reporter,CCPRPathDrawer & ccpr)839     void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
840         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
841         for (int i = 0; i < 10000; ++i) {
842             // Draw enough paths to make the arena allocator hit the heap.
843             ccpr.drawPath(fPath);
844         }
845 
846         // Unref the GrCCPerOpListPaths object.
847         auto perOpListPathsMap = ccpr.ccpr()->detachPendingPaths();
848         perOpListPathsMap.clear();
849 
850         // Now delete the Op and all its draws.
851         REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
852         ccpr.flush();
853         REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
854     }
855 };
856 DEF_CCPR_TEST(CCPR_unrefPerOpListPathsBeforeOps)
857 
858 class CCPRRenderingTest {
859 public:
run(skiatest::Reporter * reporter,GrContext * ctx,bool doStroke) const860     void run(skiatest::Reporter* reporter, GrContext* ctx, bool doStroke) const {
861         if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) {
862             return; // CCPR is not enabled on this GPU.
863         }
864         CCPRPathDrawer ccpr(sk_ref_sp(ctx), reporter, doStroke);
865         if (!ccpr.valid()) {
866             return;
867         }
868         this->onRun(reporter, ccpr);
869     }
870 
~CCPRRenderingTest()871     virtual ~CCPRRenderingTest() {}
872 
873 protected:
874     virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
875 };
876 
877 #define DEF_CCPR_RENDERING_TEST(name) \
878     DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
879         name test; \
880         test.run(reporter, ctxInfo.grContext(), false); \
881         test.run(reporter, ctxInfo.grContext(), true); \
882     }
883 
884 class CCPR_busyPath : public CCPRRenderingTest {
onRun(skiatest::Reporter * reporter,const CCPRPathDrawer & ccpr) const885     void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
886         static constexpr int kNumBusyVerbs = 1 << 14;
887         ccpr.clear();
888         SkPath busyPath;
889         busyPath.moveTo(0, 0); // top left
890         busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
891         for (int i = 2; i < kNumBusyVerbs; ++i) {
892             float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
893             busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
894         }
895         ccpr.drawPath(busyPath);
896 
897         ccpr.flush(); // If this doesn't crash, the test passed.
898                       // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
899                       // your platform's GrGLCaps.
900     }
901 };
902 DEF_CCPR_RENDERING_TEST(CCPR_busyPath)
903