1 /*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "include/core/SkCanvas.h"
9 #include "include/core/SkSpan.h"
10 #include "include/core/SkSurface.h"
11 #include "include/gpu/GrDirectContext.h"
12 #include "src/gpu/GrDirectContextPriv.h"
13 #include "src/gpu/GrGpu.h"
14 #include "src/gpu/GrProxyProvider.h"
15 #include "src/gpu/GrResourceAllocator.h"
16 #include "src/gpu/GrResourceProviderPriv.h"
17 #include "src/gpu/GrSurfaceProxyPriv.h"
18 #include "src/gpu/GrTexture.h"
19 #include "src/gpu/GrTextureProxy.h"
20 #include "tests/Test.h"
21 #include "tools/gpu/ManagedBackendTexture.h"
22
23 namespace {
24 struct ProxyParams {
25 int fSize;
26 GrRenderable fRenderable;
27 GrColorType fColorType;
28 SkBackingFit fFit;
29 int fSampleCnt;
30 SkBudgeted fBudgeted;
31 enum Kind {
32 kDeferred,
33 kBackend,
34 kFullyLazy,
35 kLazy,
36 kInstantiated
37 };
38 Kind fKind;
39 GrUniqueKey fUniqueKey = GrUniqueKey();
40 // TODO: do we care about mipmapping
41 };
42
43 constexpr GrRenderable kRT = GrRenderable::kYes;
44 constexpr GrRenderable kNotRT = GrRenderable::kNo;
45
46 constexpr GrColorType kRGBA = GrColorType::kRGBA_8888;
47 constexpr GrColorType kAlpha = GrColorType::kAlpha_8;
48
49 constexpr SkBackingFit kE = SkBackingFit::kExact;
50 constexpr SkBackingFit kA = SkBackingFit::kApprox;
51
52 constexpr SkBudgeted kNotB = SkBudgeted::kNo;
53 constexpr SkBudgeted kB = SkBudgeted::kYes;
54
55 constexpr ProxyParams::Kind kDeferred = ProxyParams::Kind::kDeferred;
56 constexpr ProxyParams::Kind kBackend = ProxyParams::Kind::kBackend;
57 constexpr ProxyParams::Kind kInstantiated = ProxyParams::Kind::kInstantiated;
58 constexpr ProxyParams::Kind kLazy = ProxyParams::Kind::kLazy;
59 constexpr ProxyParams::Kind kFullyLazy = ProxyParams::Kind::kFullyLazy;
60 };
61
make_deferred(GrProxyProvider * proxyProvider,const GrCaps * caps,const ProxyParams & p)62 static sk_sp<GrSurfaceProxy> make_deferred(GrProxyProvider* proxyProvider, const GrCaps* caps,
63 const ProxyParams& p) {
64 const GrBackendFormat format = caps->getDefaultBackendFormat(p.fColorType, p.fRenderable);
65 return proxyProvider->createProxy(format, {p.fSize, p.fSize}, p.fRenderable, p.fSampleCnt,
66 GrMipmapped::kNo, p.fFit, p.fBudgeted, GrProtected::kNo);
67 }
68
make_backend(GrDirectContext * dContext,const ProxyParams & p)69 static sk_sp<GrSurfaceProxy> make_backend(GrDirectContext* dContext, const ProxyParams& p) {
70 GrProxyProvider* proxyProvider = dContext->priv().proxyProvider();
71
72 SkColorType skColorType = GrColorTypeToSkColorType(p.fColorType);
73 SkASSERT(SkColorType::kUnknown_SkColorType != skColorType);
74
75 auto mbet = sk_gpu_test::ManagedBackendTexture::MakeWithoutData(
76 dContext, p.fSize, p.fSize, skColorType, GrMipmapped::kNo, GrRenderable::kNo);
77
78 if (!mbet) {
79 return nullptr;
80 }
81
82 return proxyProvider->wrapBackendTexture(mbet->texture(),
83 kBorrow_GrWrapOwnership,
84 GrWrapCacheable::kNo,
85 kRead_GrIOType,
86 mbet->refCountedCallback());
87 }
88
make_fully_lazy(GrProxyProvider * proxyProvider,const GrCaps * caps,const ProxyParams & p)89 static sk_sp<GrSurfaceProxy> make_fully_lazy(GrProxyProvider* proxyProvider, const GrCaps* caps,
90 const ProxyParams& p) {
91 const GrBackendFormat format = caps->getDefaultBackendFormat(p.fColorType, p.fRenderable);
92 auto cb = [p](GrResourceProvider* provider, const GrSurfaceProxy::LazySurfaceDesc& desc) {
93 auto tex = provider->createTexture({p.fSize, p.fSize}, desc.fFormat,
94 desc.fTextureType,
95 desc.fRenderable, desc.fSampleCnt,
96 desc.fMipmapped, desc.fBudgeted,
97 desc.fProtected);
98 return GrSurfaceProxy::LazyCallbackResult(std::move(tex));
99 };
100 return GrProxyProvider::MakeFullyLazyProxy(std::move(cb), format, p.fRenderable, p.fSampleCnt,
101 GrProtected::kNo, *caps,
102 GrSurfaceProxy::UseAllocator::kYes);
103 }
104
make_lazy(GrProxyProvider * proxyProvider,const GrCaps * caps,const ProxyParams & p)105 static sk_sp<GrSurfaceProxy> make_lazy(GrProxyProvider* proxyProvider, const GrCaps* caps,
106 const ProxyParams& p) {
107 const GrBackendFormat format = caps->getDefaultBackendFormat(p.fColorType, p.fRenderable);
108 auto cb = [](GrResourceProvider* provider, const GrSurfaceProxy::LazySurfaceDesc& desc) {
109 auto tex = provider->createTexture(desc.fDimensions, desc.fFormat,
110 desc.fTextureType,
111 desc.fRenderable, desc.fSampleCnt,
112 desc.fMipmapped, desc.fBudgeted,
113 desc.fProtected);
114 return GrSurfaceProxy::LazyCallbackResult(std::move(tex));
115 };
116 return proxyProvider->createLazyProxy(std::move(cb), format, {p.fSize, p.fSize},
117 GrMipmapped::kNo, GrMipmapStatus::kNotAllocated,
118 GrInternalSurfaceFlags::kNone,
119 p.fFit, p.fBudgeted, GrProtected::kNo,
120 GrSurfaceProxy::UseAllocator::kYes);
121 }
122
make_proxy(GrDirectContext * dContext,const ProxyParams & p)123 static sk_sp<GrSurfaceProxy> make_proxy(GrDirectContext* dContext, const ProxyParams& p) {
124 GrProxyProvider* proxyProvider = dContext->priv().proxyProvider();
125 const GrCaps* caps = dContext->priv().caps();
126 sk_sp<GrSurfaceProxy> proxy;
127 switch (p.fKind) {
128 case ProxyParams::kDeferred:
129 proxy = make_deferred(proxyProvider, caps, p);
130 break;
131 case ProxyParams::kBackend:
132 proxy = make_backend(dContext, p);
133 break;
134 case ProxyParams::kFullyLazy:
135 proxy = make_fully_lazy(proxyProvider, caps, p);
136 break;
137 case ProxyParams::kLazy:
138 proxy = make_lazy(proxyProvider, caps, p);
139 break;
140 case ProxyParams::kInstantiated:
141 proxy = make_deferred(proxyProvider, caps, p);
142 if (proxy) {
143 auto surf = proxy->priv().createSurface(dContext->priv().resourceProvider());
144 proxy->priv().assign(std::move(surf));
145 }
146 break;
147 }
148 if (proxy && p.fUniqueKey.isValid()) {
149 SkASSERT(proxy->asTextureProxy());
150 proxyProvider->assignUniqueKeyToProxy(p.fUniqueKey, proxy->asTextureProxy());
151 }
152 return proxy;
153 }
154
155 // Basic test that two proxies with overlapping intervals and compatible descriptors are
156 // assigned different GrSurfaces.
overlap_test(skiatest::Reporter * reporter,GrDirectContext * dContext,sk_sp<GrSurfaceProxy> p1,sk_sp<GrSurfaceProxy> p2,bool expectedResult)157 static void overlap_test(skiatest::Reporter* reporter, GrDirectContext* dContext,
158 sk_sp<GrSurfaceProxy> p1, sk_sp<GrSurfaceProxy> p2,
159 bool expectedResult) {
160 GrResourceAllocator alloc(dContext);
161
162 alloc.addInterval(p1.get(), 0, 4, GrResourceAllocator::ActualUse::kYes);
163 alloc.incOps();
164 alloc.addInterval(p2.get(), 1, 2, GrResourceAllocator::ActualUse::kYes);
165 alloc.incOps();
166
167 REPORTER_ASSERT(reporter, alloc.planAssignment());
168 REPORTER_ASSERT(reporter, alloc.makeBudgetHeadroom());
169 REPORTER_ASSERT(reporter, alloc.assign());
170
171 REPORTER_ASSERT(reporter, p1->peekSurface());
172 REPORTER_ASSERT(reporter, p2->peekSurface());
173 bool doTheBackingStoresMatch = p1->underlyingUniqueID() == p2->underlyingUniqueID();
174 REPORTER_ASSERT(reporter, expectedResult == doTheBackingStoresMatch);
175 }
176
177 // Test various cases when two proxies do not have overlapping intervals.
178 // This mainly acts as a test of the ResourceAllocator's free pool.
non_overlap_test(skiatest::Reporter * reporter,GrDirectContext * dContext,sk_sp<GrSurfaceProxy> p1,sk_sp<GrSurfaceProxy> p2,bool expectedResult)179 static void non_overlap_test(skiatest::Reporter* reporter, GrDirectContext* dContext,
180 sk_sp<GrSurfaceProxy> p1, sk_sp<GrSurfaceProxy> p2,
181 bool expectedResult) {
182 GrResourceAllocator alloc(dContext);
183
184 alloc.incOps();
185 alloc.incOps();
186 alloc.incOps();
187 alloc.incOps();
188 alloc.incOps();
189 alloc.incOps();
190
191 alloc.addInterval(p1.get(), 0, 2, GrResourceAllocator::ActualUse::kYes);
192 alloc.addInterval(p2.get(), 3, 5, GrResourceAllocator::ActualUse::kYes);
193
194 REPORTER_ASSERT(reporter, alloc.planAssignment());
195 REPORTER_ASSERT(reporter, alloc.makeBudgetHeadroom());
196 REPORTER_ASSERT(reporter, alloc.assign());
197
198 REPORTER_ASSERT(reporter, p1->peekSurface());
199 REPORTER_ASSERT(reporter, p2->peekSurface());
200 bool doTheBackingStoresMatch = p1->underlyingUniqueID() == p2->underlyingUniqueID();
201 REPORTER_ASSERT(reporter, expectedResult == doTheBackingStoresMatch);
202 }
203
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorTest,reporter,ctxInfo)204 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorTest, reporter, ctxInfo) {
205 auto dContext = ctxInfo.directContext();
206 const GrCaps* caps = dContext->priv().caps();
207
208 struct TestCase {
209 ProxyParams fP1;
210 ProxyParams fP2;
211 bool fExpectation;
212 };
213
214 constexpr bool kShare = true;
215 constexpr bool kDontShare = false;
216
217 // Non-RT GrSurfaces are never recycled on some platforms.
218 bool kConditionallyShare = caps->reuseScratchTextures();
219
220 static const TestCase overlappingTests[] = {
221 // Two proxies with overlapping intervals and compatible descriptors should never share
222 // RT version
223 {{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
224 {64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
225 kDontShare},
226 // non-RT version
227 {{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
228 {64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
229 kDontShare},
230 };
231
232 for (size_t i = 0; i < SK_ARRAY_COUNT(overlappingTests); i++) {
233 const TestCase& test = overlappingTests[i];
234 sk_sp<GrSurfaceProxy> p1 = make_proxy(dContext, test.fP1);
235 sk_sp<GrSurfaceProxy> p2 = make_proxy(dContext, test.fP2);
236 reporter->push(SkStringPrintf("case %d", SkToInt(i)));
237 overlap_test(reporter, dContext, std::move(p1), std::move(p2), test.fExpectation);
238 reporter->pop();
239 }
240
241 auto beFormat = caps->getDefaultBackendFormat(GrColorType::kRGBA_8888, GrRenderable::kYes);
242 int k2 = caps->getRenderTargetSampleCount(2, beFormat);
243 int k4 = caps->getRenderTargetSampleCount(4, beFormat);
244
245 static const TestCase nonOverlappingTests[] = {
246 // Two non-overlapping intervals w/ compatible proxies should share
247 // both same size & approx
248 {{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
249 {64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
250 kShare},
251 {{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
252 {64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
253 kConditionallyShare},
254 // diffs sizes but still approx
255 {{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
256 {50, kRT, kRGBA, kA, 1, kNotB, kDeferred},
257 kShare},
258 {{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
259 {50, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
260 kConditionallyShare},
261 // sames sizes but exact
262 {{64, kRT, kRGBA, kE, 1, kNotB, kDeferred},
263 {64, kRT, kRGBA, kE, 1, kNotB, kDeferred},
264 kShare},
265 {{64, kNotRT, kRGBA, kE, 1, kNotB, kDeferred},
266 {64, kNotRT, kRGBA, kE, 1, kNotB, kDeferred},
267 kConditionallyShare},
268 // Two non-overlapping intervals w/ different exact sizes should not share
269 {{56, kRT, kRGBA, kE, 1, kNotB, kDeferred},
270 {54, kRT, kRGBA, kE, 1, kNotB, kDeferred},
271 kDontShare},
272 // Two non-overlapping intervals w/ _very different_ approx sizes should not share
273 {{255, kRT, kRGBA, kA, 1, kNotB, kDeferred},
274 {127, kRT, kRGBA, kA, 1, kNotB, kDeferred},
275 kDontShare},
276 // Two non-overlapping intervals w/ different MSAA sample counts should not share
277 {{64, kRT, kRGBA, kA, k2, kNotB, kDeferred},
278 {64, kRT, kRGBA, kA, k4, kNotB, kDeferred},
279 k2 == k4},
280 // Two non-overlapping intervals w/ different configs should not share
281 {{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
282 {64, kRT, kAlpha, kA, 1, kNotB, kDeferred},
283 kDontShare},
284 // Two non-overlapping intervals w/ different RT classifications should never share
285 {{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
286 {64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
287 kDontShare},
288 {{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
289 {64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
290 kDontShare},
291 // Two non-overlapping intervals w/ different origins should share
292 {{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
293 {64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
294 kShare},
295 // Wrapped backend textures should never be reused
296 {{64, kNotRT, kRGBA, kE, 1, kNotB, kBackend},
297 {64, kNotRT, kRGBA, kE, 1, kNotB, kDeferred},
298 kDontShare}
299 };
300
301 for (size_t i = 0; i < SK_ARRAY_COUNT(nonOverlappingTests); i++) {
302 const TestCase& test = nonOverlappingTests[i];
303 sk_sp<GrSurfaceProxy> p1 = make_proxy(dContext, test.fP1);
304 sk_sp<GrSurfaceProxy> p2 = make_proxy(dContext, test.fP2);
305
306 if (!p1 || !p2) {
307 continue; // creation can fail (e.g., for msaa4 on iOS)
308 }
309
310 reporter->push(SkStringPrintf("case %d", SkToInt(i)));
311 non_overlap_test(reporter, dContext, std::move(p1), std::move(p2),
312 test.fExpectation);
313 reporter->pop();
314 }
315 }
316
draw(GrRecordingContext * rContext)317 static void draw(GrRecordingContext* rContext) {
318 SkImageInfo ii = SkImageInfo::Make(1024, 1024, kRGBA_8888_SkColorType, kPremul_SkAlphaType);
319
320 sk_sp<SkSurface> s = SkSurface::MakeRenderTarget(rContext, SkBudgeted::kYes,
321 ii, 1, kTopLeft_GrSurfaceOrigin, nullptr);
322
323 SkCanvas* c = s->getCanvas();
324
325 c->clear(SK_ColorBLACK);
326 }
327
328
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorStressTest,reporter,ctxInfo)329 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorStressTest, reporter, ctxInfo) {
330 auto context = ctxInfo.directContext();
331
332 size_t maxBytes = context->getResourceCacheLimit();
333
334 context->setResourceCacheLimit(0); // We'll always be overbudget
335
336 draw(context);
337 draw(context);
338 draw(context);
339 draw(context);
340 context->flushAndSubmit();
341
342 context->setResourceCacheLimit(maxBytes);
343 }
344
345 struct Interval {
346 ProxyParams fParams;
347 int fStart;
348 int fEnd;
349 sk_sp<GrSurfaceProxy> fProxy = nullptr;
350 };
351
352 struct TestCase {
353 const char * fName;
354 bool fShouldFit;
355 size_t fBudget;
356 SkTArray<ProxyParams> fPurgeableResourcesInCache = {};
357 SkTArray<ProxyParams> fUnpurgeableResourcesInCache = {};
358 SkTArray<Interval> fIntervals;
359 };
360
memory_budget_test(skiatest::Reporter * reporter,GrDirectContext * dContext,const TestCase & test)361 static void memory_budget_test(skiatest::Reporter* reporter,
362 GrDirectContext* dContext,
363 const TestCase& test) {
364 // Reset cache.
365 auto cache = dContext->priv().getResourceCache();
366 cache->releaseAll();
367 cache->setLimit(test.fBudget);
368
369 // Add purgeable entries.
370 size_t expectedPurgeableBytes = 0;
371 SkTArray<sk_sp<GrSurface>> purgeableSurfaces;
372 for (auto& params : test.fPurgeableResourcesInCache) {
373 SkASSERT(params.fKind == kInstantiated);
374 sk_sp<GrSurfaceProxy> proxy = make_proxy(dContext, params);
375 REPORTER_ASSERT(reporter, proxy->peekSurface());
376 expectedPurgeableBytes += proxy->gpuMemorySize();
377 purgeableSurfaces.push_back(sk_ref_sp(proxy->peekSurface()));
378 }
379 purgeableSurfaces.reset();
380 REPORTER_ASSERT(reporter, expectedPurgeableBytes == cache->getPurgeableBytes(),
381 "%zu", cache->getPurgeableBytes());
382
383 // Add unpurgeable entries.
384 size_t expectedUnpurgeableBytes = 0;
385 SkTArray<sk_sp<GrSurface>> unpurgeableSurfaces;
386 for (auto& params : test.fUnpurgeableResourcesInCache) {
387 SkASSERT(params.fKind == kInstantiated);
388 sk_sp<GrSurfaceProxy> proxy = make_proxy(dContext, params);
389 REPORTER_ASSERT(reporter, proxy->peekSurface());
390 expectedUnpurgeableBytes += proxy->gpuMemorySize();
391 unpurgeableSurfaces.push_back(sk_ref_sp(proxy->peekSurface()));
392 }
393
394 auto unpurgeableBytes = cache->getBudgetedResourceBytes() - cache->getPurgeableBytes();
395 REPORTER_ASSERT(reporter, expectedUnpurgeableBytes == unpurgeableBytes,
396 "%zu", unpurgeableBytes);
397
398 // Add intervals and test.
399 GrResourceAllocator alloc(dContext);
400 for (auto& interval : test.fIntervals) {
401 for (int i = interval.fStart; i <= interval.fEnd; i++) {
402 alloc.incOps();
403 }
404 alloc.addInterval(interval.fProxy.get(), interval.fStart, interval.fEnd,
405 GrResourceAllocator::ActualUse::kYes);
406 }
407 REPORTER_ASSERT(reporter, alloc.planAssignment());
408 REPORTER_ASSERT(reporter, alloc.makeBudgetHeadroom() == test.fShouldFit);
409 }
410
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorMemoryBudgetTest,reporter,ctxInfo)411 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorMemoryBudgetTest, reporter, ctxInfo) {
412 auto dContext = ctxInfo.directContext();
413
414 constexpr bool kUnder = true;
415 constexpr bool kOver = false;
416 constexpr size_t kRGBA64Bytes = 4 * 64 * 64;
417 const ProxyParams kProxy64 = {64, kRT, kRGBA, kE, 1, kB, kDeferred};
418 const ProxyParams kProxy64NotBudgeted = {64, kRT, kRGBA, kE, 1, kNotB, kDeferred};
419 const ProxyParams kProxy64Lazy = {64, kRT, kRGBA, kE, 1, kB, kLazy};
420 const ProxyParams kProxy64FullyLazy = {64, kRT, kRGBA, kE, 1, kB, kFullyLazy};
421 const ProxyParams kProxy32Instantiated = {32, kRT, kRGBA, kE, 1, kB, kInstantiated};
422 const ProxyParams kProxy64Instantiated = {64, kRT, kRGBA, kE, 1, kB, kInstantiated};
423
424 TestCase tests[] = {
425 {"empty DAG", kUnder, 0, {}, {}, {}},
426 {"unbudgeted", kUnder, 0, {}, {}, {{kProxy64NotBudgeted, 0, 2}}},
427 {"basic", kUnder, kRGBA64Bytes, {}, {}, {{kProxy64, 0, 2}}},
428 {"basic, over", kOver, kRGBA64Bytes - 1, {}, {}, {{kProxy64, 0, 2}}},
429 {"shared", kUnder, kRGBA64Bytes, {}, {},
430 {
431 {kProxy64, 0, 2},
432 {kProxy64, 3, 5},
433 }},
434 {"retrieved from cache", kUnder, kRGBA64Bytes,
435 /* purgeable */{kProxy64Instantiated},
436 /* unpurgeable */{},
437 {
438 {kProxy64, 0, 2}
439 }},
440 {"purge 4", kUnder, kRGBA64Bytes,
441 /* purgeable */{
442 kProxy32Instantiated,
443 kProxy32Instantiated,
444 kProxy32Instantiated,
445 kProxy32Instantiated
446 },
447 /* unpurgeable */{},
448 {
449 {kProxy64, 0, 2}
450 }},
451 {"dont purge what we've reserved", kOver, kRGBA64Bytes,
452 /* purgeable */{kProxy64Instantiated},
453 /* unpurgeable */{},
454 {
455 {kProxy64, 0, 2},
456 {kProxy64, 1, 3}
457 }},
458 {"unpurgeable", kOver, kRGBA64Bytes,
459 /* purgeable */{},
460 /* unpurgeable */{kProxy64Instantiated},
461 {
462 {kProxy64, 0, 2}
463 }},
464 {"lazy", kUnder, kRGBA64Bytes,
465 /* purgeable */{},
466 /* unpurgeable */{},
467 {
468 {kProxy64Lazy, 0, 2}
469 }},
470 {"lazy, over", kOver, kRGBA64Bytes - 1,
471 /* purgeable */{},
472 /* unpurgeable */{},
473 {
474 {kProxy64Lazy, 0, 2}
475 }},
476 {"fully-lazy", kUnder, kRGBA64Bytes,
477 /* purgeable */{},
478 /* unpurgeable */{},
479 {
480 {kProxy64FullyLazy, 0, 2}
481 }},
482 {"fully-lazy, over", kOver, kRGBA64Bytes - 1,
483 /* purgeable */{},
484 /* unpurgeable */{},
485 {
486 {kProxy64FullyLazy, 0, 2}
487 }},
488 };
489 SkString match("");
490 for (size_t i = 0; i < SK_ARRAY_COUNT(tests); i++) {
491 TestCase& test = tests[i];
492 if (match.isEmpty() || match == SkString(test.fName)) {
493 // Create proxies
494 for (Interval& interval : test.fIntervals) {
495 interval.fProxy = make_proxy(dContext, interval.fParams);
496 }
497 reporter->push(SkString(test.fName));
498 memory_budget_test(reporter, dContext, test);
499 reporter->pop();
500 }
501 }
502 }
503
504