1 /*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/ganesh/ops/AtlasPathRenderer.h"
9
10 #include "src/base/SkVx.h"
11 #include "src/core/SkIPoint16.h"
12 #include "src/gpu/ganesh/GrCaps.h"
13 #include "src/gpu/ganesh/GrClip.h"
14 #include "src/gpu/ganesh/GrDirectContextPriv.h"
15 #include "src/gpu/ganesh/GrTexture.h"
16 #include "src/gpu/ganesh/SurfaceDrawContext.h"
17 #include "src/gpu/ganesh/effects/GrModulateAtlasCoverageEffect.h"
18 #include "src/gpu/ganesh/geometry/GrStyledShape.h"
19 #include "src/gpu/ganesh/ops/AtlasRenderTask.h"
20 #include "src/gpu/ganesh/ops/DrawAtlasPathOp.h"
21 #include "src/gpu/ganesh/ops/TessellationPathRenderer.h"
22 #include "src/gpu/ganesh/tessellate/GrTessellationShader.h"
23
24 namespace {
25
26 // Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer
27 // boundaries.
round_out(const SkRect & r)28 std::pair<skvx::float2, skvx::float2> round_out(const SkRect& r) {
29 return {floor(skvx::float2::Load(&r.fLeft)),
30 ceil(skvx::float2::Load(&r.fRight))};
31 }
32
33 // Returns whether the given proxyOwner uses the atlasProxy.
refs_atlas(const T * proxyOwner,const GrSurfaceProxy * atlasProxy)34 template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) {
35 bool refsAtlas = false;
36 auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) {
37 if (proxy == atlasProxy) {
38 refsAtlas = true;
39 }
40 };
41 if (proxyOwner) {
42 proxyOwner->visitProxies(checkForAtlasRef);
43 }
44 return refsAtlas;
45 }
46
is_visible(const SkRect & pathDevBounds,const SkIRect & clipBounds)47 bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) {
48 auto pathTopLeft = skvx::float2::Load(&pathDevBounds.fLeft);
49 auto pathBotRight = skvx::float2::Load(&pathDevBounds.fRight);
50 // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false
51 // in the case of NaN.
52 if (!all(pathTopLeft < pathBotRight)) {
53 return false;
54 }
55 auto clipTopLeft = skvx::cast<float>(skvx::int2::Load(&clipBounds.fLeft));
56 auto clipBotRight = skvx::cast<float>(skvx::int2::Load(&clipBounds.fRight));
57 static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight));
58 return all(pathTopLeft < clipBotRight) && all(pathBotRight > clipTopLeft);
59 }
60
61 #ifdef SK_DEBUG
62 // Ensures the atlas dependencies are set up such that each atlas will be totally out of service
63 // before we render the next one in line. This means there will only ever be one atlas active at a
64 // time and that they can all share the same texture.
validate_atlas_dependencies(const SkTArray<sk_sp<skgpu::v1::AtlasRenderTask>> & atlasTasks)65 void validate_atlas_dependencies(const SkTArray<sk_sp<skgpu::v1::AtlasRenderTask>>& atlasTasks) {
66 for (int i = atlasTasks.size() - 1; i >= 1; --i) {
67 auto atlasTask = atlasTasks[i].get();
68 auto previousAtlasTask = atlasTasks[i - 1].get();
69 // Double check that atlasTask depends on every dependent of its previous atlas. If this
70 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
71 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
72 // new atlas with the drawingManager).
73 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
74 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
75 }
76 }
77 }
78 #endif
79
80 } // anonymous namespace
81
82 namespace skgpu::v1 {
83
84 constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
85 constexpr static int kAtlasInitialSize = 512;
86
87 // The atlas is only used for small-area paths, which means at least one dimension of every path is
88 // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
89 // height, which lends very well to efficient pow2 atlas packing.
90 constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
91
92 // Ensure every path in the atlas falls in or below the 256px high rectanizer band.
93 constexpr static int kAtlasMaxPathHeight = 256;
94
95 // If we have MSAA to fall back on, paths are already fast enough that we really only benefit from
96 // atlasing when they are very small.
97 constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128;
98
99 // http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire
100 // 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine
101 // until it's resolved.
102 constexpr static int kAtlasMaxPathWidth = 1024;
103
IsSupported(GrRecordingContext * rContext)104 bool AtlasPathRenderer::IsSupported(GrRecordingContext* rContext) {
105 #ifdef SK_BUILD_FOR_IOS
106 // b/195095846: There is a bug with the atlas path renderer on OpenGL iOS. Disable until we can
107 // investigate.
108 if (rContext->backend() == GrBackendApi::kOpenGL) {
109 return false;
110 }
111 #endif
112 #ifdef SK_BUILD_FOR_WIN
113 // http://skbug.com/13519 There is a bug with the atlas path renderer on Direct3D, running on
114 // Radeon hardware and possibly others. Disable until we can investigate.
115 if (rContext->backend() == GrBackendApi::kDirect3D) {
116 return false;
117 }
118 #endif
119 const GrCaps& caps = *rContext->priv().caps();
120 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
121 return rContext->asDirectContext() && // The atlas doesn't support DDL yet.
122 caps.internalMultisampleCount(atlasFormat) > 1 &&
123 // GrAtlasRenderTask currently requires tessellation. In the future it could use the
124 // default path renderer when tessellation isn't available.
125 TessellationPathRenderer::IsSupported(caps);
126 }
127
Make(GrRecordingContext * rContext)128 sk_sp<AtlasPathRenderer> AtlasPathRenderer::Make(GrRecordingContext* rContext) {
129 return IsSupported(rContext)
130 ? sk_sp<AtlasPathRenderer>(new AtlasPathRenderer(rContext->asDirectContext()))
131 : nullptr;
132 }
133
AtlasPathRenderer(GrDirectContext * dContext)134 AtlasPathRenderer::AtlasPathRenderer(GrDirectContext* dContext) {
135 SkASSERT(IsSupported(dContext));
136 const GrCaps& caps = *dContext->priv().caps();
137 #if GR_TEST_UTILS
138 fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize;
139 #else
140 fAtlasMaxSize = 2048;
141 #endif
142 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize()));
143 fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize);
144 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize));
145 }
146
pathFitsInAtlas(const SkRect & pathDevBounds,GrAAType fallbackAAType) const147 bool AtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds,
148 GrAAType fallbackAAType) const {
149 SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA.
150 float atlasMaxPathHeight_p2 = (fallbackAAType == GrAAType::kMSAA)
151 ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback
152 : kAtlasMaxPathHeight * kAtlasMaxPathHeight;
153 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
154 auto size = botRightCeil - topLeftFloor;
155 return // Ensure the path's largest dimension fits in the atlas.
156 all(size <= fAtlasMaxPathWidth) &&
157 // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels
158 // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are
159 // very wide and short.
160 size[0] * size[1] <= atlasMaxPathHeight_p2;
161 }
162
set(const SkMatrix & m,const SkPath & path)163 void AtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) {
164 fPathGenID = path.getGenerationID();
165 fAffineMatrix[0] = m.getScaleX();
166 fAffineMatrix[1] = m.getSkewX();
167 fAffineMatrix[2] = m.getTranslateX();
168 fAffineMatrix[3] = m.getSkewY();
169 fAffineMatrix[4] = m.getScaleY();
170 fAffineMatrix[5] = m.getTranslateY();
171 fFillRule = (uint32_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
172 }
173
addPathToAtlas(GrRecordingContext * rContext,const SkMatrix & viewMatrix,const SkPath & path,const SkRect & pathDevBounds,SkIRect * devIBounds,SkIPoint16 * locationInAtlas,bool * transposedInAtlas,const DrawRefsAtlasCallback & drawRefsAtlasCallback)174 bool AtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext,
175 const SkMatrix& viewMatrix,
176 const SkPath& path,
177 const SkRect& pathDevBounds,
178 SkIRect* devIBounds,
179 SkIPoint16* locationInAtlas,
180 bool* transposedInAtlas,
181 const DrawRefsAtlasCallback& drawRefsAtlasCallback) {
182 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
183
184 pathDevBounds.roundOut(devIBounds);
185 #ifdef SK_DEBUG
186 // is_visible() should have guaranteed the path's bounds were representable as ints, since clip
187 // bounds within the max render target size are nowhere near INT_MAX.
188 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
189 SkASSERT(all(skvx::cast<float>(skvx::int2::Load(&devIBounds->fLeft)) == topLeftFloor));
190 SkASSERT(all(skvx::cast<float>(skvx::int2::Load(&devIBounds->fRight)) == botRightCeil));
191 #endif
192
193 int widthInAtlas = devIBounds->width();
194 int heightInAtlas = devIBounds->height();
195 // is_visible() should have guaranteed the path's bounds were non-empty.
196 SkASSERT(widthInAtlas > 0 && heightInAtlas > 0);
197
198 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
199 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
200 // for more efficient packing.
201 *transposedInAtlas = widthInAtlas > heightInAtlas;
202 } else {
203 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
204 // most efficient packing.
205 *transposedInAtlas = heightInAtlas > widthInAtlas;
206 }
207 if (*transposedInAtlas) {
208 std::swap(heightInAtlas, widthInAtlas);
209 }
210 // pathFitsInAtlas() should have guaranteed these constraints on the path size.
211 SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth);
212 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
213
214 // Check if this path is already in the atlas. This is mainly for clip paths.
215 AtlasPathKey atlasPathKey;
216 if (!path.isVolatile()) {
217 atlasPathKey.set(viewMatrix, path);
218 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
219 *locationInAtlas = *existingLocation;
220 return true;
221 }
222 }
223
224 if (fAtlasRenderTasks.empty() ||
225 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
226 heightInAtlas, *transposedInAtlas, locationInAtlas)) {
227 // We either don't have an atlas yet or the current one is full. Try to replace it.
228 auto currentAtlasTask = (!fAtlasRenderTasks.empty()) ? fAtlasRenderTasks.back().get()
229 : nullptr;
230 if (currentAtlasTask &&
231 drawRefsAtlasCallback &&
232 drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) {
233 // The draw already refs the current atlas. Give up. Otherwise the draw would ref two
234 // different atlases and they couldn't share a texture.
235 return false;
236 }
237 // Replace the atlas with a new one.
238 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
239 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
240 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
241 *rContext->priv().caps(), kAtlasAlgorithm);
242 auto newAtlasTask = sk_make_sp<AtlasRenderTask>(rContext,
243 sk_make_sp<GrArenas>(),
244 std::move(dynamicAtlas));
245 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
246 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
247 heightInAtlas, *transposedInAtlas, locationInAtlas));
248 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
249 fAtlasPathCache.reset();
250 }
251
252 // Remember this path's location in the atlas, in case it gets drawn again.
253 if (!path.isVolatile()) {
254 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
255 }
256 return true;
257 }
258
onCanDrawPath(const CanDrawPathArgs & args) const259 PathRenderer::CanDrawPath AtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
260 #ifdef SK_DEBUG
261 if (!fAtlasRenderTasks.empty()) {
262 // args.fPaint should NEVER reference our current atlas. If it does, it means somebody
263 // intercepted a clip FP meant for a different op and will cause rendering artifacts.
264 const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy();
265 SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy));
266 SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy));
267 }
268 SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport().
269 #endif
270 bool canDrawPath = args.fShape->style().isSimpleFill() &&
271 #ifdef SK_DISABLE_ATLAS_PATH_RENDERER_WITH_COVERAGE_AA
272 // The MSAA requirement is a temporary limitation in order to preserve
273 // functionality for refactoring. TODO: Allow kCoverage AA types.
274 args.fAAType == GrAAType::kMSAA &&
275 #else
276 args.fAAType != GrAAType::kNone &&
277 #endif
278 // Non-DMSAA convex paths should be handled by the convex tessellator.
279 // (With DMSAA we continue to use the atlas for these paths in order to avoid
280 // triggering MSAA.)
281 (args.fProxy->numSamples() == 1 || !args.fShape->knownToBeConvex()) &&
282 !args.fShape->style().hasPathEffect() &&
283 !args.fViewMatrix->hasPerspective() &&
284 this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()),
285 args.fAAType);
286 return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo;
287 }
288
onDrawPath(const DrawPathArgs & args)289 bool AtlasPathRenderer::onDrawPath(const DrawPathArgs& args) {
290 SkPath path;
291 args.fShape->asPath(&path);
292
293 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
294 SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType));
295
296 if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) {
297 // The path is empty or outside the clip. No mask is needed.
298 if (path.isInverseFillType()) {
299 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
300 *args.fViewMatrix);
301 }
302 return true;
303 }
304
305 SkIRect devIBounds;
306 SkIPoint16 locationInAtlas;
307 bool transposedInAtlas;
308 SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
309 &devIBounds, &locationInAtlas, &transposedInAtlas,
310 nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/));
311
312 const SkIRect& fillBounds = args.fShape->inverseFilled()
313 ? (args.fClip
314 ? args.fClip->getConservativeBounds()
315 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
316 : devIBounds;
317 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
318 auto op = GrOp::Make<DrawAtlasPathOp>(args.fContext,
319 args.fSurfaceDrawContext->arenaAlloc(),
320 fillBounds, *args.fViewMatrix,
321 std::move(args.fPaint), locationInAtlas,
322 devIBounds, transposedInAtlas,
323 fAtlasRenderTasks.back()->readView(caps),
324 args.fShape->inverseFilled());
325 args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op));
326 return true;
327 }
328
makeAtlasClipEffect(const SurfaceDrawContext * sdc,const GrOp * opBeingClipped,std::unique_ptr<GrFragmentProcessor> inputFP,const SkIRect & drawBounds,const SkMatrix & viewMatrix,const SkPath & path)329 GrFPResult AtlasPathRenderer::makeAtlasClipEffect(const SurfaceDrawContext* sdc,
330 const GrOp* opBeingClipped,
331 std::unique_ptr<GrFragmentProcessor> inputFP,
332 const SkIRect& drawBounds,
333 const SkMatrix& viewMatrix,
334 const SkPath& path) {
335 if (viewMatrix.hasPerspective()) {
336 return GrFPFailure(std::move(inputFP));
337 }
338
339 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
340 if (!is_visible(pathDevBounds, drawBounds)) {
341 // The path is empty or outside the drawBounds. No mask is needed. We explicitly allow the
342 // returned successful "fp" to be null in case this bypassed atlas clip effect was the first
343 // clip to be processed by the clip stack (at which point inputFP is null).
344 return path.isInverseFillType() ? GrFPNullableSuccess(std::move(inputFP))
345 : GrFPFailure(std::move(inputFP));
346 }
347
348 auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA
349 : GrAAType::kCoverage;
350 if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) {
351 // The path is too big.
352 return GrFPFailure(std::move(inputFP));
353 }
354
355 SkIRect devIBounds;
356 SkIPoint16 locationInAtlas;
357 bool transposedInAtlas;
358 // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws
359 // can never access more than one atlas.)
360 auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) {
361 return refs_atlas(opBeingClipped, atlasProxy) ||
362 refs_atlas(inputFP.get(), atlasProxy);
363 };
364 // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath().
365 if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds,
366 &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) {
367 // The atlas ran out of room and we were unable to start a new one.
368 return GrFPFailure(std::move(inputFP));
369 }
370
371 SkMatrix atlasMatrix;
372 auto [atlasX, atlasY] = locationInAtlas;
373 if (!transposedInAtlas) {
374 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
375 } else {
376 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
377 1, 0, atlasY - devIBounds.left(),
378 0, 0, 1);
379 }
380 auto flags = GrModulateAtlasCoverageEffect::Flags::kNone;
381 if (path.isInverseFillType()) {
382 flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage;
383 }
384 if (!devIBounds.contains(drawBounds)) {
385 flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds;
386 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
387 // opposed to us having to check the path bounds. Feel free to remove this assert if that
388 // ever changes.
389 SkASSERT(path.isInverseFillType());
390 }
391 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps());
392 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP),
393 std::move(atlasView),
394 atlasMatrix, devIBounds));
395 }
396
preFlush(GrOnFlushResourceProvider * onFlushRP)397 bool AtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP) {
398 if (fAtlasRenderTasks.empty()) {
399 SkASSERT(fAtlasPathCache.count() == 0);
400 return true;
401 }
402
403 // Verify the atlases can all share the same texture.
404 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
405
406 bool successful;
407
408 #if GR_TEST_UTILS
409 if (onFlushRP->failFlushTimeCallbacks()) {
410 successful = false;
411 } else
412 #endif
413 {
414 // TODO: it seems like this path renderer's backing-texture reuse could be greatly
415 // improved. Please see skbug.com/13298.
416
417 // Instantiate the first atlas.
418 successful = fAtlasRenderTasks[0]->instantiate(onFlushRP);
419
420 // Instantiate the remaining atlases.
421 GrTexture* firstAtlas = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
422 SkASSERT(firstAtlas);
423 for (int i = 1; successful && i < fAtlasRenderTasks.size(); ++i) {
424 auto atlasTask = fAtlasRenderTasks[i].get();
425 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlas->dimensions()) {
426 successful &= atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlas));
427 } else {
428 // The atlases are expected to all be full size except possibly the final one.
429 SkASSERT(i == fAtlasRenderTasks.size() - 1);
430 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
431 firstAtlas->dimensions().area());
432 // TODO: Recycle the larger atlas texture anyway?
433 successful &= atlasTask->instantiate(onFlushRP);
434 }
435 }
436 }
437
438 // Reset all atlas data.
439 fAtlasRenderTasks.clear();
440 fAtlasPathCache.reset();
441 return successful;
442 }
443
444 } // namespace skgpu::v1
445