• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2023 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 #include "src/gpu/graphite/render/CoverageMaskRenderStep.h"
8 
9 #include "src/gpu/graphite/ContextUtils.h"
10 #include "src/gpu/graphite/DrawParams.h"
11 #include "src/gpu/graphite/DrawWriter.h"
12 #include "src/gpu/graphite/PathAtlas.h"
13 #include "src/gpu/graphite/geom/CoverageMaskShape.h"
14 #include "src/gpu/graphite/render/CommonDepthStencilSettings.h"
15 
16 namespace skgpu::graphite {
17 
18 // The device origin is applied *before* the maskToDeviceRemainder matrix so that it can be
19 // combined with the mask atlas origin. This is necessary so that the mask bounds can be inset or
20 // outset for clamping w/o affecting the alignment of the mask sampling.
get_device_translation(const SkM44 & localToDevice)21 static skvx::float2 get_device_translation(const SkM44& localToDevice) {
22     float m00 = localToDevice.rc(0,0), m01 = localToDevice.rc(0,1);
23     float m10 = localToDevice.rc(1,0), m11 = localToDevice.rc(1,1);
24 
25     float det = m00*m11 - m01*m10;
26     if (SkScalarNearlyZero(det)) {
27         // We can't extract any pre-translation, since the upper 2x2 is not invertible. Return (0,0)
28         // so that the maskToDeviceRemainder matrix remains the full transform.
29         return {0.f, 0.f};
30     }
31 
32     // Calculate inv([[m00,m01][m10,m11]])*[[m30][m31]] to get the pre-remainder device translation.
33     float tx = localToDevice.rc(0,3), ty = localToDevice.rc(1,3);
34     skvx::float4 invT = skvx::float4{m11, -m10, -m01, m00} * skvx::float4{tx,tx,ty,ty};
35     return (invT.xy() + invT.zw()) / det;
36 }
37 
CoverageMaskRenderStep()38 CoverageMaskRenderStep::CoverageMaskRenderStep()
39         : RenderStep("CoverageMaskRenderStep",
40                      "",
41                      // The mask will have AA outsets baked in, but the original bounds for clipping
42                      // still require the outset for analytic coverage.
43                      Flags::kPerformsShading | Flags::kHasTextures | Flags::kEmitsCoverage |
44                      Flags::kOutsetBoundsForAA,
45                      /*uniforms=*/{{"maskToDeviceRemainder", SkSLType::kFloat3x3}},
46                      PrimitiveType::kTriangleStrip,
47                      kDirectDepthGreaterPass,
48                      /*vertexAttrs=*/{},
49                      /*instanceAttrs=*/
50                      // Draw bounds and mask bounds are in normalized relative to the mask texture,
51                      // but 'drawBounds' is stored in float since the coords may map outside of
52                      // [0,1] for inverse-filled masks. 'drawBounds' is relative to the logical mask
53                      // entry's origin, while 'maskBoundsIn' is atlas-relative. Inverse fills swap
54                      // the order in 'maskBoundsIn' to be RBLT.
55                      {{"drawBounds", VertexAttribType::kFloat4 , SkSLType::kFloat4},  // ltrb
56                       {"maskBoundsIn", VertexAttribType::kUShort4_norm, SkSLType::kFloat4},
57                       // Remaining translation extracted from actual 'maskToDevice' transform.
58                       {"deviceOrigin", VertexAttribType::kFloat2, SkSLType::kFloat2},
59                       {"depth"     , VertexAttribType::kFloat, SkSLType::kFloat},
60                       {"ssboIndices", VertexAttribType::kUShort2, SkSLType::kUShort2},
61                       // deviceToLocal matrix for producing local coords for shader evaluation
62                       {"mat0", VertexAttribType::kFloat3, SkSLType::kFloat3},
63                       {"mat1", VertexAttribType::kFloat3, SkSLType::kFloat3},
64                       {"mat2", VertexAttribType::kFloat3, SkSLType::kFloat3}},
65                      /*varyings=*/
66                      {// `maskBounds` are the atlas-relative, sorted bounds of the coverage mask.
67                       // `textureCoords` are the atlas-relative UV coordinates of the draw, which
68                       // can spill beyond `maskBounds` for inverse fills.
69                       // TODO: maskBounds is constant for all fragments for a given instance,
70                       // could we store them in the draw's SSBO?
71                       {"maskBounds"   , SkSLType::kFloat4},
72                       {"textureCoords", SkSLType::kFloat2},
73                       // 'invert' is set to 0 use unmodified coverage, and set to 1 for "1-c".
74                       {"invert", SkSLType::kHalf}}) {}
75 
vertexSkSL() const76 std::string CoverageMaskRenderStep::vertexSkSL() const {
77     // Returns the body of a vertex function, which must define a float4 devPosition variable and
78     // must write to an already-defined float2 stepLocalCoords variable.
79     return "float4 devPosition = coverage_mask_vertex_fn("
80                     "float2(sk_VertexID >> 1, sk_VertexID & 1), "
81                     "maskToDeviceRemainder, drawBounds, maskBoundsIn, deviceOrigin, "
82                     "depth, float3x3(mat0, mat1, mat2), "
83                     "maskBounds, textureCoords, invert, stepLocalCoords);\n";
84 }
85 
texturesAndSamplersSkSL(const ResourceBindingRequirements & bindingReqs,int * nextBindingIndex) const86 std::string CoverageMaskRenderStep::texturesAndSamplersSkSL(
87         const ResourceBindingRequirements& bindingReqs, int* nextBindingIndex) const {
88     return EmitSamplerLayout(bindingReqs, nextBindingIndex) + " sampler2D pathAtlas;";
89 }
90 
fragmentCoverageSkSL() const91 const char* CoverageMaskRenderStep::fragmentCoverageSkSL() const {
92     return R"(
93         half c = sample(pathAtlas, clamp(textureCoords, maskBounds.LT, maskBounds.RB)).r;
94         outputCoverage = half4(mix(c, 1 - c, invert));
95     )";
96 }
97 
writeVertices(DrawWriter * dw,const DrawParams & params,skvx::ushort2 ssboIndices) const98 void CoverageMaskRenderStep::writeVertices(DrawWriter* dw,
99                                            const DrawParams& params,
100                                            skvx::ushort2 ssboIndices) const {
101     const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
102     const TextureProxy* proxy = coverageMask.textureProxy();
103     SkASSERT(proxy);
104 
105     // A quad is a 4-vertex instance. The coordinates are derived from the vertex IDs.
106     DrawWriter::Instances instances(*dw, {}, {}, 4);
107 
108     // The device origin is the  translation extracted from the mask-to-device matrix so
109     // that the remaining matrix uniform has less variance between draws.
110     const auto& maskToDevice = params.transform().matrix();
111     skvx::float2 deviceOrigin = get_device_translation(maskToDevice);
112 
113     // Relative to mask space (device origin and mask-to-device remainder must be applied in shader)
114     skvx::float4 maskBounds = coverageMask.bounds().ltrb();
115     skvx::float4 drawBounds;
116 
117     if (coverageMask.inverted()) {
118         // Only mask filters trigger complex transforms, and they are never inverse filled. Since
119         // we know this is an inverted mask, then we can exactly map the draw's clip bounds to mask
120         // space so that the clip is still fully covered without branching in the vertex shader.
121         SkASSERT(maskToDevice == SkM44::Translate(deviceOrigin.x(), deviceOrigin.y()));
122         drawBounds = params.clip().drawBounds().makeOffset(-deviceOrigin).ltrb();
123 
124         // If the mask is fully clipped out, then the shape's mask info should be (0,0,0,0).
125         // If it's not fully clipped out, then the mask info should be non-empty.
126         SkASSERT(!params.clip().transformedShapeBounds().isEmptyNegativeOrNaN() ^
127                  all(maskBounds == 0.f));
128 
129         if (params.clip().transformedShapeBounds().isEmptyNegativeOrNaN()) {
130             // The inversion check is strict inequality, so (0,0,0,0) would not be detected. Adjust
131             // to (0,0,1/2,1/2) to restrict sampling to the top-left quarter of the top-left pixel,
132             // which should have a value of 0 regardless of filtering mode.
133             maskBounds = skvx::float4{0.f, 0.f, 0.5f, 0.5f};
134         } else {
135             // Add 1/2px outset to the mask bounds so that clamped coordinates sample the texel
136             // center of the padding around the atlas entry.
137             maskBounds += skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
138         }
139 
140         // and store RBLT so that the 'maskBoundsIn' attribute has xy > zw to detect inverse fill.
141         maskBounds = skvx::shuffle<2,3,0,1>(maskBounds);
142     } else {
143         // If we aren't inverted, then the originally assigned values don't need to be adjusted, but
144         // also ensure the mask isn't empty (otherwise the draw should have been skipped earlier).
145         SkASSERT(!coverageMask.bounds().isEmptyNegativeOrNaN());
146         SkASSERT(all(maskBounds.xy() < maskBounds.zw()));
147 
148         // Since the mask bounds and draw bounds are 1-to-1 with each other, the clamping of texture
149         // coords is mostly a formality. We inset the mask bounds by 1/2px so that we clamp to the
150         // texel center of the outer row/column of the mask. This should be a no-op for nearest
151         // sampling but prevents any linear sampling from incorporating adjacent data; for atlases
152         // this would just be 0 but for non-atlas coverage masks that might not have padding this
153         // avoids filtering unknown values in an approx-fit texture.
154         drawBounds = maskBounds;
155         maskBounds -= skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
156     }
157 
158     // Move 'drawBounds' and 'maskBounds' into the atlas coordinate space, then adjust the
159     // device translation to undo the atlas origin automatically in the vertex shader.
160     skvx::float2 textureOrigin = skvx::cast<float>(coverageMask.textureOrigin());
161     maskBounds += textureOrigin.xyxy();
162     drawBounds += textureOrigin.xyxy();
163     deviceOrigin -= textureOrigin;
164 
165     // Normalize drawBounds and maskBounds after possibly correcting drawBounds for inverse fills.
166     // The maskToDevice matrix uniform will handle de-normalizing drawBounds for vertex positions.
167     auto atlasSizeInv = skvx::float2{1.f / proxy->dimensions().width(),
168                                      1.f / proxy->dimensions().height()};
169     drawBounds *= atlasSizeInv.xyxy();
170     maskBounds *= atlasSizeInv.xyxy();
171     deviceOrigin *= atlasSizeInv;
172 
173     // Since the mask bounds define normalized texels of the texture, we can encode them as
174     // ushort_norm without losing precision to save space.
175     SkASSERT(all((maskBounds >= 0.f) & (maskBounds <= 1.f)));
176     maskBounds = 65535.f * maskBounds + 0.5f;
177 
178     const SkM44& m = coverageMask.deviceToLocal();
179     instances.append(1) << drawBounds << skvx::cast<uint16_t>(maskBounds) << deviceOrigin
180                         << params.order().depthAsFloat() << ssboIndices
181                         << m.rc(0,0) << m.rc(1,0) << m.rc(3,0)   // mat0
182                         << m.rc(0,1) << m.rc(1,1) << m.rc(3,1)   // mat1
183                         << m.rc(0,3) << m.rc(1,3) << m.rc(3,3);  // mat2
184 }
185 
writeUniformsAndTextures(const DrawParams & params,PipelineDataGatherer * gatherer) const186 void CoverageMaskRenderStep::writeUniformsAndTextures(const DrawParams& params,
187                                                       PipelineDataGatherer* gatherer) const {
188     SkDEBUGCODE(UniformExpectationsValidator uev(gatherer, this->uniforms());)
189 
190     const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
191     const TextureProxy* proxy = coverageMask.textureProxy();
192     SkASSERT(proxy);
193 
194     // Most coverage masks are aligned with the device pixels, so the params' transform is an
195     // integer translation matrix. This translation is extracted as an instance attribute so that
196     // the remaining transform has a much lower frequency of changing (only complex-transformed
197     // mask filters).
198     skvx::float2 deviceOrigin = get_device_translation(params.transform().matrix());
199     SkMatrix maskToDevice = params.transform().matrix().asM33();
200     maskToDevice.preTranslate(-deviceOrigin.x(), -deviceOrigin.y());
201 
202     // The mask coordinates in the vertex shader will be normalized, so scale by the proxy size
203     // to get back to Skia's texel-based coords.
204     maskToDevice.preScale(proxy->dimensions().width(), proxy->dimensions().height());
205 
206     // Write uniforms:
207     gatherer->write(maskToDevice);
208 
209     // Write textures and samplers:
210     const bool pixelAligned =
211             params.transform().type() <= Transform::Type::kSimpleRectStaysRect &&
212             params.transform().maxScaleFactor() == 1.f &&
213             all(deviceOrigin == floor(deviceOrigin + SK_ScalarNearlyZero));
214     constexpr SkTileMode kTileModes[2] = {SkTileMode::kClamp, SkTileMode::kClamp};
215     gatherer->add(pixelAligned ? SkFilterMode::kNearest : SkFilterMode::kLinear,
216                   kTileModes, sk_ref_sp(proxy));
217 }
218 
219 }  // namespace skgpu::graphite
220