• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2023 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 #include "src/gpu/graphite/render/CoverageMaskRenderStep.h"
8 
9 #include "include/core/SkM44.h"
10 #include "include/core/SkMatrix.h"
11 #include "include/core/SkRefCnt.h"
12 #include "include/core/SkSamplingOptions.h"
13 #include "include/core/SkScalar.h"
14 #include "include/core/SkSize.h"
15 #include "include/core/SkTileMode.h"
16 #include "include/private/base/SkAssert.h"
17 #include "include/private/base/SkDebug.h"
18 #include "src/base/SkEnumBitMask.h"
19 #include "src/core/SkSLTypeShared.h"
20 #include "src/gpu/BufferWriter.h"
21 #include "src/gpu/graphite/Attribute.h"
22 #include "src/gpu/graphite/ContextUtils.h"
23 #include "src/gpu/graphite/DrawOrder.h"
24 #include "src/gpu/graphite/DrawParams.h"
25 #include "src/gpu/graphite/DrawTypes.h"
26 #include "src/gpu/graphite/DrawWriter.h"
27 #include "src/gpu/graphite/PipelineData.h"
28 #include "src/gpu/graphite/TextureProxy.h"
29 #include "src/gpu/graphite/geom/CoverageMaskShape.h"
30 #include "src/gpu/graphite/geom/Geometry.h"
31 #include "src/gpu/graphite/geom/Rect.h"
32 #include "src/gpu/graphite/geom/Transform.h"
33 #include "src/gpu/graphite/render/CommonDepthStencilSettings.h"
34 
35 #include <cstdint>
36 
37 namespace skgpu::graphite {
38 
39 // The device origin is applied *before* the maskToDeviceRemainder matrix so that it can be
40 // combined with the mask atlas origin. This is necessary so that the mask bounds can be inset or
41 // outset for clamping w/o affecting the alignment of the mask sampling.
get_device_translation(const SkM44 & localToDevice)42 static skvx::float2 get_device_translation(const SkM44& localToDevice) {
43     float m00 = localToDevice.rc(0,0), m01 = localToDevice.rc(0,1);
44     float m10 = localToDevice.rc(1,0), m11 = localToDevice.rc(1,1);
45 
46     float det = m00*m11 - m01*m10;
47     if (SkScalarNearlyZero(det)) {
48         // We can't extract any pre-translation, since the upper 2x2 is not invertible. Return (0,0)
49         // so that the maskToDeviceRemainder matrix remains the full transform.
50         return {0.f, 0.f};
51     }
52 
53     // Calculate inv([[m00,m01][m10,m11]])*[[m30][m31]] to get the pre-remainder device translation.
54     float tx = localToDevice.rc(0,3), ty = localToDevice.rc(1,3);
55     skvx::float4 invT = skvx::float4{m11, -m10, -m01, m00} * skvx::float4{tx,tx,ty,ty};
56     return (invT.xy() + invT.zw()) / det;
57 }
58 
CoverageMaskRenderStep()59 CoverageMaskRenderStep::CoverageMaskRenderStep()
60         : RenderStep(RenderStepID::kCoverageMask,
61                      // The mask will have AA outsets baked in, but the original bounds for clipping
62                      // still require the outset for analytic coverage.
63                      Flags::kPerformsShading | Flags::kHasTextures | Flags::kEmitsCoverage |
64                      Flags::kOutsetBoundsForAA,
65                      /*uniforms=*/{{"maskToDeviceRemainder", SkSLType::kFloat3x3}},
66                      PrimitiveType::kTriangleStrip,
67                      kDirectDepthGreaterPass,
68                      /*vertexAttrs=*/{},
69                      /*instanceAttrs=*/
70                      // Draw bounds and mask bounds are in normalized relative to the mask texture,
71                      // but 'drawBounds' is stored in float since the coords may map outside of
72                      // [0,1] for inverse-filled masks. 'drawBounds' is relative to the logical mask
73                      // entry's origin, while 'maskBoundsIn' is atlas-relative. Inverse fills swap
74                      // the order in 'maskBoundsIn' to be RBLT.
75                      {{"drawBounds", VertexAttribType::kFloat4 , SkSLType::kFloat4},  // ltrb
76                       {"maskBoundsIn", VertexAttribType::kUShort4_norm, SkSLType::kFloat4},
77                       // Remaining translation extracted from actual 'maskToDevice' transform.
78                       {"deviceOrigin", VertexAttribType::kFloat2, SkSLType::kFloat2},
79                       {"depth"     , VertexAttribType::kFloat, SkSLType::kFloat},
80                       {"ssboIndices", VertexAttribType::kUInt2, SkSLType::kUInt2},
81                       // deviceToLocal matrix for producing local coords for shader evaluation
82                       {"mat0", VertexAttribType::kFloat3, SkSLType::kFloat3},
83                       {"mat1", VertexAttribType::kFloat3, SkSLType::kFloat3},
84                       {"mat2", VertexAttribType::kFloat3, SkSLType::kFloat3}},
85                      /*varyings=*/
86                      {// `maskBounds` are the atlas-relative, sorted bounds of the coverage mask.
87                       // `textureCoords` are the atlas-relative UV coordinates of the draw, which
88                       // can spill beyond `maskBounds` for inverse fills.
89                       // TODO: maskBounds is constant for all fragments for a given instance,
90                       // could we store them in the draw's SSBO?
91                       {"maskBounds"   , SkSLType::kFloat4},
92                       {"textureCoords", SkSLType::kFloat2},
93                       // 'invert' is set to 0 use unmodified coverage, and set to 1 for "1-c".
94                       {"invert", SkSLType::kHalf}}) {}
95 
vertexSkSL() const96 std::string CoverageMaskRenderStep::vertexSkSL() const {
97     // Returns the body of a vertex function, which must define a float4 devPosition variable and
98     // must write to an already-defined float2 stepLocalCoords variable.
99     return "float4 devPosition = coverage_mask_vertex_fn("
100                     "float2(sk_VertexID >> 1, sk_VertexID & 1), "
101                     "maskToDeviceRemainder, drawBounds, maskBoundsIn, deviceOrigin, "
102                     "depth, float3x3(mat0, mat1, mat2), "
103                     "maskBounds, textureCoords, invert, stepLocalCoords);\n";
104 }
105 
texturesAndSamplersSkSL(const ResourceBindingRequirements & bindingReqs,int * nextBindingIndex) const106 std::string CoverageMaskRenderStep::texturesAndSamplersSkSL(
107         const ResourceBindingRequirements& bindingReqs, int* nextBindingIndex) const {
108     return EmitSamplerLayout(bindingReqs, nextBindingIndex) + " sampler2D pathAtlas;";
109 }
110 
fragmentCoverageSkSL() const111 const char* CoverageMaskRenderStep::fragmentCoverageSkSL() const {
112     return
113         "half c = sample(pathAtlas, clamp(textureCoords, maskBounds.LT, maskBounds.RB)).r;\n"
114         "outputCoverage = half4(mix(c, 1 - c, invert));\n";
115 }
116 
writeVertices(DrawWriter * dw,const DrawParams & params,skvx::uint2 ssboIndices) const117 void CoverageMaskRenderStep::writeVertices(DrawWriter* dw,
118                                            const DrawParams& params,
119                                            skvx::uint2 ssboIndices) const {
120     const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
121     const TextureProxy* proxy = coverageMask.textureProxy();
122     SkASSERT(proxy);
123 
124     // A quad is a 4-vertex instance. The coordinates are derived from the vertex IDs.
125     DrawWriter::Instances instances(*dw, {}, {}, 4);
126 
127     // The device origin is the  translation extracted from the mask-to-device matrix so
128     // that the remaining matrix uniform has less variance between draws.
129     const auto& maskToDevice = params.transform().matrix();
130     skvx::float2 deviceOrigin = get_device_translation(maskToDevice);
131 
132     // Relative to mask space (device origin and mask-to-device remainder must be applied in shader)
133     skvx::float4 maskBounds = coverageMask.bounds().ltrb();
134     skvx::float4 drawBounds;
135 
136     if (coverageMask.inverted()) {
137         // Only mask filters trigger complex transforms, and they are never inverse filled. Since
138         // we know this is an inverted mask, then we can exactly map the draw's clip bounds to mask
139         // space so that the clip is still fully covered without branching in the vertex shader.
140         SkASSERT(maskToDevice == SkM44::Translate(deviceOrigin.x(), deviceOrigin.y()));
141         drawBounds = params.clip().drawBounds().makeOffset(-deviceOrigin).ltrb();
142 
143         // If the mask is fully clipped out, then the shape's mask info should be (0,0,0,0).
144         // If it's not fully clipped out, then the mask info should be non-empty.
145         SkASSERT(!params.clip().transformedShapeBounds().isEmptyNegativeOrNaN() ^
146                  all(maskBounds == 0.f));
147 
148         if (params.clip().transformedShapeBounds().isEmptyNegativeOrNaN()) {
149             // The inversion check is strict inequality, so (0,0,0,0) would not be detected. Adjust
150             // to (0,0,1/2,1/2) to restrict sampling to the top-left quarter of the top-left pixel,
151             // which should have a value of 0 regardless of filtering mode.
152             maskBounds = skvx::float4{0.f, 0.f, 0.5f, 0.5f};
153         } else {
154             // Add 1/2px outset to the mask bounds so that clamped coordinates sample the texel
155             // center of the padding around the atlas entry.
156             maskBounds += skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
157         }
158 
159         // and store RBLT so that the 'maskBoundsIn' attribute has xy > zw to detect inverse fill.
160         maskBounds = skvx::shuffle<2,3,0,1>(maskBounds);
161     } else {
162         // If we aren't inverted, then the originally assigned values don't need to be adjusted, but
163         // also ensure the mask isn't empty (otherwise the draw should have been skipped earlier).
164         SkASSERT(!coverageMask.bounds().isEmptyNegativeOrNaN());
165         SkASSERT(all(maskBounds.xy() < maskBounds.zw()));
166 
167         // Since the mask bounds and draw bounds are 1-to-1 with each other, the clamping of texture
168         // coords is mostly a formality. We inset the mask bounds by 1/2px so that we clamp to the
169         // texel center of the outer row/column of the mask. This should be a no-op for nearest
170         // sampling but prevents any linear sampling from incorporating adjacent data; for atlases
171         // this would just be 0 but for non-atlas coverage masks that might not have padding this
172         // avoids filtering unknown values in an approx-fit texture.
173         drawBounds = maskBounds;
174         maskBounds -= skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
175     }
176 
177     // Move 'drawBounds' and 'maskBounds' into the atlas coordinate space, then adjust the
178     // device translation to undo the atlas origin automatically in the vertex shader.
179     skvx::float2 textureOrigin = skvx::cast<float>(coverageMask.textureOrigin());
180     maskBounds += textureOrigin.xyxy();
181     drawBounds += textureOrigin.xyxy();
182     deviceOrigin -= textureOrigin;
183 
184     // Normalize drawBounds and maskBounds after possibly correcting drawBounds for inverse fills.
185     // The maskToDevice matrix uniform will handle de-normalizing drawBounds for vertex positions.
186     auto atlasSizeInv = skvx::float2{1.f / proxy->dimensions().width(),
187                                      1.f / proxy->dimensions().height()};
188     drawBounds *= atlasSizeInv.xyxy();
189     maskBounds *= atlasSizeInv.xyxy();
190     deviceOrigin *= atlasSizeInv;
191 
192     // Since the mask bounds define normalized texels of the texture, we can encode them as
193     // ushort_norm without losing precision to save space.
194     SkASSERT(all((maskBounds >= 0.f) & (maskBounds <= 1.f)));
195     maskBounds = 65535.f * maskBounds + 0.5f;
196 
197     const SkM44& m = coverageMask.deviceToLocal();
198     instances.append(1) << drawBounds << skvx::cast<uint16_t>(maskBounds) << deviceOrigin
199                         << params.order().depthAsFloat() << ssboIndices
200                         << m.rc(0,0) << m.rc(1,0) << m.rc(3,0)   // mat0
201                         << m.rc(0,1) << m.rc(1,1) << m.rc(3,1)   // mat1
202                         << m.rc(0,3) << m.rc(1,3) << m.rc(3,3);  // mat2
203 }
204 
writeUniformsAndTextures(const DrawParams & params,PipelineDataGatherer * gatherer) const205 void CoverageMaskRenderStep::writeUniformsAndTextures(const DrawParams& params,
206                                                       PipelineDataGatherer* gatherer) const {
207     SkDEBUGCODE(UniformExpectationsValidator uev(gatherer, this->uniforms());)
208 
209     const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
210     const TextureProxy* proxy = coverageMask.textureProxy();
211     SkASSERT(proxy);
212 
213     // Most coverage masks are aligned with the device pixels, so the params' transform is an
214     // integer translation matrix. This translation is extracted as an instance attribute so that
215     // the remaining transform has a much lower frequency of changing (only complex-transformed
216     // mask filters).
217     skvx::float2 deviceOrigin = get_device_translation(params.transform().matrix());
218     SkMatrix maskToDevice = params.transform().matrix().asM33();
219     maskToDevice.preTranslate(-deviceOrigin.x(), -deviceOrigin.y());
220 
221     // The mask coordinates in the vertex shader will be normalized, so scale by the proxy size
222     // to get back to Skia's texel-based coords.
223     maskToDevice.preScale(proxy->dimensions().width(), proxy->dimensions().height());
224 
225     // Write uniforms:
226     gatherer->write(maskToDevice);
227 
228     // Write textures and samplers:
229     const bool pixelAligned =
230             params.transform().type() <= Transform::Type::kSimpleRectStaysRect &&
231             params.transform().maxScaleFactor() == 1.f &&
232             all(deviceOrigin == floor(deviceOrigin + SK_ScalarNearlyZero));
233     gatherer->add(sk_ref_sp(proxy), {pixelAligned ? SkFilterMode::kNearest : SkFilterMode::kLinear,
234                                      SkTileMode::kClamp});
235 }
236 
237 }  // namespace skgpu::graphite
238