1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef GrCCPerFlushResources_DEFINED
9 #define GrCCPerFlushResources_DEFINED
10
11 #include "src/gpu/GrNonAtomicRef.h"
12 #include "src/gpu/ccpr/GrCCAtlas.h"
13 #include "src/gpu/ccpr/GrCCFiller.h"
14 #include "src/gpu/ccpr/GrCCPathProcessor.h"
15 #include "src/gpu/ccpr/GrCCStroker.h"
16 #include "src/gpu/ccpr/GrStencilAtlasOp.h"
17
18 class GrCCPathCache;
19 class GrCCPathCacheEntry;
20 class GrOctoBounds;
21 class GrOnFlushResourceProvider;
22 class GrShape;
23
24 /**
25 * This struct counts values that help us preallocate buffers for rendered path geometry.
26 */
27 struct GrCCRenderedPathStats {
28 int fMaxPointsPerPath = 0;
29 int fNumTotalSkPoints = 0;
30 int fNumTotalSkVerbs = 0;
31 int fNumTotalConicWeights = 0;
32
33 void statPath(const SkPath&);
34 };
35
36 /**
37 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
38 * CCPR in a given flush.
39 */
40 struct GrCCPerFlushResourceSpecs {
41 static constexpr int kFillIdx = 0;
42 static constexpr int kStrokeIdx = 1;
43
44 int fNumCachedPaths = 0;
45
46 int fNumCopiedPaths[2] = {0, 0};
47 GrCCRenderedPathStats fCopyPathStats[2];
48 GrCCAtlas::Specs fCopyAtlasSpecs;
49
50 int fNumRenderedPaths[2] = {0, 0};
51 int fNumClipPaths = 0;
52 GrCCRenderedPathStats fRenderedPathStats[2];
53 GrCCAtlas::Specs fRenderedAtlasSpecs;
54
isEmptyGrCCPerFlushResourceSpecs55 bool isEmpty() const {
56 return 0 == fNumCachedPaths + fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx] +
57 fNumRenderedPaths[kFillIdx] + fNumRenderedPaths[kStrokeIdx] + fNumClipPaths;
58 }
59 // Converts the copies to normal cached draws.
60 void cancelCopies();
61 };
62
63 /**
64 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
65 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
66 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
67 */
68 class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
69 public:
70 GrCCPerFlushResources(
71 GrOnFlushResourceProvider*, GrCCAtlas::CoverageType,const GrCCPerFlushResourceSpecs&);
72
isMapped()73 bool isMapped() const { return SkToBool(fPathInstanceData); }
74
renderedPathCoverageType()75 GrCCAtlas::CoverageType renderedPathCoverageType() const {
76 return fRenderedAtlasStack.coverageType();
77 }
78
79 // Copies a coverage-counted path out of the given texture proxy, and into a cached, 8-bit,
80 // literal coverage atlas. Updates the cache entry to reference the new atlas.
81 void upgradeEntryToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
82 GrCCPathCacheEntry*, GrFillRule);
83
84 // These two methods render a path into a temporary coverage count atlas. See
85 // GrCCPathProcessor::Instance for a description of the outputs.
86 //
87 // strokeDevWidth must be 0 for fills, 1 for hairlines, or the stroke width in device-space
88 // pixels for non-hairline strokes (implicitly requiring a rigid-body transform).
89 GrCCAtlas* renderShapeInAtlas(
90 const SkIRect& clipIBounds, const SkMatrix&, const GrShape&, float strokeDevWidth,
91 GrOctoBounds*, SkIRect* devIBounds, SkIVector* devToAtlasOffset);
92 const GrCCAtlas* renderDeviceSpacePathInAtlas(
93 const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds,
94 GrFillRule fillRule, SkIVector* devToAtlasOffset);
95
96 // Returns the index in instanceBuffer() of the next instance that will be added by
97 // appendDrawPathInstance().
nextPathInstanceIdx()98 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
99
100 // Appends an instance to instanceBuffer() that will draw a path to the destination render
101 // target. The caller is responsible to call set() on the returned instance, to keep track of
102 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
appendDrawPathInstance()103 GrCCPathProcessor::Instance& appendDrawPathInstance() {
104 SkASSERT(this->isMapped());
105 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
106 return fPathInstanceData[fNextPathInstanceIdx++];
107 }
108
109 // Finishes off the GPU buffers and renders the atlas(es).
110 bool finalize(GrOnFlushResourceProvider*, SkTArray<sk_sp<GrRenderTargetContext>>* out);
111
112 // Accessors used by draw calls, once the resources have been finalized.
filler()113 const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; }
stroker()114 const GrCCStroker& stroker() const { SkASSERT(!this->isMapped()); return fStroker; }
refIndexBuffer()115 sk_sp<const GrGpuBuffer> refIndexBuffer() const {
116 SkASSERT(!this->isMapped());
117 return fIndexBuffer;
118 }
refVertexBuffer()119 sk_sp<const GrGpuBuffer> refVertexBuffer() const {
120 SkASSERT(!this->isMapped());
121 return fVertexBuffer;
122 }
refInstanceBuffer()123 sk_sp<const GrGpuBuffer> refInstanceBuffer() const {
124 SkASSERT(!this->isMapped());
125 return fInstanceBuffer;
126 }
refStencilResolveBuffer()127 sk_sp<const GrGpuBuffer> refStencilResolveBuffer() const {
128 SkASSERT(!this->isMapped());
129 return fStencilResolveBuffer;
130 }
131
132 private:
133 void recordCopyPathInstance(const GrCCPathCacheEntry&, const SkIVector& newAtlasOffset,
134 GrFillRule, sk_sp<GrTextureProxy> srcProxy);
135 void placeRenderedPathInAtlas(
136 const SkIRect& clippedPathIBounds, GrScissorTest, SkIVector* devToAtlasOffset);
137
138 // In MSAA mode we record an additional instance per path that draws a rectangle on top of its
139 // corresponding path in the atlas and resolves stencil winding values to coverage.
140 void recordStencilResolveInstance(
141 const SkIRect& clippedPathIBounds, const SkIVector& devToAtlasOffset, GrFillRule);
142
143 const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer;
144 GrCCFiller fFiller;
145 GrCCStroker fStroker;
146 GrCCAtlasStack fCopyAtlasStack;
147 GrCCAtlasStack fRenderedAtlasStack;
148
149 const sk_sp<const GrGpuBuffer> fIndexBuffer;
150 const sk_sp<const GrGpuBuffer> fVertexBuffer;
151 const sk_sp<GrGpuBuffer> fInstanceBuffer;
152
153 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
154 int fNextCopyInstanceIdx;
155 SkDEBUGCODE(int fEndCopyInstance);
156 int fNextPathInstanceIdx;
157 int fBasePathInstanceIdx;
158 SkDEBUGCODE(int fEndPathInstance);
159
160 // Represents a range of copy-path instances that all share the same source proxy. (i.e. Draw
161 // instances that copy a path mask from a 16-bit coverage count atlas into an 8-bit literal
162 // coverage atlas.)
163 struct CopyPathRange {
164 CopyPathRange() = default;
CopyPathRangeCopyPathRange165 CopyPathRange(sk_sp<GrTextureProxy> srcProxy, int count)
166 : fSrcProxy(std::move(srcProxy)), fCount(count) {}
167 sk_sp<GrTextureProxy> fSrcProxy;
168 int fCount;
169 };
170
171 SkSTArray<4, CopyPathRange> fCopyPathRanges;
172 int fCurrCopyAtlasRangesIdx = 0;
173
174 // This is a list of coverage count atlas textures that have been invalidated due to us copying
175 // their paths into new 8-bit literal coverage atlases. Since copying is finished by the time
176 // we begin rendering new atlases, we can recycle these textures for the rendered atlases rather
177 // than allocating new texture objects upon instantiation.
178 SkSTArray<2, sk_sp<GrTexture>> fRecyclableAtlasTextures;
179
180 // Used in MSAA mode make an intermediate draw that resolves stencil winding values to coverage.
181 sk_sp<GrGpuBuffer> fStencilResolveBuffer;
182 GrStencilAtlasOp::ResolveRectInstance* fStencilResolveInstanceData = nullptr;
183 int fNextStencilResolveInstanceIdx = 0;
184 SkDEBUGCODE(int fEndStencilResolveInstance);
185
186 public:
187 #ifdef SK_DEBUG
debugOnly_didReuseRenderedPath()188 void debugOnly_didReuseRenderedPath() {
189 if (GrCCAtlas::CoverageType::kA8_Multisample == this->renderedPathCoverageType()) {
190 --fEndStencilResolveInstance;
191 }
192 }
193 #endif
194 const GrTexture* testingOnly_frontCopyAtlasTexture() const;
195 const GrTexture* testingOnly_frontRenderedAtlasTexture() const;
196 };
197
statPath(const SkPath & path)198 inline void GrCCRenderedPathStats::statPath(const SkPath& path) {
199 fMaxPointsPerPath = SkTMax(fMaxPointsPerPath, path.countPoints());
200 fNumTotalSkPoints += path.countPoints();
201 fNumTotalSkVerbs += path.countVerbs();
202 fNumTotalConicWeights += SkPathPriv::ConicWeightCnt(path);
203 }
204
205 #endif
206