1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef GrCCPerFlushResources_DEFINED
9 #define GrCCPerFlushResources_DEFINED
10
11 #include "GrNonAtomicRef.h"
12 #include "ccpr/GrCCAtlas.h"
13 #include "ccpr/GrCCFiller.h"
14 #include "ccpr/GrCCStroker.h"
15 #include "ccpr/GrCCPathProcessor.h"
16
17 class GrCCPathCache;
18 class GrCCPathCacheEntry;
19 class GrOnFlushResourceProvider;
20 class GrShape;
21
22 /**
23 * This struct counts values that help us preallocate buffers for rendered path geometry.
24 */
25 struct GrCCRenderedPathStats {
26 int fMaxPointsPerPath = 0;
27 int fNumTotalSkPoints = 0;
28 int fNumTotalSkVerbs = 0;
29 int fNumTotalConicWeights = 0;
30
31 void statPath(const SkPath&);
32 };
33
34 /**
35 * This struct encapsulates the minimum and desired requirements for the GPU resources required by
36 * CCPR in a given flush.
37 */
38 struct GrCCPerFlushResourceSpecs {
39 static constexpr int kFillIdx = 0;
40 static constexpr int kStrokeIdx = 1;
41
42 int fNumCachedPaths = 0;
43
44 int fNumCopiedPaths[2] = {0, 0};
45 GrCCRenderedPathStats fCopyPathStats[2];
46 GrCCAtlas::Specs fCopyAtlasSpecs;
47
48 int fNumRenderedPaths[2] = {0, 0};
49 int fNumClipPaths = 0;
50 GrCCRenderedPathStats fRenderedPathStats[2];
51 GrCCAtlas::Specs fRenderedAtlasSpecs;
52
isEmptyGrCCPerFlushResourceSpecs53 bool isEmpty() const {
54 return 0 == fNumCachedPaths + fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx] +
55 fNumRenderedPaths[kFillIdx] + fNumRenderedPaths[kStrokeIdx] + fNumClipPaths;
56 }
57 // Converts the copies to normal cached draws.
58 void cancelCopies();
59 };
60
61 /**
62 * This class wraps all the GPU resources that CCPR builds at flush time. It is allocated in CCPR's
63 * preFlush() method, and referenced by all the GrCCPerOpListPaths objects that are being flushed.
64 * It is deleted in postFlush() once all the flushing GrCCPerOpListPaths objects are deleted.
65 */
66 class GrCCPerFlushResources : public GrNonAtomicRef<GrCCPerFlushResources> {
67 public:
68 GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCPerFlushResourceSpecs&);
69
isMapped()70 bool isMapped() const { return SkToBool(fPathInstanceData); }
71
72 // Copies a coverage-counted path out of the given texture proxy, and into a cached, 8-bit,
73 // literal coverage atlas. Updates the cache entry to reference the new atlas.
74 void upgradeEntryToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*,
75 GrCCPathCacheEntry*, GrCCPathProcessor::DoEvenOddFill);
76
77 // These two methods render a path into a temporary coverage count atlas. See
78 // GrCCPathProcessor::Instance for a description of the outputs.
79 //
80 // strokeDevWidth must be 0 for fills, 1 for hairlines, or the stroke width in device-space
81 // pixels for non-hairline strokes (implicitly requiring a rigid-body transform).
82 GrCCAtlas* renderShapeInAtlas(const SkIRect& clipIBounds, const SkMatrix&, const GrShape&,
83 float strokeDevWidth, SkRect* devBounds, SkRect* devBounds45,
84 SkIRect* devIBounds, SkIVector* devToAtlasOffset);
85 const GrCCAtlas* renderDeviceSpacePathInAtlas(const SkIRect& clipIBounds, const SkPath& devPath,
86 const SkIRect& devPathIBounds,
87 SkIVector* devToAtlasOffset);
88
89 // Returns the index in instanceBuffer() of the next instance that will be added by
90 // appendDrawPathInstance().
nextPathInstanceIdx()91 int nextPathInstanceIdx() const { return fNextPathInstanceIdx; }
92
93 // Appends an instance to instanceBuffer() that will draw a path to the destination render
94 // target. The caller is responsible to call set() on the returned instance, to keep track of
95 // its atlas and index (see nextPathInstanceIdx()), and to issue the actual draw call.
appendDrawPathInstance()96 GrCCPathProcessor::Instance& appendDrawPathInstance() {
97 SkASSERT(this->isMapped());
98 SkASSERT(fNextPathInstanceIdx < fEndPathInstance);
99 return fPathInstanceData[fNextPathInstanceIdx++];
100 }
101
102 // Finishes off the GPU buffers and renders the atlas(es).
103 bool finalize(GrOnFlushResourceProvider*, SkTArray<sk_sp<GrRenderTargetContext>>* out);
104
105 // Accessors used by draw calls, once the resources have been finalized.
filler()106 const GrCCFiller& filler() const { SkASSERT(!this->isMapped()); return fFiller; }
stroker()107 const GrCCStroker& stroker() const { SkASSERT(!this->isMapped()); return fStroker; }
refIndexBuffer()108 sk_sp<const GrBuffer> refIndexBuffer() const {
109 SkASSERT(!this->isMapped());
110 return fIndexBuffer;
111 }
refVertexBuffer()112 sk_sp<const GrBuffer> refVertexBuffer() const {
113 SkASSERT(!this->isMapped());
114 return fVertexBuffer;
115 }
refInstanceBuffer()116 sk_sp<const GrBuffer> refInstanceBuffer() const {
117 SkASSERT(!this->isMapped());
118 return fInstanceBuffer;
119 }
120
121 private:
122 void recordCopyPathInstance(const GrCCPathCacheEntry&, const SkIVector& newAtlasOffset,
123 GrCCPathProcessor::DoEvenOddFill, sk_sp<GrTextureProxy> srcProxy);
124 bool placeRenderedPathInAtlas(const SkIRect& clipIBounds, const SkIRect& pathIBounds,
125 GrScissorTest*, SkIRect* clippedPathIBounds,
126 SkIVector* devToAtlasOffset);
127
128 const SkAutoSTArray<32, SkPoint> fLocalDevPtsBuffer;
129 GrCCFiller fFiller;
130 GrCCStroker fStroker;
131 GrCCAtlasStack fCopyAtlasStack;
132 GrCCAtlasStack fRenderedAtlasStack;
133
134 const sk_sp<const GrBuffer> fIndexBuffer;
135 const sk_sp<const GrBuffer> fVertexBuffer;
136 const sk_sp<GrBuffer> fInstanceBuffer;
137
138 GrCCPathProcessor::Instance* fPathInstanceData = nullptr;
139 int fNextCopyInstanceIdx;
140 SkDEBUGCODE(int fEndCopyInstance);
141 int fNextPathInstanceIdx;
142 SkDEBUGCODE(int fEndPathInstance);
143
144 // Represents a range of copy-path instances that all share the same source proxy. (i.e. Draw
145 // instances that copy a path mask from a 16-bit coverage count atlas into an 8-bit literal
146 // coverage atlas.)
147 struct CopyPathRange {
148 CopyPathRange() = default;
CopyPathRangeCopyPathRange149 CopyPathRange(sk_sp<GrTextureProxy> srcProxy, int count)
150 : fSrcProxy(std::move(srcProxy)), fCount(count) {}
151 sk_sp<GrTextureProxy> fSrcProxy;
152 int fCount;
153 };
154
155 SkSTArray<4, CopyPathRange> fCopyPathRanges;
156 int fCurrCopyAtlasRangesIdx = 0;
157
158 // This is a list of coverage count atlas textures that have been invalidated due to us copying
159 // their paths into new 8-bit literal coverage atlases. Since copying is finished by the time
160 // we begin rendering new atlases, we can recycle these textures for the rendered atlases rather
161 // than allocating new texture objects upon instantiation.
162 SkSTArray<2, sk_sp<GrTexture>> fRecyclableAtlasTextures;
163
164 public:
165 const GrTexture* testingOnly_frontCopyAtlasTexture() const;
166 const GrTexture* testingOnly_frontRenderedAtlasTexture() const;
167 };
168
statPath(const SkPath & path)169 inline void GrCCRenderedPathStats::statPath(const SkPath& path) {
170 fMaxPointsPerPath = SkTMax(fMaxPointsPerPath, path.countPoints());
171 fNumTotalSkPoints += path.countPoints();
172 fNumTotalSkVerbs += path.countVerbs();
173 fNumTotalConicWeights += SkPathPriv::ConicWeightCnt(path);
174 }
175
176 #endif
177