1 /*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "include/gpu/GrRecordingContext.h"
9
10 #include "include/gpu/GrContextThreadSafeProxy.h"
11 #include "src/core/SkArenaAlloc.h"
12 #include "src/gpu/GrAuditTrail.h"
13 #include "src/gpu/GrCaps.h"
14 #include "src/gpu/GrContextThreadSafeProxyPriv.h"
15 #include "src/gpu/GrDrawingManager.h"
16 #include "src/gpu/GrMemoryPool.h"
17 #include "src/gpu/GrProgramDesc.h"
18 #include "src/gpu/GrProxyProvider.h"
19 #include "src/gpu/GrRecordingContextPriv.h"
20 #include "src/gpu/SkGr.h"
21 #include "src/gpu/SurfaceContext.h"
22 #include "src/gpu/effects/GrSkSLFP.h"
23 #include "src/gpu/text/GrTextBlob.h"
24 #include "src/gpu/text/GrTextBlobCache.h"
25
26 #if SK_GPU_V1
27 #include "src/gpu/ops/AtlasTextOp.h"
28 #endif
29
ProgramData(std::unique_ptr<const GrProgramDesc> desc,const GrProgramInfo * info)30 GrRecordingContext::ProgramData::ProgramData(std::unique_ptr<const GrProgramDesc> desc,
31 const GrProgramInfo* info)
32 : fDesc(std::move(desc))
33 , fInfo(info) {
34 }
35
ProgramData(ProgramData && other)36 GrRecordingContext::ProgramData::ProgramData(ProgramData&& other)
37 : fDesc(std::move(other.fDesc))
38 , fInfo(other.fInfo) {
39 }
40
41 GrRecordingContext::ProgramData::~ProgramData() = default;
42
GrRecordingContext(sk_sp<GrContextThreadSafeProxy> proxy,bool ddlRecording)43 GrRecordingContext::GrRecordingContext(sk_sp<GrContextThreadSafeProxy> proxy, bool ddlRecording)
44 : INHERITED(std::move(proxy))
45 , fAuditTrail(new GrAuditTrail())
46 , fArenas(ddlRecording) {
47 fProxyProvider = std::make_unique<GrProxyProvider>(this);
48 }
49
~GrRecordingContext()50 GrRecordingContext::~GrRecordingContext() {
51 #if SK_GPU_V1
52 skgpu::v1::AtlasTextOp::ClearCache();
53 #endif
54 }
55
maxSurfaceSampleCountForColorType(SkColorType colorType) const56 int GrRecordingContext::maxSurfaceSampleCountForColorType(SkColorType colorType) const {
57 GrBackendFormat format =
58 this->caps()->getDefaultBackendFormat(SkColorTypeToGrColorType(colorType),
59 GrRenderable::kYes);
60 return this->caps()->maxRenderTargetSampleCount(format);
61 }
62
init()63 bool GrRecordingContext::init() {
64 if (!INHERITED::init()) {
65 return false;
66 }
67
68 #if SK_GPU_V1
69 skgpu::v1::PathRendererChain::Options prcOptions;
70 prcOptions.fAllowPathMaskCaching = this->options().fAllowPathMaskCaching;
71 #if GR_TEST_UTILS
72 prcOptions.fGpuPathRenderers = this->options().fGpuPathRenderers;
73 #endif
74 // FIXME: Once this is removed from Chrome and Android, rename to fEnable"".
75 if (this->options().fDisableDistanceFieldPaths) {
76 prcOptions.fGpuPathRenderers &= ~GpuPathRenderers::kSmall;
77 }
78 #endif
79
80 bool reduceOpsTaskSplitting = true;
81 if (this->caps()->avoidReorderingRenderTasks()) {
82 reduceOpsTaskSplitting = false;
83 } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) {
84 reduceOpsTaskSplitting = true;
85 } else if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) {
86 reduceOpsTaskSplitting = false;
87 }
88 fDrawingManager.reset(new GrDrawingManager(this,
89 #if SK_GPU_V1
90 prcOptions,
91 #endif
92 reduceOpsTaskSplitting));
93 return true;
94 }
95
abandonContext()96 void GrRecordingContext::abandonContext() {
97 INHERITED::abandonContext();
98
99 this->destroyDrawingManager();
100 }
101
drawingManager()102 GrDrawingManager* GrRecordingContext::drawingManager() {
103 return fDrawingManager.get();
104 }
105
destroyDrawingManager()106 void GrRecordingContext::destroyDrawingManager() {
107 fDrawingManager.reset();
108 }
109
Arenas(SkArenaAlloc * recordTimeAllocator,GrSubRunAllocator * subRunAllocator)110 GrRecordingContext::Arenas::Arenas(SkArenaAlloc* recordTimeAllocator,
111 GrSubRunAllocator* subRunAllocator)
112 : fRecordTimeAllocator(recordTimeAllocator)
113 , fRecordTimeSubRunAllocator(subRunAllocator) {
114 // OwnedArenas should instantiate these before passing the bare pointer off to this struct.
115 SkASSERT(subRunAllocator);
116 }
117
118 // Must be defined here so that std::unique_ptr can see the sizes of the various pools, otherwise
119 // it can't generate a default destructor for them.
OwnedArenas(bool ddlRecording)120 GrRecordingContext::OwnedArenas::OwnedArenas(bool ddlRecording) : fDDLRecording(ddlRecording) {}
~OwnedArenas()121 GrRecordingContext::OwnedArenas::~OwnedArenas() {}
122
operator =(OwnedArenas && a)123 GrRecordingContext::OwnedArenas& GrRecordingContext::OwnedArenas::operator=(OwnedArenas&& a) {
124 fDDLRecording = a.fDDLRecording;
125 fRecordTimeAllocator = std::move(a.fRecordTimeAllocator);
126 fRecordTimeSubRunAllocator = std::move(a.fRecordTimeSubRunAllocator);
127 return *this;
128 }
129
get()130 GrRecordingContext::Arenas GrRecordingContext::OwnedArenas::get() {
131 if (!fRecordTimeAllocator && fDDLRecording) {
132 // TODO: empirically determine a better number for SkArenaAlloc's firstHeapAllocation param
133 fRecordTimeAllocator = std::make_unique<SkArenaAlloc>(1024);
134 }
135
136 if (!fRecordTimeSubRunAllocator) {
137 fRecordTimeSubRunAllocator = std::make_unique<GrSubRunAllocator>();
138 }
139
140 return {fRecordTimeAllocator.get(), fRecordTimeSubRunAllocator.get()};
141 }
142
detachArenas()143 GrRecordingContext::OwnedArenas&& GrRecordingContext::detachArenas() {
144 return std::move(fArenas);
145 }
146
getTextBlobCache()147 GrTextBlobCache* GrRecordingContext::getTextBlobCache() {
148 return fThreadSafeProxy->priv().getTextBlobCache();
149 }
150
getTextBlobCache() const151 const GrTextBlobCache* GrRecordingContext::getTextBlobCache() const {
152 return fThreadSafeProxy->priv().getTextBlobCache();
153 }
154
threadSafeCache()155 GrThreadSafeCache* GrRecordingContext::threadSafeCache() {
156 return fThreadSafeProxy->priv().threadSafeCache();
157 }
158
threadSafeCache() const159 const GrThreadSafeCache* GrRecordingContext::threadSafeCache() const {
160 return fThreadSafeProxy->priv().threadSafeCache();
161 }
162
addOnFlushCallbackObject(GrOnFlushCallbackObject * onFlushCBObject)163 void GrRecordingContext::addOnFlushCallbackObject(GrOnFlushCallbackObject* onFlushCBObject) {
164 this->drawingManager()->addOnFlushCallbackObject(onFlushCBObject);
165 }
166
167 ////////////////////////////////////////////////////////////////////////////////
168
maxTextureSize() const169 int GrRecordingContext::maxTextureSize() const { return this->caps()->maxTextureSize(); }
170
maxRenderTargetSize() const171 int GrRecordingContext::maxRenderTargetSize() const { return this->caps()->maxRenderTargetSize(); }
172
colorTypeSupportedAsImage(SkColorType colorType) const173 bool GrRecordingContext::colorTypeSupportedAsImage(SkColorType colorType) const {
174 GrBackendFormat format =
175 this->caps()->getDefaultBackendFormat(SkColorTypeToGrColorType(colorType),
176 GrRenderable::kNo);
177 return format.isValid();
178 }
179
180 ///////////////////////////////////////////////////////////////////////////////////////////////////
181
182 #ifdef SK_ENABLE_DUMP_GPU
183 #include "src/utils/SkJSONWriter.h"
184
dumpJSON(SkJSONWriter * writer) const185 void GrRecordingContext::dumpJSON(SkJSONWriter* writer) const {
186 writer->beginObject();
187
188 #if GR_GPU_STATS
189 writer->appendS32("path_masks_generated", this->stats()->numPathMasksGenerated());
190 writer->appendS32("path_mask_cache_hits", this->stats()->numPathMaskCacheHits());
191 #endif
192
193 writer->endObject();
194 }
195 #else
dumpJSON(SkJSONWriter *) const196 void GrRecordingContext::dumpJSON(SkJSONWriter*) const { }
197 #endif
198
199 #if GR_TEST_UTILS
200
201 #if GR_GPU_STATS
202
dump(SkString * out) const203 void GrRecordingContext::Stats::dump(SkString* out) const {
204 out->appendf("Num Path Masks Generated: %d\n", fNumPathMasksGenerated);
205 out->appendf("Num Path Mask Cache Hits: %d\n", fNumPathMaskCacheHits);
206 }
207
dumpKeyValuePairs(SkTArray<SkString> * keys,SkTArray<double> * values) const208 void GrRecordingContext::Stats::dumpKeyValuePairs(SkTArray<SkString>* keys,
209 SkTArray<double>* values) const {
210 keys->push_back(SkString("path_masks_generated"));
211 values->push_back(fNumPathMasksGenerated);
212
213 keys->push_back(SkString("path_mask_cache_hits"));
214 values->push_back(fNumPathMaskCacheHits);
215 }
216
dumpKeyValuePairs(SkTArray<SkString> * keys,SkTArray<double> * values) const217 void GrRecordingContext::DMSAAStats::dumpKeyValuePairs(SkTArray<SkString>* keys,
218 SkTArray<double>* values) const {
219 keys->push_back(SkString("dmsaa_render_passes"));
220 values->push_back(fNumRenderPasses);
221
222 keys->push_back(SkString("dmsaa_multisample_render_passes"));
223 values->push_back(fNumMultisampleRenderPasses);
224
225 for (const auto& [name, count] : fTriggerCounts) {
226 keys->push_back(SkStringPrintf("dmsaa_trigger_%s", name.c_str()));
227 values->push_back(count);
228 }
229 }
230
dump() const231 void GrRecordingContext::DMSAAStats::dump() const {
232 SkDebugf("DMSAA Render Passes: %d\n", fNumRenderPasses);
233 SkDebugf("DMSAA Multisample Render Passes: %d\n", fNumMultisampleRenderPasses);
234 if (!fTriggerCounts.empty()) {
235 SkDebugf("DMSAA Triggers:\n");
236 for (const auto& [name, count] : fTriggerCounts) {
237 SkDebugf(" %s: %d\n", name.c_str(), count);
238 }
239 }
240 }
241
merge(const DMSAAStats & stats)242 void GrRecordingContext::DMSAAStats::merge(const DMSAAStats& stats) {
243 fNumRenderPasses += stats.fNumRenderPasses;
244 fNumMultisampleRenderPasses += stats.fNumMultisampleRenderPasses;
245 for (const auto& [name, count] : stats.fTriggerCounts) {
246 fTriggerCounts[name] += count;
247 }
248 }
249
250 #endif // GR_GPU_STATS
251 #endif // GR_TEST_UTILS
252
253