• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "include/gpu/GrRecordingContext.h"
9 
10 #include "include/gpu/GrContextThreadSafeProxy.h"
11 #include "src/core/SkArenaAlloc.h"
12 #include "src/gpu/GrAuditTrail.h"
13 #include "src/gpu/GrCaps.h"
14 #include "src/gpu/GrContextThreadSafeProxyPriv.h"
15 #include "src/gpu/GrDrawingManager.h"
16 #include "src/gpu/GrMemoryPool.h"
17 #include "src/gpu/GrProgramDesc.h"
18 #include "src/gpu/GrProxyProvider.h"
19 #include "src/gpu/GrRecordingContextPriv.h"
20 #include "src/gpu/GrSurfaceContext.h"
21 #include "src/gpu/GrSurfaceDrawContext.h"
22 #include "src/gpu/SkGr.h"
23 #include "src/gpu/effects/GrSkSLFP.h"
24 #include "src/gpu/ops/GrAtlasTextOp.h"
25 #include "src/gpu/text/GrTextBlob.h"
26 #include "src/gpu/text/GrTextBlobCache.h"
27 
ProgramData(std::unique_ptr<const GrProgramDesc> desc,const GrProgramInfo * info)28 GrRecordingContext::ProgramData::ProgramData(std::unique_ptr<const GrProgramDesc> desc,
29                                              const GrProgramInfo* info)
30         : fDesc(std::move(desc))
31         , fInfo(info) {
32 }
33 
ProgramData(ProgramData && other)34 GrRecordingContext::ProgramData::ProgramData(ProgramData&& other)
35         : fDesc(std::move(other.fDesc))
36         , fInfo(other.fInfo) {
37 }
38 
39 GrRecordingContext::ProgramData::~ProgramData() = default;
40 
GrRecordingContext(sk_sp<GrContextThreadSafeProxy> proxy,bool ddlRecording)41 GrRecordingContext::GrRecordingContext(sk_sp<GrContextThreadSafeProxy> proxy, bool ddlRecording)
42         : INHERITED(std::move(proxy))
43         , fAuditTrail(new GrAuditTrail())
44         , fArenas(ddlRecording) {
45     fProxyProvider = std::make_unique<GrProxyProvider>(this);
46 }
47 
~GrRecordingContext()48 GrRecordingContext::~GrRecordingContext() {
49     GrAtlasTextOp::ClearCache();
50 }
51 
maxSurfaceSampleCountForColorType(SkColorType colorType) const52 int GrRecordingContext::maxSurfaceSampleCountForColorType(SkColorType colorType) const {
53     GrBackendFormat format =
54             this->caps()->getDefaultBackendFormat(SkColorTypeToGrColorType(colorType),
55                                                   GrRenderable::kYes);
56     return this->caps()->maxRenderTargetSampleCount(format);
57 }
58 
init()59 bool GrRecordingContext::init() {
60     if (!INHERITED::init()) {
61         return false;
62     }
63 
64     GrPathRendererChain::Options prcOptions;
65     prcOptions.fAllowPathMaskCaching = this->options().fAllowPathMaskCaching;
66 #if GR_TEST_UTILS
67     prcOptions.fGpuPathRenderers = this->options().fGpuPathRenderers;
68 #endif
69     // FIXME: Once this is removed from Chrome and Android, rename to fEnable"".
70     if (this->options().fDisableDistanceFieldPaths) {
71         prcOptions.fGpuPathRenderers &= ~GpuPathRenderers::kSmall;
72     }
73 
74     bool reduceOpsTaskSplitting = false;
75     if (this->caps()->avoidReorderingRenderTasks()) {
76         reduceOpsTaskSplitting = false;
77     } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) {
78         reduceOpsTaskSplitting = true;
79     } else if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) {
80         reduceOpsTaskSplitting = false;
81     }
82     fDrawingManager.reset(new GrDrawingManager(this,
83                                                prcOptions,
84                                                reduceOpsTaskSplitting));
85     return true;
86 }
87 
abandonContext()88 void GrRecordingContext::abandonContext() {
89     INHERITED::abandonContext();
90 
91     this->destroyDrawingManager();
92 }
93 
drawingManager()94 GrDrawingManager* GrRecordingContext::drawingManager() {
95     return fDrawingManager.get();
96 }
97 
destroyDrawingManager()98 void GrRecordingContext::destroyDrawingManager() {
99     fDrawingManager.reset();
100 }
101 
Arenas(SkArenaAlloc * recordTimeAllocator,GrSubRunAllocator * subRunAllocator)102 GrRecordingContext::Arenas::Arenas(SkArenaAlloc* recordTimeAllocator,
103                                    GrSubRunAllocator* subRunAllocator)
104         : fRecordTimeAllocator(recordTimeAllocator)
105         , fRecordTimeSubRunAllocator(subRunAllocator) {
106     // OwnedArenas should instantiate these before passing the bare pointer off to this struct.
107     SkASSERT(subRunAllocator);
108 }
109 
110 // Must be defined here so that std::unique_ptr can see the sizes of the various pools, otherwise
111 // it can't generate a default destructor for them.
OwnedArenas(bool ddlRecording)112 GrRecordingContext::OwnedArenas::OwnedArenas(bool ddlRecording) : fDDLRecording(ddlRecording) {}
~OwnedArenas()113 GrRecordingContext::OwnedArenas::~OwnedArenas() {}
114 
operator =(OwnedArenas && a)115 GrRecordingContext::OwnedArenas& GrRecordingContext::OwnedArenas::operator=(OwnedArenas&& a) {
116     fDDLRecording = a.fDDLRecording;
117     fRecordTimeAllocator = std::move(a.fRecordTimeAllocator);
118     fRecordTimeSubRunAllocator = std::move(a.fRecordTimeSubRunAllocator);
119     return *this;
120 }
121 
get()122 GrRecordingContext::Arenas GrRecordingContext::OwnedArenas::get() {
123     if (!fRecordTimeAllocator && fDDLRecording) {
124         // TODO: empirically determine a better number for SkArenaAlloc's firstHeapAllocation param
125         fRecordTimeAllocator = std::make_unique<SkArenaAlloc>(1024);
126     }
127 
128     if (!fRecordTimeSubRunAllocator) {
129         fRecordTimeSubRunAllocator = std::make_unique<GrSubRunAllocator>();
130     }
131 
132     return {fRecordTimeAllocator.get(), fRecordTimeSubRunAllocator.get()};
133 }
134 
detachArenas()135 GrRecordingContext::OwnedArenas&& GrRecordingContext::detachArenas() {
136     return std::move(fArenas);
137 }
138 
getTextBlobCache()139 GrTextBlobCache* GrRecordingContext::getTextBlobCache() {
140     return fThreadSafeProxy->priv().getTextBlobCache();
141 }
142 
getTextBlobCache() const143 const GrTextBlobCache* GrRecordingContext::getTextBlobCache() const {
144     return fThreadSafeProxy->priv().getTextBlobCache();
145 }
146 
threadSafeCache()147 GrThreadSafeCache* GrRecordingContext::threadSafeCache() {
148     return fThreadSafeProxy->priv().threadSafeCache();
149 }
150 
threadSafeCache() const151 const GrThreadSafeCache* GrRecordingContext::threadSafeCache() const {
152     return fThreadSafeProxy->priv().threadSafeCache();
153 }
154 
addOnFlushCallbackObject(GrOnFlushCallbackObject * onFlushCBObject)155 void GrRecordingContext::addOnFlushCallbackObject(GrOnFlushCallbackObject* onFlushCBObject) {
156     this->drawingManager()->addOnFlushCallbackObject(onFlushCBObject);
157 }
158 
159 ////////////////////////////////////////////////////////////////////////////////
160 
maxTextureSize() const161 int GrRecordingContext::maxTextureSize() const { return this->caps()->maxTextureSize(); }
162 
maxRenderTargetSize() const163 int GrRecordingContext::maxRenderTargetSize() const { return this->caps()->maxRenderTargetSize(); }
164 
colorTypeSupportedAsImage(SkColorType colorType) const165 bool GrRecordingContext::colorTypeSupportedAsImage(SkColorType colorType) const {
166     GrBackendFormat format =
167             this->caps()->getDefaultBackendFormat(SkColorTypeToGrColorType(colorType),
168                                                   GrRenderable::kNo);
169     return format.isValid();
170 }
171 
172 ///////////////////////////////////////////////////////////////////////////////////////////////////
refCaps() const173 sk_sp<const GrCaps> GrRecordingContextPriv::refCaps() const {
174     return fContext->refCaps();
175 }
176 
addOnFlushCallbackObject(GrOnFlushCallbackObject * onFlushCBObject)177 void GrRecordingContextPriv::addOnFlushCallbackObject(GrOnFlushCallbackObject* onFlushCBObject) {
178     fContext->addOnFlushCallbackObject(onFlushCBObject);
179 }
180 
181 ///////////////////////////////////////////////////////////////////////////////////////////////////
182 
183 #ifdef SK_ENABLE_DUMP_GPU
184 #include "src/utils/SkJSONWriter.h"
185 
dumpJSON(SkJSONWriter * writer) const186 void GrRecordingContext::dumpJSON(SkJSONWriter* writer) const {
187     writer->beginObject();
188 
189 #if GR_GPU_STATS
190     writer->appendS32("path_masks_generated", this->stats()->numPathMasksGenerated());
191     writer->appendS32("path_mask_cache_hits", this->stats()->numPathMaskCacheHits());
192 #endif
193 
194     writer->endObject();
195 }
196 #else
dumpJSON(SkJSONWriter *) const197 void GrRecordingContext::dumpJSON(SkJSONWriter*) const { }
198 #endif
199 
200 #if GR_TEST_UTILS
201 
202 #if GR_GPU_STATS
203 
dump(SkString * out)204 void GrRecordingContext::Stats::dump(SkString* out) {
205     out->appendf("Num Path Masks Generated: %d\n", fNumPathMasksGenerated);
206     out->appendf("Num Path Mask Cache Hits: %d\n", fNumPathMaskCacheHits);
207 }
208 
dumpKeyValuePairs(SkTArray<SkString> * keys,SkTArray<double> * values)209 void GrRecordingContext::Stats::dumpKeyValuePairs(SkTArray<SkString>* keys,
210                                                   SkTArray<double>* values) {
211     keys->push_back(SkString("path_masks_generated"));
212     values->push_back(fNumPathMasksGenerated);
213 
214     keys->push_back(SkString("path_mask_cache_hits"));
215     values->push_back(fNumPathMaskCacheHits);
216 }
217 
dumpKeyValuePairs(SkTArray<SkString> * keys,SkTArray<double> * values) const218 void GrRecordingContext::DMSAAStats::dumpKeyValuePairs(SkTArray<SkString>* keys,
219                                                        SkTArray<double>* values) const {
220     keys->push_back(SkString("dmsaa_render_passes"));
221     values->push_back(fNumRenderPasses);
222 
223     keys->push_back(SkString("dmsaa_multisample_render_passes"));
224     values->push_back(fNumMultisampleRenderPasses);
225 
226     for (const auto& [name, count] : fTriggerCounts) {
227         keys->push_back(SkStringPrintf("dmsaa_trigger_%s", name.c_str()));
228         values->push_back(count);
229     }
230 }
231 
dump() const232 void GrRecordingContext::DMSAAStats::dump() const {
233     SkDebugf("DMSAA Render Passes: %d\n", fNumRenderPasses);
234     SkDebugf("DMSAA Multisample Render Passes: %d\n", fNumMultisampleRenderPasses);
235     if (!fTriggerCounts.empty()) {
236         SkDebugf("DMSAA Triggers:\n");
237         for (const auto& [name, count] : fTriggerCounts) {
238             SkDebugf("    %s: %d\n", name.c_str(), count);
239         }
240     }
241 }
242 
merge(const DMSAAStats & stats)243 void GrRecordingContext::DMSAAStats::merge(const DMSAAStats& stats) {
244     fNumRenderPasses += stats.fNumRenderPasses;
245     fNumMultisampleRenderPasses += stats.fNumMultisampleRenderPasses;
246     for (const auto& [name, count] : stats.fTriggerCounts) {
247         fTriggerCounts[name] += count;
248     }
249 }
250 
251 #endif // GR_GPU_STATS
252 #endif // GR_TEST_UTILS
253 
254