1 /*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef GrRecordingContext_DEFINED
9 #define GrRecordingContext_DEFINED
10
11 #include "include/core/SkRefCnt.h"
12 #include "include/core/SkTypes.h"
13 #include "include/private/base/SkTArray.h"
14 #include "include/private/gpu/ganesh/GrImageContext.h"
15
16 #if GR_GPU_STATS && GR_TEST_UTILS
17 #include <map>
18 #include <string>
19 #endif
20
21 class GrAuditTrail;
22 class GrBackendFormat;
23 class GrDrawingManager;
24 class GrOnFlushCallbackObject;
25 class GrMemoryPool;
26 class GrProgramDesc;
27 class GrProgramInfo;
28 class GrProxyProvider;
29 class GrRecordingContextPriv;
30 class GrSurfaceProxy;
31 class GrThreadSafeCache;
32 class SkArenaAlloc;
33 class SkCapabilities;
34 class SkJSONWriter;
35
36 namespace sktext::gpu {
37 class SubRunAllocator;
38 class TextBlobRedrawCoordinator;
39 }
40
41 #if GR_TEST_UTILS
42 class SkString;
43 #endif
44
45 class GrRecordingContext : public GrImageContext {
46 public:
47 ~GrRecordingContext() override;
48
defaultBackendFormat(SkColorType ct,GrRenderable renderable)49 SK_API GrBackendFormat defaultBackendFormat(SkColorType ct, GrRenderable renderable) const {
50 return INHERITED::defaultBackendFormat(ct, renderable);
51 }
52
53 /**
54 * Reports whether the GrDirectContext associated with this GrRecordingContext is abandoned.
55 * When called on a GrDirectContext it may actively check whether the underlying 3D API
56 * device/context has been disconnected before reporting the status. If so, calling this
57 * method will transition the GrDirectContext to the abandoned state.
58 */
abandoned()59 bool abandoned() override { return INHERITED::abandoned(); }
60
61 /*
62 * Can a SkSurface be created with the given color type. To check whether MSAA is supported
63 * use maxSurfaceSampleCountForColorType().
64 */
colorTypeSupportedAsSurface(SkColorType colorType)65 SK_API bool colorTypeSupportedAsSurface(SkColorType colorType) const {
66 if (kR16G16_unorm_SkColorType == colorType ||
67 kA16_unorm_SkColorType == colorType ||
68 kA16_float_SkColorType == colorType ||
69 kR16G16_float_SkColorType == colorType ||
70 kR16G16B16A16_unorm_SkColorType == colorType ||
71 kGray_8_SkColorType == colorType) {
72 return false;
73 }
74
75 return this->maxSurfaceSampleCountForColorType(colorType) > 0;
76 }
77
78 /**
79 * Gets the maximum supported texture size.
80 */
81 SK_API int maxTextureSize() const;
82
83 /**
84 * Gets the maximum supported render target size.
85 */
86 SK_API int maxRenderTargetSize() const;
87
88 /**
89 * Can a SkImage be created with the given color type.
90 */
91 SK_API bool colorTypeSupportedAsImage(SkColorType) const;
92
93 /**
94 * Gets the maximum supported sample count for a color type. 1 is returned if only non-MSAA
95 * rendering is supported for the color type. 0 is returned if rendering to this color type
96 * is not supported at all.
97 */
maxSurfaceSampleCountForColorType(SkColorType colorType)98 SK_API int maxSurfaceSampleCountForColorType(SkColorType colorType) const {
99 return INHERITED::maxSurfaceSampleCountForColorType(colorType);
100 }
101
102 SK_API sk_sp<const SkCapabilities> skCapabilities() const;
103
104 // Provides access to functions that aren't part of the public API.
105 GrRecordingContextPriv priv();
106 const GrRecordingContextPriv priv() const; // NOLINT(readability-const-return-type)
107
108 // The collection of specialized memory arenas for different types of data recorded by a
109 // GrRecordingContext. Arenas does not maintain ownership of the pools it groups together.
110 class Arenas {
111 public:
112 Arenas(SkArenaAlloc*, sktext::gpu::SubRunAllocator*);
113
114 // For storing pipelines and other complex data as-needed by ops
recordTimeAllocator()115 SkArenaAlloc* recordTimeAllocator() { return fRecordTimeAllocator; }
116
117 // For storing GrTextBlob SubRuns
recordTimeSubRunAllocator()118 sktext::gpu::SubRunAllocator* recordTimeSubRunAllocator() {
119 return fRecordTimeSubRunAllocator;
120 }
121
122 private:
123 SkArenaAlloc* fRecordTimeAllocator;
124 sktext::gpu::SubRunAllocator* fRecordTimeSubRunAllocator;
125 };
126
127 protected:
128 friend class GrRecordingContextPriv; // for hidden functions
129 friend class SkDeferredDisplayList; // for OwnedArenas
130 friend class SkDeferredDisplayListPriv; // for ProgramData
131
132 // Like Arenas, but preserves ownership of the underlying pools.
133 class OwnedArenas {
134 public:
135 OwnedArenas(bool ddlRecording);
136 ~OwnedArenas();
137
138 Arenas get();
139
140 OwnedArenas& operator=(OwnedArenas&&);
141
142 private:
143 bool fDDLRecording;
144 std::unique_ptr<SkArenaAlloc> fRecordTimeAllocator;
145 std::unique_ptr<sktext::gpu::SubRunAllocator> fRecordTimeSubRunAllocator;
146 };
147
148 GrRecordingContext(sk_sp<GrContextThreadSafeProxy>, bool ddlRecording);
149
150 bool init() override;
151
152 void abandonContext() override;
153
154 GrDrawingManager* drawingManager();
155
156 // There is no going back from this method. It should only be called to control the timing
157 // during abandon or destruction of the context.
158 void destroyDrawingManager();
159
arenas()160 Arenas arenas() { return fArenas.get(); }
161 // This entry point should only be used for DDL creation where we want the ops' lifetime to
162 // match that of the DDL.
163 OwnedArenas&& detachArenas();
164
proxyProvider()165 GrProxyProvider* proxyProvider() { return fProxyProvider.get(); }
proxyProvider()166 const GrProxyProvider* proxyProvider() const { return fProxyProvider.get(); }
167
168 struct ProgramData {
169 ProgramData(std::unique_ptr<const GrProgramDesc>, const GrProgramInfo*);
170 ProgramData(ProgramData&&); // for SkTArray
171 ProgramData(const ProgramData&) = delete;
172 ~ProgramData();
173
descProgramData174 const GrProgramDesc& desc() const { return *fDesc; }
infoProgramData175 const GrProgramInfo& info() const { return *fInfo; }
176
177 private:
178 // TODO: store the GrProgramDescs in the 'fRecordTimeData' arena
179 std::unique_ptr<const GrProgramDesc> fDesc;
180 // The program infos should be stored in 'fRecordTimeData' so do not need to be ref
181 // counted or deleted in the destructor.
182 const GrProgramInfo* fInfo = nullptr;
183 };
184
185 // This entry point gives the recording context a chance to cache the provided
186 // programInfo. The DDL context takes this opportunity to store programInfos as a sidecar
187 // to the DDL.
recordProgramInfo(const GrProgramInfo *)188 virtual void recordProgramInfo(const GrProgramInfo*) {}
189 // This asks the recording context to return any programInfos it may have collected
190 // via the 'recordProgramInfo' call. It is up to the caller to ensure that the lifetime
191 // of the programInfos matches the intended use. For example, in DDL-record mode it
192 // is known that all the programInfos will have been allocated in an arena with the
193 // same lifetime at the DDL itself.
detachProgramData(SkTArray<ProgramData> *)194 virtual void detachProgramData(SkTArray<ProgramData>*) {}
195
196 sktext::gpu::TextBlobRedrawCoordinator* getTextBlobRedrawCoordinator();
197 const sktext::gpu::TextBlobRedrawCoordinator* getTextBlobRedrawCoordinator() const;
198
199 GrThreadSafeCache* threadSafeCache();
200 const GrThreadSafeCache* threadSafeCache() const;
201
202 /**
203 * Registers an object for flush-related callbacks. (See GrOnFlushCallbackObject.)
204 *
205 * NOTE: the drawing manager tracks this object as a raw pointer; it is up to the caller to
206 * ensure its lifetime is tied to that of the context.
207 */
208 void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
209
asRecordingContext()210 GrRecordingContext* asRecordingContext() override { return this; }
211
212 class Stats {
213 public:
214 Stats() = default;
215
216 #if GR_GPU_STATS
reset()217 void reset() { *this = {}; }
218
numPathMasksGenerated()219 int numPathMasksGenerated() const { return fNumPathMasksGenerated; }
incNumPathMasksGenerated()220 void incNumPathMasksGenerated() { fNumPathMasksGenerated++; }
221
numPathMaskCacheHits()222 int numPathMaskCacheHits() const { return fNumPathMaskCacheHits; }
incNumPathMasksCacheHits()223 void incNumPathMasksCacheHits() { fNumPathMaskCacheHits++; }
224
225 #if GR_TEST_UTILS
226 void dump(SkString* out) const;
227 void dumpKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
228 #endif
229
230 private:
231 int fNumPathMasksGenerated{0};
232 int fNumPathMaskCacheHits{0};
233
234 #else // GR_GPU_STATS
incNumPathMasksGenerated()235 void incNumPathMasksGenerated() {}
incNumPathMasksCacheHits()236 void incNumPathMasksCacheHits() {}
237
238 #if GR_TEST_UTILS
dump(SkString *)239 void dump(SkString*) const {}
dumpKeyValuePairs(SkTArray<SkString> * keys,SkTArray<double> * values)240 void dumpKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const {}
241 #endif
242 #endif // GR_GPU_STATS
243 } fStats;
244
245 #if GR_GPU_STATS && GR_TEST_UTILS
246 struct DMSAAStats {
247 void dumpKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
248 void dump() const;
249 void merge(const DMSAAStats&);
250 int fNumRenderPasses = 0;
251 int fNumMultisampleRenderPasses = 0;
252 std::map<std::string, int> fTriggerCounts;
253 };
254
255 DMSAAStats fDMSAAStats;
256 #endif
257
stats()258 Stats* stats() { return &fStats; }
stats()259 const Stats* stats() const { return &fStats; }
260 void dumpJSON(SkJSONWriter*) const;
261
262 protected:
263 // Delete last in case other objects call it during destruction.
264 std::unique_ptr<GrAuditTrail> fAuditTrail;
265
266 private:
267 OwnedArenas fArenas;
268
269 std::unique_ptr<GrDrawingManager> fDrawingManager;
270 std::unique_ptr<GrProxyProvider> fProxyProvider;
271
272 #if GR_TEST_UTILS
273 int fSuppressWarningMessages = 0;
274 #endif
275
276 using INHERITED = GrImageContext;
277 };
278
279 /**
280 * Safely cast a possibly-null base context to direct context.
281 */
GrAsDirectContext(GrContext_Base * base)282 static inline GrDirectContext* GrAsDirectContext(GrContext_Base* base) {
283 return base ? base->asDirectContext() : nullptr;
284 }
285
286 #endif
287