1
2 /*
3 * Copyright 2014 Google Inc.
4 *
5 * Use of this source code is governed by a BSD-style license that can be
6 * found in the LICENSE file.
7 */
8
9 #include "include/gpu/GrDirectContext.h"
10 #include "src/gpu/GrDirectContextPriv.h"
11 #include "tools/gpu/GrContextFactory.h"
12 #ifdef SK_GL
13 #include "tools/gpu/gl/GLTestContext.h"
14 #endif
15
16 #if SK_ANGLE
17 #include "tools/gpu/gl/angle/GLTestContext_angle.h"
18 #endif
19 #include "tools/gpu/gl/command_buffer/GLTestContext_command_buffer.h"
20 #ifdef SK_VULKAN
21 #include "tools/gpu/vk/VkTestContext.h"
22 #endif
23 #ifdef SK_METAL
24 #include "tools/gpu/mtl/MtlTestContext.h"
25 #endif
26 #ifdef SK_DIRECT3D
27 #include "tools/gpu/d3d/D3DTestContext.h"
28 #endif
29 #ifdef SK_DAWN
30 #include "tools/gpu/dawn/DawnTestContext.h"
31 #endif
32 #include "src/gpu/GrCaps.h"
33 #include "tools/gpu/mock/MockTestContext.h"
34
35 #if defined(SK_BUILD_FOR_WIN) && defined(SK_ENABLE_DISCRETE_GPU)
36 extern "C" {
37 // NVIDIA documents that the presence and value of this symbol programmatically enable the high
38 // performance GPU in laptops with switchable graphics.
39 // https://docs.nvidia.com/gameworks/content/technologies/desktop/optimus.htm
40 // From testing, including this symbol, even if it is set to 0, we still get the NVIDIA GPU.
41 _declspec(dllexport) unsigned long NvOptimusEnablement = 0x00000001;
42
43 // AMD has a similar mechanism, although I don't have an AMD laptop, so this is untested.
44 // https://community.amd.com/thread/169965
45 __declspec(dllexport) int AmdPowerXpressRequestHighPerformance = 1;
46 }
47 #endif
48
49 namespace sk_gpu_test {
GrContextFactory()50 GrContextFactory::GrContextFactory() { }
51
GrContextFactory(const GrContextOptions & opts)52 GrContextFactory::GrContextFactory(const GrContextOptions& opts)
53 : fGlobalOptions(opts) {
54 }
55
~GrContextFactory()56 GrContextFactory::~GrContextFactory() {
57 this->destroyContexts();
58 }
59
destroyContexts()60 void GrContextFactory::destroyContexts() {
61 // We must delete the test contexts in reverse order so that any child context is finished and
62 // deleted before a parent context. This relies on the fact that when we make a new context we
63 // append it to the end of fContexts array.
64 // TODO: Look into keeping a dependency dag for contexts and deletion order
65 for (int i = fContexts.count() - 1; i >= 0; --i) {
66 Context& context = fContexts[i];
67 SkScopeExit restore(nullptr);
68 if (context.fTestContext) {
69 restore = context.fTestContext->makeCurrentAndAutoRestore();
70 }
71 if (!context.fGrContext->unique()) {
72 context.fGrContext->releaseResourcesAndAbandonContext();
73 context.fAbandoned = true;
74 }
75 context.fGrContext->unref();
76 delete context.fTestContext;
77 }
78 fContexts.reset();
79 }
80
abandonContexts()81 void GrContextFactory::abandonContexts() {
82 // We must abandon the test contexts in reverse order so that any child context is finished and
83 // abandoned before a parent context. This relies on the fact that when we make a new context we
84 // append it to the end of fContexts array.
85 // TODO: Look into keeping a dependency dag for contexts and deletion order
86 for (int i = fContexts.count() - 1; i >= 0; --i) {
87 Context& context = fContexts[i];
88 if (!context.fAbandoned) {
89 if (context.fTestContext) {
90 auto restore = context.fTestContext->makeCurrentAndAutoRestore();
91 context.fTestContext->testAbandon();
92 }
93 GrBackendApi api = context.fGrContext->backend();
94 bool requiresEarlyAbandon = api == GrBackendApi::kVulkan || api == GrBackendApi::kDawn;
95 if (requiresEarlyAbandon) {
96 context.fGrContext->abandonContext();
97 }
98 if (context.fTestContext) {
99 delete(context.fTestContext);
100 context.fTestContext = nullptr;
101 }
102 if (!requiresEarlyAbandon) {
103 context.fGrContext->abandonContext();
104 }
105 context.fAbandoned = true;
106 }
107 }
108 }
109
releaseResourcesAndAbandonContexts()110 void GrContextFactory::releaseResourcesAndAbandonContexts() {
111 // We must abandon the test contexts in reverse order so that any child context is finished and
112 // abandoned before a parent context. This relies on the fact that when we make a new context we
113 // append it to the end of fContexts array.
114 // TODO: Look into keeping a dependency dag for contexts and deletion order
115 for (int i = fContexts.count() - 1; i >= 0; --i) {
116 Context& context = fContexts[i];
117 SkScopeExit restore(nullptr);
118 if (!context.fAbandoned) {
119 if (context.fTestContext) {
120 restore = context.fTestContext->makeCurrentAndAutoRestore();
121 }
122 context.fGrContext->releaseResourcesAndAbandonContext();
123 if (context.fTestContext) {
124 delete context.fTestContext;
125 context.fTestContext = nullptr;
126 }
127 context.fAbandoned = true;
128 }
129 }
130 }
131
get(ContextType type,ContextOverrides overrides)132 GrDirectContext* GrContextFactory::get(ContextType type, ContextOverrides overrides) {
133 return this->getContextInfo(type, overrides).directContext();
134 }
135
getContextInfoInternal(ContextType type,ContextOverrides overrides,GrDirectContext * shareContext,uint32_t shareIndex)136 ContextInfo GrContextFactory::getContextInfoInternal(ContextType type, ContextOverrides overrides,
137 GrDirectContext* shareContext,
138 uint32_t shareIndex) {
139 // (shareIndex != 0) -> (shareContext != nullptr)
140 SkASSERT((shareIndex == 0) || (shareContext != nullptr));
141
142 for (int i = 0; i < fContexts.count(); ++i) {
143 Context& context = fContexts[i];
144 if (context.fType == type &&
145 context.fOverrides == overrides &&
146 context.fShareContext == shareContext &&
147 context.fShareIndex == shareIndex &&
148 !context.fAbandoned) {
149 context.fTestContext->makeCurrent();
150 return ContextInfo(context.fType, context.fTestContext, context.fGrContext,
151 context.fOptions);
152 }
153 }
154
155 // If we're trying to create a context in a share group, find the primary context
156 Context* primaryContext = nullptr;
157 if (shareContext) {
158 for (int i = 0; i < fContexts.count(); ++i) {
159 if (!fContexts[i].fAbandoned && fContexts[i].fGrContext == shareContext) {
160 primaryContext = &fContexts[i];
161 break;
162 }
163 }
164 SkASSERT(primaryContext && primaryContext->fType == type);
165 }
166
167 std::unique_ptr<TestContext> testCtx;
168 GrBackendApi backend = ContextTypeBackend(type);
169 switch (backend) {
170 #ifdef SK_GL
171 case GrBackendApi::kOpenGL: {
172 GLTestContext* glShareContext = primaryContext
173 ? static_cast<GLTestContext*>(primaryContext->fTestContext) : nullptr;
174 GLTestContext* glCtx;
175 switch (type) {
176 case kGL_ContextType:
177 glCtx = CreatePlatformGLTestContext(kGL_GrGLStandard, glShareContext);
178 break;
179 case kGLES_ContextType:
180 glCtx = CreatePlatformGLTestContext(kGLES_GrGLStandard, glShareContext);
181 break;
182 #if SK_ANGLE
183 case kANGLE_D3D9_ES2_ContextType:
184 glCtx = MakeANGLETestContext(ANGLEBackend::kD3D9, ANGLEContextVersion::kES2,
185 glShareContext).release();
186 // Chrome will only run on D3D9 with NVIDIA for 2012 and earlier drivers.
187 // (<= 269.73). We get shader link failures when testing on recent drivers
188 // using this backend.
189 if (glCtx) {
190 GrGLDriverInfo info = GrGLGetDriverInfo(glCtx->gl());
191 if (info.fANGLEVendor == GrGLVendor::kNVIDIA) {
192 delete glCtx;
193 return ContextInfo();
194 }
195 }
196 break;
197 case kANGLE_D3D11_ES2_ContextType:
198 glCtx = MakeANGLETestContext(ANGLEBackend::kD3D11, ANGLEContextVersion::kES2,
199 glShareContext).release();
200 break;
201 case kANGLE_D3D11_ES3_ContextType:
202 glCtx = MakeANGLETestContext(ANGLEBackend::kD3D11, ANGLEContextVersion::kES3,
203 glShareContext).release();
204 break;
205 case kANGLE_GL_ES2_ContextType:
206 glCtx = MakeANGLETestContext(ANGLEBackend::kOpenGL, ANGLEContextVersion::kES2,
207 glShareContext).release();
208 break;
209 case kANGLE_GL_ES3_ContextType:
210 glCtx = MakeANGLETestContext(ANGLEBackend::kOpenGL, ANGLEContextVersion::kES3,
211 glShareContext).release();
212 break;
213 #endif
214 #ifndef SK_NO_COMMAND_BUFFER
215 case kCommandBuffer_ES2_ContextType:
216 glCtx = CommandBufferGLTestContext::Create(2, glShareContext);
217 break;
218 case kCommandBuffer_ES3_ContextType:
219 glCtx = CommandBufferGLTestContext::Create(3, glShareContext);
220 break;
221 #endif
222 default:
223 return ContextInfo();
224 }
225 if (!glCtx) {
226 return ContextInfo();
227 }
228 if (glCtx->gl()->fStandard == kGLES_GrGLStandard &&
229 (overrides & ContextOverrides::kFakeGLESVersionAs2)) {
230 glCtx->overrideVersion("OpenGL ES 2.0", "OpenGL ES GLSL ES 1.00");
231 }
232 testCtx.reset(glCtx);
233 break;
234 }
235 #endif // SK_GL
236 #ifdef SK_VULKAN
237 case GrBackendApi::kVulkan: {
238 VkTestContext* vkSharedContext = primaryContext
239 ? static_cast<VkTestContext*>(primaryContext->fTestContext) : nullptr;
240 SkASSERT(kVulkan_ContextType == type);
241 testCtx.reset(CreatePlatformVkTestContext(vkSharedContext));
242 if (!testCtx) {
243 return ContextInfo();
244 }
245 #ifdef SK_GL
246 // We previously had an issue where the VkDevice destruction would occasionally hang
247 // on systems with NVIDIA GPUs and having an existing GL context fixed it. Now (Feb
248 // 2022) we still need the GL context to keep Vulkan/TSAN bots from running incredibly
249 // slow. Perhaps this prevents repeated driver loading/unloading? Note that keeping
250 // a persistent VkTestContext around instead was tried and did not work.
251 if (!fSentinelGLContext) {
252 fSentinelGLContext.reset(CreatePlatformGLTestContext(kGL_GrGLStandard));
253 if (!fSentinelGLContext) {
254 fSentinelGLContext.reset(CreatePlatformGLTestContext(kGLES_GrGLStandard));
255 }
256 }
257 #endif
258 break;
259 }
260 #endif
261 #ifdef SK_METAL
262 case GrBackendApi::kMetal: {
263 MtlTestContext* mtlSharedContext = primaryContext
264 ? static_cast<MtlTestContext*>(primaryContext->fTestContext) : nullptr;
265 SkASSERT(kMetal_ContextType == type);
266 testCtx.reset(CreatePlatformMtlTestContext(mtlSharedContext));
267 if (!testCtx) {
268 return ContextInfo();
269 }
270 break;
271 }
272 #endif
273 #ifdef SK_DIRECT3D
274 case GrBackendApi::kDirect3D: {
275 D3DTestContext* d3dSharedContext = primaryContext
276 ? static_cast<D3DTestContext*>(primaryContext->fTestContext) : nullptr;
277 SkASSERT(kDirect3D_ContextType == type);
278 testCtx.reset(CreatePlatformD3DTestContext(d3dSharedContext));
279 if (!testCtx) {
280 return ContextInfo();
281 }
282 break;
283 }
284 #endif
285 #ifdef SK_DAWN
286 case GrBackendApi::kDawn: {
287 DawnTestContext* dawnSharedContext = primaryContext
288 ? static_cast<DawnTestContext*>(primaryContext->fTestContext) : nullptr;
289 testCtx.reset(CreatePlatformDawnTestContext(dawnSharedContext));
290 if (!testCtx) {
291 return ContextInfo();
292 }
293 break;
294 }
295 #endif
296 case GrBackendApi::kMock: {
297 TestContext* sharedContext = primaryContext ? primaryContext->fTestContext : nullptr;
298 SkASSERT(kMock_ContextType == type);
299 testCtx.reset(CreateMockTestContext(sharedContext));
300 if (!testCtx) {
301 return ContextInfo();
302 }
303 break;
304 }
305 default:
306 return ContextInfo();
307 }
308
309 SkASSERT(testCtx && testCtx->backend() == backend);
310 GrContextOptions grOptions = fGlobalOptions;
311 if (ContextOverrides::kAvoidStencilBuffers & overrides) {
312 grOptions.fAvoidStencilBuffers = true;
313 }
314 if (ContextOverrides::kReducedShaders & overrides) {
315 grOptions.fReducedShaderVariations = true;
316 }
317 sk_sp<GrDirectContext> grCtx;
318 {
319 auto restore = testCtx->makeCurrentAndAutoRestore();
320 grCtx = testCtx->makeContext(grOptions);
321 }
322 if (!grCtx) {
323 return ContextInfo();
324 }
325
326 if (shareContext) {
327 SkASSERT(grCtx->directContextID() != shareContext->directContextID());
328 }
329
330 // We must always add new contexts by pushing to the back so that when we delete them we delete
331 // them in reverse order in which they were made.
332 Context& context = fContexts.push_back();
333 context.fBackend = backend;
334 context.fTestContext = testCtx.release();
335 context.fGrContext = SkRef(grCtx.get());
336 context.fType = type;
337 context.fOverrides = overrides;
338 context.fAbandoned = false;
339 context.fShareContext = shareContext;
340 context.fShareIndex = shareIndex;
341 context.fOptions = grOptions;
342 context.fTestContext->makeCurrent();
343 return ContextInfo(context.fType, context.fTestContext, context.fGrContext, context.fOptions);
344 }
345
getContextInfo(ContextType type,ContextOverrides overrides)346 ContextInfo GrContextFactory::getContextInfo(ContextType type, ContextOverrides overrides) {
347 return this->getContextInfoInternal(type, overrides, nullptr, 0);
348 }
349
getSharedContextInfo(GrDirectContext * shareContext,uint32_t shareIndex)350 ContextInfo GrContextFactory::getSharedContextInfo(GrDirectContext* shareContext,
351 uint32_t shareIndex) {
352 SkASSERT(shareContext);
353 for (int i = 0; i < fContexts.count(); ++i) {
354 if (!fContexts[i].fAbandoned && fContexts[i].fGrContext == shareContext) {
355 return this->getContextInfoInternal(fContexts[i].fType, fContexts[i].fOverrides,
356 shareContext, shareIndex);
357 }
358 }
359
360 return ContextInfo();
361 }
362
363 } // namespace sk_gpu_test
364