1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 // This is a GPU-backend specific test. It relies on static initializers to work
9
10 #include "include/core/SkTypes.h"
11
12 #if defined(SK_GANESH) && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
14 #include "include/core/SkBitmap.h"
15 #include "include/core/SkCanvas.h"
16 #include "include/core/SkColorSpace.h"
17 #include "include/core/SkImage.h"
18 #include "include/core/SkSurface.h"
19 #include "include/gpu/GrBackendSemaphore.h"
20 #include "include/gpu/GrDirectContext.h"
21 #include "include/gpu/GrTypes.h"
22 #include "include/gpu/MutableTextureState.h"
23 #include "include/gpu/ganesh/SkImageGanesh.h"
24 #include "include/gpu/ganesh/SkSurfaceGanesh.h"
25 #include "include/gpu/ganesh/gl/GrGLBackendSurface.h"
26 #include "include/gpu/ganesh/vk/GrVkBackendSemaphore.h"
27 #include "include/gpu/ganesh/vk/GrVkBackendSurface.h"
28 #include "include/gpu/ganesh/vk/GrVkDirectContext.h"
29 #include "include/gpu/vk/VulkanBackendContext.h"
30 #include "include/gpu/vk/VulkanExtensions.h"
31 #include "include/gpu/vk/VulkanMemoryAllocator.h"
32 #include "include/gpu/vk/VulkanMutableTextureState.h"
33 #include "src/base/SkAutoMalloc.h"
34 #include "src/gpu/ganesh/GrDirectContextPriv.h"
35 #include "src/gpu/ganesh/GrGpu.h"
36 #include "src/gpu/ganesh/GrProxyProvider.h"
37 #include "src/gpu/ganesh/SkGr.h"
38 #include "src/gpu/ganesh/gl/GrGLDefines.h"
39 #include "src/gpu/ganesh/gl/GrGLUtil.h"
40 #include "tests/Test.h"
41 #include "tools/gpu/GrContextFactory.h"
42 #include "tools/gpu/vk/VkTestUtils.h"
43
44 #include <android/hardware_buffer.h>
45 #include <cinttypes>
46
47 #include <EGL/egl.h>
48 #include <EGL/eglext.h>
49 #include <GLES/gl.h>
50 #include <GLES/glext.h>
51
52 static const int DEV_W = 16, DEV_H = 16;
53
54 class BaseTestHelper {
55 public:
~BaseTestHelper()56 virtual ~BaseTestHelper() {}
57
58 virtual bool init(skiatest::Reporter* reporter) = 0;
59
60 virtual void cleanup() = 0;
61 // This is used to release a surface back to the external queue in vulkan
62 virtual void releaseSurfaceToExternal(SkSurface*) = 0;
63 virtual void releaseImage() = 0;
64
65 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
66 AHardwareBuffer* buffer) = 0;
67 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
68 AHardwareBuffer* buffer) = 0;
69
70 virtual void doClientSync() = 0;
71 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
72 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
73 sk_sp<SkSurface>) = 0;
74
75 virtual void makeCurrent() = 0;
76
77 virtual GrDirectContext* directContext() = 0;
78
getFdHandle()79 int getFdHandle() { return fFdHandle; }
80
81 protected:
BaseTestHelper()82 BaseTestHelper() {}
83
84 int fFdHandle = 0;
85 };
86
87 #ifdef SK_GL
88 class EGLTestHelper : public BaseTestHelper {
89 public:
EGLTestHelper(const GrContextOptions & options)90 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
91
~EGLTestHelper()92 ~EGLTestHelper() override {}
93
releaseImage()94 void releaseImage() override {
95 this->makeCurrent();
96 if (!fGLCtx) {
97 return;
98 }
99 if (EGL_NO_IMAGE_KHR != fImage) {
100 fGLCtx->destroyEGLImage(fImage);
101 fImage = EGL_NO_IMAGE_KHR;
102 }
103 if (fTexID) {
104 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
105 fTexID = 0;
106 }
107 }
108
releaseSurfaceToExternal(SkSurface *)109 void releaseSurfaceToExternal(SkSurface*) override {}
110
cleanup()111 void cleanup() override {
112 this->releaseImage();
113 }
114
115 bool init(skiatest::Reporter* reporter) override;
116
117 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
118 AHardwareBuffer* buffer) override;
119 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
120 AHardwareBuffer* buffer) override;
121
122 void doClientSync() override;
123 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
124 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
125 sk_sp<SkSurface>) override;
126
makeCurrent()127 void makeCurrent() override { fGLCtx->makeCurrent(); }
128
directContext()129 GrDirectContext* directContext() override { return fDirectContext; }
130
131 private:
132 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
133
134 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
135 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
136 const EGLint*);
137 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
138 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
139 EGLCreateImageKHRProc fEGLCreateImageKHR;
140 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
141
142 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
143 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
144 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
145 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
146 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
147
148 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
149 GrGLuint fTexID = 0;
150
151 sk_gpu_test::GrContextFactory fFactory;
152 sk_gpu_test::ContextInfo fGLESContextInfo;
153
154 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
155 GrDirectContext* fDirectContext = nullptr;
156 };
157
init(skiatest::Reporter * reporter)158 bool EGLTestHelper::init(skiatest::Reporter* reporter) {
159 fGLESContextInfo = fFactory.getContextInfo(skgpu::ContextType::kGLES);
160 fDirectContext = fGLESContextInfo.directContext();
161 fGLCtx = fGLESContextInfo.glContext();
162 if (!fDirectContext || !fGLCtx) {
163 return false;
164 }
165
166 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
167 return false;
168 }
169
170 // Confirm we have egl and the needed extensions
171 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
172 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
173 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
174 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
175 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync") ||
176 !fGLCtx->gl()->hasExtension("EGL_ANDROID_native_fence_sync")) {
177 return false;
178 }
179
180 fEGLGetNativeClientBufferANDROID =
181 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
182 if (!fEGLGetNativeClientBufferANDROID) {
183 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
184 return false;
185 }
186
187 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
188 if (!fEGLCreateImageKHR) {
189 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
190 return false;
191 }
192
193 fEGLImageTargetTexture2DOES =
194 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
195 if (!fEGLImageTargetTexture2DOES) {
196 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
197 return false;
198 }
199
200 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
201 if (!fEGLCreateSyncKHR) {
202 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
203 return false;
204
205 }
206 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
207 if (!fEGLWaitSyncKHR) {
208 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
209 return false;
210
211 }
212 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
213 if (!fEGLGetSyncAttribKHR) {
214 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
215 return false;
216
217 }
218 fEGLDupNativeFenceFDANDROID =
219 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
220 if (!fEGLDupNativeFenceFDANDROID) {
221 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
222 return false;
223
224 }
225 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
226 if (!fEGLDestroySyncKHR) {
227 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
228 return false;
229
230 }
231
232 return true;
233 }
234
importHardwareBuffer(skiatest::Reporter * reporter,AHardwareBuffer * buffer)235 bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
236 while (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {}
237
238 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
239 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
240 EGL_NONE };
241 EGLDisplay eglDisplay = eglGetCurrentDisplay();
242 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
243 EGL_NATIVE_BUFFER_ANDROID,
244 eglClientBuffer, eglAttribs);
245 if (EGL_NO_IMAGE_KHR == fImage) {
246 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
247 return false;
248 }
249
250 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
251 if (!fTexID) {
252 ERRORF(reporter, "Failed to create GL Texture");
253 return false;
254 }
255 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
256 if (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {
257 ERRORF(reporter, "Failed to bind GL Texture");
258 return false;
259 }
260
261 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
262 if (GrGLenum error = fGLCtx->gl()->fFunctions.fGetError(); error != GR_GL_NO_ERROR) {
263 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) error);
264 return false;
265 }
266
267 fDirectContext->resetContext(kTextureBinding_GrGLBackendState);
268 return true;
269 }
270
importHardwareBufferForRead(skiatest::Reporter * reporter,AHardwareBuffer * buffer)271 sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
272 AHardwareBuffer* buffer) {
273 if (!this->importHardwareBuffer(reporter, buffer)) {
274 return nullptr;
275 }
276 GrGLTextureInfo textureInfo;
277 textureInfo.fTarget = GR_GL_TEXTURE_2D;
278 textureInfo.fID = fTexID;
279 textureInfo.fFormat = GR_GL_RGBA8;
280
281 auto backendTex = GrBackendTextures::MakeGL(DEV_W, DEV_H, skgpu::Mipmapped::kNo, textureInfo);
282 REPORTER_ASSERT(reporter, backendTex.isValid());
283
284 sk_sp<SkImage> image = SkImages::BorrowTextureFrom(fDirectContext,
285 backendTex,
286 kTopLeft_GrSurfaceOrigin,
287 kRGBA_8888_SkColorType,
288 kPremul_SkAlphaType,
289 nullptr);
290
291 if (!image) {
292 ERRORF(reporter, "Failed to make wrapped GL SkImage");
293 return nullptr;
294 }
295
296 return image;
297 }
298
importHardwareBufferForWrite(skiatest::Reporter * reporter,AHardwareBuffer * buffer)299 sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
300 AHardwareBuffer* buffer) {
301 if (!this->importHardwareBuffer(reporter, buffer)) {
302 return nullptr;
303 }
304 GrGLTextureInfo textureInfo;
305 textureInfo.fTarget = GR_GL_TEXTURE_2D;
306 textureInfo.fID = fTexID;
307 textureInfo.fFormat = GR_GL_RGBA8;
308
309 auto backendTex = GrBackendTextures::MakeGL(DEV_W, DEV_H, skgpu::Mipmapped::kNo, textureInfo);
310 REPORTER_ASSERT(reporter, backendTex.isValid());
311
312 sk_sp<SkSurface> surface = SkSurfaces::WrapBackendTexture(fDirectContext,
313 backendTex,
314 kTopLeft_GrSurfaceOrigin,
315 0,
316 kRGBA_8888_SkColorType,
317 nullptr,
318 nullptr);
319
320 if (!surface) {
321 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
322 return nullptr;
323 }
324
325 return surface;
326 }
327
flushSurfaceAndSignalSemaphore(skiatest::Reporter * reporter,sk_sp<SkSurface> surface)328 bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
329 sk_sp<SkSurface> surface) {
330 skgpu::ganesh::FlushAndSubmit(surface);
331
332 EGLDisplay eglDisplay = eglGetCurrentDisplay();
333 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
334 if (EGL_NO_SYNC_KHR == eglsync) {
335 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
336 return false;
337 }
338
339 GR_GL_CALL(fGLCtx->gl(), Flush());
340 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
341
342 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
343 if (EGL_TRUE != result) {
344 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
345 return false;
346 }
347
348 return true;
349 }
350
importAndWaitOnSemaphore(skiatest::Reporter * reporter,int fdHandle,sk_sp<SkSurface> surface)351 bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
352 sk_sp<SkSurface> surface) {
353 EGLDisplay eglDisplay = eglGetCurrentDisplay();
354 EGLint attr[] = {
355 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
356 EGL_NONE
357 };
358 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
359 if (EGL_NO_SYNC_KHR == eglsync) {
360 ERRORF(reporter,
361 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
362 return false;
363 }
364 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
365 if (EGL_TRUE != result) {
366 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
367 // Don't return false yet, try to delete the sync first
368 }
369 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
370 if (EGL_TRUE != result) {
371 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
372 return false;
373 }
374 return true;
375 }
376
doClientSync()377 void EGLTestHelper::doClientSync() {
378 this->directContext()->flush();
379 this->directContext()->submit(GrSyncCpu::kYes);
380 }
381 #endif // SK_GL
382
383 #define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
384
385 #define ACQUIRE_INST_VK_PROC(name) \
386 do { \
387 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
388 VK_NULL_HANDLE)); \
389 if (fVk##name == nullptr) { \
390 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
391 return false; \
392 } \
393 } while(false)
394
395 #define ACQUIRE_DEVICE_VK_PROC(name) \
396 do { \
397 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
398 if (fVk##name == nullptr) { \
399 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
400 return false; \
401 } \
402 } while(false)
403
404 class VulkanTestHelper : public BaseTestHelper {
405 public:
VulkanTestHelper()406 VulkanTestHelper() {}
407
~VulkanTestHelper()408 ~VulkanTestHelper() override {}
409
releaseImage()410 void releaseImage() override {
411 if (VK_NULL_HANDLE == fDevice) {
412 return;
413 }
414 if (fImage != VK_NULL_HANDLE) {
415 fVkDestroyImage(fDevice, fImage, nullptr);
416 fImage = VK_NULL_HANDLE;
417 }
418
419 if (fMemory != VK_NULL_HANDLE) {
420 fVkFreeMemory(fDevice, fMemory, nullptr);
421 fMemory = VK_NULL_HANDLE;
422 }
423 }
424
releaseSurfaceToExternal(SkSurface * surface)425 void releaseSurfaceToExternal(SkSurface* surface) override {
426 skgpu::MutableTextureState newState = skgpu::MutableTextureStates::MakeVulkan(
427 VK_IMAGE_LAYOUT_UNDEFINED, VK_QUEUE_FAMILY_EXTERNAL);
428 fDirectContext->flush(surface, {}, &newState);
429 }
430
cleanup()431 void cleanup() override {
432 fDirectContext.reset();
433 this->releaseImage();
434 if (fSignalSemaphore != VK_NULL_HANDLE) {
435 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
436 fSignalSemaphore = VK_NULL_HANDLE;
437 }
438 fBackendContext.fMemoryAllocator.reset();
439 if (fDevice != VK_NULL_HANDLE) {
440 fVkDeviceWaitIdle(fDevice);
441 fVkDestroyDevice(fDevice, nullptr);
442 fDevice = VK_NULL_HANDLE;
443 }
444 #ifdef SK_ENABLE_VK_LAYERS
445 if (fDebugCallback != VK_NULL_HANDLE) {
446 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
447 }
448 #endif
449 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
450 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
451 fBackendContext.fInstance = VK_NULL_HANDLE;
452 }
453
454 delete fExtensions;
455
456 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
457 delete fFeatures;
458 }
459
460 bool init(skiatest::Reporter* reporter) override;
461
doClientSync()462 void doClientSync() override {
463 if (!fDirectContext) {
464 return;
465 }
466
467 fDirectContext->submit(GrSyncCpu::kYes);
468 }
469
470 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
471 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
472 sk_sp<SkSurface>) override;
473
474 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
475 AHardwareBuffer* buffer) override;
476
477 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
478 AHardwareBuffer* buffer) override;
479
makeCurrent()480 void makeCurrent() override {}
481
directContext()482 GrDirectContext* directContext() override { return fDirectContext.get(); }
483
484 private:
485 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
486
487 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
488 GrVkImageInfo* outImageInfo);
489
490 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
491 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
492
493 DECLARE_VK_PROC(DestroyInstance);
494 DECLARE_VK_PROC(DeviceWaitIdle);
495 DECLARE_VK_PROC(DestroyDevice);
496
497 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
498 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
499 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
500
501 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
502
503 DECLARE_VK_PROC(CreateImage);
504 DECLARE_VK_PROC(GetImageMemoryRequirements2);
505 DECLARE_VK_PROC(DestroyImage);
506
507 DECLARE_VK_PROC(AllocateMemory);
508 DECLARE_VK_PROC(BindImageMemory2);
509 DECLARE_VK_PROC(FreeMemory);
510
511 DECLARE_VK_PROC(CreateSemaphore);
512 DECLARE_VK_PROC(GetSemaphoreFdKHR);
513 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
514 DECLARE_VK_PROC(DestroySemaphore);
515
516 VkImage fImage = VK_NULL_HANDLE;
517 VkDeviceMemory fMemory = VK_NULL_HANDLE;
518
519 skgpu::VulkanExtensions* fExtensions = nullptr;
520 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
521 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
522 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
523
524 // We hold on to the semaphore so we can delete once the GPU is done.
525 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
526
527 VkDevice fDevice = VK_NULL_HANDLE;
528
529 skgpu::VulkanBackendContext fBackendContext;
530 sk_sp<GrDirectContext> fDirectContext;
531 };
532
init(skiatest::Reporter * reporter)533 bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
534 PFN_vkGetInstanceProcAddr instProc;
535 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc)) {
536 return false;
537 }
538
539 fExtensions = new skgpu::VulkanExtensions();
540 fFeatures = new VkPhysicalDeviceFeatures2;
541 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
542 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
543 fFeatures->pNext = nullptr;
544
545 fBackendContext.fInstance = VK_NULL_HANDLE;
546 fBackendContext.fDevice = VK_NULL_HANDLE;
547
548 if (!sk_gpu_test::CreateVkBackendContext(instProc, &fBackendContext, fExtensions,
549 fFeatures, &fDebugCallback)) {
550 return false;
551 }
552 fDevice = fBackendContext.fDevice;
553 auto getProc = fBackendContext.fGetProc;
554
555 if (fDebugCallback != VK_NULL_HANDLE) {
556 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
557 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
558 }
559
560 ACQUIRE_INST_VK_PROC(DestroyInstance);
561 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
562 ACQUIRE_INST_VK_PROC(DestroyDevice);
563
564 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
565 2)) {
566 return false;
567 }
568 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
569 return false;
570 }
571 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
572 return false;
573 }
574 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
575 // return false;
576 }
577
578 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
579 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
580 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
581
582 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
583
584 ACQUIRE_DEVICE_VK_PROC(CreateImage);
585 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
586 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
587
588 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
589 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
590 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
591
592 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
593 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
594 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
595 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
596
597 fDirectContext = GrDirectContexts::MakeVulkan(fBackendContext);
598 REPORTER_ASSERT(reporter, fDirectContext.get());
599 if (!fDirectContext) {
600 return false;
601 }
602
603 return this->checkOptimalHardwareBuffer(reporter);
604 }
605
checkOptimalHardwareBuffer(skiatest::Reporter * reporter)606 bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
607 VkResult err;
608
609 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
610 externalImageFormatInfo.sType =
611 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
612 externalImageFormatInfo.pNext = nullptr;
613 externalImageFormatInfo.handleType =
614 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
615 //externalImageFormatInfo.handType = 0x80;
616
617 // We will create the hardware buffer with gpu sampled so these usages should all be valid
618 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
619 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
620 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
621 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
622 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
623 imageFormatInfo.pNext = &externalImageFormatInfo;
624 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
625 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
626 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
627 imageFormatInfo.usage = usageFlags;
628 imageFormatInfo.flags = 0;
629
630 VkAndroidHardwareBufferUsageANDROID hwbUsage;
631 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
632 hwbUsage.pNext = nullptr;
633
634 VkExternalImageFormatProperties externalImgFormatProps;
635 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
636 externalImgFormatProps.pNext = &hwbUsage;
637
638 VkImageFormatProperties2 imgFormProps;
639 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
640 imgFormProps.pNext = &externalImgFormatProps;
641
642 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
643 &imageFormatInfo, &imgFormProps);
644 if (VK_SUCCESS != err) {
645 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
646 return false;
647 }
648
649 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
650 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
651 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
652
653 const VkExternalMemoryProperties& externalImageFormatProps =
654 externalImgFormatProps.externalMemoryProperties;
655 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
656 externalImageFormatProps.externalMemoryFeatures));
657 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
658 externalImageFormatProps.externalMemoryFeatures));
659
660 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
661 hwbUsage.androidHardwareBufferUsage));
662
663 return true;
664 }
665
importHardwareBuffer(skiatest::Reporter * reporter,AHardwareBuffer * buffer,bool forWrite,GrVkImageInfo * outImageInfo)666 bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
667 AHardwareBuffer* buffer,
668 bool forWrite,
669 GrVkImageInfo* outImageInfo) {
670 VkResult err;
671
672 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
673 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
674 hwbFormatProps.pNext = nullptr;
675
676 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
677 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
678 hwbProps.pNext = &hwbFormatProps;
679
680 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
681 if (VK_SUCCESS != err) {
682 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
683 return false;
684 }
685
686 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
687 REPORTER_ASSERT(reporter,
688 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
689 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
690 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
691 if (forWrite) {
692 REPORTER_ASSERT(reporter,
693 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
694
695 }
696
697 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
698 const VkExternalFormatANDROID externalFormatInfo {
699 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
700 nullptr, // pNext
701 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
702 };
703
704 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
705 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
706 &externalFormatInfo, // pNext
707 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
708 };
709
710 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
711 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
712 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
713 if (forWrite) {
714 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
715 }
716
717 const VkImageCreateInfo imageCreateInfo = {
718 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
719 &externalMemoryImageInfo, // pNext
720 0, // VkImageCreateFlags
721 VK_IMAGE_TYPE_2D, // VkImageType
722 hwbFormatProps.format, // VkFormat
723 { DEV_W, DEV_H, 1 }, // VkExtent3D
724 1, // mipLevels
725 1, // arrayLayers
726 VK_SAMPLE_COUNT_1_BIT, // samples
727 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
728 usageFlags, // VkImageUsageFlags
729 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
730 0, // queueFamilyCount
731 0, // pQueueFamilyIndices
732 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
733 };
734
735 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
736 if (VK_SUCCESS != err) {
737 ERRORF(reporter, "Create Image failed, err: %d", err);
738 return false;
739 }
740
741 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
742 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
743 phyDevMemProps.pNext = nullptr;
744
745 uint32_t typeIndex = 0;
746 uint32_t heapIndex = 0;
747 bool foundHeap = false;
748 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
749 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
750 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
751 if (hwbProps.memoryTypeBits & (1 << i)) {
752 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
753 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
754 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
755 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
756 typeIndex = i;
757 heapIndex = pdmp.memoryTypes[i].heapIndex;
758 REPORTER_ASSERT(reporter, heapIndex < pdmp.memoryHeapCount);
759 foundHeap = true;
760 }
761 }
762 }
763
764 // Fallback to align with GrAHardwareBufferUtils
765 if (!foundHeap && hwbProps.memoryTypeBits) {
766 typeIndex = ffs(hwbProps.memoryTypeBits) - 1;
767 foundHeap = true;
768 }
769
770 if (!foundHeap) {
771 ERRORF(reporter, "Failed to find valid heap for imported memory");
772 return false;
773 }
774
775 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
776 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
777 hwbImportInfo.pNext = nullptr;
778 hwbImportInfo.buffer = buffer;
779
780 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
781 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
782 dedicatedAllocInfo.pNext = &hwbImportInfo;
783 dedicatedAllocInfo.image = fImage;
784 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
785
786 VkMemoryAllocateInfo allocInfo = {
787 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
788 &dedicatedAllocInfo, // pNext
789 hwbProps.allocationSize, // allocationSize
790 typeIndex, // memoryTypeIndex
791 };
792
793 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
794 if (VK_SUCCESS != err) {
795 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
796 return false;
797 }
798
799 VkBindImageMemoryInfo bindImageInfo;
800 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
801 bindImageInfo.pNext = nullptr;
802 bindImageInfo.image = fImage;
803 bindImageInfo.memory = fMemory;
804 bindImageInfo.memoryOffset = 0;
805
806 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
807 if (VK_SUCCESS != err) {
808 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
809 return false;
810 }
811
812 skgpu::VulkanAlloc alloc;
813 alloc.fMemory = fMemory;
814 alloc.fOffset = 0;
815 alloc.fSize = hwbProps.allocationSize;
816 alloc.fFlags = 0;
817
818 outImageInfo->fImage = fImage;
819 outImageInfo->fAlloc = alloc;
820 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
821 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
822 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
823 outImageInfo->fImageUsageFlags = usageFlags;
824 outImageInfo->fLevelCount = 1;
825 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
826 return true;
827 }
828
importHardwareBufferForRead(skiatest::Reporter * reporter,AHardwareBuffer * buffer)829 sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
830 AHardwareBuffer* buffer) {
831 GrVkImageInfo imageInfo;
832 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
833 return nullptr;
834 }
835
836 auto backendTex = GrBackendTextures::MakeVk(DEV_W, DEV_H, imageInfo);
837
838 sk_sp<SkImage> wrappedImage = SkImages::BorrowTextureFrom(fDirectContext.get(),
839 backendTex,
840 kTopLeft_GrSurfaceOrigin,
841 kRGBA_8888_SkColorType,
842 kPremul_SkAlphaType,
843 nullptr);
844
845 if (!wrappedImage.get()) {
846 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
847 return nullptr;
848 }
849
850 return wrappedImage;
851 }
852
flushSurfaceAndSignalSemaphore(skiatest::Reporter * reporter,sk_sp<SkSurface> surface)853 bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
854 sk_sp<SkSurface> surface) {
855 this->releaseSurfaceToExternal(surface.get());
856 surface.reset();
857 GrBackendSemaphore semaphore;
858 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
859 return false;
860 }
861 GrFlushInfo info;
862 info.fNumSemaphores = 1;
863 info.fSignalSemaphores = &semaphore;
864 GrSemaphoresSubmitted submitted = fDirectContext->flush(info);
865 fDirectContext->submit();
866 if (GrSemaphoresSubmitted::kNo == submitted) {
867 ERRORF(reporter, "Failing call to flush on GrDirectContext");
868 return false;
869 }
870 SkASSERT(semaphore.isInitialized());
871 if (!this->exportSemaphore(reporter, semaphore)) {
872 return false;
873 }
874 return true;
875 }
876
setupSemaphoreForSignaling(skiatest::Reporter * reporter,GrBackendSemaphore * beSemaphore)877 bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
878 GrBackendSemaphore* beSemaphore) {
879 // Query supported info
880 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
881 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
882 exSemInfo.pNext = nullptr;
883 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
884
885 VkExternalSemaphoreProperties exSemProps;
886 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
887 exSemProps.pNext = nullptr;
888
889 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
890 &exSemProps);
891
892 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
893 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
894 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
895 return false;
896 }
897 if (!SkToBool(exSemProps.compatibleHandleTypes &
898 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
899 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
900 return false;
901 }
902 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
903 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
904 !SkToBool(exSemProps.externalSemaphoreFeatures &
905 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
906 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
907 return false;
908 }
909
910 VkExportSemaphoreCreateInfo exportInfo;
911 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
912 exportInfo.pNext = nullptr;
913 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
914
915 VkSemaphoreCreateInfo semaphoreInfo;
916 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
917 semaphoreInfo.pNext = &exportInfo;
918 semaphoreInfo.flags = 0;
919
920 VkSemaphore semaphore;
921 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
922 if (VK_SUCCESS != err) {
923 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
924 return false;
925 }
926 *beSemaphore = GrBackendSemaphores::MakeVk(semaphore);
927 return true;
928 }
929
exportSemaphore(skiatest::Reporter * reporter,const GrBackendSemaphore & beSemaphore)930 bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
931 const GrBackendSemaphore& beSemaphore) {
932 VkSemaphore semaphore = GrBackendSemaphores::GetVkSemaphore(beSemaphore);
933 if (VK_NULL_HANDLE == semaphore) {
934 ERRORF(reporter, "Invalid vulkan handle in export call");
935 return false;
936 }
937
938 VkSemaphoreGetFdInfoKHR getFdInfo;
939 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
940 getFdInfo.pNext = nullptr;
941 getFdInfo.semaphore = semaphore;
942 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
943
944 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
945 if (VK_SUCCESS != err) {
946 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
947 return false;
948 }
949 fSignalSemaphore = semaphore;
950 return true;
951 }
952
importAndWaitOnSemaphore(skiatest::Reporter * reporter,int fdHandle,sk_sp<SkSurface> surface)953 bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
954 sk_sp<SkSurface> surface) {
955 VkSemaphoreCreateInfo semaphoreInfo;
956 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
957 semaphoreInfo.pNext = nullptr;
958 semaphoreInfo.flags = 0;
959
960 VkSemaphore semaphore;
961 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
962 if (VK_SUCCESS != err) {
963 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
964 return false;
965 }
966
967 VkImportSemaphoreFdInfoKHR importInfo;
968 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
969 importInfo.pNext = nullptr;
970 importInfo.semaphore = semaphore;
971 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
972 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
973 importInfo.fd = fdHandle;
974
975 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
976 if (VK_SUCCESS != err) {
977 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
978 return false;
979 }
980
981 GrBackendSemaphore beSemaphore = GrBackendSemaphores::MakeVk(semaphore);
982 if (!surface->wait(1, &beSemaphore)) {
983 ERRORF(reporter, "Failed to add wait semaphore to surface");
984 fVkDestroySemaphore(fDevice, semaphore, nullptr);
985 return false;
986 }
987 return true;
988 }
989
importHardwareBufferForWrite(skiatest::Reporter * reporter,AHardwareBuffer * buffer)990 sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
991 AHardwareBuffer* buffer) {
992 GrVkImageInfo imageInfo;
993 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
994 return nullptr;
995 }
996
997 auto backendTex = GrBackendTextures::MakeVk(DEV_W, DEV_H, imageInfo);
998
999 sk_sp<SkSurface> surface = SkSurfaces::WrapBackendTexture(fDirectContext.get(),
1000 backendTex,
1001 kTopLeft_GrSurfaceOrigin,
1002 0,
1003 kRGBA_8888_SkColorType,
1004 nullptr,
1005 nullptr);
1006
1007 if (!surface.get()) {
1008 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
1009 return nullptr;
1010 }
1011
1012 return surface;
1013 }
1014
get_src_color(int x,int y)1015 static SkPMColor get_src_color(int x, int y) {
1016 SkASSERT(x >= 0 && x < DEV_W);
1017 SkASSERT(y >= 0 && y < DEV_H);
1018
1019 U8CPU r = x;
1020 U8CPU g = y;
1021 U8CPU b = 0xc;
1022
1023 U8CPU a = 0xff;
1024 switch ((x+y) % 5) {
1025 case 0:
1026 a = 0xff;
1027 break;
1028 case 1:
1029 a = 0x80;
1030 break;
1031 case 2:
1032 a = 0xCC;
1033 break;
1034 case 4:
1035 a = 0x01;
1036 break;
1037 case 3:
1038 a = 0x00;
1039 break;
1040 }
1041 a = 0xff;
1042 return SkPremultiplyARGBInline(a, r, g, b);
1043 }
1044
make_src_bitmap()1045 static SkBitmap make_src_bitmap() {
1046 static SkBitmap bmp;
1047 if (bmp.isNull()) {
1048 bmp.allocN32Pixels(DEV_W, DEV_H);
1049 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1050 for (int y = 0; y < DEV_H; ++y) {
1051 for (int x = 0; x < DEV_W; ++x) {
1052 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1053 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1054 *pixel = get_src_color(x, y);
1055 }
1056 }
1057 }
1058 return bmp;
1059 }
1060
check_read(skiatest::Reporter * reporter,const SkBitmap & srcBitmap,const SkBitmap & dstBitmap)1061 static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1062 const SkBitmap& dstBitmap) {
1063 bool result = true;
1064 for (int y = 0; y < DEV_H && result; ++y) {
1065 for (int x = 0; x < DEV_W && result; ++x) {
1066 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1067 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1068 if (srcPixel != dstPixel) {
1069 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1070 x, y, srcPixel, dstPixel);
1071 result = false;
1072 } /*else {
1073 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1074 x, y, srcPixel, dstPixel);
1075
1076 }*/
1077 }
1078 }
1079 return result;
1080 }
1081
cleanup_resources(BaseTestHelper * srcHelper,BaseTestHelper * dstHelper,AHardwareBuffer * buffer)1082 static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1083 AHardwareBuffer* buffer) {
1084 if (srcHelper) {
1085 srcHelper->cleanup();
1086 }
1087 if (dstHelper) {
1088 dstHelper->cleanup();
1089 }
1090 if (buffer) {
1091 AHardwareBuffer_release(buffer);
1092 }
1093 }
1094
1095 enum class SrcType {
1096 kCPU,
1097 kEGL,
1098 kVulkan,
1099 };
1100
1101 enum class DstType {
1102 kEGL,
1103 kVulkan,
1104 };
1105
run_test(skiatest::Reporter * reporter,const GrContextOptions & options,SrcType srcType,DstType dstType,bool shareSyncs)1106 void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1107 SrcType srcType, DstType dstType, bool shareSyncs) {
1108 if (SrcType::kCPU == srcType && shareSyncs) {
1109 // We don't currently test this since we don't do any syncs in this case.
1110 return;
1111 }
1112 std::unique_ptr<BaseTestHelper> srcHelper;
1113 std::unique_ptr<BaseTestHelper> dstHelper;
1114 AHardwareBuffer* buffer = nullptr;
1115 if (SrcType::kVulkan == srcType) {
1116 srcHelper.reset(new VulkanTestHelper());
1117 } else if (SrcType::kEGL == srcType) {
1118 #ifdef SK_GL
1119 srcHelper.reset(new EGLTestHelper(options));
1120 #else
1121 SkASSERTF(false, "SrcType::kEGL used without OpenGL support.");
1122 #endif
1123 }
1124 if (srcHelper) {
1125 if (!srcHelper->init(reporter)) {
1126 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1127 return;
1128 }
1129 }
1130
1131 if (DstType::kVulkan == dstType) {
1132 dstHelper.reset(new VulkanTestHelper());
1133 } else {
1134 #ifdef SK_GL
1135 SkASSERT(DstType::kEGL == dstType);
1136 dstHelper.reset(new EGLTestHelper(options));
1137 #else
1138 SkASSERTF(false, "DstType::kEGL used without OpenGL support.");
1139 #endif
1140 }
1141 if (dstHelper) {
1142 if (!dstHelper->init(reporter)) {
1143 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1144 return;
1145 }
1146 }
1147
1148 ///////////////////////////////////////////////////////////////////////////
1149 // Setup SkBitmaps
1150 ///////////////////////////////////////////////////////////////////////////
1151
1152 SkBitmap srcBitmap = make_src_bitmap();
1153 SkBitmap dstBitmapSurface;
1154 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1155 SkBitmap dstBitmapFinal;
1156 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1157
1158 ///////////////////////////////////////////////////////////////////////////
1159 // Setup AHardwareBuffer
1160 ///////////////////////////////////////////////////////////////////////////
1161
1162 AHardwareBuffer_Desc hwbDesc;
1163 hwbDesc.width = DEV_W;
1164 hwbDesc.height = DEV_H;
1165 hwbDesc.layers = 1;
1166 if (SrcType::kCPU == srcType) {
1167 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1168 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1169 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1170 } else {
1171 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1172 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1173 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1174 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1175 }
1176 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1177 // The following three are not used in the allocate
1178 hwbDesc.stride = 0;
1179 hwbDesc.rfu0= 0;
1180 hwbDesc.rfu1= 0;
1181
1182 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1183 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1184 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1185 return;
1186 }
1187
1188 if (SrcType::kCPU == srcType) {
1189 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1190 AHardwareBuffer_describe(buffer, &hwbDesc);
1191
1192 uint32_t* bufferAddr;
1193 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1194 reinterpret_cast<void**>(&bufferAddr))) {
1195 ERRORF(reporter, "Failed to lock hardware buffer");
1196 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1197 return;
1198 }
1199
1200 int bbp = srcBitmap.bytesPerPixel();
1201 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1202 uint32_t* dst = bufferAddr;
1203 for (int y = 0; y < DEV_H; ++y) {
1204 memcpy(dst, src, DEV_W * bbp);
1205 src += DEV_W;
1206 dst += hwbDesc.stride;
1207 }
1208
1209 for (int y = 0; y < DEV_H; ++y) {
1210 for (int x = 0; x < DEV_W; ++x) {
1211 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1212 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1213 if (srcPixel != dstPixel) {
1214 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1215 x, y, srcPixel, dstPixel);
1216 }
1217 }
1218 }
1219
1220 AHardwareBuffer_unlock(buffer, nullptr);
1221
1222 } else {
1223 srcHelper->makeCurrent();
1224 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1225
1226 if (!surface) {
1227 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1228 return;
1229 }
1230
1231 sk_sp<SkImage> srcBmpImage = SkImages::RasterFromBitmap(srcBitmap);
1232 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1233
1234 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1235 // to occur.
1236 if (!shareSyncs) {
1237 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1238 if (!readResult) {
1239 ERRORF(reporter, "Read Pixels on surface failed");
1240 surface.reset();
1241 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1242 return;
1243 }
1244 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1245 }
1246
1247 ///////////////////////////////////////////////////////////////////////////
1248 // Cleanup GL/EGL and add syncs
1249 ///////////////////////////////////////////////////////////////////////////
1250
1251 if (shareSyncs) {
1252 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1253 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1254 return;
1255 }
1256 } else {
1257 srcHelper->releaseSurfaceToExternal(surface.get());
1258 srcHelper->doClientSync();
1259 surface.reset();
1260 srcHelper->releaseImage();
1261 }
1262 }
1263
1264 ///////////////////////////////////////////////////////////////////////////
1265 // Import the HWB into backend and draw it to a surface
1266 ///////////////////////////////////////////////////////////////////////////
1267
1268 dstHelper->makeCurrent();
1269 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1270
1271 if (!wrappedImage) {
1272 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1273 return;
1274 }
1275
1276 auto direct = dstHelper->directContext();
1277
1278 // Make SkSurface to render wrapped HWB into.
1279 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1280 kPremul_SkAlphaType, nullptr);
1281
1282 sk_sp<SkSurface> dstSurf = SkSurfaces::RenderTarget(
1283 direct, skgpu::Budgeted::kNo, imageInfo, 0, kTopLeft_GrSurfaceOrigin, nullptr, false);
1284 if (!dstSurf.get()) {
1285 ERRORF(reporter, "Failed to create destination SkSurface");
1286 wrappedImage.reset();
1287 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1288 return;
1289 }
1290
1291 if (shareSyncs) {
1292 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1293 wrappedImage.reset();
1294 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1295 return;
1296 }
1297 }
1298 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1299
1300 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1301 if (!readResult) {
1302 ERRORF(reporter, "Read Pixels failed");
1303 wrappedImage.reset();
1304 dstSurf.reset();
1305 dstHelper->doClientSync();
1306 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1307 return;
1308 }
1309
1310 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1311
1312 dstSurf.reset();
1313 wrappedImage.reset();
1314 dstHelper->doClientSync();
1315 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1316 }
1317
DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1318 DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options, CtsEnforcement::kApiLevel_T) {
1319 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1320 }
1321
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1322 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan,
1323 reporter,
1324 options,
1325 CtsEnforcement::kApiLevel_T) {
1326 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1327 }
1328
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1329 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs,
1330 reporter,
1331 options,
1332 CtsEnforcement::kApiLevel_T) {
1333 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1334 }
1335
1336 #if defined(SK_GL)
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1337 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options, CtsEnforcement::kApiLevel_T) {
1338 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1339 }
1340
DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1341 DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1342 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1343 }
1344
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1345 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1346 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1347 }
1348
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1349 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1350 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1351 }
1352
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1353 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL_Syncs,
1354 reporter,
1355 options,
1356 CtsEnforcement::kApiLevel_T) {
1357 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1358 }
1359
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1360 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs,
1361 reporter,
1362 options,
1363 CtsEnforcement::kApiLevel_T) {
1364 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1365 }
1366
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1367 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs,
1368 reporter,
1369 options,
1370 CtsEnforcement::kApiLevel_T) {
1371 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1372 }
1373 #endif
1374
1375 #endif // defined(SK_GANESH) && defined(SK_BUILD_FOR_ANDROID) &&
1376 // __ANDROID_API__ >= 26 && defined(SK_VULKAN)
1377
1378