1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 // This is a GPU-backend specific test. It relies on static initializers to work
9
10 #include "include/core/SkTypes.h"
11
12 #if defined(SK_GANESH) && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
14 #include "include/core/SkBitmap.h"
15 #include "include/core/SkCanvas.h"
16 #include "include/core/SkColorSpace.h"
17 #include "include/core/SkImage.h"
18 #include "include/core/SkSurface.h"
19 #include "include/gpu/GrBackendSemaphore.h"
20 #include "include/gpu/GrDirectContext.h"
21 #include "include/gpu/vk/GrVkBackendContext.h"
22 #include "include/gpu/vk/VulkanExtensions.h"
23 #include "src/base/SkAutoMalloc.h"
24 #include "src/gpu/ganesh/GrDirectContextPriv.h"
25 #include "src/gpu/ganesh/GrGpu.h"
26 #include "src/gpu/ganesh/GrProxyProvider.h"
27 #include "src/gpu/ganesh/SkGr.h"
28 #include "src/gpu/ganesh/gl/GrGLDefines_impl.h"
29 #include "src/gpu/ganesh/gl/GrGLUtil.h"
30 #include "tests/Test.h"
31 #include "tools/gpu/GrContextFactory.h"
32 #include "tools/gpu/vk/VkTestUtils.h"
33
34 #include <android/hardware_buffer.h>
35 #include <cinttypes>
36
37 #include <EGL/egl.h>
38 #include <EGL/eglext.h>
39 #include <GLES/gl.h>
40 #include <GLES/glext.h>
41
42 static const int DEV_W = 16, DEV_H = 16;
43
44 class BaseTestHelper {
45 public:
~BaseTestHelper()46 virtual ~BaseTestHelper() {}
47
48 virtual bool init(skiatest::Reporter* reporter) = 0;
49
50 virtual void cleanup() = 0;
51 // This is used to release a surface back to the external queue in vulkan
52 virtual void releaseSurfaceToExternal(SkSurface*) = 0;
53 virtual void releaseImage() = 0;
54
55 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
56 AHardwareBuffer* buffer) = 0;
57 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
58 AHardwareBuffer* buffer) = 0;
59
60 virtual void doClientSync() = 0;
61 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
62 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
63 sk_sp<SkSurface>) = 0;
64
65 virtual void makeCurrent() = 0;
66
67 virtual GrDirectContext* directContext() = 0;
68
getFdHandle()69 int getFdHandle() { return fFdHandle; }
70
71 protected:
BaseTestHelper()72 BaseTestHelper() {}
73
74 int fFdHandle = 0;
75 };
76
77 #ifdef SK_GL
78 class EGLTestHelper : public BaseTestHelper {
79 public:
EGLTestHelper(const GrContextOptions & options)80 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
81
~EGLTestHelper()82 ~EGLTestHelper() override {}
83
releaseImage()84 void releaseImage() override {
85 this->makeCurrent();
86 if (!fGLCtx) {
87 return;
88 }
89 if (EGL_NO_IMAGE_KHR != fImage) {
90 fGLCtx->destroyEGLImage(fImage);
91 fImage = EGL_NO_IMAGE_KHR;
92 }
93 if (fTexID) {
94 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
95 fTexID = 0;
96 }
97 }
98
releaseSurfaceToExternal(SkSurface *)99 void releaseSurfaceToExternal(SkSurface*) override {}
100
cleanup()101 void cleanup() override {
102 this->releaseImage();
103 }
104
105 bool init(skiatest::Reporter* reporter) override;
106
107 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
108 AHardwareBuffer* buffer) override;
109 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
110 AHardwareBuffer* buffer) override;
111
112 void doClientSync() override;
113 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
114 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
115 sk_sp<SkSurface>) override;
116
makeCurrent()117 void makeCurrent() override { fGLCtx->makeCurrent(); }
118
directContext()119 GrDirectContext* directContext() override { return fDirectContext; }
120
121 private:
122 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
123
124 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
125 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
126 const EGLint*);
127 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
128 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
129 EGLCreateImageKHRProc fEGLCreateImageKHR;
130 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
131
132 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
133 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
134 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
135 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
136 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
137
138 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
139 GrGLuint fTexID = 0;
140
141 sk_gpu_test::GrContextFactory fFactory;
142 sk_gpu_test::ContextInfo fGLESContextInfo;
143
144 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
145 GrDirectContext* fDirectContext = nullptr;
146 };
147
init(skiatest::Reporter * reporter)148 bool EGLTestHelper::init(skiatest::Reporter* reporter) {
149 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType);
150 fDirectContext = fGLESContextInfo.directContext();
151 fGLCtx = fGLESContextInfo.glContext();
152 if (!fDirectContext || !fGLCtx) {
153 return false;
154 }
155
156 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
157 return false;
158 }
159
160 // Confirm we have egl and the needed extensions
161 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
162 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
163 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
164 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
165 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync") ||
166 !fGLCtx->gl()->hasExtension("EGL_ANDROID_native_fence_sync")) {
167 return false;
168 }
169
170 fEGLGetNativeClientBufferANDROID =
171 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
172 if (!fEGLGetNativeClientBufferANDROID) {
173 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
174 return false;
175 }
176
177 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
178 if (!fEGLCreateImageKHR) {
179 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
180 return false;
181 }
182
183 fEGLImageTargetTexture2DOES =
184 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
185 if (!fEGLImageTargetTexture2DOES) {
186 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
187 return false;
188 }
189
190 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
191 if (!fEGLCreateSyncKHR) {
192 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
193 return false;
194
195 }
196 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
197 if (!fEGLWaitSyncKHR) {
198 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
199 return false;
200
201 }
202 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
203 if (!fEGLGetSyncAttribKHR) {
204 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
205 return false;
206
207 }
208 fEGLDupNativeFenceFDANDROID =
209 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
210 if (!fEGLDupNativeFenceFDANDROID) {
211 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
212 return false;
213
214 }
215 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
216 if (!fEGLDestroySyncKHR) {
217 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
218 return false;
219
220 }
221
222 return true;
223 }
224
importHardwareBuffer(skiatest::Reporter * reporter,AHardwareBuffer * buffer)225 bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
226 while (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {}
227
228 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
229 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
230 EGL_NONE };
231 EGLDisplay eglDisplay = eglGetCurrentDisplay();
232 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
233 EGL_NATIVE_BUFFER_ANDROID,
234 eglClientBuffer, eglAttribs);
235 if (EGL_NO_IMAGE_KHR == fImage) {
236 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
237 return false;
238 }
239
240 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
241 if (!fTexID) {
242 ERRORF(reporter, "Failed to create GL Texture");
243 return false;
244 }
245 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
246 if (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {
247 ERRORF(reporter, "Failed to bind GL Texture");
248 return false;
249 }
250
251 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
252 if (GrGLenum error = fGLCtx->gl()->fFunctions.fGetError(); error != GR_GL_NO_ERROR) {
253 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) error);
254 return false;
255 }
256
257 fDirectContext->resetContext(kTextureBinding_GrGLBackendState);
258 return true;
259 }
260
importHardwareBufferForRead(skiatest::Reporter * reporter,AHardwareBuffer * buffer)261 sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
262 AHardwareBuffer* buffer) {
263 if (!this->importHardwareBuffer(reporter, buffer)) {
264 return nullptr;
265 }
266 GrGLTextureInfo textureInfo;
267 textureInfo.fTarget = GR_GL_TEXTURE_2D;
268 textureInfo.fID = fTexID;
269 textureInfo.fFormat = GR_GL_RGBA8;
270
271 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipmapped::kNo, textureInfo);
272 REPORTER_ASSERT(reporter, backendTex.isValid());
273
274 sk_sp<SkImage> image = SkImage::MakeFromTexture(fDirectContext,
275 backendTex,
276 kTopLeft_GrSurfaceOrigin,
277 kRGBA_8888_SkColorType,
278 kPremul_SkAlphaType,
279 nullptr);
280
281 if (!image) {
282 ERRORF(reporter, "Failed to make wrapped GL SkImage");
283 return nullptr;
284 }
285
286 return image;
287 }
288
importHardwareBufferForWrite(skiatest::Reporter * reporter,AHardwareBuffer * buffer)289 sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
290 AHardwareBuffer* buffer) {
291 if (!this->importHardwareBuffer(reporter, buffer)) {
292 return nullptr;
293 }
294 GrGLTextureInfo textureInfo;
295 textureInfo.fTarget = GR_GL_TEXTURE_2D;
296 textureInfo.fID = fTexID;
297 textureInfo.fFormat = GR_GL_RGBA8;
298
299 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipmapped::kNo, textureInfo);
300 REPORTER_ASSERT(reporter, backendTex.isValid());
301
302 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fDirectContext,
303 backendTex,
304 kTopLeft_GrSurfaceOrigin,
305 0,
306 kRGBA_8888_SkColorType,
307 nullptr, nullptr);
308
309 if (!surface) {
310 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
311 return nullptr;
312 }
313
314 return surface;
315 }
316
flushSurfaceAndSignalSemaphore(skiatest::Reporter * reporter,sk_sp<SkSurface> surface)317 bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
318 sk_sp<SkSurface> surface) {
319 surface->flushAndSubmit();
320
321 EGLDisplay eglDisplay = eglGetCurrentDisplay();
322 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
323 if (EGL_NO_SYNC_KHR == eglsync) {
324 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
325 return false;
326 }
327
328 GR_GL_CALL(fGLCtx->gl(), Flush());
329 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
330
331 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
332 if (EGL_TRUE != result) {
333 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
334 return false;
335 }
336
337 return true;
338 }
339
importAndWaitOnSemaphore(skiatest::Reporter * reporter,int fdHandle,sk_sp<SkSurface> surface)340 bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
341 sk_sp<SkSurface> surface) {
342 EGLDisplay eglDisplay = eglGetCurrentDisplay();
343 EGLint attr[] = {
344 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
345 EGL_NONE
346 };
347 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
348 if (EGL_NO_SYNC_KHR == eglsync) {
349 ERRORF(reporter,
350 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
351 return false;
352 }
353 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
354 if (EGL_TRUE != result) {
355 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
356 // Don't return false yet, try to delete the sync first
357 }
358 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
359 if (EGL_TRUE != result) {
360 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
361 return false;
362 }
363 return true;
364 }
365
doClientSync()366 void EGLTestHelper::doClientSync() {
367 this->directContext()->flush();
368 this->directContext()->submit(true);
369 }
370 #endif // SK_GL
371
372 #define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
373
374 #define ACQUIRE_INST_VK_PROC(name) \
375 do { \
376 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
377 VK_NULL_HANDLE)); \
378 if (fVk##name == nullptr) { \
379 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
380 return false; \
381 } \
382 } while(false)
383
384 #define ACQUIRE_DEVICE_VK_PROC(name) \
385 do { \
386 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
387 if (fVk##name == nullptr) { \
388 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
389 return false; \
390 } \
391 } while(false)
392
393 class VulkanTestHelper : public BaseTestHelper {
394 public:
VulkanTestHelper()395 VulkanTestHelper() {}
396
~VulkanTestHelper()397 ~VulkanTestHelper() override {}
398
releaseImage()399 void releaseImage() override {
400 if (VK_NULL_HANDLE == fDevice) {
401 return;
402 }
403 if (fImage != VK_NULL_HANDLE) {
404 fVkDestroyImage(fDevice, fImage, nullptr);
405 fImage = VK_NULL_HANDLE;
406 }
407
408 if (fMemory != VK_NULL_HANDLE) {
409 fVkFreeMemory(fDevice, fMemory, nullptr);
410 fMemory = VK_NULL_HANDLE;
411 }
412 }
413
releaseSurfaceToExternal(SkSurface * surface)414 void releaseSurfaceToExternal(SkSurface* surface) override {
415 skgpu::MutableTextureState newState(VK_IMAGE_LAYOUT_UNDEFINED, VK_QUEUE_FAMILY_EXTERNAL);
416 surface->flush({}, &newState);
417 }
418
cleanup()419 void cleanup() override {
420 fDirectContext.reset();
421 this->releaseImage();
422 if (fSignalSemaphore != VK_NULL_HANDLE) {
423 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
424 fSignalSemaphore = VK_NULL_HANDLE;
425 }
426 fBackendContext.fMemoryAllocator.reset();
427 if (fDevice != VK_NULL_HANDLE) {
428 fVkDeviceWaitIdle(fDevice);
429 fVkDestroyDevice(fDevice, nullptr);
430 fDevice = VK_NULL_HANDLE;
431 }
432 #ifdef SK_ENABLE_VK_LAYERS
433 if (fDebugCallback != VK_NULL_HANDLE) {
434 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
435 }
436 #endif
437 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
438 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
439 fBackendContext.fInstance = VK_NULL_HANDLE;
440 }
441
442 delete fExtensions;
443
444 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
445 delete fFeatures;
446 }
447
448 bool init(skiatest::Reporter* reporter) override;
449
doClientSync()450 void doClientSync() override {
451 if (!fDirectContext) {
452 return;
453 }
454
455 fDirectContext->submit(true);
456 }
457
458 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
459 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
460 sk_sp<SkSurface>) override;
461
462 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
463 AHardwareBuffer* buffer) override;
464
465 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
466 AHardwareBuffer* buffer) override;
467
makeCurrent()468 void makeCurrent() override {}
469
directContext()470 GrDirectContext* directContext() override { return fDirectContext.get(); }
471
472 private:
473 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
474
475 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
476 GrVkImageInfo* outImageInfo);
477
478 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
479 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
480
481 DECLARE_VK_PROC(DestroyInstance);
482 DECLARE_VK_PROC(DeviceWaitIdle);
483 DECLARE_VK_PROC(DestroyDevice);
484
485 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
486 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
487 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
488
489 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
490
491 DECLARE_VK_PROC(CreateImage);
492 DECLARE_VK_PROC(GetImageMemoryRequirements2);
493 DECLARE_VK_PROC(DestroyImage);
494
495 DECLARE_VK_PROC(AllocateMemory);
496 DECLARE_VK_PROC(BindImageMemory2);
497 DECLARE_VK_PROC(FreeMemory);
498
499 DECLARE_VK_PROC(CreateSemaphore);
500 DECLARE_VK_PROC(GetSemaphoreFdKHR);
501 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
502 DECLARE_VK_PROC(DestroySemaphore);
503
504 VkImage fImage = VK_NULL_HANDLE;
505 VkDeviceMemory fMemory = VK_NULL_HANDLE;
506
507 skgpu::VulkanExtensions* fExtensions = nullptr;
508 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
509 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
510 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
511
512 // We hold on to the semaphore so we can delete once the GPU is done.
513 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
514
515 VkDevice fDevice = VK_NULL_HANDLE;
516
517 GrVkBackendContext fBackendContext;
518 sk_sp<GrDirectContext> fDirectContext;
519 };
520
init(skiatest::Reporter * reporter)521 bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
522 PFN_vkGetInstanceProcAddr instProc;
523 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc)) {
524 return false;
525 }
526
527 fExtensions = new skgpu::VulkanExtensions();
528 fFeatures = new VkPhysicalDeviceFeatures2;
529 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
530 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
531 fFeatures->pNext = nullptr;
532
533 fBackendContext.fInstance = VK_NULL_HANDLE;
534 fBackendContext.fDevice = VK_NULL_HANDLE;
535
536 if (!sk_gpu_test::CreateVkBackendContext(instProc, &fBackendContext, fExtensions,
537 fFeatures, &fDebugCallback)) {
538 return false;
539 }
540 fDevice = fBackendContext.fDevice;
541 auto getProc = fBackendContext.fGetProc;
542
543 if (fDebugCallback != VK_NULL_HANDLE) {
544 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
545 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
546 }
547
548 ACQUIRE_INST_VK_PROC(DestroyInstance);
549 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
550 ACQUIRE_INST_VK_PROC(DestroyDevice);
551
552 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
553 2)) {
554 return false;
555 }
556 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
557 return false;
558 }
559 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
560 return false;
561 }
562 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
563 // return false;
564 }
565
566 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
567 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
568 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
569
570 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
571
572 ACQUIRE_DEVICE_VK_PROC(CreateImage);
573 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
574 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
575
576 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
577 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
578 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
579
580 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
581 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
582 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
583 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
584
585 fDirectContext = GrDirectContext::MakeVulkan(fBackendContext);
586 REPORTER_ASSERT(reporter, fDirectContext.get());
587 if (!fDirectContext) {
588 return false;
589 }
590
591 return this->checkOptimalHardwareBuffer(reporter);
592 }
593
checkOptimalHardwareBuffer(skiatest::Reporter * reporter)594 bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
595 VkResult err;
596
597 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
598 externalImageFormatInfo.sType =
599 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
600 externalImageFormatInfo.pNext = nullptr;
601 externalImageFormatInfo.handleType =
602 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
603 //externalImageFormatInfo.handType = 0x80;
604
605 // We will create the hardware buffer with gpu sampled so these usages should all be valid
606 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
607 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
608 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
609 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
610 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
611 imageFormatInfo.pNext = &externalImageFormatInfo;
612 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
613 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
614 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
615 imageFormatInfo.usage = usageFlags;
616 imageFormatInfo.flags = 0;
617
618 VkAndroidHardwareBufferUsageANDROID hwbUsage;
619 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
620 hwbUsage.pNext = nullptr;
621
622 VkExternalImageFormatProperties externalImgFormatProps;
623 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
624 externalImgFormatProps.pNext = &hwbUsage;
625
626 VkImageFormatProperties2 imgFormProps;
627 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
628 imgFormProps.pNext = &externalImgFormatProps;
629
630 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
631 &imageFormatInfo, &imgFormProps);
632 if (VK_SUCCESS != err) {
633 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
634 return false;
635 }
636
637 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
638 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
639 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
640
641 const VkExternalMemoryProperties& externalImageFormatProps =
642 externalImgFormatProps.externalMemoryProperties;
643 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
644 externalImageFormatProps.externalMemoryFeatures));
645 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
646 externalImageFormatProps.externalMemoryFeatures));
647
648 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
649 hwbUsage.androidHardwareBufferUsage));
650
651 return true;
652 }
653
importHardwareBuffer(skiatest::Reporter * reporter,AHardwareBuffer * buffer,bool forWrite,GrVkImageInfo * outImageInfo)654 bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
655 AHardwareBuffer* buffer,
656 bool forWrite,
657 GrVkImageInfo* outImageInfo) {
658 VkResult err;
659
660 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
661 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
662 hwbFormatProps.pNext = nullptr;
663
664 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
665 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
666 hwbProps.pNext = &hwbFormatProps;
667
668 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
669 if (VK_SUCCESS != err) {
670 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
671 return false;
672 }
673
674 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
675 REPORTER_ASSERT(reporter,
676 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
677 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
678 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
679 if (forWrite) {
680 REPORTER_ASSERT(reporter,
681 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
682
683 }
684
685 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
686 const VkExternalFormatANDROID externalFormatInfo {
687 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
688 nullptr, // pNext
689 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
690 };
691
692 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
693 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
694 &externalFormatInfo, // pNext
695 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
696 };
697
698 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
699 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
700 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
701 if (forWrite) {
702 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
703 }
704
705 const VkImageCreateInfo imageCreateInfo = {
706 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
707 &externalMemoryImageInfo, // pNext
708 0, // VkImageCreateFlags
709 VK_IMAGE_TYPE_2D, // VkImageType
710 hwbFormatProps.format, // VkFormat
711 { DEV_W, DEV_H, 1 }, // VkExtent3D
712 1, // mipLevels
713 1, // arrayLayers
714 VK_SAMPLE_COUNT_1_BIT, // samples
715 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
716 usageFlags, // VkImageUsageFlags
717 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
718 0, // queueFamilyCount
719 0, // pQueueFamilyIndices
720 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
721 };
722
723 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
724 if (VK_SUCCESS != err) {
725 ERRORF(reporter, "Create Image failed, err: %d", err);
726 return false;
727 }
728
729 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
730 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
731 phyDevMemProps.pNext = nullptr;
732
733 uint32_t typeIndex = 0;
734 uint32_t heapIndex = 0;
735 bool foundHeap = false;
736 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
737 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
738 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
739 if (hwbProps.memoryTypeBits & (1 << i)) {
740 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
741 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
742 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
743 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
744 typeIndex = i;
745 heapIndex = pdmp.memoryTypes[i].heapIndex;
746 REPORTER_ASSERT(reporter, heapIndex < pdmp.memoryHeapCount);
747 foundHeap = true;
748 }
749 }
750 }
751 if (!foundHeap) {
752 ERRORF(reporter, "Failed to find valid heap for imported memory");
753 return false;
754 }
755
756 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
757 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
758 hwbImportInfo.pNext = nullptr;
759 hwbImportInfo.buffer = buffer;
760
761 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
762 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
763 dedicatedAllocInfo.pNext = &hwbImportInfo;
764 dedicatedAllocInfo.image = fImage;
765 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
766
767 VkMemoryAllocateInfo allocInfo = {
768 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
769 &dedicatedAllocInfo, // pNext
770 hwbProps.allocationSize, // allocationSize
771 typeIndex, // memoryTypeIndex
772 };
773
774 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
775 if (VK_SUCCESS != err) {
776 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
777 return false;
778 }
779
780 VkBindImageMemoryInfo bindImageInfo;
781 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
782 bindImageInfo.pNext = nullptr;
783 bindImageInfo.image = fImage;
784 bindImageInfo.memory = fMemory;
785 bindImageInfo.memoryOffset = 0;
786
787 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
788 if (VK_SUCCESS != err) {
789 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
790 return false;
791 }
792
793 skgpu::VulkanAlloc alloc;
794 alloc.fMemory = fMemory;
795 alloc.fOffset = 0;
796 alloc.fSize = hwbProps.allocationSize;
797 alloc.fFlags = 0;
798
799 outImageInfo->fImage = fImage;
800 outImageInfo->fAlloc = alloc;
801 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
802 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
803 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
804 outImageInfo->fImageUsageFlags = usageFlags;
805 outImageInfo->fLevelCount = 1;
806 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
807 return true;
808 }
809
importHardwareBufferForRead(skiatest::Reporter * reporter,AHardwareBuffer * buffer)810 sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
811 AHardwareBuffer* buffer) {
812 GrVkImageInfo imageInfo;
813 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
814 return nullptr;
815 }
816
817 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
818
819 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fDirectContext.get(),
820 backendTex,
821 kTopLeft_GrSurfaceOrigin,
822 kRGBA_8888_SkColorType,
823 kPremul_SkAlphaType,
824 nullptr);
825
826 if (!wrappedImage.get()) {
827 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
828 return nullptr;
829 }
830
831 return wrappedImage;
832 }
833
flushSurfaceAndSignalSemaphore(skiatest::Reporter * reporter,sk_sp<SkSurface> surface)834 bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
835 sk_sp<SkSurface> surface) {
836 this->releaseSurfaceToExternal(surface.get());
837 surface.reset();
838 GrBackendSemaphore semaphore;
839 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
840 return false;
841 }
842 GrFlushInfo info;
843 info.fNumSemaphores = 1;
844 info.fSignalSemaphores = &semaphore;
845 GrSemaphoresSubmitted submitted = fDirectContext->flush(info);
846 fDirectContext->submit();
847 if (GrSemaphoresSubmitted::kNo == submitted) {
848 ERRORF(reporter, "Failing call to flush on GrDirectContext");
849 return false;
850 }
851 SkASSERT(semaphore.isInitialized());
852 if (!this->exportSemaphore(reporter, semaphore)) {
853 return false;
854 }
855 return true;
856 }
857
setupSemaphoreForSignaling(skiatest::Reporter * reporter,GrBackendSemaphore * beSemaphore)858 bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
859 GrBackendSemaphore* beSemaphore) {
860 // Query supported info
861 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
862 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
863 exSemInfo.pNext = nullptr;
864 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
865
866 VkExternalSemaphoreProperties exSemProps;
867 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
868 exSemProps.pNext = nullptr;
869
870 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
871 &exSemProps);
872
873 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
874 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
875 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
876 return false;
877 }
878 if (!SkToBool(exSemProps.compatibleHandleTypes &
879 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
880 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
881 return false;
882 }
883 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
884 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
885 !SkToBool(exSemProps.externalSemaphoreFeatures &
886 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
887 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
888 return false;
889 }
890
891 VkExportSemaphoreCreateInfo exportInfo;
892 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
893 exportInfo.pNext = nullptr;
894 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
895
896 VkSemaphoreCreateInfo semaphoreInfo;
897 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
898 semaphoreInfo.pNext = &exportInfo;
899 semaphoreInfo.flags = 0;
900
901 VkSemaphore semaphore;
902 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
903 if (VK_SUCCESS != err) {
904 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
905 return false;
906 }
907 beSemaphore->initVulkan(semaphore);
908 return true;
909 }
910
exportSemaphore(skiatest::Reporter * reporter,const GrBackendSemaphore & beSemaphore)911 bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
912 const GrBackendSemaphore& beSemaphore) {
913 VkSemaphore semaphore = beSemaphore.vkSemaphore();
914 if (VK_NULL_HANDLE == semaphore) {
915 ERRORF(reporter, "Invalid vulkan handle in export call");
916 return false;
917 }
918
919 VkSemaphoreGetFdInfoKHR getFdInfo;
920 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
921 getFdInfo.pNext = nullptr;
922 getFdInfo.semaphore = semaphore;
923 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
924
925 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
926 if (VK_SUCCESS != err) {
927 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
928 return false;
929 }
930 fSignalSemaphore = semaphore;
931 return true;
932 }
933
importAndWaitOnSemaphore(skiatest::Reporter * reporter,int fdHandle,sk_sp<SkSurface> surface)934 bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
935 sk_sp<SkSurface> surface) {
936 VkSemaphoreCreateInfo semaphoreInfo;
937 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
938 semaphoreInfo.pNext = nullptr;
939 semaphoreInfo.flags = 0;
940
941 VkSemaphore semaphore;
942 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
943 if (VK_SUCCESS != err) {
944 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
945 return false;
946 }
947
948 VkImportSemaphoreFdInfoKHR importInfo;
949 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
950 importInfo.pNext = nullptr;
951 importInfo.semaphore = semaphore;
952 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
953 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
954 importInfo.fd = fdHandle;
955
956 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
957 if (VK_SUCCESS != err) {
958 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
959 return false;
960 }
961
962 GrBackendSemaphore beSemaphore;
963 beSemaphore.initVulkan(semaphore);
964 if (!surface->wait(1, &beSemaphore)) {
965 ERRORF(reporter, "Failed to add wait semaphore to surface");
966 fVkDestroySemaphore(fDevice, semaphore, nullptr);
967 return false;
968 }
969 return true;
970 }
971
importHardwareBufferForWrite(skiatest::Reporter * reporter,AHardwareBuffer * buffer)972 sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
973 AHardwareBuffer* buffer) {
974 GrVkImageInfo imageInfo;
975 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
976 return nullptr;
977 }
978
979 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo);
980
981 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fDirectContext.get(),
982 backendTex,
983 kTopLeft_GrSurfaceOrigin,
984 0,
985 kRGBA_8888_SkColorType,
986 nullptr, nullptr);
987
988 if (!surface.get()) {
989 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
990 return nullptr;
991 }
992
993 return surface;
994 }
995
get_src_color(int x,int y)996 static SkPMColor get_src_color(int x, int y) {
997 SkASSERT(x >= 0 && x < DEV_W);
998 SkASSERT(y >= 0 && y < DEV_H);
999
1000 U8CPU r = x;
1001 U8CPU g = y;
1002 U8CPU b = 0xc;
1003
1004 U8CPU a = 0xff;
1005 switch ((x+y) % 5) {
1006 case 0:
1007 a = 0xff;
1008 break;
1009 case 1:
1010 a = 0x80;
1011 break;
1012 case 2:
1013 a = 0xCC;
1014 break;
1015 case 4:
1016 a = 0x01;
1017 break;
1018 case 3:
1019 a = 0x00;
1020 break;
1021 }
1022 a = 0xff;
1023 return SkPremultiplyARGBInline(a, r, g, b);
1024 }
1025
make_src_bitmap()1026 static SkBitmap make_src_bitmap() {
1027 static SkBitmap bmp;
1028 if (bmp.isNull()) {
1029 bmp.allocN32Pixels(DEV_W, DEV_H);
1030 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1031 for (int y = 0; y < DEV_H; ++y) {
1032 for (int x = 0; x < DEV_W; ++x) {
1033 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1034 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1035 *pixel = get_src_color(x, y);
1036 }
1037 }
1038 }
1039 return bmp;
1040 }
1041
check_read(skiatest::Reporter * reporter,const SkBitmap & srcBitmap,const SkBitmap & dstBitmap)1042 static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1043 const SkBitmap& dstBitmap) {
1044 bool result = true;
1045 for (int y = 0; y < DEV_H && result; ++y) {
1046 for (int x = 0; x < DEV_W && result; ++x) {
1047 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1048 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1049 if (srcPixel != dstPixel) {
1050 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1051 x, y, srcPixel, dstPixel);
1052 result = false;
1053 } /*else {
1054 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1055 x, y, srcPixel, dstPixel);
1056
1057 }*/
1058 }
1059 }
1060 return result;
1061 }
1062
cleanup_resources(BaseTestHelper * srcHelper,BaseTestHelper * dstHelper,AHardwareBuffer * buffer)1063 static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1064 AHardwareBuffer* buffer) {
1065 if (srcHelper) {
1066 srcHelper->cleanup();
1067 }
1068 if (dstHelper) {
1069 dstHelper->cleanup();
1070 }
1071 if (buffer) {
1072 AHardwareBuffer_release(buffer);
1073 }
1074 }
1075
1076 enum class SrcType {
1077 kCPU,
1078 kEGL,
1079 kVulkan,
1080 };
1081
1082 enum class DstType {
1083 kEGL,
1084 kVulkan,
1085 };
1086
run_test(skiatest::Reporter * reporter,const GrContextOptions & options,SrcType srcType,DstType dstType,bool shareSyncs)1087 void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1088 SrcType srcType, DstType dstType, bool shareSyncs) {
1089 if (SrcType::kCPU == srcType && shareSyncs) {
1090 // We don't currently test this since we don't do any syncs in this case.
1091 return;
1092 }
1093 std::unique_ptr<BaseTestHelper> srcHelper;
1094 std::unique_ptr<BaseTestHelper> dstHelper;
1095 AHardwareBuffer* buffer = nullptr;
1096 if (SrcType::kVulkan == srcType) {
1097 srcHelper.reset(new VulkanTestHelper());
1098 } else if (SrcType::kEGL == srcType) {
1099 #ifdef SK_GL
1100 srcHelper.reset(new EGLTestHelper(options));
1101 #else
1102 SkASSERTF(false, "SrcType::kEGL used without OpenGL support.");
1103 #endif
1104 }
1105 if (srcHelper) {
1106 if (!srcHelper->init(reporter)) {
1107 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1108 return;
1109 }
1110 }
1111
1112 if (DstType::kVulkan == dstType) {
1113 dstHelper.reset(new VulkanTestHelper());
1114 } else {
1115 #ifdef SK_GL
1116 SkASSERT(DstType::kEGL == dstType);
1117 dstHelper.reset(new EGLTestHelper(options));
1118 #else
1119 SkASSERTF(false, "DstType::kEGL used without OpenGL support.");
1120 #endif
1121 }
1122 if (dstHelper) {
1123 if (!dstHelper->init(reporter)) {
1124 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1125 return;
1126 }
1127 }
1128
1129 ///////////////////////////////////////////////////////////////////////////
1130 // Setup SkBitmaps
1131 ///////////////////////////////////////////////////////////////////////////
1132
1133 SkBitmap srcBitmap = make_src_bitmap();
1134 SkBitmap dstBitmapSurface;
1135 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1136 SkBitmap dstBitmapFinal;
1137 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1138
1139 ///////////////////////////////////////////////////////////////////////////
1140 // Setup AHardwareBuffer
1141 ///////////////////////////////////////////////////////////////////////////
1142
1143 AHardwareBuffer_Desc hwbDesc;
1144 hwbDesc.width = DEV_W;
1145 hwbDesc.height = DEV_H;
1146 hwbDesc.layers = 1;
1147 if (SrcType::kCPU == srcType) {
1148 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1149 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1150 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1151 } else {
1152 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1153 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1154 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1155 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1156 }
1157 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1158 // The following three are not used in the allocate
1159 hwbDesc.stride = 0;
1160 hwbDesc.rfu0= 0;
1161 hwbDesc.rfu1= 0;
1162
1163 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1164 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1165 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1166 return;
1167 }
1168
1169 if (SrcType::kCPU == srcType) {
1170 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1171 AHardwareBuffer_describe(buffer, &hwbDesc);
1172
1173 uint32_t* bufferAddr;
1174 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1175 reinterpret_cast<void**>(&bufferAddr))) {
1176 ERRORF(reporter, "Failed to lock hardware buffer");
1177 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1178 return;
1179 }
1180
1181 int bbp = srcBitmap.bytesPerPixel();
1182 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1183 uint32_t* dst = bufferAddr;
1184 for (int y = 0; y < DEV_H; ++y) {
1185 memcpy(dst, src, DEV_W * bbp);
1186 src += DEV_W;
1187 dst += hwbDesc.stride;
1188 }
1189
1190 for (int y = 0; y < DEV_H; ++y) {
1191 for (int x = 0; x < DEV_W; ++x) {
1192 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1193 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1194 if (srcPixel != dstPixel) {
1195 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1196 x, y, srcPixel, dstPixel);
1197 }
1198 }
1199 }
1200
1201 AHardwareBuffer_unlock(buffer, nullptr);
1202
1203 } else {
1204 srcHelper->makeCurrent();
1205 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1206
1207 if (!surface) {
1208 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1209 return;
1210 }
1211
1212 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap);
1213 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1214
1215 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1216 // to occur.
1217 if (!shareSyncs) {
1218 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1219 if (!readResult) {
1220 ERRORF(reporter, "Read Pixels on surface failed");
1221 surface.reset();
1222 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1223 return;
1224 }
1225 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1226 }
1227
1228 ///////////////////////////////////////////////////////////////////////////
1229 // Cleanup GL/EGL and add syncs
1230 ///////////////////////////////////////////////////////////////////////////
1231
1232 if (shareSyncs) {
1233 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1234 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1235 return;
1236 }
1237 } else {
1238 srcHelper->releaseSurfaceToExternal(surface.get());
1239 srcHelper->doClientSync();
1240 surface.reset();
1241 srcHelper->releaseImage();
1242 }
1243 }
1244
1245 ///////////////////////////////////////////////////////////////////////////
1246 // Import the HWB into backend and draw it to a surface
1247 ///////////////////////////////////////////////////////////////////////////
1248
1249 dstHelper->makeCurrent();
1250 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1251
1252 if (!wrappedImage) {
1253 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1254 return;
1255 }
1256
1257 auto direct = dstHelper->directContext();
1258
1259 // Make SkSurface to render wrapped HWB into.
1260 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1261 kPremul_SkAlphaType, nullptr);
1262
1263 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(
1264 direct, skgpu::Budgeted::kNo, imageInfo, 0, kTopLeft_GrSurfaceOrigin, nullptr, false);
1265 if (!dstSurf.get()) {
1266 ERRORF(reporter, "Failed to create destination SkSurface");
1267 wrappedImage.reset();
1268 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1269 return;
1270 }
1271
1272 if (shareSyncs) {
1273 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1274 wrappedImage.reset();
1275 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1276 return;
1277 }
1278 }
1279 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1280
1281 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1282 if (!readResult) {
1283 ERRORF(reporter, "Read Pixels failed");
1284 wrappedImage.reset();
1285 dstSurf.reset();
1286 dstHelper->doClientSync();
1287 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1288 return;
1289 }
1290
1291 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1292
1293 dstSurf.reset();
1294 wrappedImage.reset();
1295 dstHelper->doClientSync();
1296 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1297 }
1298
DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1299 DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options, CtsEnforcement::kApiLevel_T) {
1300 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1301 }
1302
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1303 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan,
1304 reporter,
1305 options,
1306 CtsEnforcement::kApiLevel_T) {
1307 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1308 }
1309
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1310 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs,
1311 reporter,
1312 options,
1313 CtsEnforcement::kApiLevel_T) {
1314 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1315 }
1316
1317 #if defined(SK_GL)
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1318 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options, CtsEnforcement::kApiLevel_T) {
1319 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1320 }
1321
DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1322 DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1323 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1324 }
1325
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1326 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1327 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1328 }
1329
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1330 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1331 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1332 }
1333
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1334 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL_Syncs,
1335 reporter,
1336 options,
1337 CtsEnforcement::kApiLevel_T) {
1338 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1339 }
1340
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1341 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs,
1342 reporter,
1343 options,
1344 CtsEnforcement::kApiLevel_T) {
1345 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1346 }
1347
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1348 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs,
1349 reporter,
1350 options,
1351 CtsEnforcement::kApiLevel_T) {
1352 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1353 }
1354 #endif
1355
1356 #endif // defined(SK_GANESH) && defined(SK_BUILD_FOR_ANDROID) &&
1357 // __ANDROID_API__ >= 26 && defined(SK_VULKAN)
1358
1359