1 // Copyright (C) 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 #include <gtest/gtest.h>
15
16 #include "GoldfishOpenglTestEnv.h"
17 #include "GrallocDispatch.h"
18 #include "GrallocUsageConversion.h"
19 #include "AndroidVulkanDispatch.h"
20
21 #include <vulkan/vulkan.h>
22 #include <vulkan/vulkan_android.h>
23 #include <vulkan/vk_android_native_buffer.h>
24
25 #include "android/base/ArraySize.h"
26 #include "android/base/files/MemStream.h"
27 #include "android/base/files/Stream.h"
28 #include "android/base/files/PathUtils.h"
29 #include "android/base/memory/ScopedPtr.h"
30 #include "android/base/synchronization/ConditionVariable.h"
31 #include "android/base/synchronization/Lock.h"
32 #include "android/base/system/System.h"
33 #include "android/base/threads/FunctorThread.h"
34 #include "android/opengles.h"
35 #include "android/snapshot/interface.h"
36
37 #include "OpenglSystemCommon/HostConnection.h"
38 #include "OpenglSystemCommon/ProcessPipe.h"
39
40 #include <android/hardware_buffer.h>
41 #include <cutils/properties.h>
42
43 #include <atomic>
44 #include <random>
45 #include <memory>
46 #include <vector>
47
48 using android::base::AutoLock;
49 using android::base::ConditionVariable;
50 using android::base::FunctorThread;
51 using android::base::Lock;
52 using android::base::pj;
53 using android::base::System;
54
55 namespace aemu {
56
57 static constexpr int kWindowSize = 256;
58
59 static android_vulkan_dispatch* vk = nullptr;
60
61 class VulkanHalTest :
62 public ::testing::Test,
63 public ::testing::WithParamInterface<const char*> {
64 protected:
65
66 static GoldfishOpenglTestEnv* testEnv;
67
SetUpTestCase()68 static void SetUpTestCase() {
69 testEnv = new GoldfishOpenglTestEnv;
70 #ifdef _WIN32
71 const char* libFilename = "vulkan_android.dll";
72 #elif defined(__APPLE__)
73 const char* libFilename = "libvulkan_android.dylib";
74 #else
75 const char* libFilename = "libvulkan_android.so";
76 #endif
77 auto path =
78 pj(System::get()->getProgramDirectory(),
79 "lib64", libFilename);
80 vk = load_android_vulkan_dispatch(path.c_str());
81 }
82
TearDownTestCase()83 static void TearDownTestCase() {
84 // Cancel all host threads as well
85 android_finishOpenglesRenderer();
86
87 delete testEnv;
88 testEnv = nullptr;
89
90 delete vk;
91 }
92
93 static constexpr kGltransportPropName = "ro.boot.qemu.gltransport";
94
usingAddressSpaceGraphics()95 bool usingAddressSpaceGraphics() {
96 char value[PROPERTY_VALUE_MAX];
97 if (property_get(
98 kGltransportPropName, value, "pipe") > 0) {
99 return !strcmp("asg", value);
100 }
101 return false;
102 }
103
SetUp()104 void SetUp() override {
105 mProcessPipeRestarted = false;
106
107 property_set(kGltransportPropName, GetParam());
108 printf("%s: using transport: %s\n", __func__, GetParam());
109
110 setupGralloc();
111 setupVulkan();
112 }
113
TearDown()114 void TearDown() override {
115 teardownVulkan();
116 teardownGralloc();
117 }
118
restartProcessPipeAndHostConnection()119 void restartProcessPipeAndHostConnection() {
120 processPipeRestart();
121 cutHostConnectionUnclean();
122 refreshHostConnection();
123 }
124
cutHostConnectionUnclean()125 void cutHostConnectionUnclean() {
126 HostConnection::exitUnclean();
127 }
128
setupGralloc()129 void setupGralloc() {
130 auto grallocPath = pj(System::get()->getProgramDirectory(), "lib64",
131 "gralloc.ranchu" LIBSUFFIX);
132
133 load_gralloc_module(grallocPath.c_str(), &mGralloc);
134 set_global_gralloc_module(&mGralloc);
135
136 EXPECT_NE(nullptr, mGralloc.alloc_dev);
137 EXPECT_NE(nullptr, mGralloc.alloc_module);
138 }
139
teardownGralloc()140 void teardownGralloc() { unload_gralloc_module(&mGralloc); }
141
createTestGrallocBuffer(int usage,int format,int width,int height,int * stride_out)142 buffer_handle_t createTestGrallocBuffer(
143 int usage, int format,
144 int width, int height, int* stride_out) {
145 buffer_handle_t buffer;
146 int res;
147
148 res = mGralloc.alloc(width, height, format, usage, &buffer, stride_out);
149 if (res) {
150 fprintf(stderr, "%s:%d res=%d buffer=%p\n", __func__, __LINE__, res, buffer);
151 ::abort();
152 }
153
154 res = mGralloc.registerBuffer(buffer);
155 if (res) {
156 fprintf(stderr, "%s:%d res=%d buffer=%p\n", __func__, __LINE__, res, buffer);
157 ::abort();
158 }
159
160 return buffer;
161 }
162
destroyTestGrallocBuffer(buffer_handle_t buffer)163 void destroyTestGrallocBuffer(buffer_handle_t buffer) {
164 int res;
165
166 res = mGralloc.unregisterBuffer(buffer);
167 if (res) {
168 fprintf(stderr, "%s:%d res=%d buffer=%p\n", __func__, __LINE__, res, buffer);
169 ::abort();
170 }
171
172 res = mGralloc.free(buffer);
173 if (res) {
174 fprintf(stderr, "%s:%d res=%d buffer=%p\n", __func__, __LINE__, res, buffer);
175 ::abort();
176 }
177 }
178
setupVulkan()179 void setupVulkan() {
180 uint32_t extCount = 0;
181 std::vector<VkExtensionProperties> exts;
182 EXPECT_EQ(VK_SUCCESS, vk->vkEnumerateInstanceExtensionProperties(
183 nullptr, &extCount, nullptr));
184 exts.resize(extCount);
185 EXPECT_EQ(VK_SUCCESS, vk->vkEnumerateInstanceExtensionProperties(
186 nullptr, &extCount, exts.data()));
187
188 bool hasGetPhysicalDeviceProperties2 = false;
189 bool hasExternalMemoryCapabilities = false;
190
191 for (const auto& prop : exts) {
192 if (!strcmp("VK_KHR_get_physical_device_properties2", prop.extensionName)) {
193 hasGetPhysicalDeviceProperties2 = true;
194 }
195 if (!strcmp("VK_KHR_external_memory_capabilities", prop.extensionName)) {
196 hasExternalMemoryCapabilities = true;
197 }
198 }
199
200 std::vector<const char*> enabledExtensions;
201
202 if (hasGetPhysicalDeviceProperties2) {
203 enabledExtensions.push_back("VK_KHR_get_physical_device_properties2");
204 mInstanceHasGetPhysicalDeviceProperties2Support = true;
205 }
206
207 if (hasExternalMemoryCapabilities) {
208 enabledExtensions.push_back("VK_KHR_external_memory_capabilities");
209 mInstanceHasExternalMemorySupport = true;
210 }
211
212 const char* const* enabledExtensionNames =
213 enabledExtensions.size() > 0 ? enabledExtensions.data()
214 : nullptr;
215
216 VkApplicationInfo appInfo = {
217 VK_STRUCTURE_TYPE_APPLICATION_INFO, 0,
218 "someAppName", 1,
219 "someEngineName", 1,
220 VK_VERSION_1_0,
221 };
222
223 VkInstanceCreateInfo instCi = {
224 VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
225 0, 0, &appInfo,
226 0, nullptr,
227 (uint32_t)enabledExtensions.size(),
228 enabledExtensionNames,
229 };
230
231 EXPECT_EQ(VK_SUCCESS, vk->vkCreateInstance(&instCi, nullptr, &mInstance));
232
233 uint32_t physdevCount = 0;
234 std::vector<VkPhysicalDevice> physdevs;
235 EXPECT_EQ(VK_SUCCESS,
236 vk->vkEnumeratePhysicalDevices(mInstance, &physdevCount, nullptr));
237 physdevs.resize(physdevCount);
238 EXPECT_EQ(VK_SUCCESS,
239 vk->vkEnumeratePhysicalDevices(mInstance, &physdevCount,
240 physdevs.data()));
241 std::vector<VkPhysicalDevice> physdevsSecond(physdevCount);
242 EXPECT_EQ(VK_SUCCESS,
243 vk->vkEnumeratePhysicalDevices(mInstance, &physdevCount,
244 physdevsSecond.data()));
245 // Check that a second call to vkEnumeratePhysicalDevices
246 // retrieves the same physical device handles.
247 EXPECT_EQ(physdevs, physdevsSecond);
248
249 uint32_t bestPhysicalDevice = 0;
250 bool queuesGood = false;
251 bool hasAndroidNativeBuffer = false;
252 bool hasExternalMemorySupport = false;
253
254 for (uint32_t i = 0; i < physdevCount; ++i) {
255
256 queuesGood = false;
257 hasAndroidNativeBuffer = false;
258 hasExternalMemorySupport = false;
259
260 bool hasGetMemoryRequirements2 = false;
261 bool hasDedicatedAllocation = false;
262 bool hasExternalMemoryBaseExtension = false;
263 bool hasExternalMemoryPlatformExtension = false;
264
265 uint32_t queueFamilyCount = 0;
266 std::vector<VkQueueFamilyProperties> queueFamilyProps;
267 vk->vkGetPhysicalDeviceQueueFamilyProperties(
268 physdevs[i], &queueFamilyCount, nullptr);
269 queueFamilyProps.resize(queueFamilyCount);
270 vk->vkGetPhysicalDeviceQueueFamilyProperties(
271 physdevs[i], &queueFamilyCount, queueFamilyProps.data());
272
273 for (uint32_t j = 0; j < queueFamilyCount; ++j) {
274 auto count = queueFamilyProps[j].queueCount;
275 auto flags = queueFamilyProps[j].queueFlags;
276 if (count > 0 && (flags & VK_QUEUE_GRAPHICS_BIT)) {
277 bestPhysicalDevice = i;
278 mGraphicsQueueFamily = j;
279 queuesGood = true;
280 break;
281 }
282 }
283
284 uint32_t devExtCount = 0;
285 std::vector<VkExtensionProperties> availableDeviceExtensions;
286 vk->vkEnumerateDeviceExtensionProperties(physdevs[i], nullptr,
287 &devExtCount, nullptr);
288 availableDeviceExtensions.resize(devExtCount);
289 vk->vkEnumerateDeviceExtensionProperties(
290 physdevs[i], nullptr, &devExtCount, availableDeviceExtensions.data());
291 for (uint32_t j = 0; j < devExtCount; ++j) {
292 if (!strcmp("VK_KHR_swapchain",
293 availableDeviceExtensions[j].extensionName)) {
294 hasAndroidNativeBuffer = true;
295 }
296 if (!strcmp("VK_KHR_get_memory_requirements2",
297 availableDeviceExtensions[j].extensionName)) {
298 hasGetMemoryRequirements2 = true;
299 }
300 if (!strcmp("VK_KHR_dedicated_allocation",
301 availableDeviceExtensions[j].extensionName)) {
302 hasDedicatedAllocation = true;
303 }
304 if (!strcmp("VK_KHR_external_memory",
305 availableDeviceExtensions[j].extensionName)) {
306 hasExternalMemoryBaseExtension = true;
307 }
308 static const char* externalMemoryPlatformExtension =
309 "VK_ANDROID_external_memory_android_hardware_buffer";
310
311 if (!strcmp(externalMemoryPlatformExtension,
312 availableDeviceExtensions[j].extensionName)) {
313 hasExternalMemoryPlatformExtension = true;
314 }
315 }
316
317 hasExternalMemorySupport =
318 (hasGetMemoryRequirements2 &&
319 hasDedicatedAllocation &&
320 hasExternalMemoryBaseExtension &&
321 hasExternalMemoryPlatformExtension);
322
323 if (queuesGood && hasExternalMemorySupport) {
324 bestPhysicalDevice = i;
325 break;
326 }
327 }
328
329 EXPECT_TRUE(queuesGood);
330 EXPECT_TRUE(hasAndroidNativeBuffer);
331
332 mDeviceHasExternalMemorySupport =
333 hasExternalMemorySupport;
334
335 mPhysicalDevice = physdevs[bestPhysicalDevice];
336
337 VkPhysicalDeviceMemoryProperties memProps;
338 vk->vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memProps);
339
340 bool foundHostVisibleMemoryTypeIndex = false;
341 bool foundDeviceLocalMemoryTypeIndex = false;
342
343 for (uint32_t i = 0; i < memProps.memoryTypeCount; ++i) {
344 if (memProps.memoryTypes[i].propertyFlags &
345 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
346 mHostVisibleMemoryTypeIndex = i;
347 foundHostVisibleMemoryTypeIndex = true;
348 }
349
350 if (memProps.memoryTypes[i].propertyFlags &
351 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
352 mDeviceLocalMemoryTypeIndex = i;
353 foundDeviceLocalMemoryTypeIndex = true;
354 }
355
356 if (foundHostVisibleMemoryTypeIndex &&
357 foundDeviceLocalMemoryTypeIndex) {
358 break;
359 }
360 }
361
362 EXPECT_TRUE(
363 foundHostVisibleMemoryTypeIndex &&
364 foundDeviceLocalMemoryTypeIndex);
365
366 EXPECT_TRUE(foundHostVisibleMemoryTypeIndex);
367
368 float priority = 1.0f;
369 VkDeviceQueueCreateInfo dqCi = {
370 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
371 0, 0,
372 mGraphicsQueueFamily, 1,
373 &priority,
374 };
375
376 VkDeviceCreateInfo dCi = {
377 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, 0, 0,
378 1, &dqCi,
379 0, nullptr, // no layers
380 0, nullptr, // no extensions
381 nullptr, // no features
382 };
383
384 std::vector<const char*> externalMemoryExtensions = {
385 "VK_KHR_get_memory_requirements2",
386 "VK_KHR_dedicated_allocation",
387 "VK_KHR_external_memory",
388 "VK_ANDROID_external_memory_android_hardware_buffer",
389 };
390
391 // Mostly for MoltenVK or any other driver that doesn't support
392 // external memory.
393 std::vector<const char*> usefulExtensions = {
394 "VK_KHR_get_memory_requirements2",
395 "VK_KHR_dedicated_allocation",
396 };
397
398 if (mDeviceHasExternalMemorySupport) {
399 dCi.enabledExtensionCount =
400 (uint32_t)externalMemoryExtensions.size();
401 dCi.ppEnabledExtensionNames =
402 externalMemoryExtensions.data();
403 } else {
404 dCi.enabledExtensionCount =
405 (uint32_t)usefulExtensions.size();
406 dCi.ppEnabledExtensionNames =
407 usefulExtensions.data();
408 }
409
410 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDevice(physdevs[bestPhysicalDevice], &dCi,
411 nullptr, &mDevice));
412 vk->vkGetDeviceQueue(mDevice, mGraphicsQueueFamily, 0, &mQueue);
413 }
414
teardownVulkan()415 void teardownVulkan() {
416 vk->vkDestroyDevice(mDevice, nullptr);
417 vk->vkDestroyInstance(mInstance, nullptr);
418 }
419
createAndroidNativeImage(buffer_handle_t * buffer_out,VkImage * image_out)420 void createAndroidNativeImage(buffer_handle_t* buffer_out, VkImage* image_out) {
421
422 int usage = GRALLOC_USAGE_HW_RENDER;
423 int format = HAL_PIXEL_FORMAT_RGBA_8888;
424 int stride;
425 buffer_handle_t buffer =
426 createTestGrallocBuffer(
427 usage, format, kWindowSize, kWindowSize, &stride);
428
429 uint64_t producerUsage, consumerUsage;
430 android_convertGralloc0To1Usage(usage, &producerUsage, &consumerUsage);
431
432 VkNativeBufferANDROID nativeBufferInfo = {
433 VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID, nullptr,
434 buffer, stride,
435 format,
436 usage,
437 {
438 consumerUsage,
439 producerUsage,
440 },
441 };
442
443 VkImageCreateInfo testImageCi = {
444 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, (const void*)&nativeBufferInfo,
445 0,
446 VK_IMAGE_TYPE_2D,
447 VK_FORMAT_R8G8B8A8_UNORM,
448 { kWindowSize, kWindowSize, 1, },
449 1, 1,
450 VK_SAMPLE_COUNT_1_BIT,
451 VK_IMAGE_TILING_OPTIMAL,
452 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
453 VK_SHARING_MODE_EXCLUSIVE,
454 0, nullptr /* shared queue families */,
455 VK_IMAGE_LAYOUT_UNDEFINED,
456 };
457
458 VkImage testAndroidNativeImage;
459 EXPECT_EQ(VK_SUCCESS, vk->vkCreateImage(mDevice, &testImageCi, nullptr,
460 &testAndroidNativeImage));
461
462 *buffer_out = buffer;
463 *image_out = testAndroidNativeImage;
464 }
465
destroyAndroidNativeImage(buffer_handle_t buffer,VkImage image)466 void destroyAndroidNativeImage(buffer_handle_t buffer, VkImage image) {
467 vk->vkDestroyImage(mDevice, image, nullptr);
468 destroyTestGrallocBuffer(buffer);
469 }
470
allocateAndroidHardwareBuffer(int width=kWindowSize,int height=kWindowSize,AHardwareBuffer_Format format=AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM,uint64_t usage=AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE|AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN|AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN)471 AHardwareBuffer* allocateAndroidHardwareBuffer(
472 int width = kWindowSize,
473 int height = kWindowSize,
474 AHardwareBuffer_Format format =
475 AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM,
476 uint64_t usage =
477 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
478 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
479 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN) {
480
481 AHardwareBuffer_Desc desc = {
482 kWindowSize, kWindowSize, 1,
483 AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM,
484 usage,
485 4, // stride ignored for allocate; don't check this
486 };
487
488 AHardwareBuffer* buf = nullptr;
489 AHardwareBuffer_allocate(&desc, &buf);
490
491 EXPECT_NE(nullptr, buf);
492
493 return buf;
494 }
495
exportAllocateAndroidHardwareBuffer(VkMemoryDedicatedAllocateInfo * dedicated,VkDeviceSize allocSize,uint32_t memoryTypeIndex,VkDeviceMemory * pMemory,AHardwareBuffer ** ahw)496 void exportAllocateAndroidHardwareBuffer(
497 VkMemoryDedicatedAllocateInfo* dedicated,
498 VkDeviceSize allocSize,
499 uint32_t memoryTypeIndex,
500 VkDeviceMemory* pMemory,
501 AHardwareBuffer** ahw) {
502
503 EXPECT_TRUE(mDeviceHasExternalMemorySupport);
504
505 VkExportMemoryAllocateInfo exportAi = {
506 VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, dedicated,
507 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
508 };
509
510 VkMemoryAllocateInfo allocInfo = {
511 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, &exportAi,
512 allocSize, memoryTypeIndex,
513 };
514
515 VkResult res = vk->vkAllocateMemory(mDevice, &allocInfo, nullptr, pMemory);
516 EXPECT_EQ(VK_SUCCESS, res);
517
518 if (ahw) {
519 VkMemoryGetAndroidHardwareBufferInfoANDROID getAhbInfo = {
520 VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, 0, *pMemory,
521 };
522
523 EXPECT_EQ(VK_SUCCESS,
524 vk->vkGetMemoryAndroidHardwareBufferANDROID(
525 mDevice, &getAhbInfo, ahw));
526 }
527 }
528
importAllocateAndroidHardwareBuffer(VkMemoryDedicatedAllocateInfo * dedicated,VkDeviceSize allocSize,uint32_t memoryTypeIndex,AHardwareBuffer * ahw,VkDeviceMemory * pMemory)529 void importAllocateAndroidHardwareBuffer(
530 VkMemoryDedicatedAllocateInfo* dedicated,
531 VkDeviceSize allocSize,
532 uint32_t memoryTypeIndex,
533 AHardwareBuffer* ahw,
534 VkDeviceMemory* pMemory) {
535
536 VkImportAndroidHardwareBufferInfoANDROID importInfo = {
537 VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
538 dedicated, ahw,
539 };
540
541 VkMemoryAllocateInfo allocInfo = {
542 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, &importInfo,
543 allocSize, memoryTypeIndex,
544 };
545
546 VkDeviceMemory memory;
547 VkResult res = vk->vkAllocateMemory(mDevice, &allocInfo, nullptr, pMemory);
548
549 EXPECT_EQ(VK_SUCCESS, res);
550 }
551
createExternalImage(VkImage * pImage,uint32_t width=kWindowSize,uint32_t height=kWindowSize,VkFormat format=VK_FORMAT_R8G8B8A8_UNORM,VkImageTiling tiling=VK_IMAGE_TILING_OPTIMAL)552 void createExternalImage(
553 VkImage* pImage,
554 uint32_t width = kWindowSize,
555 uint32_t height = kWindowSize,
556 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM,
557 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL) {
558
559 VkExternalMemoryImageCreateInfo extMemImgCi = {
560 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, 0,
561 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
562 };
563
564 VkImageCreateInfo testImageCi = {
565 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
566 (const void*)&extMemImgCi, 0,
567 VK_IMAGE_TYPE_2D, format,
568 { width, height, 1, }, 1, 1,
569 VK_SAMPLE_COUNT_1_BIT,
570 tiling,
571 VK_IMAGE_USAGE_SAMPLED_BIT,
572 VK_SHARING_MODE_EXCLUSIVE,
573 0, nullptr /* shared queue families */,
574 VK_IMAGE_LAYOUT_UNDEFINED,
575 };
576
577 EXPECT_EQ(VK_SUCCESS,
578 vk->vkCreateImage(
579 mDevice, &testImageCi, nullptr, pImage));
580 }
581
getFirstMemoryTypeIndexForImage(VkImage image)582 uint32_t getFirstMemoryTypeIndexForImage(VkImage image) {
583 VkMemoryRequirements memReqs;
584 vk->vkGetImageMemoryRequirements(
585 mDevice, image, &memReqs);
586
587 uint32_t memoryTypeIndex = 0;
588 EXPECT_NE(0, memReqs.memoryTypeBits);
589
590 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
591 if (memReqs.memoryTypeBits & (1 << i)) {
592 memoryTypeIndex = i;
593 break;
594 }
595 }
596 return memoryTypeIndex;
597 }
598
getNeededMemorySizeForImage(VkImage image)599 VkDeviceSize getNeededMemorySizeForImage(VkImage image) {
600 VkMemoryRequirements memReqs;
601 vk->vkGetImageMemoryRequirements(
602 mDevice, image, &memReqs);
603 return memReqs.size;
604 }
605
allocateTestDescriptorSetsFromExistingPool(uint32_t setsToAllocate,VkDescriptorPool pool,VkDescriptorSetLayout setLayout,VkDescriptorSet * sets_out)606 VkResult allocateTestDescriptorSetsFromExistingPool(
607 uint32_t setsToAllocate,
608 VkDescriptorPool pool,
609 VkDescriptorSetLayout setLayout,
610 VkDescriptorSet* sets_out) {
611
612 std::vector<VkDescriptorSetLayout> setLayouts;
613 for (uint32_t i = 0; i < setsToAllocate; ++i) {
614 setLayouts.push_back(setLayout);
615 }
616
617 VkDescriptorSetAllocateInfo setAi = {
618 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0,
619 pool, setsToAllocate, setLayouts.data(),
620 };
621
622 return vk->vkAllocateDescriptorSets(
623 mDevice, &setAi, sets_out);
624 }
625
allocateTestDescriptorSets(uint32_t maxSetCount,uint32_t setsToAllocate,VkDescriptorType descriptorType,VkDescriptorPoolCreateFlags poolCreateFlags,VkDescriptorPool * pool_out,VkDescriptorSetLayout * setLayout_out,VkDescriptorSet * sets_out)626 VkResult allocateTestDescriptorSets(
627 uint32_t maxSetCount,
628 uint32_t setsToAllocate,
629 VkDescriptorType descriptorType,
630 VkDescriptorPoolCreateFlags poolCreateFlags,
631 VkDescriptorPool* pool_out,
632 VkDescriptorSetLayout* setLayout_out,
633 VkDescriptorSet* sets_out) {
634
635 VkDescriptorPoolSize poolSize = {
636 descriptorType,
637 maxSetCount,
638 };
639
640 VkDescriptorPoolCreateInfo poolCi = {
641 VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, 0,
642 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
643 maxSetCount /* maxSets */,
644 1 /* poolSizeCount */,
645 &poolSize,
646 };
647
648 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDescriptorPool(mDevice, &poolCi, nullptr, pool_out));
649
650 VkDescriptorSetLayoutBinding binding = {
651 0,
652 descriptorType,
653 1,
654 VK_SHADER_STAGE_VERTEX_BIT,
655 nullptr,
656 };
657
658 VkDescriptorSetLayoutCreateInfo setLayoutCi = {
659 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, 0, 0,
660 1,
661 &binding,
662 };
663
664 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDescriptorSetLayout(
665 mDevice, &setLayoutCi, nullptr, setLayout_out));
666
667 return allocateTestDescriptorSetsFromExistingPool(
668 setsToAllocate, *pool_out, *setLayout_out, sets_out);
669 }
670
allocateImmutableSamplerDescriptorSets(uint32_t maxSetCount,uint32_t setsToAllocate,std::vector<bool> bindingImmutabilities,VkSampler * sampler_out,VkDescriptorPool * pool_out,VkDescriptorSetLayout * setLayout_out,VkDescriptorSet * sets_out)671 VkResult allocateImmutableSamplerDescriptorSets(
672 uint32_t maxSetCount,
673 uint32_t setsToAllocate,
674 std::vector<bool> bindingImmutabilities,
675 VkSampler* sampler_out,
676 VkDescriptorPool* pool_out,
677 VkDescriptorSetLayout* setLayout_out,
678 VkDescriptorSet* sets_out) {
679
680 VkSamplerCreateInfo samplerCi = {
681 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, 0, 0,
682 VK_FILTER_NEAREST,
683 VK_FILTER_NEAREST,
684 VK_SAMPLER_MIPMAP_MODE_NEAREST,
685 VK_SAMPLER_ADDRESS_MODE_REPEAT,
686 VK_SAMPLER_ADDRESS_MODE_REPEAT,
687 VK_SAMPLER_ADDRESS_MODE_REPEAT,
688 0.0f,
689 VK_FALSE,
690 1.0f,
691 VK_FALSE,
692 VK_COMPARE_OP_NEVER,
693 0.0f,
694 1.0f,
695 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
696 VK_FALSE,
697 };
698
699 EXPECT_EQ(VK_SUCCESS,
700 vk->vkCreateSampler(
701 mDevice, &samplerCi, nullptr, sampler_out));
702
703 VkDescriptorPoolSize poolSize = {
704 VK_DESCRIPTOR_TYPE_SAMPLER,
705 maxSetCount,
706 };
707
708 VkDescriptorPoolCreateInfo poolCi = {
709 VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, 0,
710 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
711 maxSetCount /* maxSets */,
712 1 /* poolSizeCount */,
713 &poolSize,
714 };
715
716 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDescriptorPool(mDevice, &poolCi, nullptr, pool_out));
717
718 std::vector<VkDescriptorSetLayoutBinding> samplerBindings;
719
720 for (size_t i = 0; i < bindingImmutabilities.size(); ++i) {
721 VkDescriptorSetLayoutBinding samplerBinding = {
722 (uint32_t)i, VK_DESCRIPTOR_TYPE_SAMPLER,
723 1, VK_SHADER_STAGE_FRAGMENT_BIT,
724 bindingImmutabilities[i] ? sampler_out : nullptr,
725 };
726 samplerBindings.push_back(samplerBinding);
727 }
728
729 VkDescriptorSetLayoutCreateInfo setLayoutCi = {
730 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, 0, 0,
731 (uint32_t)samplerBindings.size(),
732 samplerBindings.data(),
733 };
734
735 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDescriptorSetLayout(
736 mDevice, &setLayoutCi, nullptr, setLayout_out));
737
738 return allocateTestDescriptorSetsFromExistingPool(
739 setsToAllocate, *pool_out, *setLayout_out, sets_out);
740 }
741
742 struct gralloc_implementation mGralloc;
743
744 bool mInstanceHasGetPhysicalDeviceProperties2Support = false;
745 bool mInstanceHasExternalMemorySupport = false;
746 bool mDeviceHasExternalMemorySupport = false;
747 bool mProcessPipeRestarted = false;
748
749 VkInstance mInstance;
750 VkPhysicalDevice mPhysicalDevice;
751 VkDevice mDevice;
752 VkQueue mQueue;
753 uint32_t mHostVisibleMemoryTypeIndex;
754 uint32_t mDeviceLocalMemoryTypeIndex;
755 uint32_t mGraphicsQueueFamily;
756 };
757
758 // static
759 GoldfishOpenglTestEnv* VulkanHalTest::testEnv = nullptr;
760
761 // A basic test of Vulkan HAL:
762 // - Touch the Android loader at global, instance, and device level.
TEST_P(VulkanHalTest,Basic)763 TEST_P(VulkanHalTest, Basic) { }
764
765 // Test: Allocate, map, flush, invalidate some host visible memory.
TEST_P(VulkanHalTest,MemoryMapping)766 TEST_P(VulkanHalTest, MemoryMapping) {
767 static constexpr VkDeviceSize kTestAlloc = 16 * 1024;
768 VkMemoryAllocateInfo allocInfo = {
769 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
770 kTestAlloc,
771 mHostVisibleMemoryTypeIndex,
772 };
773 VkDeviceMemory mem;
774 EXPECT_EQ(VK_SUCCESS, vk->vkAllocateMemory(mDevice, &allocInfo, nullptr, &mem));
775
776 void* hostPtr;
777 EXPECT_EQ(VK_SUCCESS, vk->vkMapMemory(mDevice, mem, 0, VK_WHOLE_SIZE, 0, &hostPtr));
778
779 memset(hostPtr, 0xff, kTestAlloc);
780
781 VkMappedMemoryRange toFlush = {
782 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
783 mem, 0, kTestAlloc,
784 };
785
786 EXPECT_EQ(VK_SUCCESS, vk->vkFlushMappedMemoryRanges(mDevice, 1, &toFlush));
787 EXPECT_EQ(VK_SUCCESS, vk->vkInvalidateMappedMemoryRanges(mDevice, 1, &toFlush));
788
789 for (uint32_t i = 0; i < kTestAlloc; ++i) {
790 EXPECT_EQ(0xff, *((uint8_t*)hostPtr + i));
791 }
792
793 int usage = GRALLOC_USAGE_HW_RENDER;
794 int format = HAL_PIXEL_FORMAT_RGBA_8888;
795 int stride;
796 buffer_handle_t buffer =
797 createTestGrallocBuffer(
798 usage, format, kWindowSize, kWindowSize, &stride);
799
800 uint64_t producerUsage, consumerUsage;
801 android_convertGralloc0To1Usage(usage, &producerUsage, &consumerUsage);
802
803 VkNativeBufferANDROID nativeBufferInfo = {
804 VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID, nullptr,
805 buffer, stride,
806 format,
807 usage,
808 {
809 consumerUsage,
810 producerUsage,
811 },
812 };
813
814 VkImageCreateInfo testImageCi = {
815 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, (const void*)&nativeBufferInfo,
816 0,
817 VK_IMAGE_TYPE_2D,
818 VK_FORMAT_R8G8B8A8_UNORM,
819 { kWindowSize, kWindowSize, 1, },
820 1, 1,
821 VK_SAMPLE_COUNT_1_BIT,
822 VK_IMAGE_TILING_OPTIMAL,
823 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
824 VK_SHARING_MODE_EXCLUSIVE,
825 0, nullptr /* shared queue families */,
826 VK_IMAGE_LAYOUT_UNDEFINED,
827 };
828
829 VkImage testAndroidNativeImage;
830 EXPECT_EQ(VK_SUCCESS, vk->vkCreateImage(mDevice, &testImageCi, nullptr,
831 &testAndroidNativeImage));
832 vk->vkDestroyImage(mDevice, testAndroidNativeImage, nullptr);
833 destroyTestGrallocBuffer(buffer);
834
835 vk->vkUnmapMemory(mDevice, mem);
836 vk->vkFreeMemory(mDevice, mem, nullptr);
837 }
838
839 // Tests creation of VkImages backed by gralloc buffers.
TEST_P(VulkanHalTest,AndroidNativeImageCreation)840 TEST_P(VulkanHalTest, AndroidNativeImageCreation) {
841 VkImage image;
842 buffer_handle_t buffer;
843 createAndroidNativeImage(&buffer, &image);
844 destroyAndroidNativeImage(buffer, image);
845 }
846
847 // Tests the path to sync Android native buffers with Gralloc buffers.
TEST_P(VulkanHalTest,AndroidNativeImageQueueSignal)848 TEST_P(VulkanHalTest, AndroidNativeImageQueueSignal) {
849 VkImage image;
850 buffer_handle_t buffer;
851 int fenceFd;
852
853 createAndroidNativeImage(&buffer, &image);
854
855 PFN_vkQueueSignalReleaseImageANDROID func =
856 (PFN_vkQueueSignalReleaseImageANDROID)
857 vk->vkGetDeviceProcAddr(mDevice, "vkQueueSignalReleaseImageANDROID");
858
859 if (func) {
860 fprintf(stderr, "%s: qsig\n", __func__);
861 func(mQueue, 0, nullptr, image, &fenceFd);
862 }
863
864 destroyAndroidNativeImage(buffer, image);
865 }
866
867 // Tests VK_KHR_get_physical_device_properties2:
868 // new API: vkGetPhysicalDeviceProperties2KHR
TEST_P(VulkanHalTest,GetPhysicalDeviceProperties2)869 TEST_P(VulkanHalTest, GetPhysicalDeviceProperties2) {
870 if (!mInstanceHasGetPhysicalDeviceProperties2Support) {
871 printf("Warning: Not testing VK_KHR_physical_device_properties2, not "
872 "supported\n");
873 return;
874 }
875
876 PFN_vkGetPhysicalDeviceProperties2KHR physProps2KHRFunc =
877 (PFN_vkGetPhysicalDeviceProperties2KHR)vk->vkGetInstanceProcAddr(
878 mInstance, "vkGetPhysicalDeviceProperties2KHR");
879
880 EXPECT_NE(nullptr, physProps2KHRFunc);
881
882 VkPhysicalDeviceProperties2KHR props2 = {
883 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, 0,
884 };
885
886 physProps2KHRFunc(mPhysicalDevice, &props2);
887
888 VkPhysicalDeviceProperties props;
889 vk->vkGetPhysicalDeviceProperties(mPhysicalDevice, &props);
890
891 EXPECT_EQ(props.vendorID, props2.properties.vendorID);
892 EXPECT_EQ(props.deviceID, props2.properties.deviceID);
893 }
894
895 // Tests VK_KHR_get_physical_device_properties2:
896 // new API: vkGetPhysicalDeviceFeatures2KHR
TEST_P(VulkanHalTest,GetPhysicalDeviceFeatures2KHR)897 TEST_P(VulkanHalTest, GetPhysicalDeviceFeatures2KHR) {
898 if (!mInstanceHasGetPhysicalDeviceProperties2Support) {
899 printf("Warning: Not testing VK_KHR_physical_device_properties2, not "
900 "supported\n");
901 return;
902 }
903
904 PFN_vkGetPhysicalDeviceFeatures2KHR physDeviceFeatures =
905 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk->vkGetInstanceProcAddr(
906 mInstance, "vkGetPhysicalDeviceFeatures2KHR");
907
908 EXPECT_NE(nullptr, physDeviceFeatures);
909
910 VkPhysicalDeviceFeatures2 features2 = {
911 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, 0,
912 };
913
914 physDeviceFeatures(mPhysicalDevice, &features2);
915 }
916
917 // Tests VK_KHR_get_physical_device_properties2:
918 // new API: vkGetPhysicalDeviceImageFormatProperties2KHR
TEST_P(VulkanHalTest,GetPhysicalDeviceImageFormatProperties2KHR)919 TEST_P(VulkanHalTest, GetPhysicalDeviceImageFormatProperties2KHR) {
920 if (!mInstanceHasGetPhysicalDeviceProperties2Support) {
921 printf("Warning: Not testing VK_KHR_physical_device_properties2, not "
922 "supported\n");
923 return;
924 }
925
926 PFN_vkGetPhysicalDeviceImageFormatProperties2KHR
927 physDeviceImageFormatPropertiesFunc =
928 (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)
929 vk->vkGetInstanceProcAddr(mInstance,
930 "vkGetPhysicalDeviceImageForm"
931 "atProperties2KHR");
932
933 EXPECT_NE(nullptr, physDeviceImageFormatPropertiesFunc);
934
935 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = {
936 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, 0,
937 VK_FORMAT_R8G8B8A8_UNORM,
938 VK_IMAGE_TYPE_2D,
939 VK_IMAGE_TILING_OPTIMAL,
940 VK_IMAGE_USAGE_SAMPLED_BIT,
941 0,
942 };
943
944 VkImageFormatProperties2 res = {
945 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, 0,
946 };
947
948 EXPECT_EQ(VK_SUCCESS, physDeviceImageFormatPropertiesFunc(
949 mPhysicalDevice, &imageFormatInfo, &res));
950 }
951
952 // Tests that if we create an instance and the API version is less than 1.1,
953 // we return null for 1.1 core API calls.
TEST_P(VulkanHalTest,Hide1_1FunctionPointers)954 TEST_P(VulkanHalTest, Hide1_1FunctionPointers) {
955 VkPhysicalDeviceProperties props;
956
957 vk->vkGetPhysicalDeviceProperties(mPhysicalDevice, &props);
958
959 if (props.apiVersion < VK_API_VERSION_1_1) {
960 EXPECT_EQ(nullptr,
961 vk->vkGetDeviceProcAddr(mDevice, "vkTrimCommandPool"));
962 } else {
963 EXPECT_NE(nullptr,
964 vk->vkGetDeviceProcAddr(mDevice, "vkTrimCommandPool"));
965 }
966 }
967
968 // Tests VK_ANDROID_external_memory_android_hardware_buffer's allocation API.
969 // The simplest: export allocate device local memory.
970 // Disabled for now: currently goes down invalid paths in the GL side
TEST_P(VulkanHalTest,DISABLED_AndroidHardwareBufferAllocate_ExportDeviceLocal)971 TEST_P(VulkanHalTest, DISABLED_AndroidHardwareBufferAllocate_ExportDeviceLocal) {
972 if (!mDeviceHasExternalMemorySupport) return;
973
974 VkDeviceMemory memory;
975 AHardwareBuffer* ahw;
976 exportAllocateAndroidHardwareBuffer(
977 nullptr, 4096, mDeviceLocalMemoryTypeIndex,
978 &memory, &ahw);
979
980 vk->vkFreeMemory(mDevice, memory, nullptr);
981 }
982
983 // Test AHB allocation via import.
984 // Disabled for now: currently goes down invalid paths in the GL side
TEST_P(VulkanHalTest,DISABLED_AndroidHardwareBufferAllocate_ImportDeviceLocal)985 TEST_P(VulkanHalTest, DISABLED_AndroidHardwareBufferAllocate_ImportDeviceLocal) {
986 if (!mDeviceHasExternalMemorySupport) return;
987
988 AHardwareBuffer* testBuf = allocateAndroidHardwareBuffer();
989
990 VkDeviceMemory memory;
991
992 importAllocateAndroidHardwareBuffer(
993 nullptr,
994 4096, // also checks that the top-level allocation size is ignored
995 mDeviceLocalMemoryTypeIndex,
996 testBuf,
997 &memory);
998
999 vk->vkFreeMemory(mDevice, memory, nullptr);
1000
1001 AHardwareBuffer_release(testBuf);
1002 }
1003
1004 // Test AHB allocation via export, but with a dedicated allocation (image).
1005 // Disabled for now: currently goes down invalid paths in the GL side
TEST_P(VulkanHalTest,DISABLED_AndroidHardwareBufferAllocate_Dedicated_Export)1006 TEST_P(VulkanHalTest, DISABLED_AndroidHardwareBufferAllocate_Dedicated_Export) {
1007 if (!mDeviceHasExternalMemorySupport) return;
1008
1009 VkImage testAhbImage;
1010 createExternalImage(&testAhbImage);
1011
1012 VkMemoryDedicatedAllocateInfo dedicatedAi = {
1013 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, 0,
1014 testAhbImage, VK_NULL_HANDLE,
1015 };
1016
1017 VkDeviceMemory memory;
1018 AHardwareBuffer* buffer;
1019 exportAllocateAndroidHardwareBuffer(
1020 &dedicatedAi,
1021 4096,
1022 getFirstMemoryTypeIndexForImage(testAhbImage),
1023 &memory, &buffer);
1024
1025 EXPECT_EQ(VK_SUCCESS, vk->vkBindImageMemory(mDevice, testAhbImage, memory, 0));
1026
1027 vk->vkFreeMemory(mDevice, memory, nullptr);
1028 vk->vkDestroyImage(mDevice, testAhbImage, nullptr);
1029 }
1030
1031 // Test AHB allocation via import, but with a dedicated allocation (image).
1032 // Disabled for now: currently goes down invalid paths in the GL side
TEST_P(VulkanHalTest,DISABLED_AndroidHardwareBufferAllocate_Dedicated_Import)1033 TEST_P(VulkanHalTest, DISABLED_AndroidHardwareBufferAllocate_Dedicated_Import) {
1034 if (!mDeviceHasExternalMemorySupport) return;
1035
1036 AHardwareBuffer* testBuf =
1037 allocateAndroidHardwareBuffer();
1038
1039 VkImage testAhbImage;
1040 createExternalImage(&testAhbImage);
1041
1042 VkMemoryDedicatedAllocateInfo dedicatedAi = {
1043 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, 0,
1044 testAhbImage, VK_NULL_HANDLE,
1045 };
1046
1047 VkDeviceMemory memory;
1048 importAllocateAndroidHardwareBuffer(
1049 &dedicatedAi,
1050 4096, // also checks that the top-level allocation size is ignored
1051 getFirstMemoryTypeIndexForImage(testAhbImage),
1052 testBuf,
1053 &memory);
1054
1055 EXPECT_EQ(VK_SUCCESS,
1056 vk->vkBindImageMemory(mDevice, testAhbImage, memory, 0));
1057
1058 vk->vkFreeMemory(mDevice, memory, nullptr);
1059 vk->vkDestroyImage(mDevice, testAhbImage, nullptr);
1060
1061 AHardwareBuffer_release(testBuf);
1062 }
1063
1064 // Test many host visible allocations.
TEST_P(VulkanHalTest,HostVisibleAllocations)1065 TEST_P(VulkanHalTest, HostVisibleAllocations) {
1066 static constexpr VkDeviceSize kTestAllocSizesSmall[] =
1067 { 4, 5, 6, 16, 32, 37, 64, 255, 256, 267,
1068 1024, 1023, 1025, 4095, 4096,
1069 4097, 16333, };
1070
1071 static constexpr size_t kNumSmallAllocSizes = android::base::arraySize(kTestAllocSizesSmall);
1072 static constexpr size_t kNumTrialsSmall = 1000;
1073
1074 static constexpr VkDeviceSize kTestAllocSizesLarge[] =
1075 { 1048576, 1048577, 1048575 };
1076
1077 static constexpr size_t kNumLargeAllocSizes = android::base::arraySize(kTestAllocSizesLarge);
1078 static constexpr size_t kNumTrialsLarge = 20;
1079
1080 static constexpr float kLargeAllocChance = 0.05;
1081
1082 std::default_random_engine generator;
1083 // Use a consistent seed value to avoid flakes
1084 generator.seed(0);
1085
1086 std::uniform_int_distribution<size_t>
1087 smallAllocIndexDistribution(0, kNumSmallAllocSizes - 1);
1088 std::uniform_int_distribution<size_t>
1089 largeAllocIndexDistribution(0, kNumLargeAllocSizes - 1);
1090 std::bernoulli_distribution largeAllocDistribution(kLargeAllocChance);
1091
1092 size_t smallAllocCount = 0;
1093 size_t largeAllocCount = 0;
1094
1095 VkMemoryAllocateInfo allocInfo = {
1096 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
1097 0,
1098 mHostVisibleMemoryTypeIndex,
1099 };
1100
1101 std::vector<VkDeviceMemory> allocs;
1102
1103 while (smallAllocCount < kNumTrialsSmall ||
1104 largeAllocCount < kNumTrialsLarge) {
1105
1106 VkDeviceMemory mem = VK_NULL_HANDLE;
1107 void* hostPtr = nullptr;
1108
1109 if (largeAllocDistribution(generator)) {
1110 if (largeAllocCount < kNumTrialsLarge) {
1111 fprintf(stderr, "%s: large alloc\n", __func__);
1112 allocInfo.allocationSize =
1113 kTestAllocSizesLarge[
1114 largeAllocIndexDistribution(generator)];
1115 ++largeAllocCount;
1116 }
1117 } else {
1118 if (smallAllocCount < kNumTrialsSmall) {
1119 allocInfo.allocationSize =
1120 kTestAllocSizesSmall[
1121 smallAllocIndexDistribution(generator)];
1122 ++smallAllocCount;
1123 }
1124 }
1125
1126 EXPECT_EQ(VK_SUCCESS,
1127 vk->vkAllocateMemory(mDevice, &allocInfo, nullptr, &mem));
1128
1129 if (!mem) continue;
1130
1131 allocs.push_back(mem);
1132
1133 EXPECT_EQ(VK_SUCCESS,
1134 vk->vkMapMemory(mDevice, mem, 0, VK_WHOLE_SIZE, 0, &hostPtr));
1135
1136 memset(hostPtr, 0xff, 4);
1137
1138 VkMappedMemoryRange toFlush = {
1139 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
1140 mem, 0, 4,
1141 };
1142
1143 EXPECT_EQ(VK_SUCCESS, vk->vkFlushMappedMemoryRanges(mDevice, 1, &toFlush));
1144 EXPECT_EQ(VK_SUCCESS, vk->vkInvalidateMappedMemoryRanges(mDevice, 1, &toFlush));
1145
1146 for (uint32_t i = 0; i < 4; ++i) {
1147 EXPECT_EQ(0xff, *((uint8_t*)hostPtr + i));
1148 }
1149 }
1150
1151 for (auto mem : allocs) {
1152 vk->vkUnmapMemory(mDevice, mem);
1153 vk->vkFreeMemory(mDevice, mem, nullptr);
1154 }
1155 }
1156
TEST_P(VulkanHalTest,BufferCreate)1157 TEST_P(VulkanHalTest, BufferCreate) {
1158 VkBufferCreateInfo bufCi = {
1159 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, 0, 0,
1160 4096,
1161 VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
1162 VK_SHARING_MODE_EXCLUSIVE,
1163 0, nullptr,
1164 };
1165
1166 VkBuffer buffer;
1167 vk->vkCreateBuffer(mDevice, &bufCi, nullptr, &buffer);
1168
1169 VkMemoryRequirements memReqs;
1170 vk->vkGetBufferMemoryRequirements(mDevice, buffer, &memReqs);
1171
1172 vk->vkDestroyBuffer(mDevice, buffer, nullptr);
1173 }
1174
TEST_P(VulkanHalTest,SnapshotSaveLoad)1175 TEST_P(VulkanHalTest, SnapshotSaveLoad) {
1176 // TODO: Skip if using address space graphics
1177 if (usingAddressSpaceGraphics()) {
1178 printf("%s: skipping, ASG does not yet support snapshots\n", __func__);
1179 return;
1180 }
1181 androidSnapshot_save("test_snapshot");
1182 androidSnapshot_load("test_snapshot");
1183 }
1184
TEST_P(VulkanHalTest,SnapshotSaveLoadSimpleNonDispatchable)1185 TEST_P(VulkanHalTest, SnapshotSaveLoadSimpleNonDispatchable) {
1186 // TODO: Skip if using address space graphics
1187 if (usingAddressSpaceGraphics()) {
1188 printf("%s: skipping, ASG does not yet support snapshots\n", __func__);
1189 return;
1190 }
1191 VkBufferCreateInfo bufCi = {
1192 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, 0, 0,
1193 4096,
1194 VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
1195 VK_SHARING_MODE_EXCLUSIVE,
1196 0, nullptr,
1197 };
1198
1199 VkFence fence;
1200 VkFenceCreateInfo fenceCi = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, };
1201 vk->vkCreateFence(mDevice, &fenceCi, nullptr, &fence);
1202
1203 fprintf(stderr, "%s: guest fence: %p\n", __func__, fence);
1204
1205 VkBuffer buffer;
1206 vk->vkCreateBuffer(mDevice, &bufCi, nullptr, &buffer);
1207
1208 fprintf(stderr, "%s: guest buffer: %p\n", __func__, buffer);
1209
1210 androidSnapshot_save("test_snapshot");
1211 androidSnapshot_load("test_snapshot");
1212
1213 VkMemoryRequirements memReqs;
1214 vk->vkGetBufferMemoryRequirements(mDevice, buffer, &memReqs);
1215 vk->vkDestroyBuffer(mDevice, buffer, nullptr);
1216
1217 vk->vkDestroyFence(mDevice, fence, nullptr);
1218 }
1219
1220 // Tests save/load of host visible memory. This is not yet a viable host-only
1221 // test because, the only way to really test it is to be able to preserve a
1222 // host visible address for the simulated guest while the backing memory under
1223 // it changes due to the new snapshot. In other words, this is arbitrary
1224 // remapping of virtual addrs and is functionality that does not exist on
1225 // Linux/macOS. It would ironically require a hypervisor (or an OS that
1226 // supports freer ways of mapping memory) in order to test properly.
1227 // Disabled for now: currently goes down invalid paths in the GL side
TEST_P(VulkanHalTest,DISABLED_SnapshotSaveLoadHostVisibleMemory)1228 TEST_P(VulkanHalTest, DISABLED_SnapshotSaveLoadHostVisibleMemory) {
1229 // TODO: Skip if using address space graphics
1230 if (usingAddressSpaceGraphics()) {
1231 printf("%s: skipping, ASG does not yet support snapshots\n", __func__);
1232 return;
1233 }
1234
1235 static constexpr VkDeviceSize kTestAlloc = 16 * 1024;
1236 VkMemoryAllocateInfo allocInfo = {
1237 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
1238 kTestAlloc,
1239 mHostVisibleMemoryTypeIndex,
1240 };
1241 VkDeviceMemory mem;
1242 EXPECT_EQ(VK_SUCCESS, vk->vkAllocateMemory(mDevice, &allocInfo, nullptr, &mem));
1243
1244 void* hostPtr;
1245 EXPECT_EQ(VK_SUCCESS, vk->vkMapMemory(mDevice, mem, 0, VK_WHOLE_SIZE, 0, &hostPtr));
1246 androidSnapshot_save("test_snapshot");
1247 androidSnapshot_load("test_snapshot");
1248
1249
1250 memset(hostPtr, 0xff, kTestAlloc);
1251
1252 VkMappedMemoryRange toFlush = {
1253 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
1254 mem, 0, kTestAlloc,
1255 };
1256
1257 EXPECT_EQ(VK_SUCCESS, vk->vkFlushMappedMemoryRanges(mDevice, 1, &toFlush));
1258 EXPECT_EQ(VK_SUCCESS, vk->vkInvalidateMappedMemoryRanges(mDevice, 1, &toFlush));
1259
1260 vk->vkUnmapMemory(mDevice, mem);
1261 vk->vkFreeMemory(mDevice, mem, nullptr);
1262 }
1263
1264 // Tests save/load of a dispatchable handle, such as VkCommandBuffer.
1265 // Note that the internal state of the command buffer is not snapshotted yet.
TEST_P(VulkanHalTest,SnapshotSaveLoadSimpleDispatchable)1266 TEST_P(VulkanHalTest, SnapshotSaveLoadSimpleDispatchable) {
1267 // TODO: Skip if using address space graphics
1268 if (usingAddressSpaceGraphics()) {
1269 printf("%s: skipping, ASG does not yet support snapshots\n", __func__);
1270 return;
1271 }
1272 VkCommandPoolCreateInfo poolCi = {
1273 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0, 0, mGraphicsQueueFamily,
1274 };
1275
1276 VkCommandPool pool;
1277 vk->vkCreateCommandPool(mDevice, &poolCi, nullptr, &pool);
1278
1279 VkCommandBuffer cb;
1280 VkCommandBufferAllocateInfo cbAi = {
1281 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
1282 pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
1283 };
1284
1285 vk->vkAllocateCommandBuffers(mDevice, &cbAi, &cb);
1286
1287 androidSnapshot_save("test_snapshot");
1288 androidSnapshot_load("test_snapshot");
1289
1290 vk->vkFreeCommandBuffers(mDevice, pool, 1, &cb);
1291 vk->vkDestroyCommandPool(mDevice, pool, nullptr);
1292 }
1293
1294 // Tests that dependencies are respected between different handle types,
1295 // such as VkImage and VkImageView.
TEST_P(VulkanHalTest,SnapshotSaveLoadDependentHandlesImageView)1296 TEST_P(VulkanHalTest, SnapshotSaveLoadDependentHandlesImageView) {
1297 // TODO: Skip if using address space graphics
1298 if (usingAddressSpaceGraphics()) {
1299 printf("%s: skipping, ASG does not yet support snapshots\n", __func__);
1300 return;
1301 }
1302 VkImage image;
1303 VkImageCreateInfo imageCi = {
1304 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, 0, 0,
1305 VK_IMAGE_TYPE_2D,
1306 VK_FORMAT_R8G8B8A8_UNORM,
1307 { 1, 1, 1, },
1308 1, 1,
1309 VK_SAMPLE_COUNT_1_BIT,
1310 VK_IMAGE_TILING_LINEAR,
1311 VK_IMAGE_USAGE_TRANSFER_DST_BIT,
1312 VK_SHARING_MODE_EXCLUSIVE,
1313 0, nullptr,
1314 VK_IMAGE_LAYOUT_UNDEFINED,
1315 };
1316
1317 vk->vkCreateImage(mDevice, &imageCi, nullptr, &image);
1318
1319 VkImageView imageView;
1320 VkImageViewCreateInfo imageViewCi = {
1321 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, 0, 0,
1322 image,
1323 VK_IMAGE_VIEW_TYPE_2D,
1324 VK_FORMAT_R8G8B8A8_UNORM,
1325 {
1326 VK_COMPONENT_SWIZZLE_IDENTITY,
1327 VK_COMPONENT_SWIZZLE_IDENTITY,
1328 VK_COMPONENT_SWIZZLE_IDENTITY,
1329 VK_COMPONENT_SWIZZLE_IDENTITY,
1330 },
1331 { VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1, },
1332 };
1333
1334 vk->vkCreateImageView(mDevice, &imageViewCi, nullptr, &imageView);
1335
1336 androidSnapshot_save("test_snapshot");
1337 androidSnapshot_load("test_snapshot");
1338
1339 vk->vkDestroyImageView(mDevice, imageView, nullptr);
1340 vk->vkCreateImageView(mDevice, &imageViewCi, nullptr, &imageView);
1341 vk->vkDestroyImageView(mDevice, imageView, nullptr);
1342
1343 vk->vkDestroyImage(mDevice, image, nullptr);
1344 }
1345
1346 // Tests beginning and ending command buffers from separate threads.
TEST_P(VulkanHalTest,SeparateThreadCommandBufferBeginEnd)1347 TEST_P(VulkanHalTest, SeparateThreadCommandBufferBeginEnd) {
1348 Lock lock;
1349 ConditionVariable cvSequence;
1350 uint32_t begins = 0;
1351 uint32_t ends = 0;
1352 constexpr uint32_t kTrials = 1000;
1353
1354 VkCommandPoolCreateInfo poolCi = {
1355 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0, 0, mGraphicsQueueFamily,
1356 };
1357
1358 VkCommandPool pool;
1359 vk->vkCreateCommandPool(mDevice, &poolCi, nullptr, &pool);
1360
1361 VkCommandBuffer cb;
1362 VkCommandBufferAllocateInfo cbAi = {
1363 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
1364 pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
1365 };
1366
1367 vk->vkAllocateCommandBuffers(mDevice, &cbAi, &cb);
1368
1369 auto timeoutDeadline = []() {
1370 return System::get()->getUnixTimeUs() + 5000000; // 5 s
1371 };
1372
1373 FunctorThread beginThread([this, cb, &lock, &cvSequence, &begins, &ends, timeoutDeadline]() {
1374
1375 while (begins < kTrials) {
1376 AutoLock a(lock);
1377
1378 while (ends != begins) {
1379 if (!cvSequence.timedWait(&lock, timeoutDeadline())) {
1380 EXPECT_TRUE(false) << "Error: begin thread timed out!";
1381 return 0;
1382 }
1383 }
1384
1385 VkCommandBufferBeginInfo beginInfo = {
1386 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
1387 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, 0,
1388 };
1389
1390 vk->vkBeginCommandBuffer(cb, &beginInfo);
1391
1392 ++begins;
1393 cvSequence.signal();
1394 }
1395
1396 vk->vkDeviceWaitIdle(mDevice);
1397 return 0;
1398 });
1399
1400 FunctorThread endThread([this, cb, &lock, &cvSequence, &begins, &ends, timeoutDeadline]() {
1401
1402 while (ends < kTrials) {
1403 AutoLock a(lock);
1404
1405 while (begins - ends != 1) {
1406 if (!cvSequence.timedWait(&lock, timeoutDeadline())) {
1407 EXPECT_TRUE(false) << "Error: end thread timed out!";
1408 return 0;
1409 }
1410 }
1411
1412 vk->vkEndCommandBuffer(cb);
1413
1414 ++ends;
1415 cvSequence.signal();
1416 }
1417
1418 vk->vkDeviceWaitIdle(mDevice);
1419 return 0;
1420 });
1421
1422 beginThread.start();
1423 endThread.start();
1424 beginThread.wait();
1425 endThread.wait();
1426
1427 vk->vkFreeCommandBuffers(mDevice, pool, 1, &cb);
1428 vk->vkDestroyCommandPool(mDevice, pool, nullptr);
1429 }
1430
1431 // Tests creating a bunch of descriptor sets and freeing them via vkFreeDescriptorSet.
1432 // 1. Via vkFreeDescriptorSet directly
1433 // 2. Via vkResetDescriptorPool
1434 // 3. Via vkDestroyDescriptorPool
1435 // 4. Via vkResetDescriptorPool and double frees in vkFreeDescriptorSet
1436 // 5. Via vkResetDescriptorPool and double frees in vkFreeDescriptorSet
1437 // 4. Via vkResetDescriptorPool, creating more, and freeing vai vkFreeDescriptorSet
1438 // (because vkFree* APIs are expected to never fail)
1439 // https://github.com/KhronosGroup/Vulkan-Docs/issues/1070
TEST_P(VulkanHalTest,DescriptorSetAllocFreeBasic)1440 TEST_P(VulkanHalTest, DescriptorSetAllocFreeBasic) {
1441 const uint32_t kSetCount = 4;
1442 VkDescriptorPool pool;
1443 VkDescriptorSetLayout setLayout;
1444 std::vector<VkDescriptorSet> sets(kSetCount);
1445
1446 EXPECT_EQ(VK_SUCCESS, allocateTestDescriptorSets(
1447 kSetCount, kSetCount,
1448 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
1449 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
1450 &pool, &setLayout, sets.data()));
1451
1452 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1453 mDevice, pool, kSetCount, sets.data()));
1454
1455 // The double free should also work
1456 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1457 mDevice, pool, kSetCount, sets.data()));
1458
1459 // Alloc/free again should also work
1460 EXPECT_EQ(VK_SUCCESS,
1461 allocateTestDescriptorSetsFromExistingPool(
1462 kSetCount, pool, setLayout, sets.data()));
1463
1464 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1465 mDevice, pool, kSetCount, sets.data()));
1466
1467 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
1468 }
1469
1470 // Tests creating a bunch of descriptor sets and freeing them via
1471 // vkResetDescriptorPool, and that vkFreeDescriptorSets still succeeds.
TEST_P(VulkanHalTest,DescriptorSetAllocFreeReset)1472 TEST_P(VulkanHalTest, DescriptorSetAllocFreeReset) {
1473 const uint32_t kSetCount = 4;
1474 VkDescriptorPool pool;
1475 VkDescriptorSetLayout setLayout;
1476 std::vector<VkDescriptorSet> sets(kSetCount);
1477
1478 EXPECT_EQ(VK_SUCCESS, allocateTestDescriptorSets(
1479 kSetCount, kSetCount,
1480 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
1481 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
1482 &pool, &setLayout, sets.data()));
1483
1484 EXPECT_EQ(VK_SUCCESS, vk->vkResetDescriptorPool(
1485 mDevice, pool, 0));
1486
1487 // The double free should also work
1488 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1489 mDevice, pool, kSetCount, sets.data()));
1490
1491 // Alloc/reset/free again should also work
1492 EXPECT_EQ(VK_SUCCESS,
1493 allocateTestDescriptorSetsFromExistingPool(
1494 kSetCount, pool, setLayout, sets.data()));
1495
1496 EXPECT_EQ(VK_SUCCESS, vk->vkResetDescriptorPool(
1497 mDevice, pool, 0));
1498
1499 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1500 mDevice, pool, kSetCount, sets.data()));
1501
1502 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
1503 }
1504
1505 // Tests creating a bunch of descriptor sets and freeing them via vkDestroyDescriptorPool, and that vkFreeDescriptorSets still succeeds.
TEST_P(VulkanHalTest,DescriptorSetAllocFreeDestroy)1506 TEST_P(VulkanHalTest, DescriptorSetAllocFreeDestroy) {
1507 const uint32_t kSetCount = 4;
1508 VkDescriptorPool pool;
1509 VkDescriptorSetLayout setLayout;
1510 std::vector<VkDescriptorSet> sets(kSetCount);
1511
1512 EXPECT_EQ(VK_SUCCESS, allocateTestDescriptorSets(
1513 kSetCount, kSetCount,
1514 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
1515 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
1516 &pool, &setLayout, sets.data()));
1517
1518 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
1519
1520 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1521 mDevice, pool, kSetCount, sets.data()));
1522 }
1523
1524 // Tests that immutable sampler descriptors properly cause
1525 // the |sampler| field of VkWriteDescriptorSet's descriptor image info
1526 // to be ignored.
TEST_P(VulkanHalTest,ImmutableSamplersSuppressVkWriteDescriptorSetSampler)1527 TEST_P(VulkanHalTest, ImmutableSamplersSuppressVkWriteDescriptorSetSampler) {
1528 const uint32_t kSetCount = 4;
1529 std::vector<bool> bindingImmutabilities = {
1530 false,
1531 true,
1532 false,
1533 false,
1534 };
1535
1536 VkSampler sampler;
1537 VkDescriptorPool pool;
1538 VkDescriptorSetLayout setLayout;
1539 std::vector<VkDescriptorSet> sets(kSetCount);
1540
1541 EXPECT_EQ(VK_SUCCESS,
1542 allocateImmutableSamplerDescriptorSets(
1543 kSetCount * bindingImmutabilities.size(),
1544 kSetCount,
1545 bindingImmutabilities,
1546 &sampler,
1547 &pool,
1548 &setLayout,
1549 sets.data()));
1550
1551 for (uint32_t i = 0; i < bindingImmutabilities.size(); ++i) {
1552 VkDescriptorImageInfo imageInfo = {
1553 bindingImmutabilities[i] ? (VkSampler)0xdeadbeef : sampler,
1554 0,
1555 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
1556 };
1557
1558 VkWriteDescriptorSet write = {
1559 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, 0,
1560 sets[0],
1561 1,
1562 0,
1563 1,
1564 VK_DESCRIPTOR_TYPE_SAMPLER,
1565 &imageInfo,
1566 nullptr,
1567 nullptr,
1568 };
1569
1570 vk->vkUpdateDescriptorSets(mDevice, 1, &write, 0, nullptr);
1571 }
1572
1573 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
1574 vk->vkDestroyDescriptorSetLayout(mDevice, setLayout, nullptr);
1575 vk->vkDestroySampler(mDevice, sampler, nullptr);
1576 }
1577
1578
1579 // Tests vkGetImageMemoryRequirements2
TEST_P(VulkanHalTest,GetImageMemoryRequirements2)1580 TEST_P(VulkanHalTest, GetImageMemoryRequirements2) {
1581 VkImageCreateInfo testImageCi = {
1582 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, nullptr,
1583 0,
1584 VK_IMAGE_TYPE_2D,
1585 VK_FORMAT_R8G8B8A8_UNORM,
1586 { kWindowSize, kWindowSize, 1, },
1587 1, 1,
1588 VK_SAMPLE_COUNT_1_BIT,
1589 VK_IMAGE_TILING_OPTIMAL,
1590 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
1591 VK_SHARING_MODE_EXCLUSIVE,
1592 0, nullptr /* shared queue families */,
1593 VK_IMAGE_LAYOUT_UNDEFINED,
1594 };
1595
1596 VkImage testImage;
1597 EXPECT_EQ(VK_SUCCESS, vk->vkCreateImage(mDevice, &testImageCi, nullptr,
1598 &testImage));
1599
1600 VkImageMemoryRequirementsInfo2 info2 = {
1601 VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, 0,
1602 testImage,
1603 };
1604
1605 VkMemoryDedicatedRequirements dedicatedReqs {
1606 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, 0,
1607 };
1608
1609 VkMemoryRequirements2 reqs2 = {
1610 VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, 0,
1611 };
1612
1613 reqs2.pNext = &dedicatedReqs;
1614
1615 PFN_vkGetImageMemoryRequirements2KHR func =
1616 (PFN_vkGetImageMemoryRequirements2KHR)
1617 vk->vkGetDeviceProcAddr(mDevice, "vkGetImageMemoryRequirements2KHR");
1618
1619 EXPECT_NE(nullptr, func);
1620
1621 func(mDevice, &info2, &reqs2);
1622
1623 vk->vkDestroyImage(mDevice, testImage, nullptr);
1624 }
1625
TEST_P(VulkanHalTest,ProcessCleanup)1626 TEST_P(VulkanHalTest, ProcessCleanup) {
1627 static constexpr uint32_t kNumTrials = 10;
1628 static constexpr uint32_t kNumImagesPerTrial = 100;
1629
1630 for (uint32_t i = 0; i < kNumTrials; ++i) {
1631 VkImage images[kNumImagesPerTrial];
1632 for (uint32_t j = 0; j < kNumImagesPerTrial; ++j) {
1633 VkImageCreateInfo imageCi = {
1634 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, 0, 0,
1635 VK_IMAGE_TYPE_2D,
1636 VK_FORMAT_R8G8B8A8_UNORM,
1637 { 1, 1, 1, },
1638 1, 1,
1639 VK_SAMPLE_COUNT_1_BIT,
1640 VK_IMAGE_TILING_LINEAR,
1641 VK_IMAGE_USAGE_TRANSFER_DST_BIT,
1642 VK_SHARING_MODE_EXCLUSIVE,
1643 0, nullptr,
1644 VK_IMAGE_LAYOUT_UNDEFINED,
1645 };
1646
1647 vk->vkCreateImage(mDevice, &imageCi, nullptr, &images[j]);
1648 }
1649
1650 for (uint32_t j = 0; j < kNumImagesPerTrial; ++j) {
1651 vk->vkDestroyImage(mDevice, images[j], nullptr);
1652 }
1653
1654 restartProcessPipeAndHostConnection();
1655 setupVulkan();
1656 }
1657 }
1658
1659 // Multithreaded benchmarks: Speed of light with simple vkCmd's.
1660 //
1661 // Currently disabled until we land
1662 // VulkanQueueSubmitWithCommands---syncEncodersFor** interferes with rc
1663 // encoders
TEST_P(VulkanHalTest,DISABLED_MultithreadedSimpleCommand)1664 TEST_P(VulkanHalTest, DISABLED_MultithreadedSimpleCommand) {
1665 Lock lock;
1666
1667 constexpr uint32_t kThreadCount = 4;
1668 VkDescriptorPool pool;
1669 VkDescriptorSetLayout setLayout;
1670
1671 std::vector<VkDescriptorSet> sets(kThreadCount);
1672
1673 // Setup descriptor sets
1674 EXPECT_EQ(VK_SUCCESS, allocateTestDescriptorSets(
1675 kThreadCount, kThreadCount,
1676 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
1677 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
1678 &pool, &setLayout, sets.data()));
1679
1680 VkPipelineLayout pipelineLayout;
1681
1682 VkPipelineLayoutCreateInfo pipelineLayoutCi = {
1683 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, 0, 0,
1684 1, &setLayout,
1685 0, nullptr,
1686 };
1687 vk->vkCreatePipelineLayout(mDevice, &pipelineLayoutCi, nullptr, &pipelineLayout);
1688
1689 // Setup command buffers
1690 VkCommandPoolCreateInfo poolCi = {
1691 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0, 0, mGraphicsQueueFamily,
1692 };
1693
1694 VkCommandPool commandPool;
1695 vk->vkCreateCommandPool(mDevice, &poolCi, nullptr, &commandPool);
1696
1697 VkCommandBuffer cbs[kThreadCount];
1698 VkCommandBufferAllocateInfo cbAi = {
1699 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
1700 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount,
1701 };
1702
1703 vk->vkAllocateCommandBuffers(mDevice, &cbAi, cbs);
1704
1705 std::vector<FunctorThread*> threads;
1706
1707 constexpr uint32_t kRecordsPerThread = 20000;
1708 constexpr uint32_t kRepeatSubmits = 1;
1709 constexpr uint32_t kTotalRecords = kThreadCount * kRecordsPerThread * kRepeatSubmits;
1710
1711 for (uint32_t i = 0; i < kThreadCount; ++i) {
1712 FunctorThread* thread = new FunctorThread([this, &lock, cbs, sets, pipelineLayout, i]() {
1713 VkCommandBufferBeginInfo beginInfo = {
1714 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
1715 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, 0,
1716 };
1717
1718 for (uint32_t k = 0; k < kRepeatSubmits; ++k) {
1719
1720 vk->vkBeginCommandBuffer(cbs[i], &beginInfo);
1721 VkRect2D scissor = {
1722 { 0, 0, },
1723 { 256, 256, },
1724 };
1725
1726 for (uint32_t j = 0; j < kRecordsPerThread; ++j) {
1727 vk->vkCmdBindDescriptorSets(
1728 cbs[i],
1729 VK_PIPELINE_BIND_POINT_GRAPHICS,
1730 pipelineLayout, 0, 1, &sets[i], 0, nullptr);
1731 }
1732
1733 vk->vkEndCommandBuffer(cbs[i]);
1734 VkSubmitInfo si = {
1735 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
1736 0, 0,
1737 0,
1738 1, &cbs[i],
1739 0, 0,
1740 };
1741
1742 {
1743 AutoLock queueLock(lock);
1744 vk->vkQueueSubmit(mQueue, 1, &si, 0);
1745 }
1746
1747 }
1748 VkPhysicalDeviceMemoryProperties memProps;
1749 vk->vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memProps);
1750 return 0;
1751 });
1752 threads.push_back(thread);
1753 }
1754
1755 auto cpuTimeStart = System::cpuTime();
1756
1757 for (uint32_t i = 0; i < kThreadCount; ++i) {
1758 threads[i]->start();
1759 }
1760
1761 for (uint32_t i = 0; i < kThreadCount; ++i) {
1762 threads[i]->wait();
1763 delete threads[i];
1764 }
1765
1766 vk->vkQueueWaitIdle(mQueue);
1767 vk->vkDeviceWaitIdle(mDevice);
1768
1769 auto cpuTime = System::cpuTime() - cpuTimeStart;
1770
1771 uint64_t duration_us = cpuTime.wall_time_us;
1772 uint64_t duration_cpu_us = cpuTime.usageUs();
1773
1774 float ms = duration_us / 1000.0f;
1775 float sec = duration_us / 1000000.0f;
1776
1777 float submitHz = (float)kTotalRecords / sec;
1778
1779 printf("Record %u times in %f ms. Rate: %f Hz per thread: %f Hz\n", kTotalRecords, ms, submitHz, (float)submitHz / (float)kThreadCount);
1780
1781 vk->vkFreeCommandBuffers(mDevice, commandPool, 1, cbs);
1782 vk->vkDestroyCommandPool(mDevice, commandPool, nullptr);
1783
1784 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1785 mDevice, pool, kThreadCount, sets.data()));
1786
1787 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
1788 }
1789
1790 // Multithreaded benchmarks: Round trip speed of light.
1791 // Currently disabled until we land
1792 // VulkanQueueSubmitWithCommands---syncEncodersFor** interferes with rc
1793 // encoders
TEST_P(VulkanHalTest,DISABLED_MultithreadedRoundTrip)1794 TEST_P(VulkanHalTest, DISABLED_MultithreadedRoundTrip) {
1795 android::base::enableTracing();
1796
1797 constexpr uint32_t kThreadCount = 6;
1798
1799 std::vector<FunctorThread*> threads;
1800 constexpr uint32_t kRecordsPerThread = 50;
1801 constexpr uint32_t kTotalRecords = kThreadCount * kRecordsPerThread;
1802
1803 for (uint32_t i = 0; i < kThreadCount; ++i) {
1804 FunctorThread* thread = new FunctorThread([this]() {
1805 for (uint32_t j = 0; j < kRecordsPerThread; ++j) {
1806 VkPhysicalDeviceMemoryProperties memProps;
1807 vk->vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memProps);
1808 }
1809 return 0;
1810 });
1811 threads.push_back(thread);
1812 }
1813
1814 auto cpuTimeStart = System::cpuTime();
1815
1816 for (uint32_t i = 0; i < kThreadCount; ++i) {
1817 threads[i]->start();
1818 }
1819
1820 for (uint32_t i = 0; i < kThreadCount; ++i) {
1821 threads[i]->wait();
1822 delete threads[i];
1823 }
1824
1825 vk->vkDeviceWaitIdle(mDevice);
1826
1827 auto cpuTime = System::cpuTime() - cpuTimeStart;
1828
1829 uint64_t duration_us = cpuTime.wall_time_us;
1830 uint64_t duration_cpu_us = cpuTime.usageUs();
1831
1832 float ms = duration_us / 1000.0f;
1833 float sec = duration_us / 1000000.0f;
1834
1835 float submitHz = (float)kTotalRecords / sec;
1836
1837 printf("Round trip %u times in %f ms. Rate: %f Hz per thread: %f Hz\n", kTotalRecords, ms, submitHz, (float)submitHz / (float)kThreadCount);
1838 android::base::disableTracing();
1839 usleep(1000000);
1840 }
1841
1842 // Secondary command buffers.
TEST_P(VulkanHalTest,SecondaryCommandBuffers)1843 TEST_P(VulkanHalTest, SecondaryCommandBuffers) {
1844 constexpr uint32_t kThreadCount = 1;
1845 VkDescriptorPool pool;
1846 VkDescriptorSetLayout setLayout;
1847
1848 std::vector<VkDescriptorSet> sets(kThreadCount);
1849
1850 // Setup descriptor sets
1851 EXPECT_EQ(VK_SUCCESS, allocateTestDescriptorSets(
1852 kThreadCount, kThreadCount,
1853 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
1854 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
1855 &pool, &setLayout, sets.data()));
1856
1857 VkPipelineLayout pipelineLayout;
1858
1859 VkPipelineLayoutCreateInfo pipelineLayoutCi = {
1860 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, 0, 0,
1861 1, &setLayout,
1862 0, nullptr,
1863 };
1864 vk->vkCreatePipelineLayout(mDevice, &pipelineLayoutCi, nullptr, &pipelineLayout);
1865
1866 // Setup command buffers
1867 VkCommandPoolCreateInfo poolCi = {
1868 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0, 0, mGraphicsQueueFamily,
1869 };
1870
1871 VkCommandPool commandPool;
1872 vk->vkCreateCommandPool(mDevice, &poolCi, nullptr, &commandPool);
1873
1874 VkCommandBuffer cbs[kThreadCount];
1875 VkCommandBuffer cbs2[kThreadCount * 2];
1876
1877 VkCommandBufferAllocateInfo cbAi = {
1878 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
1879 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount,
1880 };
1881
1882 vk->vkAllocateCommandBuffers(mDevice, &cbAi, cbs);
1883
1884 VkCommandBufferAllocateInfo cbAi2 = {
1885 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
1886 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount * 2,
1887 };
1888
1889 vk->vkAllocateCommandBuffers(mDevice, &cbAi2, cbs2);
1890
1891 std::vector<FunctorThread*> threads;
1892
1893 constexpr uint32_t kRecordsPerThread = 20000;
1894 constexpr uint32_t kRepeatSubmits = 1;
1895 constexpr uint32_t kTotalRecords = kThreadCount * kRecordsPerThread * kRepeatSubmits;
1896
1897 for (uint32_t i = 0; i < kThreadCount; ++i) {
1898 FunctorThread* thread = new FunctorThread([this, cbs, cbs2, sets, pipelineLayout, i]() {
1899 VkCommandBufferInheritanceInfo inheritanceInfo = {
1900 VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, 0,
1901 VK_NULL_HANDLE, 0,
1902 VK_NULL_HANDLE,
1903 VK_FALSE,
1904 0,
1905 0,
1906 };
1907
1908 VkCommandBufferBeginInfo secondaryBeginInfo = {
1909 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
1910 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, &inheritanceInfo,
1911 };
1912
1913 VkCommandBufferBeginInfo primaryBeginInfo = {
1914 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
1915 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, 0,
1916 };
1917
1918 for (uint32_t k = 0; k < kRepeatSubmits; ++k) {
1919 // Secondaries
1920 {
1921 VkCommandBuffer first = cbs2[2 * i];
1922 VkCommandBuffer second = cbs2[2 * i + 1];
1923
1924 vk->vkBeginCommandBuffer(first, &secondaryBeginInfo);
1925 for (uint32_t j = 0; j < kRecordsPerThread / 2; ++j) {
1926 vk->vkCmdBindDescriptorSets(
1927 first,
1928 VK_PIPELINE_BIND_POINT_GRAPHICS,
1929 pipelineLayout, 0, 1, &sets[i], 0, nullptr);
1930 }
1931 vk->vkEndCommandBuffer(first);
1932
1933 vk->vkBeginCommandBuffer(second, &secondaryBeginInfo);
1934 for (uint32_t j = 0; j < kRecordsPerThread / 2; ++j) {
1935 vk->vkCmdBindDescriptorSets(
1936 second,
1937 VK_PIPELINE_BIND_POINT_GRAPHICS,
1938 pipelineLayout, 0, 1, &sets[i], 0, nullptr);
1939 }
1940 vk->vkEndCommandBuffer(second);
1941 }
1942
1943 vk->vkBeginCommandBuffer(cbs[i], &primaryBeginInfo);
1944 vk->vkCmdExecuteCommands(cbs[i], 2, cbs2 + 2 * i);
1945 vk->vkEndCommandBuffer(cbs[i]);
1946
1947 VkSubmitInfo si = {
1948 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
1949 0, 0,
1950 0,
1951 1, &cbs[i],
1952 0, 0,
1953 };
1954
1955 vk->vkQueueSubmit(mQueue, 1, &si, 0);
1956 }
1957 VkPhysicalDeviceMemoryProperties memProps;
1958 vk->vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memProps);
1959 return 0;
1960 });
1961 threads.push_back(thread);
1962 }
1963
1964 auto cpuTimeStart = System::cpuTime();
1965
1966 for (uint32_t i = 0; i < kThreadCount; ++i) {
1967 threads[i]->start();
1968 }
1969
1970 for (uint32_t i = 0; i < kThreadCount; ++i) {
1971 threads[i]->wait();
1972 delete threads[i];
1973 }
1974
1975 vk->vkQueueWaitIdle(mQueue);
1976 vk->vkDeviceWaitIdle(mDevice);
1977
1978 auto cpuTime = System::cpuTime() - cpuTimeStart;
1979
1980 uint64_t duration_us = cpuTime.wall_time_us;
1981 uint64_t duration_cpu_us = cpuTime.usageUs();
1982
1983 float ms = duration_us / 1000.0f;
1984 float sec = duration_us / 1000000.0f;
1985
1986 float submitHz = (float)kTotalRecords / sec;
1987
1988 printf("Record %u times in %f ms. Rate: %f Hz per thread: %f Hz\n", kTotalRecords, ms, submitHz, (float)submitHz / (float)kThreadCount);
1989
1990 vk->vkFreeCommandBuffers(mDevice, commandPool, kThreadCount * 2, cbs2);
1991 vk->vkFreeCommandBuffers(mDevice, commandPool, kThreadCount, cbs);
1992 vk->vkDestroyCommandPool(mDevice, commandPool, nullptr);
1993
1994 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
1995 mDevice, pool, kThreadCount, sets.data()));
1996
1997 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
1998 }
1999
2000 // Secondary command buffers.
TEST_P(VulkanHalTest,SecondaryCommandBuffersWithDescriptorSetUpdate)2001 TEST_P(VulkanHalTest, SecondaryCommandBuffersWithDescriptorSetUpdate) {
2002 constexpr uint32_t kThreadCount = 1;
2003 VkDescriptorPool pool;
2004 VkDescriptorSetLayout setLayout;
2005
2006 // Setup descriptor sets
2007 uint32_t maxSetsTotal = 4;
2008
2009 std::vector<VkDescriptorSet> sets(maxSetsTotal);
2010
2011 VkSampler sampler;
2012 VkBuffer buffers[2];
2013
2014 VkSamplerCreateInfo samplerCi = {
2015 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, 0, 0,
2016 VK_FILTER_NEAREST,
2017 VK_FILTER_NEAREST,
2018 VK_SAMPLER_MIPMAP_MODE_NEAREST,
2019 VK_SAMPLER_ADDRESS_MODE_REPEAT,
2020 VK_SAMPLER_ADDRESS_MODE_REPEAT,
2021 VK_SAMPLER_ADDRESS_MODE_REPEAT,
2022 0.0f,
2023 VK_FALSE,
2024 1.0f,
2025 VK_FALSE,
2026 VK_COMPARE_OP_NEVER,
2027 0.0f,
2028 1.0f,
2029 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
2030 VK_FALSE,
2031 };
2032
2033 VkBufferCreateInfo bufCi = {
2034 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, 0, 0,
2035 4096,
2036 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
2037 VK_SHARING_MODE_EXCLUSIVE,
2038 0, nullptr,
2039 };
2040
2041 EXPECT_EQ(VK_SUCCESS,
2042 vk->vkCreateSampler(
2043 mDevice, &samplerCi, nullptr, &sampler));
2044
2045 EXPECT_EQ(VK_SUCCESS,
2046 vk->vkCreateBuffer(mDevice, &bufCi, nullptr, &buffers[0]));
2047
2048 EXPECT_EQ(VK_SUCCESS,
2049 vk->vkCreateBuffer(mDevice, &bufCi, nullptr, &buffers[1]));
2050
2051 VkDescriptorPoolSize poolSizes[] = {
2052 { VK_DESCRIPTOR_TYPE_SAMPLER, 1 * maxSetsTotal, },
2053 { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 8 * maxSetsTotal, },
2054 };
2055
2056 VkDescriptorPoolCreateInfo dpCi = {
2057 VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, 0,
2058 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
2059 maxSetsTotal /* maxSets */,
2060 2 /* poolSizeCount */,
2061 poolSizes,
2062 };
2063
2064 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDescriptorPool(mDevice, &dpCi, nullptr, &pool));
2065
2066 VkDescriptorSetLayoutBinding bindings[] = {
2067 { 0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_VERTEX_BIT, &sampler, }, /* immutable sampler */
2068 { 1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 6, VK_SHADER_STAGE_VERTEX_BIT, nullptr, },
2069 { 10, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2, VK_SHADER_STAGE_VERTEX_BIT, nullptr, },
2070 };
2071
2072 VkDescriptorSetLayoutCreateInfo setLayoutCi = {
2073 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, 0, 0,
2074 sizeof(bindings) / sizeof(VkDescriptorSetLayoutBinding),
2075 bindings,
2076 };
2077
2078 EXPECT_EQ(VK_SUCCESS, vk->vkCreateDescriptorSetLayout(
2079 mDevice, &setLayoutCi, nullptr, &setLayout));
2080
2081 std::vector<VkDescriptorSetLayout> setLayouts(maxSetsTotal, setLayout);
2082
2083 VkDescriptorSetAllocateInfo setAi = {
2084 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0,
2085 pool, maxSetsTotal, setLayouts.data(),
2086 };
2087
2088 EXPECT_EQ(VK_SUCCESS, vk->vkAllocateDescriptorSets(
2089 mDevice, &setAi, sets.data()));
2090
2091 EXPECT_EQ(VK_SUCCESS, vk->vkResetDescriptorPool(
2092 mDevice, pool, 0));
2093
2094 EXPECT_EQ(VK_SUCCESS, vk->vkAllocateDescriptorSets(
2095 mDevice, &setAi, sets.data()));
2096
2097 VkPipelineLayout pipelineLayout;
2098
2099 VkPipelineLayoutCreateInfo pipelineLayoutCi = {
2100 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, 0, 0,
2101 1, &setLayout,
2102 0, nullptr,
2103 };
2104 vk->vkCreatePipelineLayout(mDevice, &pipelineLayoutCi, nullptr, &pipelineLayout);
2105
2106 // Setup command buffers
2107 VkCommandPoolCreateInfo poolCi = {
2108 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0, 0, mGraphicsQueueFamily,
2109 };
2110
2111 VkCommandPool commandPool;
2112 vk->vkCreateCommandPool(mDevice, &poolCi, nullptr, &commandPool);
2113
2114 VkCommandBuffer cbs[kThreadCount];
2115 VkCommandBuffer cbs2[kThreadCount * 2];
2116
2117 VkCommandBufferAllocateInfo cbAi = {
2118 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
2119 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount,
2120 };
2121
2122 vk->vkAllocateCommandBuffers(mDevice, &cbAi, cbs);
2123
2124 VkCommandBufferAllocateInfo cbAi2 = {
2125 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
2126 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount * 2,
2127 };
2128
2129 vk->vkAllocateCommandBuffers(mDevice, &cbAi2, cbs2);
2130
2131 std::vector<FunctorThread*> threads;
2132
2133 constexpr uint32_t kRecordsPerThread = 4;
2134 constexpr uint32_t kRepeatSubmits = 2;
2135 constexpr uint32_t kTotalRecords = kThreadCount * kRecordsPerThread * kRepeatSubmits;
2136
2137 for (uint32_t i = 0; i < kThreadCount; ++i) {
2138 FunctorThread* thread = new FunctorThread([this, maxSetsTotal, buffers, cbs, cbs2, sets, pipelineLayout, i]() {
2139 VkCommandBufferInheritanceInfo inheritanceInfo = {
2140 VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, 0,
2141 VK_NULL_HANDLE, 0,
2142 VK_NULL_HANDLE,
2143 VK_FALSE,
2144 0,
2145 0,
2146 };
2147
2148 VkCommandBufferBeginInfo secondaryBeginInfo = {
2149 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
2150 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, &inheritanceInfo,
2151 };
2152
2153 VkCommandBufferBeginInfo primaryBeginInfo = {
2154 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
2155 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, 0,
2156 };
2157
2158 for (uint32_t k = 0; k < kRepeatSubmits; ++k) {
2159 // Secondaries
2160 {
2161 VkCommandBuffer first = cbs2[2 * i];
2162 VkCommandBuffer second = cbs2[2 * i + 1];
2163
2164 vk->vkBeginCommandBuffer(first, &secondaryBeginInfo);
2165 for (uint32_t j = 0; j < kRecordsPerThread / 2; ++j) {
2166 for (uint32_t l = 0; l < maxSetsTotal; ++l) {
2167 VkDescriptorImageInfo immutableSamplerImageInfos[] = {
2168 { (VkSampler)0xdeadbeef, VK_NULL_HANDLE, VK_IMAGE_LAYOUT_GENERAL, },
2169 { (VkSampler)0xdeadbeef, VK_NULL_HANDLE, VK_IMAGE_LAYOUT_GENERAL, },
2170 };
2171
2172 VkDescriptorBufferInfo bufferInfos[] = {
2173 { buffers[0], 0, 16, },
2174 { buffers[1], 16, 32, },
2175 { buffers[0], 1024, 4096, },
2176 { buffers[1], 256, 512, },
2177 { buffers[0], 0, 512, },
2178 { buffers[1], 8, 48, },
2179 };
2180
2181 VkWriteDescriptorSet descWrites[] = {
2182 {
2183 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, 0, sets[l],
2184 0, 0, 1,
2185 VK_DESCRIPTOR_TYPE_SAMPLER,
2186 immutableSamplerImageInfos,
2187 bufferInfos,
2188 nullptr,
2189 },
2190
2191 {
2192 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, 0, sets[l],
2193 1, 0, 2,
2194 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
2195 immutableSamplerImageInfos,
2196 bufferInfos,
2197 nullptr,
2198 },
2199
2200 {
2201 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, 0, sets[l],
2202 1, 4, 2,
2203 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
2204 immutableSamplerImageInfos,
2205 bufferInfos,
2206 nullptr,
2207 },
2208
2209 {
2210 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, 0, sets[l],
2211 10, 0, 2,
2212 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
2213 immutableSamplerImageInfos,
2214 bufferInfos + 2,
2215 nullptr,
2216 },
2217 };
2218
2219 VkCopyDescriptorSet descCopies[] = {
2220 {
2221 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, 0,
2222 sets[l], 10, 0,
2223 sets[l], 1, 2,
2224 2,
2225 },
2226 {
2227 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, 0,
2228 sets[l], 1, 0,
2229 sets[l], 10, 0,
2230 2,
2231 },
2232 };
2233
2234 vk->vkUpdateDescriptorSets(
2235 mDevice,
2236 sizeof(descWrites) / sizeof(VkWriteDescriptorSet), descWrites,
2237 sizeof(descCopies) / sizeof(VkCopyDescriptorSet), descCopies);
2238 }
2239 vk->vkCmdBindDescriptorSets(
2240 first,
2241 VK_PIPELINE_BIND_POINT_GRAPHICS,
2242 pipelineLayout, 0, maxSetsTotal, &sets[0], 0, nullptr);
2243 }
2244 vk->vkEndCommandBuffer(first);
2245
2246 vk->vkBeginCommandBuffer(second, &secondaryBeginInfo);
2247 for (uint32_t j = 0; j < kRecordsPerThread / 2; ++j) {
2248 vk->vkCmdBindDescriptorSets(
2249 second,
2250 VK_PIPELINE_BIND_POINT_GRAPHICS,
2251 pipelineLayout, 0, 1, &sets[i], 0, nullptr);
2252 }
2253 vk->vkEndCommandBuffer(second);
2254 }
2255
2256 vk->vkBeginCommandBuffer(cbs[i], &primaryBeginInfo);
2257 vk->vkCmdExecuteCommands(cbs[i], 2, cbs2 + 2 * i);
2258 vk->vkEndCommandBuffer(cbs[i]);
2259
2260 VkSubmitInfo si = {
2261 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
2262 0, 0,
2263 0,
2264 1, &cbs[i],
2265 0, 0,
2266 };
2267
2268 vk->vkQueueSubmit(mQueue, 1, &si, 0);
2269 vk->vkQueueWaitIdle(mQueue);
2270 }
2271 VkPhysicalDeviceMemoryProperties memProps;
2272 vk->vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memProps);
2273 return 0;
2274 });
2275 threads.push_back(thread);
2276 }
2277
2278 auto cpuTimeStart = System::cpuTime();
2279
2280 for (uint32_t i = 0; i < kThreadCount; ++i) {
2281 threads[i]->start();
2282 }
2283
2284 for (uint32_t i = 0; i < kThreadCount; ++i) {
2285 threads[i]->wait();
2286 delete threads[i];
2287 }
2288
2289 vk->vkQueueWaitIdle(mQueue);
2290 vk->vkDeviceWaitIdle(mDevice);
2291
2292 auto cpuTime = System::cpuTime() - cpuTimeStart;
2293
2294 uint64_t duration_us = cpuTime.wall_time_us;
2295 uint64_t duration_cpu_us = cpuTime.usageUs();
2296
2297 float ms = duration_us / 1000.0f;
2298 float sec = duration_us / 1000000.0f;
2299
2300 float submitHz = (float)kTotalRecords / sec;
2301
2302 printf("Record %u times in %f ms. Rate: %f Hz per thread: %f Hz\n", kTotalRecords, ms, submitHz, (float)submitHz / (float)kThreadCount);
2303
2304 vk->vkFreeCommandBuffers(mDevice, commandPool, kThreadCount * 2, cbs2);
2305 vk->vkFreeCommandBuffers(mDevice, commandPool, kThreadCount, cbs);
2306 vk->vkDestroyCommandPool(mDevice, commandPool, nullptr);
2307
2308 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
2309 mDevice, pool, maxSetsTotal, sets.data()));
2310
2311 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
2312 }
2313
2314 // Flush
TEST_P(VulkanHalTest,Flush)2315 TEST_P(VulkanHalTest, Flush) {
2316 constexpr uint32_t kThreadCount = 1;
2317 VkDescriptorPool pool;
2318 VkDescriptorSetLayout setLayout;
2319
2320 std::vector<VkDescriptorSet> sets(kThreadCount);
2321
2322 // Setup descriptor sets
2323 EXPECT_EQ(VK_SUCCESS, allocateTestDescriptorSets(
2324 kThreadCount, kThreadCount,
2325 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
2326 VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
2327 &pool, &setLayout, sets.data()));
2328
2329 VkPipelineLayout pipelineLayout;
2330
2331 VkPipelineLayoutCreateInfo pipelineLayoutCi = {
2332 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, 0, 0,
2333 1, &setLayout,
2334 0, nullptr,
2335 };
2336 vk->vkCreatePipelineLayout(mDevice, &pipelineLayoutCi, nullptr, &pipelineLayout);
2337
2338 // Setup command buffers
2339 VkCommandPoolCreateInfo poolCi = {
2340 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0, 0, mGraphicsQueueFamily,
2341 };
2342
2343 VkCommandPool commandPool;
2344 vk->vkCreateCommandPool(mDevice, &poolCi, nullptr, &commandPool);
2345
2346 VkCommandBuffer cbs[kThreadCount];
2347 VkCommandBuffer cbs2[kThreadCount * 2];
2348
2349 VkCommandBufferAllocateInfo cbAi = {
2350 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
2351 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount,
2352 };
2353
2354 vk->vkAllocateCommandBuffers(mDevice, &cbAi, cbs);
2355
2356 VkCommandBufferAllocateInfo cbAi2 = {
2357 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
2358 commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, kThreadCount * 2,
2359 };
2360
2361 vk->vkAllocateCommandBuffers(mDevice, &cbAi2, cbs2);
2362
2363 std::vector<FunctorThread*> threads;
2364
2365 constexpr uint32_t kRecordsPerThread = 800000;
2366 constexpr uint32_t kTotalRecords = kThreadCount * kRecordsPerThread;
2367
2368 for (uint32_t i = 0; i < kThreadCount; ++i) {
2369 FunctorThread* thread = new FunctorThread([this, cbs, cbs2, sets, pipelineLayout, i]() {
2370 for (uint32_t k = 0; k < kRecordsPerThread; ++k) {
2371 VkSubmitInfo si = {
2372 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
2373 0, 0,
2374 0,
2375 0, nullptr,
2376 0, 0,
2377 };
2378
2379 vk->vkQueueSubmit(mQueue, 1, &si, 0);
2380 }
2381 VkPhysicalDeviceMemoryProperties memProps;
2382 vk->vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memProps);
2383 return 0;
2384 });
2385 threads.push_back(thread);
2386 }
2387
2388 auto cpuTimeStart = System::cpuTime();
2389
2390 for (uint32_t i = 0; i < kThreadCount; ++i) {
2391 threads[i]->start();
2392 }
2393
2394 for (uint32_t i = 0; i < kThreadCount; ++i) {
2395 threads[i]->wait();
2396 delete threads[i];
2397 }
2398
2399 vk->vkQueueWaitIdle(mQueue);
2400 vk->vkDeviceWaitIdle(mDevice);
2401
2402 auto cpuTime = System::cpuTime() - cpuTimeStart;
2403
2404 uint64_t duration_us = cpuTime.wall_time_us;
2405 uint64_t duration_cpu_us = cpuTime.usageUs();
2406
2407 float ms = duration_us / 1000.0f;
2408 float sec = duration_us / 1000000.0f;
2409
2410 float submitHz = (float)kTotalRecords / sec;
2411
2412 printf("Record %u times in %f ms. Rate: %f Hz per thread: %f Hz\n", kTotalRecords, ms, submitHz, (float)submitHz / (float)kThreadCount);
2413
2414 vk->vkFreeCommandBuffers(mDevice, commandPool, kThreadCount * 2, cbs2);
2415 vk->vkFreeCommandBuffers(mDevice, commandPool, kThreadCount, cbs);
2416 vk->vkDestroyCommandPool(mDevice, commandPool, nullptr);
2417
2418 EXPECT_EQ(VK_SUCCESS, vk->vkFreeDescriptorSets(
2419 mDevice, pool, kThreadCount, sets.data()));
2420
2421 vk->vkDestroyDescriptorPool(mDevice, pool, nullptr);
2422 }
2423
2424 INSTANTIATE_TEST_SUITE_P(
2425 MultipleTransports,
2426 VulkanHalTest,
2427 testing::ValuesIn(GoldfishOpenglTestEnv::getTransportsToTest()));
2428
2429 } // namespace aemu
2430