1 // Copyright (C) 2023 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <log/log.h>
16
17 #include <atomic>
18 #include <thread>
19
20 #include "GfxstreamEnd2EndTestUtils.h"
21 #include "GfxstreamEnd2EndTests.h"
22
23 namespace gfxstream {
24 namespace tests {
25 namespace {
26
27 using namespace std::chrono_literals;
28 using testing::Eq;
29 using testing::Ge;
30 using testing::IsEmpty;
31 using testing::IsNull;
32 using testing::Ne;
33 using testing::Not;
34 using testing::NotNull;
35
36 template <typename DurationType>
AsVkTimeout(DurationType duration)37 constexpr uint64_t AsVkTimeout(DurationType duration) {
38 return static_cast<uint64_t>(std::chrono::duration_cast<std::chrono::nanoseconds>(duration).count());
39 }
40
41 class GfxstreamEnd2EndVkTest : public GfxstreamEnd2EndTest {
42 protected:
43 // Gfxstream uses a vkQueueSubmit() to signal the VkFence and VkSemaphore used
44 // in vkAcquireImageANDROID() calls. The guest is not aware of this and may try
45 // to vkDestroyFence() and vkDestroySemaphore() (because the VkImage, VkFence,
46 // and VkSemaphore may have been unused from the guest point of view) while the
47 // host's command buffer is running. Gfxstream needs to ensure that it performs
48 // the necessary tracking to not delete the VkFence and VkSemaphore while they
49 // are in use on the host.
DoAcquireImageAndroidWithSync(bool withFence,bool withSemaphore)50 void DoAcquireImageAndroidWithSync(bool withFence, bool withSemaphore) {
51 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
52 VK_ASSERT(SetUpTypicalVkTestEnvironment());
53
54 const uint32_t width = 32;
55 const uint32_t height = 32;
56 auto ahb = GL_ASSERT(ScopedAHardwareBuffer::Allocate(*mGralloc, width, height,
57 GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
58
59 const VkNativeBufferANDROID imageNativeBufferInfo = {
60 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
61 .handle = mGralloc->getNativeHandle(ahb),
62 };
63
64 auto vkAcquireImageANDROID =
65 PFN_vkAcquireImageANDROID(device->getProcAddr("vkAcquireImageANDROID"));
66 ASSERT_THAT(vkAcquireImageANDROID, NotNull());
67
68 const vkhpp::ImageCreateInfo imageCreateInfo = {
69 .pNext = &imageNativeBufferInfo,
70 .imageType = vkhpp::ImageType::e2D,
71 .extent.width = width,
72 .extent.height = height,
73 .extent.depth = 1,
74 .mipLevels = 1,
75 .arrayLayers = 1,
76 .format = vkhpp::Format::eR8G8B8A8Unorm,
77 .tiling = vkhpp::ImageTiling::eOptimal,
78 .initialLayout = vkhpp::ImageLayout::eUndefined,
79 .usage = vkhpp::ImageUsageFlagBits::eSampled | vkhpp::ImageUsageFlagBits::eTransferDst |
80 vkhpp::ImageUsageFlagBits::eTransferSrc,
81 .sharingMode = vkhpp::SharingMode::eExclusive,
82 .samples = vkhpp::SampleCountFlagBits::e1,
83 };
84 auto image = device->createImageUnique(imageCreateInfo).value;
85
86 vkhpp::MemoryRequirements imageMemoryRequirements{};
87 device->getImageMemoryRequirements(*image, &imageMemoryRequirements);
88
89 const uint32_t imageMemoryIndex = utils::getMemoryType(
90 physicalDevice, imageMemoryRequirements, vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
91 ASSERT_THAT(imageMemoryIndex, Not(Eq(-1)));
92
93 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
94 .allocationSize = imageMemoryRequirements.size,
95 .memoryTypeIndex = imageMemoryIndex,
96 };
97
98 auto imageMemory = device->allocateMemoryUnique(imageMemoryAllocateInfo).value;
99 ASSERT_THAT(imageMemory, IsValidHandle());
100 ASSERT_THAT(device->bindImageMemory(*image, *imageMemory, 0), IsVkSuccess());
101
102 vkhpp::UniqueFence fence;
103 if (withFence) {
104 fence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
105 }
106
107 vkhpp::UniqueSemaphore semaphore;
108 if (withSemaphore) {
109 semaphore = device->createSemaphoreUnique(vkhpp::SemaphoreCreateInfo()).value;
110 }
111
112 auto result = vkAcquireImageANDROID(*device, *image, -1, *semaphore, *fence);
113 ASSERT_THAT(result, Eq(VK_SUCCESS));
114
115 if (withFence) {
116 fence.reset();
117 }
118 if (withSemaphore) {
119 semaphore.reset();
120 }
121 }
122 };
123
TEST_P(GfxstreamEnd2EndVkTest,Basic)124 TEST_P(GfxstreamEnd2EndVkTest, Basic) {
125 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
126 VK_ASSERT(SetUpTypicalVkTestEnvironment());
127 }
128
TEST_P(GfxstreamEnd2EndVkTest,ImportAHB)129 TEST_P(GfxstreamEnd2EndVkTest, ImportAHB) {
130 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
131 VK_ASSERT(SetUpTypicalVkTestEnvironment());
132
133 const uint32_t width = 32;
134 const uint32_t height = 32;
135 auto ahb = GL_ASSERT(ScopedAHardwareBuffer::Allocate(*mGralloc, width, height,
136 GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
137
138 const VkNativeBufferANDROID imageNativeBufferInfo = {
139 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
140 .handle = mGralloc->getNativeHandle(ahb),
141 };
142
143 auto vkQueueSignalReleaseImageANDROID = PFN_vkQueueSignalReleaseImageANDROID(
144 device->getProcAddr("vkQueueSignalReleaseImageANDROID"));
145 ASSERT_THAT(vkQueueSignalReleaseImageANDROID, NotNull());
146
147 const vkhpp::ImageCreateInfo imageCreateInfo = {
148 .pNext = &imageNativeBufferInfo,
149 .imageType = vkhpp::ImageType::e2D,
150 .extent.width = width,
151 .extent.height = height,
152 .extent.depth = 1,
153 .mipLevels = 1,
154 .arrayLayers = 1,
155 .format = vkhpp::Format::eR8G8B8A8Unorm,
156 .tiling = vkhpp::ImageTiling::eOptimal,
157 .initialLayout = vkhpp::ImageLayout::eUndefined,
158 .usage = vkhpp::ImageUsageFlagBits::eSampled |
159 vkhpp::ImageUsageFlagBits::eTransferDst |
160 vkhpp::ImageUsageFlagBits::eTransferSrc,
161 .sharingMode = vkhpp::SharingMode::eExclusive,
162 .samples = vkhpp::SampleCountFlagBits::e1,
163 };
164 auto image = device->createImageUnique(imageCreateInfo).value;
165
166 vkhpp::MemoryRequirements imageMemoryRequirements{};
167 device->getImageMemoryRequirements(*image, &imageMemoryRequirements);
168
169 const uint32_t imageMemoryIndex = utils::getMemoryType(
170 physicalDevice, imageMemoryRequirements, vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
171 ASSERT_THAT(imageMemoryIndex, Not(Eq(-1)));
172
173 const vkhpp::MemoryAllocateInfo imageMemoryAllocateInfo = {
174 .allocationSize = imageMemoryRequirements.size,
175 .memoryTypeIndex = imageMemoryIndex,
176 };
177
178 auto imageMemory = device->allocateMemoryUnique(imageMemoryAllocateInfo).value;
179 ASSERT_THAT(imageMemory, IsValidHandle());
180 ASSERT_THAT(device->bindImageMemory(*image, *imageMemory, 0), IsVkSuccess());
181
182 const vkhpp::BufferCreateInfo bufferCreateInfo = {
183 .size = static_cast<VkDeviceSize>(12 * 1024 * 1024),
184 .usage = vkhpp::BufferUsageFlagBits::eTransferDst |
185 vkhpp::BufferUsageFlagBits::eTransferSrc,
186 .sharingMode = vkhpp::SharingMode::eExclusive,
187 };
188 auto stagingBuffer = device->createBufferUnique(bufferCreateInfo).value;
189 ASSERT_THAT(stagingBuffer, IsValidHandle());
190
191 vkhpp::MemoryRequirements stagingBufferMemoryRequirements{};
192 device->getBufferMemoryRequirements(*stagingBuffer, &stagingBufferMemoryRequirements);
193
194 const auto stagingBufferMemoryType = utils::getMemoryType(
195 physicalDevice, stagingBufferMemoryRequirements,
196 vkhpp::MemoryPropertyFlagBits::eHostVisible | vkhpp::MemoryPropertyFlagBits::eHostCoherent);
197
198 const vkhpp::MemoryAllocateInfo stagingBufferMemoryAllocateInfo = {
199 .allocationSize = stagingBufferMemoryRequirements.size,
200 .memoryTypeIndex = stagingBufferMemoryType,
201 };
202 auto stagingBufferMemory = device->allocateMemoryUnique(stagingBufferMemoryAllocateInfo).value;
203 ASSERT_THAT(stagingBufferMemory, IsValidHandle());
204 ASSERT_THAT(device->bindBufferMemory(*stagingBuffer, *stagingBufferMemory, 0), IsVkSuccess());
205
206 const vkhpp::CommandPoolCreateInfo commandPoolCreateInfo = {
207 .queueFamilyIndex = queueFamilyIndex,
208 };
209
210 auto commandPool = device->createCommandPoolUnique(commandPoolCreateInfo).value;
211 ASSERT_THAT(stagingBufferMemory, IsValidHandle());
212
213 const vkhpp::CommandBufferAllocateInfo commandBufferAllocateInfo = {
214 .level = vkhpp::CommandBufferLevel::ePrimary,
215 .commandPool = *commandPool,
216 .commandBufferCount = 1,
217 };
218 auto commandBuffers = device->allocateCommandBuffersUnique(commandBufferAllocateInfo).value;
219 ASSERT_THAT(commandBuffers, Not(IsEmpty()));
220 auto commandBuffer = std::move(commandBuffers[0]);
221 ASSERT_THAT(commandBuffer, IsValidHandle());
222
223 const vkhpp::CommandBufferBeginInfo commandBufferBeginInfo = {
224 .flags = vkhpp::CommandBufferUsageFlagBits::eOneTimeSubmit,
225 };
226 commandBuffer->begin(commandBufferBeginInfo);
227 commandBuffer->end();
228
229 std::vector<vkhpp::CommandBuffer> commandBufferHandles;
230 commandBufferHandles.push_back(*commandBuffer);
231
232 auto transferFence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
233 ASSERT_THAT(commandBuffer, IsValidHandle());
234
235 const vkhpp::SubmitInfo submitInfo = {
236 .commandBufferCount = static_cast<uint32_t>(commandBufferHandles.size()),
237 .pCommandBuffers = commandBufferHandles.data(),
238 };
239 queue.submit(submitInfo, *transferFence);
240
241 auto waitResult = device->waitForFences(*transferFence, VK_TRUE, AsVkTimeout(3s));
242 ASSERT_THAT(waitResult, IsVkSuccess());
243
244 int fence;
245
246 auto result = vkQueueSignalReleaseImageANDROID(queue, 0, nullptr, *image, &fence);
247 ASSERT_THAT(result, Eq(VK_SUCCESS));
248 ASSERT_THAT(fence, Not(Eq(-1)));
249
250 ASSERT_THAT(mSync->wait(fence, 3000), Eq(0));
251 }
252
TEST_P(GfxstreamEnd2EndVkTest,DeferredImportAHB)253 TEST_P(GfxstreamEnd2EndVkTest, DeferredImportAHB) {
254 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
255 VK_ASSERT(SetUpTypicalVkTestEnvironment());
256
257 const uint32_t width = 32;
258 const uint32_t height = 32;
259 auto ahb = GL_ASSERT(ScopedAHardwareBuffer::Allocate(*mGralloc, width, height,
260 GFXSTREAM_AHB_FORMAT_R8G8B8A8_UNORM));
261
262 auto vkQueueSignalReleaseImageANDROID = PFN_vkQueueSignalReleaseImageANDROID(
263 device->getProcAddr("vkQueueSignalReleaseImageANDROID"));
264 ASSERT_THAT(vkQueueSignalReleaseImageANDROID, NotNull());
265
266 const vkhpp::ImageCreateInfo imageCreateInfo = {
267 .pNext = nullptr,
268 .imageType = vkhpp::ImageType::e2D,
269 .extent.width = width,
270 .extent.height = height,
271 .extent.depth = 1,
272 .mipLevels = 1,
273 .arrayLayers = 1,
274 .format = vkhpp::Format::eR8G8B8A8Unorm,
275 .tiling = vkhpp::ImageTiling::eOptimal,
276 .initialLayout = vkhpp::ImageLayout::eUndefined,
277 .usage = vkhpp::ImageUsageFlagBits::eSampled |
278 vkhpp::ImageUsageFlagBits::eTransferDst |
279 vkhpp::ImageUsageFlagBits::eTransferSrc,
280 .sharingMode = vkhpp::SharingMode::eExclusive,
281 .samples = vkhpp::SampleCountFlagBits::e1,
282 };
283 auto image = device->createImageUnique(imageCreateInfo).value;
284
285 // NOTE: Binding the VkImage to the AHB happens after the VkImage is created.
286 const VkNativeBufferANDROID imageNativeBufferInfo = {
287 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
288 .handle = mGralloc->getNativeHandle(ahb),
289 };
290
291 const vkhpp::BindImageMemoryInfo imageBindMemoryInfo = {
292 .pNext = &imageNativeBufferInfo,
293 .image = *image,
294 .memory = VK_NULL_HANDLE,
295 .memoryOffset = 0,
296 };
297 ASSERT_THAT(device->bindImageMemory2({imageBindMemoryInfo}), IsVkSuccess());
298
299 std::vector<vkhpp::Semaphore> semaphores;
300 int fence;
301
302 auto result = vkQueueSignalReleaseImageANDROID(queue, 0, nullptr, *image, &fence);
303 ASSERT_THAT(result, Eq(VK_SUCCESS));
304 ASSERT_THAT(fence, Not(Eq(-1)));
305
306 ASSERT_THAT(mSync->wait(fence, 3000), Eq(0));
307 }
308
TEST_P(GfxstreamEnd2EndVkTest,BlobAHBIsNotMapable)309 TEST_P(GfxstreamEnd2EndVkTest, BlobAHBIsNotMapable) {
310 if (GetParam().with_gl) {
311 GTEST_SKIP()
312 << "Skipping test, data buffers are currently only supported in Vulkan only mode.";
313 }
314 if (GetParam().with_features.count("VulkanUseDedicatedAhbMemoryType") == 0) {
315 GTEST_SKIP()
316 << "Skipping test, AHB test only makes sense with VulkanUseDedicatedAhbMemoryType.";
317 }
318
319 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
320 VK_ASSERT(SetUpTypicalVkTestEnvironment());
321
322 const uint32_t width = 32;
323 const uint32_t height = 1;
324 auto ahb = GL_ASSERT(
325 ScopedAHardwareBuffer::Allocate(*mGralloc, width, height, GFXSTREAM_AHB_FORMAT_BLOB));
326
327 const vkhpp::ExternalMemoryBufferCreateInfo externalMemoryBufferCreateInfo = {
328 .handleTypes = vkhpp::ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID,
329 };
330 const vkhpp::BufferCreateInfo bufferCreateInfo = {
331 .pNext = &externalMemoryBufferCreateInfo,
332 .size = width,
333 .usage = vkhpp::BufferUsageFlagBits::eTransferDst |
334 vkhpp::BufferUsageFlagBits::eTransferSrc |
335 vkhpp::BufferUsageFlagBits::eVertexBuffer,
336 .sharingMode = vkhpp::SharingMode::eExclusive,
337 };
338 auto buffer = device->createBufferUnique(bufferCreateInfo).value;
339 ASSERT_THAT(buffer, IsValidHandle());
340
341 auto vkGetAndroidHardwareBufferPropertiesANDROID =
342 reinterpret_cast<PFN_vkGetAndroidHardwareBufferPropertiesANDROID>(
343 device->getProcAddr("vkGetAndroidHardwareBufferPropertiesANDROID"));
344 ASSERT_THAT(vkGetAndroidHardwareBufferPropertiesANDROID, NotNull());
345
346 VkAndroidHardwareBufferPropertiesANDROID bufferProperties = {
347 .sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
348 .pNext = nullptr,
349 };
350 ASSERT_THAT(vkGetAndroidHardwareBufferPropertiesANDROID(*device, ahb, &bufferProperties),
351 Eq(VK_SUCCESS));
352
353 const vkhpp::MemoryRequirements bufferMemoryRequirements{
354 .size = bufferProperties.allocationSize,
355 .alignment = 0,
356 .memoryTypeBits = bufferProperties.memoryTypeBits,
357 };
358
359 const auto memoryProperties = physicalDevice.getMemoryProperties();
360 for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) {
361 if (!(bufferMemoryRequirements.memoryTypeBits & (1 << i))) {
362 continue;
363 }
364
365 const auto memoryPropertyFlags = memoryProperties.memoryTypes[i].propertyFlags;
366 EXPECT_THAT(memoryPropertyFlags & vkhpp::MemoryPropertyFlagBits::eHostVisible,
367 Ne(vkhpp::MemoryPropertyFlagBits::eHostVisible));
368 }
369
370 const auto bufferMemoryType = utils::getMemoryType(physicalDevice, bufferMemoryRequirements,
371 vkhpp::MemoryPropertyFlagBits::eDeviceLocal);
372 ASSERT_THAT(bufferMemoryType, Ne(-1));
373
374 const vkhpp::ImportAndroidHardwareBufferInfoANDROID importHardwareBufferInfo = {
375 .buffer = ahb,
376 };
377 const vkhpp::MemoryAllocateInfo bufferMemoryAllocateInfo = {
378 .pNext = &importHardwareBufferInfo,
379 .allocationSize = bufferMemoryRequirements.size,
380 .memoryTypeIndex = bufferMemoryType,
381 };
382 auto bufferMemory = device->allocateMemoryUnique(bufferMemoryAllocateInfo).value;
383 ASSERT_THAT(bufferMemory, IsValidHandle());
384
385 ASSERT_THAT(device->bindBufferMemory(*buffer, *bufferMemory, 0), IsVkSuccess());
386 }
387
TEST_P(GfxstreamEnd2EndVkTest,HostMemory)388 TEST_P(GfxstreamEnd2EndVkTest, HostMemory) {
389 static constexpr const vkhpp::DeviceSize kSize = 16 * 1024;
390
391 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
392 VK_ASSERT(SetUpTypicalVkTestEnvironment());
393
394 uint32_t hostMemoryTypeIndex = -1;
395 const auto memoryProperties = physicalDevice.getMemoryProperties();
396 for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) {
397 const vkhpp::MemoryType& memoryType = memoryProperties.memoryTypes[i];
398 if (memoryType.propertyFlags & vkhpp::MemoryPropertyFlagBits::eHostVisible) {
399 hostMemoryTypeIndex = i;
400 }
401 }
402 if (hostMemoryTypeIndex == -1) {
403 GTEST_SKIP() << "Skipping test due to no host visible memory type.";
404 return;
405 }
406
407 const vkhpp::MemoryAllocateInfo memoryAllocateInfo = {
408 .allocationSize = kSize,
409 .memoryTypeIndex = hostMemoryTypeIndex,
410 };
411 auto memory = device->allocateMemoryUnique(memoryAllocateInfo).value;
412 ASSERT_THAT(memory, IsValidHandle());
413
414 void* mapped = nullptr;
415
416 auto mapResult = device->mapMemory(*memory, 0, VK_WHOLE_SIZE, vkhpp::MemoryMapFlags{}, &mapped);
417 ASSERT_THAT(mapResult, IsVkSuccess());
418 ASSERT_THAT(mapped, NotNull());
419
420 auto* bytes = reinterpret_cast<uint8_t*>(mapped);
421 std::memset(bytes, 0xFF, kSize);
422
423 const vkhpp::MappedMemoryRange range = {
424 .memory = *memory,
425 .offset = 0,
426 .size = kSize,
427 };
428 device->flushMappedMemoryRanges({range});
429 device->invalidateMappedMemoryRanges({range});
430
431 for (uint32_t i = 0; i < kSize; ++i) {
432 EXPECT_THAT(bytes[i], Eq(0xFF));
433 }
434 }
435
TEST_P(GfxstreamEnd2EndVkTest,GetPhysicalDeviceProperties2)436 TEST_P(GfxstreamEnd2EndVkTest, GetPhysicalDeviceProperties2) {
437 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
438 VK_ASSERT(SetUpTypicalVkTestEnvironment());
439
440 auto props1 = physicalDevice.getProperties();
441 auto props2 = physicalDevice.getProperties2();
442
443 EXPECT_THAT(props1.vendorID, Eq(props2.properties.vendorID));
444 EXPECT_THAT(props1.deviceID, Eq(props2.properties.deviceID));
445 }
446
TEST_P(GfxstreamEnd2EndVkTest,GetPhysicalDeviceFeatures2KHR)447 TEST_P(GfxstreamEnd2EndVkTest, GetPhysicalDeviceFeatures2KHR) {
448 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
449 VK_ASSERT(SetUpTypicalVkTestEnvironment());
450
451 auto features1 = physicalDevice.getFeatures();
452 auto features2 = physicalDevice.getFeatures2();
453 EXPECT_THAT(features1.robustBufferAccess, Eq(features2.features.robustBufferAccess));
454 }
455
TEST_P(GfxstreamEnd2EndVkTest,GetPhysicalDeviceImageFormatProperties2KHR)456 TEST_P(GfxstreamEnd2EndVkTest, GetPhysicalDeviceImageFormatProperties2KHR) {
457 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
458 VK_ASSERT(SetUpTypicalVkTestEnvironment());
459
460 const vkhpp::PhysicalDeviceImageFormatInfo2 imageFormatInfo = {
461 .format = vkhpp::Format::eR8G8B8A8Unorm,
462 .type = vkhpp::ImageType::e2D,
463 .tiling = vkhpp::ImageTiling::eOptimal,
464 .usage = vkhpp::ImageUsageFlagBits::eSampled,
465 };
466 const auto properties = VK_ASSERT_RV(physicalDevice.getImageFormatProperties2(imageFormatInfo));
467 EXPECT_THAT(properties.imageFormatProperties.maxExtent.width, Ge(1));
468 EXPECT_THAT(properties.imageFormatProperties.maxExtent.height, Ge(1));
469 EXPECT_THAT(properties.imageFormatProperties.maxExtent.depth, Ge(1));
470 }
471
472 template <typename VkhppUniqueHandleType,
473 typename VkhppHandleType = typename VkhppUniqueHandleType::element_type>
AsHandles(const std::vector<VkhppUniqueHandleType> & elements)474 std::vector<VkhppHandleType> AsHandles(const std::vector<VkhppUniqueHandleType>& elements) {
475 std::vector<VkhppHandleType> ret;
476 ret.reserve(elements.size());
477 for (const auto& e : elements) {
478 ret.push_back(*e);
479 }
480 return ret;
481 }
482
483 struct DescriptorBundle {
484 vkhpp::UniqueDescriptorPool descriptorPool;
485 vkhpp::UniqueDescriptorSetLayout descriptorSetLayout;
486 std::vector<vkhpp::UniqueDescriptorSet> descriptorSets;
487 };
488
ReallocateDescriptorBundleSets(vkhpp::Device device,uint32_t count,DescriptorBundle * bundle)489 vkhpp::Result ReallocateDescriptorBundleSets(vkhpp::Device device, uint32_t count, DescriptorBundle* bundle) {
490 if (!bundle->descriptorSetLayout) {
491 ALOGE("Invalid descriptor set layout.");
492 return vkhpp::Result::eErrorUnknown;
493 }
494
495 const std::vector<vkhpp::DescriptorSetLayout> descriptorSetLayouts(count, *bundle->descriptorSetLayout);
496 const vkhpp::DescriptorSetAllocateInfo descriptorSetAllocateInfo = {
497 .descriptorPool = *bundle->descriptorPool,
498 .descriptorSetCount = count,
499 .pSetLayouts = descriptorSetLayouts.data(),
500 };
501 auto descriptorSets = VK_TRY_RV(device.allocateDescriptorSetsUnique(descriptorSetAllocateInfo));
502 bundle->descriptorSets = std::move(descriptorSets);
503 return vkhpp::Result::eSuccess;
504 }
505
AllocateDescriptorBundle(vkhpp::Device device,uint32_t count)506 VkExpected<DescriptorBundle> AllocateDescriptorBundle(vkhpp::Device device, uint32_t count) {
507 const vkhpp::DescriptorPoolSize descriptorPoolSize = {
508 .type = vkhpp::DescriptorType::eUniformBuffer,
509 .descriptorCount = 1 * count,
510 };
511 const vkhpp::DescriptorPoolCreateInfo descriptorPoolCreateInfo = {
512 .flags = vkhpp::DescriptorPoolCreateFlagBits::eFreeDescriptorSet,
513 .maxSets = count,
514 .poolSizeCount = 1,
515 .pPoolSizes = &descriptorPoolSize,
516 };
517 auto descriptorPool = VK_EXPECT_RV(device.createDescriptorPoolUnique(descriptorPoolCreateInfo));
518
519 const vkhpp::DescriptorSetLayoutBinding descriptorSetBinding = {
520 .binding = 0,
521 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
522 .descriptorCount = 1,
523 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
524 };
525 const vkhpp::DescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = {
526 .bindingCount = 1,
527 .pBindings = &descriptorSetBinding,
528 };
529 auto descriptorSetLayout = VK_EXPECT_RV(device.createDescriptorSetLayoutUnique(descriptorSetLayoutInfo));
530
531 DescriptorBundle bundle = {
532 .descriptorPool = std::move(descriptorPool),
533 .descriptorSetLayout = std::move(descriptorSetLayout),
534 };
535 VK_EXPECT_RESULT(ReallocateDescriptorBundleSets(device, count, &bundle));
536 return std::move(bundle);
537 }
538
539 // Tests creating a bunch of descriptor sets and freeing them via vkFreeDescriptorSet.
540 // 1. Via vkFreeDescriptorSet directly
541 // 2. Via vkResetDescriptorPool
542 // 3. Via vkDestroyDescriptorPool
543 // 4. Via vkResetDescriptorPool and double frees in vkFreeDescriptorSet
544 // 5. Via vkResetDescriptorPool and double frees in vkFreeDescriptorSet
545 // 4. Via vkResetDescriptorPool, creating more, and freeing vai vkFreeDescriptorSet
546 // (because vkFree* APIs are expected to never fail)
547 // https://github.com/KhronosGroup/Vulkan-Docs/issues/1070
TEST_P(GfxstreamEnd2EndVkTest,DescriptorSetAllocFree)548 TEST_P(GfxstreamEnd2EndVkTest, DescriptorSetAllocFree) {
549 constexpr const uint32_t kNumSets = 4;
550
551 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
552 VK_ASSERT(SetUpTypicalVkTestEnvironment());
553
554 auto bundle = VK_ASSERT(AllocateDescriptorBundle(*device, kNumSets));
555
556 auto descriptorSetHandles = AsHandles(bundle.descriptorSets);
557 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
558
559 // The double free should also work
560 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
561
562 // Alloc/free again should also work
563 ASSERT_THAT(ReallocateDescriptorBundleSets(*device, kNumSets, &bundle), IsVkSuccess());
564
565 descriptorSetHandles = AsHandles(bundle.descriptorSets);
566 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
567 }
568
TEST_P(GfxstreamEnd2EndVkTest,DescriptorSetAllocFreeReset)569 TEST_P(GfxstreamEnd2EndVkTest, DescriptorSetAllocFreeReset) {
570 constexpr const uint32_t kNumSets = 4;
571
572 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
573 VK_ASSERT(SetUpTypicalVkTestEnvironment());
574
575 auto bundle = VK_ASSERT(AllocateDescriptorBundle(*device, kNumSets));
576
577 device->resetDescriptorPool(*bundle.descriptorPool);
578
579 // The double free should also work
580 auto descriptorSetHandles = AsHandles(bundle.descriptorSets);
581 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
582
583 // Alloc/reset/free again should also work
584 ASSERT_THAT(ReallocateDescriptorBundleSets(*device, kNumSets, &bundle), IsVkSuccess());
585
586 device->resetDescriptorPool(*bundle.descriptorPool);
587
588 descriptorSetHandles = AsHandles(bundle.descriptorSets);
589 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
590 }
591
TEST_P(GfxstreamEnd2EndVkTest,DISABLED_DescriptorSetAllocFreeDestroy)592 TEST_P(GfxstreamEnd2EndVkTest, DISABLED_DescriptorSetAllocFreeDestroy) {
593 constexpr const uint32_t kNumSets = 4;
594
595 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
596 VK_ASSERT(SetUpTypicalVkTestEnvironment());
597
598 auto bundle = VK_ASSERT(AllocateDescriptorBundle(*device, kNumSets));
599
600 device->destroyDescriptorPool(*bundle.descriptorPool);
601
602 // The double free should also work
603 auto descriptorSetHandles = AsHandles(bundle.descriptorSets);
604 EXPECT_THAT(device->freeDescriptorSets(*bundle.descriptorPool, kNumSets, descriptorSetHandles.data()), IsVkSuccess());
605 }
606
TEST_P(GfxstreamEnd2EndVkTest,MultiThreadedShutdown)607 TEST_P(GfxstreamEnd2EndVkTest, MultiThreadedShutdown) {
608 constexpr const int kNumIterations = 20;
609 for (int i = 0; i < kNumIterations; i++) {
610 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
611 VK_ASSERT(SetUpTypicalVkTestEnvironment());
612
613 const vkhpp::BufferCreateInfo bufferCreateInfo = {
614 .size = 1024,
615 .usage = vkhpp::BufferUsageFlagBits::eTransferSrc,
616 };
617
618 // TODO: switch to std::barrier with arrive_and_wait().
619 std::atomic_int threadsReady{0};
620 std::vector<std::thread> threads;
621
622 constexpr const int kNumThreads = 5;
623 for (int t = 0; t < kNumThreads; t++) {
624 threads.emplace_back([&, this](){
625 // Perform some work to ensure host RenderThread started.
626 auto buffer1 = device->createBufferUnique(bufferCreateInfo).value;
627
628 ++threadsReady;
629 while (threadsReady.load() != kNumThreads) {}
630
631 // Sleep a little which is hopefully enough time to potentially get
632 // the corresponding host ASG RenderThreads to go sleep waiting for
633 // a WAKEUP via a GFXSTREAM_CONTEXT_PING.
634 std::this_thread::sleep_for(std::chrono::milliseconds(100));
635
636 auto buffer2 = device->createBufferUnique(bufferCreateInfo).value;
637
638 // 2 vkDestroyBuffer() calls happen here with the destruction of `buffer1`
639 // and `buffer2`. vkDestroy*() calls are async (return `void`) and the
640 // guest thread continues execution without waiting for the command to
641 // complete on the host.
642 //
643 // The guest ASG and corresponding virtio gpu resource will also be
644 // destructed here as a part of the thread_local HostConnection being
645 // destructed.
646 //
647 // Note: Vulkan commands are given a sequence number in order to ensure that
648 // commands from multi-threaded guest Vulkan apps are executed in order on the
649 // host. Gfxstream's host Vulkan decoders will spin loop waiting for their turn to
650 // process their next command.
651 //
652 // With all of the above, a deadlock would previouly occur with the following
653 // sequence:
654 //
655 // T1: Host-RenderThread-1: <sleeping waiting for wakeup>
656 //
657 // T2: Host-RenderThread-2: <sleeping waiting for wakeup>
658 //
659 // T3: Guest-Thread-1: vkDestroyBuffer() called,
660 // VkEncoder grabs sequence-number-10,
661 // writes sequence-number-10 into ASG-1 via resource-1
662 //
663 // T4: Guest-Thread-2: vkDestroyBuffer() called,
664 // VkEncoder grabs sequence-number-11,
665 // writes into ASG-2 via resource-2
666 //
667 // T5: Guest-Thread-2: ASG-2 sends a VIRTIO_GPU_CMD_SUBMIT_3D with
668 // GFXSTREAM_CONTEXT_PING on ASG-resource-2
669 //
670 // T6: Guest-Thread-2: guest thread finishes,
671 // ASG-2 destructor destroys the virtio-gpu resource used,
672 // destruction sends VIRTIO_GPU_CMD_RESOURCE_UNREF on
673 // resource-2
674 //
675 // T7: Guest-Thread-1: ASG-1 sends VIRTIO_GPU_CMD_SUBMIT_3D with
676 // GFXSTREAM_CONTEXT_PING on ASG-resource-1
677 //
678 // T8: Host-Virtio-Gpu-Thread: performs VIRTIO_GPU_CMD_SUBMIT_3D from T5,
679 // pings ASG-2 which wakes up Host-RenderThread-2
680 //
681 // T9: Host-RenderThread-2: woken from T8,
682 // reads sequence-number-11 from ASG-2,
683 // spin looping waiting for sequence-number-10 to execute
684 //
685 // T10: Host-Virtio-Gpu-Thread: performs VIRTIO_GPU_CMD_RESOURCE_UNREF for
686 // resource-2 from T6,
687 // resource-2 is used by ASG-2 / Host-RenderThread-2
688 // waits for Host-RenderThread-2 to finish
689 //
690 // DEADLOCKED HERE:
691 //
692 // * Host-Virtio-GpuThread is waiting for Host-RenderThread-2 to finish before
693 // it can finish destroying resource-2
694 //
695 // * Host-RenderThread-2 is waiting for Host-RenderThread-1 to execute
696 // sequence-number-10
697 //
698 // * Host-RenderThread-1 is asleep waiting for a GFXSTREAM_CONTEXT_PING
699 // from Host-Virtio-GpuThread
700 });
701 }
702
703 for (auto& thread : threads) {
704 thread.join();
705 }
706 }
707 }
708
TEST_P(GfxstreamEnd2EndVkTest,AcquireImageAndroidWithFence)709 TEST_P(GfxstreamEnd2EndVkTest, AcquireImageAndroidWithFence) {
710 DoAcquireImageAndroidWithSync(/*withFence=*/true, /*withSemaphore=*/false);
711 }
712
TEST_P(GfxstreamEnd2EndVkTest,AcquireImageAndroidWithSemaphore)713 TEST_P(GfxstreamEnd2EndVkTest, AcquireImageAndroidWithSemaphore) {
714 DoAcquireImageAndroidWithSync(/*withFence=*/false, /*withSemaphore=*/true);
715 }
716
TEST_P(GfxstreamEnd2EndVkTest,AcquireImageAndroidWithFenceAndSemaphore)717 TEST_P(GfxstreamEnd2EndVkTest, AcquireImageAndroidWithFenceAndSemaphore) {
718 DoAcquireImageAndroidWithSync(/*withFence=*/true, /*withSemaphore=*/true);
719 }
720
MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT *,void *)721 VKAPI_ATTR void VKAPI_CALL MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT*, void*) {
722 // Unused
723 }
724
TEST_P(GfxstreamEnd2EndVkTest,DeviceMemoryReport)725 TEST_P(GfxstreamEnd2EndVkTest, DeviceMemoryReport) {
726 int userdata = 1;
727 vkhpp::DeviceDeviceMemoryReportCreateInfoEXT deviceDeviceMemoryReportInfo = {
728 .pfnUserCallback = &MemoryReportCallback,
729 .pUserData = &userdata,
730 };
731
732 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
733 VK_ASSERT(SetUpTypicalVkTestEnvironment({
734 .deviceExtensions = {{
735 VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME,
736 }},
737 .deviceCreateInfoPNext = &deviceDeviceMemoryReportInfo,
738 }));
739
740 const vkhpp::MemoryAllocateInfo memoryAllocateInfo = {
741 .allocationSize = 1024,
742 .memoryTypeIndex = 0,
743 };
744 auto memory = device->allocateMemoryUnique(memoryAllocateInfo).value;
745 ASSERT_THAT(memory, IsValidHandle());
746 }
747
TEST_P(GfxstreamEnd2EndVkTest,DescriptorUpdateTemplateWithWrapping)748 TEST_P(GfxstreamEnd2EndVkTest, DescriptorUpdateTemplateWithWrapping) {
749 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
750 VK_ASSERT(SetUpTypicalVkTestEnvironment());
751
752 const vkhpp::BufferCreateInfo bufferCreateInfo = {
753 .size = 1024,
754 .usage = vkhpp::BufferUsageFlagBits::eUniformBuffer,
755 };
756 auto buffer = VK_ASSERT_RV(device->createBufferUnique(bufferCreateInfo));
757
758 const std::vector<VkDescriptorBufferInfo> descriptorInfo = {
759 VkDescriptorBufferInfo{
760 .buffer = *buffer,
761 .offset = 0,
762 .range = 1024,
763 },
764 VkDescriptorBufferInfo{
765 .buffer = *buffer,
766 .offset = 0,
767 .range = 1024,
768 },
769 VkDescriptorBufferInfo{
770 .buffer = *buffer,
771 .offset = 0,
772 .range = 1024,
773 },
774 VkDescriptorBufferInfo{
775 .buffer = *buffer,
776 .offset = 0,
777 .range = 1024,
778 },
779 };
780
781 const std::vector<vkhpp::DescriptorPoolSize> descriptorPoolSizes = {
782 {
783 .type = vkhpp::DescriptorType::eUniformBuffer,
784 .descriptorCount = 4,
785 },
786 };
787 const vkhpp::DescriptorPoolCreateInfo descriptorPoolCreateInfo = {
788 .flags = vkhpp::DescriptorPoolCreateFlagBits::eFreeDescriptorSet,
789 .maxSets = 1,
790 .poolSizeCount = static_cast<uint32_t>(descriptorPoolSizes.size()),
791 .pPoolSizes = descriptorPoolSizes.data(),
792 };
793 auto descriptorPool =
794 VK_ASSERT_RV(device->createDescriptorPoolUnique(descriptorPoolCreateInfo));
795
796 const std::vector<vkhpp::DescriptorSetLayoutBinding> descriptorSetBindings = {
797 {
798 .binding = 0,
799 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
800 .descriptorCount = 1,
801 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
802 },
803 {
804 .binding = 1,
805 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
806 .descriptorCount = 1,
807 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
808 },
809 {
810 .binding = 2,
811 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
812 .descriptorCount = 1,
813 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
814 },
815 {
816 .binding = 3,
817 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
818 .descriptorCount = 1,
819 .stageFlags = vkhpp::ShaderStageFlagBits::eVertex,
820 },
821 };
822 const vkhpp::DescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = {
823 .bindingCount = static_cast<uint32_t>(descriptorSetBindings.size()),
824 .pBindings = descriptorSetBindings.data(),
825 };
826 auto descriptorSetLayout =
827 VK_ASSERT_RV(device->createDescriptorSetLayoutUnique(descriptorSetLayoutInfo));
828
829 const std::vector<vkhpp::DescriptorSetLayout> descriptorSetLayouts = {*descriptorSetLayout};
830 const vkhpp::DescriptorSetAllocateInfo descriptorSetAllocateInfo = {
831 .descriptorPool = *descriptorPool,
832 .descriptorSetCount = static_cast<uint32_t>(descriptorSetLayouts.size()),
833 .pSetLayouts = descriptorSetLayouts.data(),
834 };
835 auto descriptorSets =
836 VK_ASSERT_RV(device->allocateDescriptorSetsUnique(descriptorSetAllocateInfo));
837 auto descriptorSet = std::move(descriptorSets[0]);
838
839 const vkhpp::PipelineLayoutCreateInfo pipelineLayoutCreateInfo = {
840 .setLayoutCount = static_cast<uint32_t>(descriptorSetLayouts.size()),
841 .pSetLayouts = descriptorSetLayouts.data(),
842 };
843 auto pipelineLayout =
844 VK_ASSERT_RV(device->createPipelineLayoutUnique(pipelineLayoutCreateInfo));
845
846 const std::vector<vkhpp::DescriptorUpdateTemplateEntry> descriptorUpdateEntries = {
847 {
848 .dstBinding = 0,
849 .dstArrayElement = 0,
850 .descriptorCount = 4,
851 .descriptorType = vkhpp::DescriptorType::eUniformBuffer,
852 .offset = 0,
853 .stride = sizeof(VkDescriptorBufferInfo),
854 },
855 };
856 const vkhpp::DescriptorUpdateTemplateCreateInfo descriptorUpdateTemplateCreateInfo = {
857 .descriptorUpdateEntryCount = static_cast<uint32_t>(descriptorUpdateEntries.size()),
858 .pDescriptorUpdateEntries = descriptorUpdateEntries.data(),
859 .descriptorSetLayout = *descriptorSetLayout,
860 .pipelineBindPoint = vkhpp::PipelineBindPoint::eGraphics,
861 .pipelineLayout = *pipelineLayout,
862 .set = 0,
863 };
864 auto descriptorUpdateTemplate = VK_ASSERT_RV(
865 device->createDescriptorUpdateTemplateUnique(descriptorUpdateTemplateCreateInfo));
866
867 device->updateDescriptorSetWithTemplate(*descriptorSet, *descriptorUpdateTemplate,
868 descriptorInfo.data());
869 }
870
TEST_P(GfxstreamEnd2EndVkTest,MultiThreadedVkMapMemory)871 TEST_P(GfxstreamEnd2EndVkTest, MultiThreadedVkMapMemory) {
872 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
873 VK_ASSERT(SetUpTypicalVkTestEnvironment());
874
875 static constexpr const vkhpp::DeviceSize kSize = 1024;
876 const vkhpp::BufferCreateInfo bufferCreateInfo = {
877 .size = kSize,
878 .usage = vkhpp::BufferUsageFlagBits::eTransferSrc,
879 };
880 auto buffer = device->createBufferUnique(bufferCreateInfo).value;
881
882 vkhpp::MemoryRequirements bufferMemoryRequirements{};
883 device->getBufferMemoryRequirements(*buffer, &bufferMemoryRequirements);
884
885 const uint32_t bufferMemoryIndex = utils::getMemoryType(
886 physicalDevice, bufferMemoryRequirements,
887 vkhpp::MemoryPropertyFlagBits::eHostVisible | vkhpp::MemoryPropertyFlagBits::eHostCoherent);
888 if (bufferMemoryIndex == -1) {
889 GTEST_SKIP() << "Skipping test due to no memory type with HOST_VISIBLE | HOST_COHERENT.";
890 }
891
892 std::vector<std::thread> threads;
893 std::atomic_int threadsReady{0};
894
895 constexpr const int kNumThreads = 2;
896 for (int t = 0; t < kNumThreads; t++) {
897 threads.emplace_back([&, this]() {
898 // Perform some work to ensure host RenderThread started.
899 auto buffer2 = device->createBufferUnique(bufferCreateInfo).value;
900 ASSERT_THAT(buffer2, IsValidHandle());
901
902 ++threadsReady;
903 while (threadsReady.load() != kNumThreads) {
904 }
905
906 constexpr const int kNumIterations = 100;
907 for (int i = 0; i < kNumIterations; i++) {
908 auto buffer3 = device->createBufferUnique(bufferCreateInfo).value;
909 ASSERT_THAT(buffer3, IsValidHandle());
910
911 const vkhpp::MemoryAllocateInfo buffer3MemoryAllocateInfo = {
912 .allocationSize = bufferMemoryRequirements.size,
913 .memoryTypeIndex = bufferMemoryIndex,
914 };
915 auto buffer3Memory = device->allocateMemoryUnique(buffer3MemoryAllocateInfo).value;
916 ASSERT_THAT(buffer3Memory, IsValidHandle());
917
918 ASSERT_THAT(device->bindBufferMemory(*buffer3, *buffer3Memory, 0), IsVkSuccess());
919
920 void* mapped = nullptr;
921 ASSERT_THAT(device->mapMemory(*buffer3Memory, 0, VK_WHOLE_SIZE,
922 vkhpp::MemoryMapFlags{}, &mapped),
923 IsVkSuccess());
924 ASSERT_THAT(mapped, NotNull());
925
926 device->unmapMemory(*buffer3Memory);
927 }
928 });
929 }
930
931 for (auto& thread : threads) {
932 thread.join();
933 }
934 }
935
TEST_P(GfxstreamEnd2EndVkTest,MultiThreadedResetCommandBuffer)936 TEST_P(GfxstreamEnd2EndVkTest, MultiThreadedResetCommandBuffer) {
937 auto [instance, physicalDevice, device, queue, queueFamilyIndex] =
938 VK_ASSERT(SetUpTypicalVkTestEnvironment());
939
940 static constexpr const vkhpp::DeviceSize kSize = 1024;
941 const vkhpp::BufferCreateInfo bufferCreateInfo = {
942 .size = kSize,
943 .usage = vkhpp::BufferUsageFlagBits::eTransferSrc,
944 };
945
946 static std::mutex queue_mutex;
947 std::vector<std::thread> threads;
948 std::atomic_int threadsReady{0};
949
950 constexpr const int kNumThreads = 10;
951 for (int t = 0; t < kNumThreads; t++) {
952 threads.emplace_back([&, this]() {
953 // Perform some work to ensure host RenderThread started.
954 auto buffer2 = device->createBufferUnique(bufferCreateInfo).value;
955 ASSERT_THAT(buffer2, IsValidHandle());
956
957 ++threadsReady;
958 while (threadsReady.load() != kNumThreads) {
959 }
960
961 const vkhpp::CommandPoolCreateInfo commandPoolCreateInfo = {
962 .queueFamilyIndex = queueFamilyIndex,
963 };
964 auto commandPool = device->createCommandPoolUnique(commandPoolCreateInfo).value;
965
966 const vkhpp::CommandBufferAllocateInfo commandBufferAllocateInfo = {
967 .level = vkhpp::CommandBufferLevel::ePrimary,
968 .commandPool = *commandPool,
969 .commandBufferCount = 1,
970 };
971 auto commandBuffers = device->allocateCommandBuffersUnique(commandBufferAllocateInfo).value;
972 ASSERT_THAT(commandBuffers, Not(IsEmpty()));
973 auto commandBuffer = std::move(commandBuffers[0]);
974 ASSERT_THAT(commandBuffer, IsValidHandle());
975
976 auto transferFence = device->createFenceUnique(vkhpp::FenceCreateInfo()).value;
977 ASSERT_THAT(commandBuffer, IsValidHandle());
978
979 constexpr const int kNumIterations = 1000;
980 for (int i = 0; i < kNumIterations; i++) {
981 commandBuffer->reset();
982 const vkhpp::CommandBufferBeginInfo commandBufferBeginInfo = {
983 .flags = vkhpp::CommandBufferUsageFlagBits::eOneTimeSubmit,
984 };
985 commandBuffer->begin(commandBufferBeginInfo);
986
987 commandBuffer->end();
988
989 std::vector<vkhpp::CommandBuffer> commandBufferHandles;
990 commandBufferHandles.push_back(*commandBuffer);
991
992 const vkhpp::SubmitInfo submitInfo = {
993 .commandBufferCount = static_cast<uint32_t>(commandBufferHandles.size()),
994 .pCommandBuffers = commandBufferHandles.data(),
995 };
996 {
997 std::lock_guard<std::mutex> qm(queue_mutex);
998 queue.submit(submitInfo, *transferFence);
999 }
1000 auto waitResult = device->waitForFences(*transferFence, VK_TRUE, AsVkTimeout(3s));
1001 ASSERT_THAT(waitResult, IsVkSuccess());
1002 }
1003 });
1004 }
1005
1006 for (auto& thread : threads) {
1007 thread.join();
1008 }
1009 }
1010
GenerateTestCases()1011 std::vector<TestParams> GenerateTestCases() {
1012 std::vector<TestParams> cases = {TestParams{
1013 .with_gl = false,
1014 .with_vk = true,
1015 .with_transport = GfxstreamTransport::kVirtioGpuAsg,
1016 },
1017 TestParams{
1018 .with_gl = true,
1019 .with_vk = true,
1020 .with_transport = GfxstreamTransport::kVirtioGpuAsg,
1021 },
1022 TestParams{
1023 .with_gl = false,
1024 .with_vk = true,
1025 .with_transport = GfxstreamTransport::kVirtioGpuPipe,
1026 },
1027 TestParams{
1028 .with_gl = true,
1029 .with_vk = true,
1030 .with_transport = GfxstreamTransport::kVirtioGpuPipe,
1031 }};
1032 cases = WithAndWithoutFeatures(cases, {"VulkanSnapshots"});
1033 cases = WithAndWithoutFeatures(cases, {"VulkanUseDedicatedAhbMemoryType"});
1034 return cases;
1035 }
1036
1037 INSTANTIATE_TEST_CASE_P(GfxstreamEnd2EndTests, GfxstreamEnd2EndVkTest,
1038 ::testing::ValuesIn(GenerateTestCases()), &GetTestName);
1039
1040 } // namespace
1041 } // namespace tests
1042 } // namespace gfxstream
1043