1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 // http://www.apache.org/licenses/LICENSE-2.0
7 //
8 // Unless required by applicable law or agreed to in writing, software
9 // distributed under the License is distributed on an "AS IS" BASIS,
10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
11 // See the License for the specific language governing permissions and
12 // limitations under the License.
13 #include "VkAndroidNativeBuffer.h"
14
15 #include <string.h>
16
17 #include <future>
18
19 #include "GrallocDefs.h"
20 #include "SyncThread.h"
21 #include "VkCommonOperations.h"
22 #include "VulkanDispatch.h"
23 #include "cereal/common/goldfish_vk_deepcopy.h"
24 #include "cereal/common/goldfish_vk_extension_structs.h"
25 #include "cereal/common/goldfish_vk_private_defs.h"
26 #include "host-common/GfxstreamFatalError.h"
27 #include "stream-servers/FrameBuffer.h"
28 #include "vulkan/vk_enum_string_helper.h"
29
30 namespace gfxstream {
31 namespace vk {
32
33 #define VK_ANB_ERR(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
34
35 #define ENABLE_VK_ANB_DEBUG 0
36
37 #if ENABLE_VK_ANB_DEBUG
38 #define VK_ANB_DEBUG(fmt, ...) \
39 fprintf(stderr, "vk-anb-debug: %s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
40 #define VK_ANB_DEBUG_OBJ(obj, fmt, ...) \
41 fprintf(stderr, "vk-anb-debug: %s:%d:%p " fmt "\n", __func__, __LINE__, obj, ##__VA_ARGS__);
42 #else
43 #define VK_ANB_DEBUG(fmt, ...)
44 #define VK_ANB_DEBUG_OBJ(obj, fmt, ...)
45 #endif
46
47 using android::base::AutoLock;
48 using android::base::Lock;
49 using emugl::ABORT_REASON_OTHER;
50 using emugl::FatalError;
51
QsriWaitFencePool(VulkanDispatch * vk,VkDevice device)52 AndroidNativeBufferInfo::QsriWaitFencePool::QsriWaitFencePool(VulkanDispatch* vk, VkDevice device)
53 : mVk(vk), mDevice(device) {}
54
getFenceFromPool()55 VkFence AndroidNativeBufferInfo::QsriWaitFencePool::getFenceFromPool() {
56 VK_ANB_DEBUG("enter");
57 AutoLock lock(mLock);
58 VkFence fence = VK_NULL_HANDLE;
59 if (mAvailableFences.empty()) {
60 VkFenceCreateInfo fenceCreateInfo = {
61 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
62 0,
63 0,
64 };
65 mVk->vkCreateFence(mDevice, &fenceCreateInfo, nullptr, &fence);
66 VK_ANB_DEBUG("no fences in pool, created %p", fence);
67 } else {
68 fence = mAvailableFences.back();
69 mAvailableFences.pop_back();
70 VkResult res = mVk->vkResetFences(mDevice, 1, &fence);
71 if (res != VK_SUCCESS) {
72 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
73 << "Fail to reset Qsri VkFence: " << res << "(" << string_VkResult(res) << ").";
74 }
75 VK_ANB_DEBUG("existing fence in pool: %p. also reset the fence", fence);
76 }
77 mUsedFences.emplace(fence);
78 VK_ANB_DEBUG("exit");
79 return fence;
80 }
81
~QsriWaitFencePool()82 AndroidNativeBufferInfo::QsriWaitFencePool::~QsriWaitFencePool() {
83 VK_ANB_DEBUG("enter");
84 // Nothing in the fence pool is unsignaled
85 if (!mUsedFences.empty()) {
86 VK_ANB_ERR("%zu VkFences are still being used when destroying the Qsri fence pool.",
87 mUsedFences.size());
88 }
89 for (auto fence : mAvailableFences) {
90 VK_ANB_DEBUG("destroy fence %p", fence);
91 mVk->vkDestroyFence(mDevice, fence, nullptr);
92 }
93 VK_ANB_DEBUG("exit");
94 }
95
returnFence(VkFence fence)96 void AndroidNativeBufferInfo::QsriWaitFencePool::returnFence(VkFence fence) {
97 AutoLock lock(mLock);
98 if (!mUsedFences.erase(fence)) {
99 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
100 << "Return an unmanaged Qsri VkFence back to the pool.";
101 return;
102 }
103 mAvailableFences.push_back(fence);
104 }
105
parseAndroidNativeBufferInfo(const VkImageCreateInfo * pCreateInfo,AndroidNativeBufferInfo * info_out)106 bool parseAndroidNativeBufferInfo(const VkImageCreateInfo* pCreateInfo,
107 AndroidNativeBufferInfo* info_out) {
108 // Look through the extension chain.
109 const void* curr_pNext = pCreateInfo->pNext;
110 if (!curr_pNext) return false;
111
112 uint32_t structType = goldfish_vk_struct_type(curr_pNext);
113
114 return structType == VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID;
115 }
116
prepareAndroidNativeBufferImage(VulkanDispatch * vk,VkDevice device,android::base::BumpPool & allocator,const VkImageCreateInfo * pCreateInfo,const VkNativeBufferANDROID * nativeBufferANDROID,const VkAllocationCallbacks * pAllocator,const VkPhysicalDeviceMemoryProperties * memProps,AndroidNativeBufferInfo * out)117 VkResult prepareAndroidNativeBufferImage(VulkanDispatch* vk, VkDevice device,
118 android::base::BumpPool& allocator,
119 const VkImageCreateInfo* pCreateInfo,
120 const VkNativeBufferANDROID* nativeBufferANDROID,
121 const VkAllocationCallbacks* pAllocator,
122 const VkPhysicalDeviceMemoryProperties* memProps,
123 AndroidNativeBufferInfo* out) {
124 out->vk = vk;
125 out->device = device;
126 out->vkFormat = pCreateInfo->format;
127 out->extent = pCreateInfo->extent;
128 out->usage = pCreateInfo->usage;
129
130 for (uint32_t i = 0; i < pCreateInfo->queueFamilyIndexCount; ++i) {
131 out->queueFamilyIndices.push_back(pCreateInfo->pQueueFamilyIndices[i]);
132 }
133
134 out->format = nativeBufferANDROID->format;
135 out->stride = nativeBufferANDROID->stride;
136 out->colorBufferHandle = *(nativeBufferANDROID->handle);
137
138 bool externalMemoryCompatible = false;
139
140 auto emu = getGlobalVkEmulation();
141
142 if (emu && emu->live) {
143 externalMemoryCompatible = emu->deviceInfo.supportsExternalMemory;
144 }
145
146 bool colorBufferExportedToGl = false;
147 if (!isColorBufferExportedToGl(out->colorBufferHandle, &colorBufferExportedToGl)) {
148 VK_ANB_ERR("Failed to query if ColorBuffer:%d exported to GL.", out->colorBufferHandle);
149 return VK_ERROR_INITIALIZATION_FAILED;
150 }
151
152 if (externalMemoryCompatible) {
153 releaseColorBufferForGuestUse(out->colorBufferHandle);
154 out->externallyBacked = true;
155 }
156
157 out->useVulkanNativeImage =
158 (emu && emu->live && emu->guestUsesAngle) || colorBufferExportedToGl;
159
160 VkDeviceSize bindOffset = 0;
161 if (out->externallyBacked) {
162 VkImageCreateInfo createImageCi;
163 deepcopy_VkImageCreateInfo(&allocator, VK_STRUCTURE_TYPE_MAX_ENUM, pCreateInfo,
164 &createImageCi);
165 auto* nativeBufferAndroid = vk_find_struct<VkNativeBufferANDROID>(&createImageCi);
166 if (!nativeBufferAndroid) {
167 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
168 << "VkNativeBufferANDROID is required to be included in the pNext chain of the "
169 "VkImageCreateInfo when importing a gralloc buffer.";
170 }
171 vk_struct_chain_remove(nativeBufferAndroid, &createImageCi);
172
173 if (vk_find_struct<VkExternalMemoryImageCreateInfo>(&createImageCi)) {
174 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
175 << "Unhandled VkExternalMemoryImageCreateInfo in the pNext chain.";
176 }
177 // Create the image with extension structure about external backing.
178 VkExternalMemoryImageCreateInfo extImageCi = {
179 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
180 0,
181 VK_EXT_MEMORY_HANDLE_TYPE_BIT,
182 };
183
184 vk_insert_struct(createImageCi, extImageCi);
185
186 VkResult createResult = vk->vkCreateImage(device, &createImageCi, pAllocator, &out->image);
187
188 if (createResult != VK_SUCCESS) return createResult;
189
190 // Now import the backing memory.
191 const auto& cbInfo = getColorBufferInfo(out->colorBufferHandle);
192 const auto& memInfo = cbInfo.memory;
193
194 vk->vkGetImageMemoryRequirements(device, out->image, &out->memReqs);
195
196 if (out->memReqs.size < memInfo.size) {
197 out->memReqs.size = memInfo.size;
198 }
199
200 if (memInfo.dedicatedAllocation) {
201 if (!importExternalMemoryDedicatedImage(vk, device, &memInfo, out->image,
202 &out->imageMemory)) {
203 VK_ANB_ERR(
204 "VK_ANDROID_native_buffer: Failed to import external memory (dedicated)");
205 return VK_ERROR_INITIALIZATION_FAILED;
206 }
207 } else {
208 if (!importExternalMemory(vk, device, &memInfo, &out->imageMemory)) {
209 VK_ANB_ERR("VK_ANDROID_native_buffer: Failed to import external memory");
210 return VK_ERROR_INITIALIZATION_FAILED;
211 }
212 }
213
214 bindOffset = memInfo.bindOffset;
215 } else {
216 // delete the info struct and pass to vkCreateImage, and also add
217 // transfer src capability to allow us to copy to CPU.
218 VkImageCreateInfo infoNoNative = *pCreateInfo;
219 infoNoNative.pNext = nullptr;
220 infoNoNative.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
221 VkResult createResult = vk->vkCreateImage(device, &infoNoNative, pAllocator, &out->image);
222
223 if (createResult != VK_SUCCESS) return createResult;
224
225 vk->vkGetImageMemoryRequirements(device, out->image, &out->memReqs);
226
227 uint32_t imageMemoryTypeIndex = 0;
228 bool imageMemoryTypeIndexFound = false;
229
230 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
231 bool supported = out->memReqs.memoryTypeBits & (1 << i);
232 if (supported) {
233 imageMemoryTypeIndex = i;
234 imageMemoryTypeIndexFound = true;
235 break;
236 }
237 }
238
239 if (!imageMemoryTypeIndexFound) {
240 VK_ANB_ERR(
241 "VK_ANDROID_native_buffer: could not obtain "
242 "image memory type index");
243 teardownAndroidNativeBufferImage(vk, out);
244 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
245 }
246
247 out->imageMemoryTypeIndex = imageMemoryTypeIndex;
248
249 VkMemoryAllocateInfo allocInfo = {
250 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
251 0,
252 out->memReqs.size,
253 out->imageMemoryTypeIndex,
254 };
255
256 if (VK_SUCCESS != vk->vkAllocateMemory(device, &allocInfo, nullptr, &out->imageMemory)) {
257 VK_ANB_ERR(
258 "VK_ANDROID_native_buffer: could not allocate "
259 "image memory. requested size: %zu",
260 (size_t)(out->memReqs.size));
261 teardownAndroidNativeBufferImage(vk, out);
262 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
263 }
264 }
265
266 if (VK_SUCCESS != vk->vkBindImageMemory(device, out->image, out->imageMemory, bindOffset)) {
267 VK_ANB_ERR(
268 "VK_ANDROID_native_buffer: could not bind "
269 "image memory.");
270 teardownAndroidNativeBufferImage(vk, out);
271 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
272 }
273
274 // Allocate a staging memory and set up the staging buffer.
275 // TODO: Make this shared as well if we can get that to
276 // work on Windows with NVIDIA.
277 {
278 bool stagingIndexRes =
279 getStagingMemoryTypeIndex(vk, device, memProps, &out->stagingMemoryTypeIndex);
280
281 if (!stagingIndexRes) {
282 VK_ANB_ERR(
283 "VK_ANDROID_native_buffer: could not obtain "
284 "staging memory type index");
285 teardownAndroidNativeBufferImage(vk, out);
286 return VK_ERROR_OUT_OF_HOST_MEMORY;
287 }
288
289 VkMemoryAllocateInfo allocInfo = {
290 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
291 0,
292 out->memReqs.size,
293 out->stagingMemoryTypeIndex,
294 };
295
296 VkResult res = vk->vkAllocateMemory(device, &allocInfo, nullptr, &out->stagingMemory);
297 if (VK_SUCCESS != res) {
298 VK_ANB_ERR(
299 "VK_ANDROID_native_buffer: could not allocate staging memory. "
300 "res = %d. requested size: %zu",
301 (int)res, (size_t)(out->memReqs.size));
302 teardownAndroidNativeBufferImage(vk, out);
303 return VK_ERROR_OUT_OF_HOST_MEMORY;
304 }
305
306 VkBufferCreateInfo stagingBufferCreateInfo = {
307 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
308 0,
309 0,
310 out->memReqs.size,
311 VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
312 VK_SHARING_MODE_EXCLUSIVE,
313 0,
314 nullptr,
315 };
316 if (out->queueFamilyIndices.size() > 1) {
317 stagingBufferCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
318 stagingBufferCreateInfo.queueFamilyIndexCount =
319 static_cast<uint32_t>(out->queueFamilyIndices.size());
320 stagingBufferCreateInfo.pQueueFamilyIndices = out->queueFamilyIndices.data();
321 }
322
323 if (VK_SUCCESS !=
324 vk->vkCreateBuffer(device, &stagingBufferCreateInfo, nullptr, &out->stagingBuffer)) {
325 VK_ANB_ERR(
326 "VK_ANDROID_native_buffer: could not create "
327 "staging buffer.");
328 teardownAndroidNativeBufferImage(vk, out);
329 return VK_ERROR_OUT_OF_HOST_MEMORY;
330 }
331
332 if (VK_SUCCESS !=
333 vk->vkBindBufferMemory(device, out->stagingBuffer, out->stagingMemory, 0)) {
334 VK_ANB_ERR(
335 "VK_ANDROID_native_buffer: could not bind "
336 "staging buffer to staging memory.");
337 teardownAndroidNativeBufferImage(vk, out);
338 return VK_ERROR_OUT_OF_HOST_MEMORY;
339 }
340
341 if (VK_SUCCESS != vk->vkMapMemory(device, out->stagingMemory, 0, out->memReqs.size, 0,
342 (void**)&out->mappedStagingPtr)) {
343 VK_ANB_ERR(
344 "VK_ANDROID_native_buffer: could not map "
345 "staging buffer.");
346 teardownAndroidNativeBufferImage(vk, out);
347 return VK_ERROR_OUT_OF_HOST_MEMORY;
348 }
349 }
350
351 out->qsriWaitFencePool =
352 std::make_unique<AndroidNativeBufferInfo::QsriWaitFencePool>(out->vk, out->device);
353 out->qsriTimeline = std::make_unique<VkQsriTimeline>();
354 return VK_SUCCESS;
355 }
356
teardownAndroidNativeBufferImage(VulkanDispatch * vk,AndroidNativeBufferInfo * anbInfo)357 void teardownAndroidNativeBufferImage(VulkanDispatch* vk, AndroidNativeBufferInfo* anbInfo) {
358 auto device = anbInfo->device;
359
360 auto image = anbInfo->image;
361 auto imageMemory = anbInfo->imageMemory;
362
363 auto stagingBuffer = anbInfo->stagingBuffer;
364 auto mappedPtr = anbInfo->mappedStagingPtr;
365 auto stagingMemory = anbInfo->stagingMemory;
366
367 if (image) vk->vkDestroyImage(device, image, nullptr);
368 if (imageMemory) vk->vkFreeMemory(device, imageMemory, nullptr);
369 if (stagingBuffer) vk->vkDestroyBuffer(device, stagingBuffer, nullptr);
370 if (mappedPtr) vk->vkUnmapMemory(device, stagingMemory);
371 if (stagingMemory) vk->vkFreeMemory(device, stagingMemory, nullptr);
372
373 for (auto queueState : anbInfo->queueStates) {
374 queueState.teardown(vk, device);
375 }
376
377 anbInfo->queueStates.clear();
378
379 anbInfo->acquireQueueState.teardown(vk, device);
380
381 anbInfo->vk = nullptr;
382 anbInfo->device = VK_NULL_HANDLE;
383 anbInfo->image = VK_NULL_HANDLE;
384 anbInfo->imageMemory = VK_NULL_HANDLE;
385 anbInfo->stagingBuffer = VK_NULL_HANDLE;
386 anbInfo->mappedStagingPtr = nullptr;
387 anbInfo->stagingMemory = VK_NULL_HANDLE;
388
389 anbInfo->qsriWaitFencePool = nullptr;
390 }
391
getGralloc0Usage(VkFormat format,VkImageUsageFlags imageUsage,int * usage_out)392 void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage, int* usage_out) {
393 // Pick some default flexible values for gralloc usage for now.
394 (void)format;
395 (void)imageUsage;
396 *usage_out = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
397 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
398 }
399
400 // Taken from Android GrallocUsageConversion.h
getGralloc1Usage(VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * consumerUsage_out,uint64_t * producerUsage_out)401 void getGralloc1Usage(VkFormat format, VkImageUsageFlags imageUsage,
402 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
403 uint64_t* consumerUsage_out, uint64_t* producerUsage_out) {
404 // Pick some default flexible values for gralloc usage for now.
405 (void)format;
406 (void)imageUsage;
407 (void)swapchainImageUsage;
408
409 constexpr int usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
410 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
411
412 constexpr uint64_t PRODUCER_MASK =
413 GRALLOC1_PRODUCER_USAGE_CPU_READ |
414 /* GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN | */
415 GRALLOC1_PRODUCER_USAGE_CPU_WRITE |
416 /* GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN | */
417 GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET | GRALLOC1_PRODUCER_USAGE_PROTECTED |
418 GRALLOC1_PRODUCER_USAGE_CAMERA | GRALLOC1_PRODUCER_USAGE_VIDEO_DECODER |
419 GRALLOC1_PRODUCER_USAGE_SENSOR_DIRECT_DATA;
420 constexpr uint64_t CONSUMER_MASK =
421 GRALLOC1_CONSUMER_USAGE_CPU_READ |
422 /* GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN | */
423 GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE | GRALLOC1_CONSUMER_USAGE_HWCOMPOSER |
424 GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET | GRALLOC1_CONSUMER_USAGE_CURSOR |
425 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER | GRALLOC1_CONSUMER_USAGE_CAMERA |
426 GRALLOC1_CONSUMER_USAGE_RENDERSCRIPT | GRALLOC1_CONSUMER_USAGE_GPU_DATA_BUFFER;
427
428 *producerUsage_out = static_cast<uint64_t>(usage) & PRODUCER_MASK;
429 *consumerUsage_out = static_cast<uint64_t>(usage) & CONSUMER_MASK;
430
431 if ((static_cast<uint32_t>(usage) & GRALLOC_USAGE_SW_READ_OFTEN) ==
432 GRALLOC_USAGE_SW_READ_OFTEN) {
433 *producerUsage_out |= GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN;
434 *consumerUsage_out |= GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN;
435 }
436
437 if ((static_cast<uint32_t>(usage) & GRALLOC_USAGE_SW_WRITE_OFTEN) ==
438 GRALLOC_USAGE_SW_WRITE_OFTEN) {
439 *producerUsage_out |= GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
440 }
441 }
442
setup(VulkanDispatch * vk,VkDevice device,VkQueue queueIn,uint32_t queueFamilyIndexIn,android::base::Lock * queueLockIn)443 void AndroidNativeBufferInfo::QueueState::setup(VulkanDispatch* vk, VkDevice device,
444 VkQueue queueIn, uint32_t queueFamilyIndexIn,
445 android::base::Lock* queueLockIn) {
446 queue = queueIn;
447 queueFamilyIndex = queueFamilyIndexIn;
448 lock = queueLockIn;
449
450 VkCommandPoolCreateInfo poolCreateInfo = {
451 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
452 0,
453 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
454 queueFamilyIndex,
455 };
456
457 vk->vkCreateCommandPool(device, &poolCreateInfo, nullptr, &pool);
458
459 VkCommandBufferAllocateInfo cbAllocInfo = {
460 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0, pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
461 };
462
463 vk->vkAllocateCommandBuffers(device, &cbAllocInfo, &cb);
464
465 vk->vkAllocateCommandBuffers(device, &cbAllocInfo, &cb2);
466
467 VkFenceCreateInfo fenceCreateInfo = {
468 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
469 0,
470 0,
471 };
472
473 vk->vkCreateFence(device, &fenceCreateInfo, nullptr, &fence);
474 }
475
teardown(VulkanDispatch * vk,VkDevice device)476 void AndroidNativeBufferInfo::QueueState::teardown(VulkanDispatch* vk, VkDevice device) {
477 if (queue) {
478 AutoLock qlock(*lock);
479 vk->vkQueueWaitIdle(queue);
480 }
481 if (cb) vk->vkFreeCommandBuffers(device, pool, 1, &cb);
482 if (pool) vk->vkDestroyCommandPool(device, pool, nullptr);
483 if (fence) vk->vkDestroyFence(device, fence, nullptr);
484
485 lock = nullptr;
486 queue = VK_NULL_HANDLE;
487 pool = VK_NULL_HANDLE;
488 cb = VK_NULL_HANDLE;
489 fence = VK_NULL_HANDLE;
490 queueFamilyIndex = 0;
491 }
492
setAndroidNativeImageSemaphoreSignaled(VulkanDispatch * vk,VkDevice device,VkQueue defaultQueue,uint32_t defaultQueueFamilyIndex,Lock * defaultQueueLock,VkSemaphore semaphore,VkFence fence,AndroidNativeBufferInfo * anbInfo)493 VkResult setAndroidNativeImageSemaphoreSignaled(VulkanDispatch* vk, VkDevice device,
494 VkQueue defaultQueue,
495 uint32_t defaultQueueFamilyIndex,
496 Lock* defaultQueueLock, VkSemaphore semaphore,
497 VkFence fence, AndroidNativeBufferInfo* anbInfo) {
498 auto fb = FrameBuffer::getFB();
499
500 bool firstTimeSetup = !anbInfo->everSynced && !anbInfo->everAcquired;
501
502 anbInfo->everAcquired = true;
503
504 if (firstTimeSetup) {
505 VkSubmitInfo submitInfo = {
506 VK_STRUCTURE_TYPE_SUBMIT_INFO,
507 0,
508 0,
509 nullptr,
510 nullptr,
511 0,
512 nullptr,
513 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
514 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
515 };
516 AutoLock qlock(*defaultQueueLock);
517 VK_CHECK(vk->vkQueueSubmit(defaultQueue, 1, &submitInfo, fence));
518 } else {
519 const AndroidNativeBufferInfo::QueueState& queueState =
520 anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
521
522 // If we used the Vulkan image without copying it back
523 // to the CPU, reset the layout to PRESENT.
524 if (anbInfo->useVulkanNativeImage) {
525 VkCommandBufferBeginInfo beginInfo = {
526 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
527 0,
528 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
529 nullptr /* no inheritance info */,
530 };
531
532 vk->vkBeginCommandBuffer(queueState.cb2, &beginInfo);
533
534 VkImageMemoryBarrier queueTransferBarrier = {
535 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
536 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
537 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
538 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
539 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
540 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL,
541 .dstQueueFamilyIndex = anbInfo->lastUsedQueueFamilyIndex,
542 .image = anbInfo->image,
543 .subresourceRange =
544 {
545 VK_IMAGE_ASPECT_COLOR_BIT,
546 0,
547 1,
548 0,
549 1,
550 },
551 };
552 vk->vkCmdPipelineBarrier(queueState.cb2, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
553 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr,
554 1, &queueTransferBarrier);
555 vk->vkEndCommandBuffer(queueState.cb2);
556
557 VkSubmitInfo submitInfo = {
558 VK_STRUCTURE_TYPE_SUBMIT_INFO,
559 0,
560 0,
561 nullptr,
562 nullptr,
563 1,
564 &queueState.cb2,
565 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
566 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
567 };
568
569 AutoLock qlock(*queueState.lock);
570 // TODO(kaiyili): initiate ownership transfer from DisplayVk here
571 VK_CHECK(vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence));
572 } else {
573 const AndroidNativeBufferInfo::QueueState& queueState =
574 anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
575 VkSubmitInfo submitInfo = {
576 VK_STRUCTURE_TYPE_SUBMIT_INFO,
577 0,
578 0,
579 nullptr,
580 nullptr,
581 0,
582 nullptr,
583 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
584 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
585 };
586 AutoLock qlock(*queueState.lock);
587 VK_CHECK(vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence));
588 }
589 }
590
591 return VK_SUCCESS;
592 }
593
594 static constexpr uint64_t kTimeoutNs = 3ULL * 1000000000ULL;
595
syncImageToColorBuffer(VulkanDispatch * vk,uint32_t queueFamilyIndex,VkQueue queue,Lock * queueLock,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,int * pNativeFenceFd,std::shared_ptr<AndroidNativeBufferInfo> anbInfo)596 VkResult syncImageToColorBuffer(VulkanDispatch* vk, uint32_t queueFamilyIndex, VkQueue queue,
597 Lock* queueLock, uint32_t waitSemaphoreCount,
598 const VkSemaphore* pWaitSemaphores, int* pNativeFenceFd,
599 std::shared_ptr<AndroidNativeBufferInfo> anbInfo) {
600 auto anbInfoPtr = anbInfo.get();
601 auto fb = FrameBuffer::getFB();
602 fb->lock();
603
604 // Implicitly synchronized
605 *pNativeFenceFd = -1;
606
607 anbInfo->everSynced = true;
608 anbInfo->lastUsedQueueFamilyIndex = queueFamilyIndex;
609
610 // Setup queue state for this queue family index.
611 if (queueFamilyIndex >= anbInfo->queueStates.size()) {
612 anbInfo->queueStates.resize(queueFamilyIndex + 1);
613 }
614
615 auto& queueState = anbInfo->queueStates[queueFamilyIndex];
616
617 if (!queueState.queue) {
618 queueState.setup(vk, anbInfo->device, queue, queueFamilyIndex, queueLock);
619 }
620
621 // Record our synchronization commands.
622 VkCommandBufferBeginInfo beginInfo = {
623 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
624 0,
625 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
626 nullptr /* no inheritance info */,
627 };
628
629 vk->vkBeginCommandBuffer(queueState.cb, &beginInfo);
630
631 // If using the Vulkan image directly (rather than copying it back to
632 // the CPU), change its layout for that use.
633 if (anbInfo->useVulkanNativeImage) {
634 VkImageMemoryBarrier queueTransferBarrier = {
635 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
636 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
637 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
638 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
639 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
640 .srcQueueFamilyIndex = queueFamilyIndex,
641 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL,
642 .image = anbInfo->image,
643 .subresourceRange =
644 {
645 VK_IMAGE_ASPECT_COLOR_BIT,
646 0,
647 1,
648 0,
649 1,
650 },
651 };
652 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
653 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
654 &queueTransferBarrier);
655
656 } else {
657 // Not a GL texture. Read it back and put it back in present layout.
658
659 // From the spec: If an application does not need the contents of a resource
660 // to remain valid when transferring from one queue family to another, then
661 // the ownership transfer should be skipped.
662 // We definitely need to transition the image to
663 // VK_TRANSFER_SRC_OPTIMAL and back.
664 VkImageMemoryBarrier presentToTransferSrc = {
665 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
666 0,
667 0,
668 VK_ACCESS_TRANSFER_READ_BIT,
669 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
670 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
671 VK_QUEUE_FAMILY_IGNORED,
672 VK_QUEUE_FAMILY_IGNORED,
673 anbInfo->image,
674 {
675 VK_IMAGE_ASPECT_COLOR_BIT,
676 0,
677 1,
678 0,
679 1,
680 },
681 };
682
683 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
684 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
685 &presentToTransferSrc);
686
687 VkBufferImageCopy region = {
688 0 /* buffer offset */,
689 anbInfo->extent.width,
690 anbInfo->extent.height,
691 {
692 VK_IMAGE_ASPECT_COLOR_BIT,
693 0,
694 0,
695 1,
696 },
697 {0, 0, 0},
698 anbInfo->extent,
699 };
700
701 vk->vkCmdCopyImageToBuffer(queueState.cb, anbInfo->image,
702 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, anbInfo->stagingBuffer, 1,
703 ®ion);
704
705 // Transfer back to present src.
706 VkImageMemoryBarrier backToPresentSrc = {
707 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
708 0,
709 VK_ACCESS_TRANSFER_READ_BIT,
710 0,
711 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
712 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
713 VK_QUEUE_FAMILY_IGNORED,
714 VK_QUEUE_FAMILY_IGNORED,
715 anbInfo->image,
716 {
717 VK_IMAGE_ASPECT_COLOR_BIT,
718 0,
719 1,
720 0,
721 1,
722 },
723 };
724
725 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
726 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
727 &backToPresentSrc);
728 }
729
730 vk->vkEndCommandBuffer(queueState.cb);
731
732 std::vector<VkPipelineStageFlags> pipelineStageFlags;
733 pipelineStageFlags.resize(waitSemaphoreCount, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
734
735 VkSubmitInfo submitInfo = {
736 VK_STRUCTURE_TYPE_SUBMIT_INFO,
737 0,
738 waitSemaphoreCount,
739 pWaitSemaphores,
740 pipelineStageFlags.data(),
741 1,
742 &queueState.cb,
743 0,
744 nullptr,
745 };
746
747 // TODO(kaiyili): initiate ownership transfer to DisplayVk here.
748 VkFence qsriFence = anbInfo->qsriWaitFencePool->getFenceFromPool();
749 AutoLock qLock(*queueLock);
750 VK_CHECK(vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, qsriFence));
751 auto waitForQsriFenceTask = [anbInfoPtr, anbInfo, vk, device = anbInfo->device, qsriFence] {
752 (void)anbInfoPtr;
753 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: enter");
754 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: wait for fence %p...", qsriFence);
755 VkResult res = vk->vkWaitForFences(device, 1, &qsriFence, VK_FALSE, kTimeoutNs);
756 switch (res) {
757 case VK_SUCCESS:
758 break;
759 case VK_TIMEOUT:
760 VK_ANB_ERR("Timeout when waiting for the Qsri fence.");
761 break;
762 default:
763 ERR("Failed to wait for QSRI fence: %s\n", string_VkResult(res));
764 VK_CHECK(res);
765 }
766 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: wait for fence %p...(done)", qsriFence);
767 anbInfo->qsriWaitFencePool->returnFence(qsriFence);
768 };
769 fb->unlock();
770
771 if (anbInfo->useVulkanNativeImage) {
772 VK_ANB_DEBUG_OBJ(anbInfoPtr, "using native image, so use sync thread to wait");
773 // Queue wait to sync thread with completion callback
774 // Pass anbInfo by value to get a ref
775 SyncThread::get()->triggerGeneral(
776 [waitForQsriFenceTask = std::move(waitForQsriFenceTask), anbInfo]() mutable {
777 waitForQsriFenceTask();
778 anbInfo->qsriTimeline->signalNextPresentAndPoll();
779 },
780 "wait for the guest Qsri VkFence signaled");
781 } else {
782 VK_ANB_DEBUG_OBJ(anbInfoPtr, "not using native image, so wait right away");
783 waitForQsriFenceTask();
784
785 VkMappedMemoryRange toInvalidate = {
786 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0, anbInfo->stagingMemory, 0, VK_WHOLE_SIZE,
787 };
788
789 vk->vkInvalidateMappedMemoryRanges(anbInfo->device, 1, &toInvalidate);
790
791 uint32_t colorBufferHandle = anbInfo->colorBufferHandle;
792
793 // Copy to from staging buffer to color buffer
794 uint32_t bpp = 4; /* format always rgba8...not */
795 switch (anbInfo->vkFormat) {
796 case VK_FORMAT_R5G6B5_UNORM_PACK16:
797 bpp = 2;
798 break;
799 case VK_FORMAT_R8G8B8_UNORM:
800 bpp = 3;
801 break;
802 default:
803 case VK_FORMAT_R8G8B8A8_UNORM:
804 case VK_FORMAT_B8G8R8A8_UNORM:
805 bpp = 4;
806 break;
807 }
808
809 FrameBuffer::getFB()->flushColorBufferFromVkBytes(
810 colorBufferHandle, anbInfo->mappedStagingPtr,
811 bpp * anbInfo->extent.width * anbInfo->extent.height);
812 anbInfo->qsriTimeline->signalNextPresentAndPoll();
813 }
814
815 return VK_SUCCESS;
816 }
817
818 } // namespace vk
819 } // namespace gfxstream
820