1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 // http://www.apache.org/licenses/LICENSE-2.0
7 //
8 // Unless required by applicable law or agreed to in writing, software
9 // distributed under the License is distributed on an "AS IS" BASIS,
10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
11 // See the License for the specific language governing permissions and
12 // limitations under the License.
13 #include "VkAndroidNativeBuffer.h"
14
15 #include <string.h>
16
17 #include <future>
18
19 #include "GrallocDefs.h"
20 #include "SyncThread.h"
21 #include "VkCommonOperations.h"
22 #include "VulkanDispatch.h"
23 #include "cereal/common/goldfish_vk_deepcopy.h"
24 #include "cereal/common/goldfish_vk_extension_structs.h"
25
26 #include "goldfish_vk_private_defs.h"
27 #include "host-common/GfxstreamFatalError.h"
28 #include "host/FrameBuffer.h"
29 #include "vulkan/vk_enum_string_helper.h"
30
31 namespace gfxstream {
32 namespace vk {
33
34 #define VK_ANB_ERR(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
35
36 #define ENABLE_VK_ANB_DEBUG 0
37
38 #if ENABLE_VK_ANB_DEBUG
39 #define VK_ANB_DEBUG(fmt, ...) \
40 fprintf(stderr, "vk-anb-debug: %s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
41 #define VK_ANB_DEBUG_OBJ(obj, fmt, ...) \
42 fprintf(stderr, "vk-anb-debug: %s:%d:%p " fmt "\n", __func__, __LINE__, obj, ##__VA_ARGS__);
43 #else
44 #define VK_ANB_DEBUG(fmt, ...)
45 #define VK_ANB_DEBUG_OBJ(obj, fmt, ...)
46 #endif
47
48 using android::base::AutoLock;
49 using android::base::Lock;
50 using emugl::ABORT_REASON_OTHER;
51 using emugl::FatalError;
52
QsriWaitFencePool(VulkanDispatch * vk,VkDevice device)53 AndroidNativeBufferInfo::QsriWaitFencePool::QsriWaitFencePool(VulkanDispatch* vk, VkDevice device)
54 : mVk(vk), mDevice(device) {}
55
getFenceFromPool()56 VkFence AndroidNativeBufferInfo::QsriWaitFencePool::getFenceFromPool() {
57 VK_ANB_DEBUG("enter");
58 AutoLock lock(mLock);
59 VkFence fence = VK_NULL_HANDLE;
60 if (mAvailableFences.empty()) {
61 VkFenceCreateInfo fenceCreateInfo = {
62 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
63 0,
64 0,
65 };
66 mVk->vkCreateFence(mDevice, &fenceCreateInfo, nullptr, &fence);
67 VK_ANB_DEBUG("no fences in pool, created %p", fence);
68 } else {
69 fence = mAvailableFences.back();
70 mAvailableFences.pop_back();
71 VkResult res = mVk->vkResetFences(mDevice, 1, &fence);
72 if (res != VK_SUCCESS) {
73 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
74 << "Fail to reset Qsri VkFence: " << res << "(" << string_VkResult(res) << ").";
75 }
76 VK_ANB_DEBUG("existing fence in pool: %p. also reset the fence", fence);
77 }
78 mUsedFences.emplace(fence);
79 VK_ANB_DEBUG("exit");
80 return fence;
81 }
82
~QsriWaitFencePool()83 AndroidNativeBufferInfo::QsriWaitFencePool::~QsriWaitFencePool() {
84 VK_ANB_DEBUG("enter");
85 // Nothing in the fence pool is unsignaled
86 if (!mUsedFences.empty()) {
87 VK_ANB_ERR("%zu VkFences are still being used when destroying the Qsri fence pool.",
88 mUsedFences.size());
89 }
90 for (auto fence : mAvailableFences) {
91 VK_ANB_DEBUG("destroy fence %p", fence);
92 mVk->vkDestroyFence(mDevice, fence, nullptr);
93 }
94 VK_ANB_DEBUG("exit");
95 }
96
returnFence(VkFence fence)97 void AndroidNativeBufferInfo::QsriWaitFencePool::returnFence(VkFence fence) {
98 AutoLock lock(mLock);
99 if (!mUsedFences.erase(fence)) {
100 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
101 << "Return an unmanaged Qsri VkFence back to the pool.";
102 return;
103 }
104 mAvailableFences.push_back(fence);
105 }
106
parseAndroidNativeBufferInfo(const VkImageCreateInfo * pCreateInfo,AndroidNativeBufferInfo * info_out)107 bool parseAndroidNativeBufferInfo(const VkImageCreateInfo* pCreateInfo,
108 AndroidNativeBufferInfo* info_out) {
109 // Look through the extension chain.
110 const void* curr_pNext = pCreateInfo->pNext;
111 if (!curr_pNext) return false;
112
113 uint32_t structType = goldfish_vk_struct_type(curr_pNext);
114
115 return structType == VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID;
116 }
117
prepareAndroidNativeBufferImage(VulkanDispatch * vk,VkDevice device,android::base::BumpPool & allocator,const VkImageCreateInfo * pCreateInfo,const VkNativeBufferANDROID * nativeBufferANDROID,const VkAllocationCallbacks * pAllocator,const VkPhysicalDeviceMemoryProperties * memProps,AndroidNativeBufferInfo * out)118 VkResult prepareAndroidNativeBufferImage(VulkanDispatch* vk, VkDevice device,
119 android::base::BumpPool& allocator,
120 const VkImageCreateInfo* pCreateInfo,
121 const VkNativeBufferANDROID* nativeBufferANDROID,
122 const VkAllocationCallbacks* pAllocator,
123 const VkPhysicalDeviceMemoryProperties* memProps,
124 AndroidNativeBufferInfo* out) {
125 out->vk = vk;
126 out->device = device;
127 out->vkFormat = pCreateInfo->format;
128 out->extent = pCreateInfo->extent;
129 out->usage = pCreateInfo->usage;
130
131 for (uint32_t i = 0; i < pCreateInfo->queueFamilyIndexCount; ++i) {
132 out->queueFamilyIndices.push_back(pCreateInfo->pQueueFamilyIndices[i]);
133 }
134
135 out->format = nativeBufferANDROID->format;
136 out->stride = nativeBufferANDROID->stride;
137 out->colorBufferHandle = *static_cast<const uint32_t*>(nativeBufferANDROID->handle);
138
139 bool externalMemoryCompatible = false;
140
141 auto emu = getGlobalVkEmulation();
142
143 if (emu && emu->live) {
144 externalMemoryCompatible = emu->deviceInfo.supportsExternalMemoryImport &&
145 emu->deviceInfo.supportsExternalMemoryExport;
146 }
147
148 bool colorBufferExportedToGl = false;
149 if (!isColorBufferExportedToGl(out->colorBufferHandle, &colorBufferExportedToGl)) {
150 VK_ANB_ERR("Failed to query if ColorBuffer:%d exported to GL.", out->colorBufferHandle);
151 return VK_ERROR_INITIALIZATION_FAILED;
152 }
153
154 if (externalMemoryCompatible) {
155 releaseColorBufferForGuestUse(out->colorBufferHandle);
156 out->externallyBacked = true;
157 }
158
159 out->useVulkanNativeImage =
160 (emu && emu->live && emu->guestUsesAngle) || colorBufferExportedToGl;
161
162 VkDeviceSize bindOffset = 0;
163 if (out->externallyBacked) {
164 VkImageCreateInfo createImageCi;
165 deepcopy_VkImageCreateInfo(&allocator, VK_STRUCTURE_TYPE_MAX_ENUM, pCreateInfo,
166 &createImageCi);
167 auto* nativeBufferAndroid = vk_find_struct<VkNativeBufferANDROID>(&createImageCi);
168 if (!nativeBufferAndroid) {
169 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
170 << "VkNativeBufferANDROID is required to be included in the pNext chain of the "
171 "VkImageCreateInfo when importing a gralloc buffer.";
172 }
173 vk_struct_chain_remove(nativeBufferAndroid, &createImageCi);
174
175 if (vk_find_struct<VkExternalMemoryImageCreateInfo>(&createImageCi)) {
176 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
177 << "Unhandled VkExternalMemoryImageCreateInfo in the pNext chain.";
178 }
179 // Create the image with extension structure about external backing.
180 VkExternalMemoryImageCreateInfo extImageCi = {
181 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
182 0,
183 VK_EXT_MEMORY_HANDLE_TYPE_BIT,
184 };
185
186 vk_insert_struct(createImageCi, extImageCi);
187
188 VkResult createResult = vk->vkCreateImage(device, &createImageCi, pAllocator, &out->image);
189
190 if (createResult != VK_SUCCESS) return createResult;
191
192 // Now import the backing memory.
193 const auto& cbInfo = getColorBufferInfo(out->colorBufferHandle);
194 const auto& memInfo = cbInfo.memory;
195
196 vk->vkGetImageMemoryRequirements(device, out->image, &out->memReqs);
197
198 if (out->memReqs.size < memInfo.size) {
199 out->memReqs.size = memInfo.size;
200 }
201
202 if (memInfo.dedicatedAllocation) {
203 if (!importExternalMemoryDedicatedImage(vk, device, &memInfo, out->image,
204 &out->imageMemory)) {
205 VK_ANB_ERR(
206 "VK_ANDROID_native_buffer: Failed to import external memory (dedicated)");
207 return VK_ERROR_INITIALIZATION_FAILED;
208 }
209 } else {
210 if (!importExternalMemory(vk, device, &memInfo, &out->imageMemory)) {
211 VK_ANB_ERR("VK_ANDROID_native_buffer: Failed to import external memory");
212 return VK_ERROR_INITIALIZATION_FAILED;
213 }
214 }
215
216 bindOffset = memInfo.bindOffset;
217 } else {
218 // delete the info struct and pass to vkCreateImage, and also add
219 // transfer src capability to allow us to copy to CPU.
220 VkImageCreateInfo infoNoNative = *pCreateInfo;
221 infoNoNative.pNext = nullptr;
222 infoNoNative.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
223 VkResult createResult = vk->vkCreateImage(device, &infoNoNative, pAllocator, &out->image);
224
225 if (createResult != VK_SUCCESS) return createResult;
226
227 vk->vkGetImageMemoryRequirements(device, out->image, &out->memReqs);
228
229 uint32_t imageMemoryTypeIndex = 0;
230 bool imageMemoryTypeIndexFound = false;
231
232 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
233 bool supported = out->memReqs.memoryTypeBits & (1 << i);
234 if (supported) {
235 imageMemoryTypeIndex = i;
236 imageMemoryTypeIndexFound = true;
237 break;
238 }
239 }
240
241 if (!imageMemoryTypeIndexFound) {
242 VK_ANB_ERR(
243 "VK_ANDROID_native_buffer: could not obtain "
244 "image memory type index");
245 teardownAndroidNativeBufferImage(vk, out);
246 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
247 }
248
249 out->imageMemoryTypeIndex = imageMemoryTypeIndex;
250
251 VkMemoryAllocateInfo allocInfo = {
252 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
253 0,
254 out->memReqs.size,
255 out->imageMemoryTypeIndex,
256 };
257
258 if (VK_SUCCESS != vk->vkAllocateMemory(device, &allocInfo, nullptr, &out->imageMemory)) {
259 VK_ANB_ERR(
260 "VK_ANDROID_native_buffer: could not allocate "
261 "image memory. requested size: %zu",
262 (size_t)(out->memReqs.size));
263 teardownAndroidNativeBufferImage(vk, out);
264 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
265 }
266 }
267
268 if (VK_SUCCESS != vk->vkBindImageMemory(device, out->image, out->imageMemory, bindOffset)) {
269 VK_ANB_ERR(
270 "VK_ANDROID_native_buffer: could not bind "
271 "image memory.");
272 teardownAndroidNativeBufferImage(vk, out);
273 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
274 }
275
276 // Allocate a staging memory and set up the staging buffer.
277 // TODO: Make this shared as well if we can get that to
278 // work on Windows with NVIDIA.
279 {
280 bool stagingIndexRes =
281 getStagingMemoryTypeIndex(vk, device, memProps, &out->stagingMemoryTypeIndex);
282
283 if (!stagingIndexRes) {
284 VK_ANB_ERR(
285 "VK_ANDROID_native_buffer: could not obtain "
286 "staging memory type index");
287 teardownAndroidNativeBufferImage(vk, out);
288 return VK_ERROR_OUT_OF_HOST_MEMORY;
289 }
290
291 VkMemoryAllocateInfo allocInfo = {
292 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
293 0,
294 out->memReqs.size,
295 out->stagingMemoryTypeIndex,
296 };
297
298 VkResult res = vk->vkAllocateMemory(device, &allocInfo, nullptr, &out->stagingMemory);
299 if (VK_SUCCESS != res) {
300 VK_ANB_ERR(
301 "VK_ANDROID_native_buffer: could not allocate staging memory. "
302 "res = %d. requested size: %zu",
303 (int)res, (size_t)(out->memReqs.size));
304 teardownAndroidNativeBufferImage(vk, out);
305 return VK_ERROR_OUT_OF_HOST_MEMORY;
306 }
307
308 VkBufferCreateInfo stagingBufferCreateInfo = {
309 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
310 0,
311 0,
312 out->memReqs.size,
313 VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
314 VK_SHARING_MODE_EXCLUSIVE,
315 0,
316 nullptr,
317 };
318 if (out->queueFamilyIndices.size() > 1) {
319 stagingBufferCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
320 stagingBufferCreateInfo.queueFamilyIndexCount =
321 static_cast<uint32_t>(out->queueFamilyIndices.size());
322 stagingBufferCreateInfo.pQueueFamilyIndices = out->queueFamilyIndices.data();
323 }
324
325 if (VK_SUCCESS !=
326 vk->vkCreateBuffer(device, &stagingBufferCreateInfo, nullptr, &out->stagingBuffer)) {
327 VK_ANB_ERR(
328 "VK_ANDROID_native_buffer: could not create "
329 "staging buffer.");
330 teardownAndroidNativeBufferImage(vk, out);
331 return VK_ERROR_OUT_OF_HOST_MEMORY;
332 }
333
334 if (VK_SUCCESS !=
335 vk->vkBindBufferMemory(device, out->stagingBuffer, out->stagingMemory, 0)) {
336 VK_ANB_ERR(
337 "VK_ANDROID_native_buffer: could not bind "
338 "staging buffer to staging memory.");
339 teardownAndroidNativeBufferImage(vk, out);
340 return VK_ERROR_OUT_OF_HOST_MEMORY;
341 }
342
343 if (VK_SUCCESS != vk->vkMapMemory(device, out->stagingMemory, 0, out->memReqs.size, 0,
344 (void**)&out->mappedStagingPtr)) {
345 VK_ANB_ERR(
346 "VK_ANDROID_native_buffer: could not map "
347 "staging buffer.");
348 teardownAndroidNativeBufferImage(vk, out);
349 return VK_ERROR_OUT_OF_HOST_MEMORY;
350 }
351 }
352
353 out->qsriWaitFencePool =
354 std::make_unique<AndroidNativeBufferInfo::QsriWaitFencePool>(out->vk, out->device);
355 out->qsriTimeline = std::make_unique<VkQsriTimeline>();
356 return VK_SUCCESS;
357 }
358
teardownAndroidNativeBufferImage(VulkanDispatch * vk,AndroidNativeBufferInfo * anbInfo)359 void teardownAndroidNativeBufferImage(VulkanDispatch* vk, AndroidNativeBufferInfo* anbInfo) {
360 auto device = anbInfo->device;
361
362 auto image = anbInfo->image;
363 auto imageMemory = anbInfo->imageMemory;
364
365 auto stagingBuffer = anbInfo->stagingBuffer;
366 auto mappedPtr = anbInfo->mappedStagingPtr;
367 auto stagingMemory = anbInfo->stagingMemory;
368
369 if (image) vk->vkDestroyImage(device, image, nullptr);
370 if (imageMemory) vk->vkFreeMemory(device, imageMemory, nullptr);
371 if (stagingBuffer) vk->vkDestroyBuffer(device, stagingBuffer, nullptr);
372 if (mappedPtr) vk->vkUnmapMemory(device, stagingMemory);
373 if (stagingMemory) vk->vkFreeMemory(device, stagingMemory, nullptr);
374
375 for (auto queueState : anbInfo->queueStates) {
376 queueState.teardown(vk, device);
377 }
378
379 anbInfo->queueStates.clear();
380
381 anbInfo->acquireQueueState.teardown(vk, device);
382
383 anbInfo->vk = nullptr;
384 anbInfo->device = VK_NULL_HANDLE;
385 anbInfo->image = VK_NULL_HANDLE;
386 anbInfo->imageMemory = VK_NULL_HANDLE;
387 anbInfo->stagingBuffer = VK_NULL_HANDLE;
388 anbInfo->mappedStagingPtr = nullptr;
389 anbInfo->stagingMemory = VK_NULL_HANDLE;
390
391 anbInfo->qsriWaitFencePool = nullptr;
392 }
393
getGralloc0Usage(VkFormat format,VkImageUsageFlags imageUsage,int * usage_out)394 void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage, int* usage_out) {
395 // Pick some default flexible values for gralloc usage for now.
396 (void)format;
397 (void)imageUsage;
398 *usage_out = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
399 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
400 }
401
402 // Taken from Android GrallocUsageConversion.h
getGralloc1Usage(VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * consumerUsage_out,uint64_t * producerUsage_out)403 void getGralloc1Usage(VkFormat format, VkImageUsageFlags imageUsage,
404 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
405 uint64_t* consumerUsage_out, uint64_t* producerUsage_out) {
406 // Pick some default flexible values for gralloc usage for now.
407 (void)format;
408 (void)imageUsage;
409 (void)swapchainImageUsage;
410
411 constexpr int usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
412 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
413
414 constexpr uint64_t PRODUCER_MASK =
415 GRALLOC1_PRODUCER_USAGE_CPU_READ |
416 /* GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN | */
417 GRALLOC1_PRODUCER_USAGE_CPU_WRITE |
418 /* GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN | */
419 GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET | GRALLOC1_PRODUCER_USAGE_PROTECTED |
420 GRALLOC1_PRODUCER_USAGE_CAMERA | GRALLOC1_PRODUCER_USAGE_VIDEO_DECODER |
421 GRALLOC1_PRODUCER_USAGE_SENSOR_DIRECT_DATA;
422 constexpr uint64_t CONSUMER_MASK =
423 GRALLOC1_CONSUMER_USAGE_CPU_READ |
424 /* GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN | */
425 GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE | GRALLOC1_CONSUMER_USAGE_HWCOMPOSER |
426 GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET | GRALLOC1_CONSUMER_USAGE_CURSOR |
427 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER | GRALLOC1_CONSUMER_USAGE_CAMERA |
428 GRALLOC1_CONSUMER_USAGE_RENDERSCRIPT | GRALLOC1_CONSUMER_USAGE_GPU_DATA_BUFFER;
429
430 *producerUsage_out = static_cast<uint64_t>(usage) & PRODUCER_MASK;
431 *consumerUsage_out = static_cast<uint64_t>(usage) & CONSUMER_MASK;
432
433 if ((static_cast<uint32_t>(usage) & GRALLOC_USAGE_SW_READ_OFTEN) ==
434 GRALLOC_USAGE_SW_READ_OFTEN) {
435 *producerUsage_out |= GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN;
436 *consumerUsage_out |= GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN;
437 }
438
439 if ((static_cast<uint32_t>(usage) & GRALLOC_USAGE_SW_WRITE_OFTEN) ==
440 GRALLOC_USAGE_SW_WRITE_OFTEN) {
441 *producerUsage_out |= GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
442 }
443 }
444
setup(VulkanDispatch * vk,VkDevice device,VkQueue queueIn,uint32_t queueFamilyIndexIn,android::base::Lock * queueLockIn)445 void AndroidNativeBufferInfo::QueueState::setup(VulkanDispatch* vk, VkDevice device,
446 VkQueue queueIn, uint32_t queueFamilyIndexIn,
447 android::base::Lock* queueLockIn) {
448 queue = queueIn;
449 queueFamilyIndex = queueFamilyIndexIn;
450 lock = queueLockIn;
451
452 VkCommandPoolCreateInfo poolCreateInfo = {
453 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
454 0,
455 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
456 queueFamilyIndex,
457 };
458
459 vk->vkCreateCommandPool(device, &poolCreateInfo, nullptr, &pool);
460
461 VkCommandBufferAllocateInfo cbAllocInfo = {
462 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0, pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
463 };
464
465 vk->vkAllocateCommandBuffers(device, &cbAllocInfo, &cb);
466
467 vk->vkAllocateCommandBuffers(device, &cbAllocInfo, &cb2);
468
469 VkFenceCreateInfo fenceCreateInfo = {
470 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
471 0,
472 0,
473 };
474
475 vk->vkCreateFence(device, &fenceCreateInfo, nullptr, &fence);
476 }
477
teardown(VulkanDispatch * vk,VkDevice device)478 void AndroidNativeBufferInfo::QueueState::teardown(VulkanDispatch* vk, VkDevice device) {
479 if (queue) {
480 AutoLock qlock(*lock);
481 vk->vkQueueWaitIdle(queue);
482 }
483 if (cb) vk->vkFreeCommandBuffers(device, pool, 1, &cb);
484 if (pool) vk->vkDestroyCommandPool(device, pool, nullptr);
485 if (fence) vk->vkDestroyFence(device, fence, nullptr);
486
487 lock = nullptr;
488 queue = VK_NULL_HANDLE;
489 pool = VK_NULL_HANDLE;
490 cb = VK_NULL_HANDLE;
491 fence = VK_NULL_HANDLE;
492 queueFamilyIndex = 0;
493 }
494
setAndroidNativeImageSemaphoreSignaled(VulkanDispatch * vk,VkDevice device,VkQueue defaultQueue,uint32_t defaultQueueFamilyIndex,Lock * defaultQueueLock,VkSemaphore semaphore,VkFence fence,AndroidNativeBufferInfo * anbInfo)495 VkResult setAndroidNativeImageSemaphoreSignaled(VulkanDispatch* vk, VkDevice device,
496 VkQueue defaultQueue,
497 uint32_t defaultQueueFamilyIndex,
498 Lock* defaultQueueLock, VkSemaphore semaphore,
499 VkFence fence, AndroidNativeBufferInfo* anbInfo) {
500 auto fb = FrameBuffer::getFB();
501
502 bool firstTimeSetup = !anbInfo->everSynced && !anbInfo->everAcquired;
503
504 anbInfo->everAcquired = true;
505
506 if (firstTimeSetup) {
507 VkSubmitInfo submitInfo = {
508 VK_STRUCTURE_TYPE_SUBMIT_INFO,
509 0,
510 0,
511 nullptr,
512 nullptr,
513 0,
514 nullptr,
515 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
516 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
517 };
518 AutoLock qlock(*defaultQueueLock);
519 VK_CHECK(vk->vkQueueSubmit(defaultQueue, 1, &submitInfo, fence));
520 } else {
521 // Setup queue state for this queue family index.
522 auto queueFamilyIndex = anbInfo->lastUsedQueueFamilyIndex;
523 if (queueFamilyIndex >= anbInfo->queueStates.size()) {
524 anbInfo->queueStates.resize(queueFamilyIndex + 1);
525 }
526 AndroidNativeBufferInfo::QueueState& queueState =
527 anbInfo->queueStates[queueFamilyIndex];
528 if (!queueState.queue) {
529 queueState.setup(vk, anbInfo->device, defaultQueue, queueFamilyIndex, defaultQueueLock);
530 }
531
532 // If we used the Vulkan image without copying it back
533 // to the CPU, reset the layout to PRESENT.
534 if (anbInfo->useVulkanNativeImage) {
535 VkCommandBufferBeginInfo beginInfo = {
536 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
537 0,
538 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
539 nullptr /* no inheritance info */,
540 };
541
542 vk->vkBeginCommandBuffer(queueState.cb2, &beginInfo);
543
544 VkImageMemoryBarrier queueTransferBarrier = {
545 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
546 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
547 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
548 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
549 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
550 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL,
551 .dstQueueFamilyIndex = anbInfo->lastUsedQueueFamilyIndex,
552 .image = anbInfo->image,
553 .subresourceRange =
554 {
555 VK_IMAGE_ASPECT_COLOR_BIT,
556 0,
557 1,
558 0,
559 1,
560 },
561 };
562 vk->vkCmdPipelineBarrier(queueState.cb2, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
563 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr,
564 1, &queueTransferBarrier);
565 vk->vkEndCommandBuffer(queueState.cb2);
566
567 VkSubmitInfo submitInfo = {
568 VK_STRUCTURE_TYPE_SUBMIT_INFO,
569 0,
570 0,
571 nullptr,
572 nullptr,
573 1,
574 &queueState.cb2,
575 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
576 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
577 };
578
579 AutoLock qlock(*queueState.lock);
580 // TODO(kaiyili): initiate ownership transfer from DisplayVk here
581 VK_CHECK(vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence));
582 } else {
583 const AndroidNativeBufferInfo::QueueState& queueState =
584 anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
585 VkSubmitInfo submitInfo = {
586 VK_STRUCTURE_TYPE_SUBMIT_INFO,
587 0,
588 0,
589 nullptr,
590 nullptr,
591 0,
592 nullptr,
593 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
594 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
595 };
596 AutoLock qlock(*queueState.lock);
597 VK_CHECK(vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence));
598 }
599 }
600
601 return VK_SUCCESS;
602 }
603
604 static constexpr uint64_t kTimeoutNs = 3ULL * 1000000000ULL;
605
syncImageToColorBuffer(VulkanDispatch * vk,uint32_t queueFamilyIndex,VkQueue queue,Lock * queueLock,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,int * pNativeFenceFd,std::shared_ptr<AndroidNativeBufferInfo> anbInfo)606 VkResult syncImageToColorBuffer(VulkanDispatch* vk, uint32_t queueFamilyIndex, VkQueue queue,
607 Lock* queueLock, uint32_t waitSemaphoreCount,
608 const VkSemaphore* pWaitSemaphores, int* pNativeFenceFd,
609 std::shared_ptr<AndroidNativeBufferInfo> anbInfo) {
610 auto anbInfoPtr = anbInfo.get();
611 auto fb = FrameBuffer::getFB();
612 fb->lock();
613
614 // Implicitly synchronized
615 *pNativeFenceFd = -1;
616
617 anbInfo->everSynced = true;
618 anbInfo->lastUsedQueueFamilyIndex = queueFamilyIndex;
619
620 // Setup queue state for this queue family index.
621 if (queueFamilyIndex >= anbInfo->queueStates.size()) {
622 anbInfo->queueStates.resize(queueFamilyIndex + 1);
623 }
624
625 auto& queueState = anbInfo->queueStates[queueFamilyIndex];
626
627 if (!queueState.queue) {
628 queueState.setup(vk, anbInfo->device, queue, queueFamilyIndex, queueLock);
629 }
630
631 // Record our synchronization commands.
632 VkCommandBufferBeginInfo beginInfo = {
633 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
634 0,
635 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
636 nullptr /* no inheritance info */,
637 };
638
639 vk->vkBeginCommandBuffer(queueState.cb, &beginInfo);
640
641 // If using the Vulkan image directly (rather than copying it back to
642 // the CPU), change its layout for that use.
643 if (anbInfo->useVulkanNativeImage) {
644 VkImageMemoryBarrier queueTransferBarrier = {
645 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
646 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
647 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
648 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
649 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
650 .srcQueueFamilyIndex = queueFamilyIndex,
651 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL,
652 .image = anbInfo->image,
653 .subresourceRange =
654 {
655 VK_IMAGE_ASPECT_COLOR_BIT,
656 0,
657 1,
658 0,
659 1,
660 },
661 };
662 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
663 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
664 &queueTransferBarrier);
665
666 } else {
667 // Not a GL texture. Read it back and put it back in present layout.
668
669 // From the spec: If an application does not need the contents of a resource
670 // to remain valid when transferring from one queue family to another, then
671 // the ownership transfer should be skipped.
672 // We definitely need to transition the image to
673 // VK_TRANSFER_SRC_OPTIMAL and back.
674 VkImageMemoryBarrier presentToTransferSrc = {
675 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
676 0,
677 0,
678 VK_ACCESS_TRANSFER_READ_BIT,
679 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
680 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
681 VK_QUEUE_FAMILY_IGNORED,
682 VK_QUEUE_FAMILY_IGNORED,
683 anbInfo->image,
684 {
685 VK_IMAGE_ASPECT_COLOR_BIT,
686 0,
687 1,
688 0,
689 1,
690 },
691 };
692
693 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
694 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
695 &presentToTransferSrc);
696
697 VkBufferImageCopy region = {
698 0 /* buffer offset */,
699 anbInfo->extent.width,
700 anbInfo->extent.height,
701 {
702 VK_IMAGE_ASPECT_COLOR_BIT,
703 0,
704 0,
705 1,
706 },
707 {0, 0, 0},
708 anbInfo->extent,
709 };
710
711 vk->vkCmdCopyImageToBuffer(queueState.cb, anbInfo->image,
712 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, anbInfo->stagingBuffer, 1,
713 ®ion);
714
715 // Transfer back to present src.
716 VkImageMemoryBarrier backToPresentSrc = {
717 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
718 0,
719 VK_ACCESS_TRANSFER_READ_BIT,
720 0,
721 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
722 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
723 VK_QUEUE_FAMILY_IGNORED,
724 VK_QUEUE_FAMILY_IGNORED,
725 anbInfo->image,
726 {
727 VK_IMAGE_ASPECT_COLOR_BIT,
728 0,
729 1,
730 0,
731 1,
732 },
733 };
734
735 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
736 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
737 &backToPresentSrc);
738 }
739
740 vk->vkEndCommandBuffer(queueState.cb);
741
742 std::vector<VkPipelineStageFlags> pipelineStageFlags;
743 pipelineStageFlags.resize(waitSemaphoreCount, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
744
745 VkSubmitInfo submitInfo = {
746 VK_STRUCTURE_TYPE_SUBMIT_INFO,
747 0,
748 waitSemaphoreCount,
749 pWaitSemaphores,
750 pipelineStageFlags.data(),
751 1,
752 &queueState.cb,
753 0,
754 nullptr,
755 };
756
757 // TODO(kaiyili): initiate ownership transfer to DisplayVk here.
758 VkFence qsriFence = anbInfo->qsriWaitFencePool->getFenceFromPool();
759 AutoLock qLock(*queueLock);
760 VK_CHECK(vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, qsriFence));
761 auto waitForQsriFenceTask = [anbInfoPtr, anbInfo, vk, device = anbInfo->device, qsriFence] {
762 (void)anbInfoPtr;
763 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: enter");
764 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: wait for fence %p...", qsriFence);
765 VkResult res = vk->vkWaitForFences(device, 1, &qsriFence, VK_FALSE, kTimeoutNs);
766 switch (res) {
767 case VK_SUCCESS:
768 break;
769 case VK_TIMEOUT:
770 VK_ANB_ERR("Timeout when waiting for the Qsri fence.");
771 break;
772 default:
773 ERR("Failed to wait for QSRI fence: %s\n", string_VkResult(res));
774 VK_CHECK(res);
775 }
776 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: wait for fence %p...(done)", qsriFence);
777 anbInfo->qsriWaitFencePool->returnFence(qsriFence);
778 };
779 fb->unlock();
780
781 if (anbInfo->useVulkanNativeImage) {
782 VK_ANB_DEBUG_OBJ(anbInfoPtr, "using native image, so use sync thread to wait");
783 // Queue wait to sync thread with completion callback
784 // Pass anbInfo by value to get a ref
785 SyncThread::get()->triggerGeneral(
786 [waitForQsriFenceTask = std::move(waitForQsriFenceTask), anbInfo]() mutable {
787 waitForQsriFenceTask();
788 anbInfo->qsriTimeline->signalNextPresentAndPoll();
789 },
790 "wait for the guest Qsri VkFence signaled");
791 } else {
792 VK_ANB_DEBUG_OBJ(anbInfoPtr, "not using native image, so wait right away");
793 waitForQsriFenceTask();
794
795 VkMappedMemoryRange toInvalidate = {
796 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0, anbInfo->stagingMemory, 0, VK_WHOLE_SIZE,
797 };
798
799 vk->vkInvalidateMappedMemoryRanges(anbInfo->device, 1, &toInvalidate);
800
801 uint32_t colorBufferHandle = anbInfo->colorBufferHandle;
802
803 // Copy to from staging buffer to color buffer
804 uint32_t bpp = 4; /* format always rgba8...not */
805 switch (anbInfo->vkFormat) {
806 case VK_FORMAT_R5G6B5_UNORM_PACK16:
807 bpp = 2;
808 break;
809 case VK_FORMAT_R8G8B8_UNORM:
810 bpp = 3;
811 break;
812 default:
813 case VK_FORMAT_R8G8B8A8_UNORM:
814 case VK_FORMAT_B8G8R8A8_UNORM:
815 bpp = 4;
816 break;
817 }
818
819 FrameBuffer::getFB()->flushColorBufferFromVkBytes(
820 colorBufferHandle, anbInfo->mappedStagingPtr,
821 bpp * anbInfo->extent.width * anbInfo->extent.height);
822 anbInfo->qsriTimeline->signalNextPresentAndPoll();
823 }
824
825 return VK_SUCCESS;
826 }
827
828 } // namespace vk
829 } // namespace gfxstream
830