1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 // http://www.apache.org/licenses/LICENSE-2.0
7 //
8 // Unless required by applicable law or agreed to in writing, software
9 // distributed under the License is distributed on an "AS IS" BASIS,
10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
11 // See the License for the specific language governing permissions and
12 // limitations under the License.
13 #include "VkAndroidNativeBuffer.h"
14
15 #include <string.h>
16
17 #include <future>
18
19 #include "GrallocDefs.h"
20 #include "SyncThread.h"
21 #include "VkCommonOperations.h"
22 #include "VulkanDispatch.h"
23 #include "cereal/common/goldfish_vk_extension_structs.h"
24 #include "cereal/common/goldfish_vk_private_defs.h"
25 #include "host-common/GfxstreamFatalError.h"
26 #include "stream-servers/FrameBuffer.h"
27 #include "vulkan/vk_enum_string_helper.h"
28
29 #define VK_ANB_ERR(fmt,...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
30
31 #define ENABLE_VK_ANB_DEBUG 0
32
33 #if ENABLE_VK_ANB_DEBUG
34 #define VK_ANB_DEBUG(fmt,...) fprintf(stderr, "vk-anb-debug: %s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
35 #define VK_ANB_DEBUG_OBJ(obj, fmt,...) fprintf(stderr, "vk-anb-debug: %s:%d:%p " fmt "\n", __func__, __LINE__, obj, ##__VA_ARGS__);
36 #else
37 #define VK_ANB_DEBUG(fmt,...)
38 #define VK_ANB_DEBUG_OBJ(obj, fmt,...)
39 #endif
40
41 using android::base::AutoLock;
42 using android::base::Lock;
43 using emugl::ABORT_REASON_OTHER;
44 using emugl::FatalError;
45
46 namespace goldfish_vk {
47
QsriWaitFencePool(VulkanDispatch * vk,VkDevice device)48 AndroidNativeBufferInfo::QsriWaitFencePool::QsriWaitFencePool(VulkanDispatch* vk, VkDevice device)
49 : mVk(vk), mDevice(device) {}
50
getFenceFromPool()51 VkFence AndroidNativeBufferInfo::QsriWaitFencePool::getFenceFromPool() {
52 VK_ANB_DEBUG("enter");
53 AutoLock lock(mLock);
54 VkFence fence = VK_NULL_HANDLE;
55 if (mAvailableFences.empty()) {
56 VkFenceCreateInfo fenceCreateInfo = {
57 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, 0,
58 };
59 mVk->vkCreateFence(mDevice, &fenceCreateInfo, nullptr, &fence);
60 VK_ANB_DEBUG("no fences in pool, created %p", fence);
61 } else {
62 fence = mAvailableFences.back();
63 mAvailableFences.pop_back();
64 VkResult res = mVk->vkResetFences(mDevice, 1, &fence);
65 if (res != VK_SUCCESS) {
66 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
67 << "Fail to reset Qsri VkFence: " << res << "(" << string_VkResult(res) << ").";
68 }
69 VK_ANB_DEBUG("existing fence in pool: %p. also reset the fence", fence);
70 }
71 mUsedFences.emplace(fence);
72 VK_ANB_DEBUG("exit");
73 return fence;
74 }
75
~QsriWaitFencePool()76 AndroidNativeBufferInfo::QsriWaitFencePool::~QsriWaitFencePool() {
77 VK_ANB_DEBUG("enter");
78 // Nothing in the fence pool is unsignaled
79 if (!mUsedFences.empty()) {
80 VK_ANB_ERR("%zu VkFences are still being used when destroying the Qsri fence pool.",
81 mUsedFences.size());
82 }
83 for (auto fence : mAvailableFences) {
84 VK_ANB_DEBUG("destroy fence %p", fence);
85 mVk->vkDestroyFence(mDevice, fence, nullptr);
86 }
87 VK_ANB_DEBUG("exit");
88 }
89
returnFence(VkFence fence)90 void AndroidNativeBufferInfo::QsriWaitFencePool::returnFence(VkFence fence) {
91 AutoLock lock(mLock);
92 if (!mUsedFences.erase(fence)) {
93 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
94 << "Return an unmanaged Qsri VkFence back to the pool.";
95 return;
96 }
97 mAvailableFences.push_back(fence);
98 }
99
parseAndroidNativeBufferInfo(const VkImageCreateInfo * pCreateInfo,AndroidNativeBufferInfo * info_out)100 bool parseAndroidNativeBufferInfo(
101 const VkImageCreateInfo* pCreateInfo,
102 AndroidNativeBufferInfo* info_out) {
103
104 // Look through the extension chain.
105 const void* curr_pNext = pCreateInfo->pNext;
106 if (!curr_pNext) return false;
107
108 uint32_t structType = goldfish_vk_struct_type(curr_pNext);
109
110 return structType == VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID;
111 }
112
prepareAndroidNativeBufferImage(VulkanDispatch * vk,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkNativeBufferANDROID * nativeBufferANDROID,const VkAllocationCallbacks * pAllocator,const VkPhysicalDeviceMemoryProperties * memProps,AndroidNativeBufferInfo * out)113 VkResult prepareAndroidNativeBufferImage(
114 VulkanDispatch* vk,
115 VkDevice device,
116 const VkImageCreateInfo* pCreateInfo,
117 const VkNativeBufferANDROID* nativeBufferANDROID,
118 const VkAllocationCallbacks* pAllocator,
119 const VkPhysicalDeviceMemoryProperties* memProps,
120 AndroidNativeBufferInfo* out) {
121
122 out->vk = vk;
123 out->device = device;
124 out->vkFormat = pCreateInfo->format;
125 out->extent = pCreateInfo->extent;
126 out->usage = pCreateInfo->usage;
127
128 for (uint32_t i = 0; i < pCreateInfo->queueFamilyIndexCount; ++i) {
129 out->queueFamilyIndices.push_back(
130 pCreateInfo->pQueueFamilyIndices[i]);
131 }
132
133 out->format = nativeBufferANDROID->format;
134 out->stride = nativeBufferANDROID->stride;
135 out->colorBufferHandle = *(nativeBufferANDROID->handle);
136
137 bool colorBufferVulkanCompatible =
138 isColorBufferVulkanCompatible(out->colorBufferHandle);
139 bool externalMemoryCompatible = false;
140
141 auto emu = getGlobalVkEmulation();
142
143 if (emu && emu->live) {
144 externalMemoryCompatible =
145 emu->deviceInfo.supportsExternalMemory;
146 }
147
148 if (colorBufferVulkanCompatible && externalMemoryCompatible &&
149 setupVkColorBuffer(out->colorBufferHandle, false /* not Vulkan only */,
150 0u /* memoryProperty */, &out->useVulkanNativeImage)) {
151 releaseColorBufferFromHostComposingSync({out->colorBufferHandle});
152 out->externallyBacked = true;
153 }
154
155 // delete the info struct and pass to vkCreateImage, and also add
156 // transfer src capability to allow us to copy to CPU.
157 VkImageCreateInfo infoNoNative = *pCreateInfo;
158 infoNoNative.pNext = nullptr;
159 infoNoNative.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
160
161 if (out->externallyBacked) {
162 // Create the image with extension structure about external backing.
163 VkExternalMemoryImageCreateInfo extImageCi = {
164 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, 0,
165 VK_EXT_MEMORY_HANDLE_TYPE_BIT,
166 };
167
168 infoNoNative.pNext = &extImageCi;
169
170 VkResult createResult =
171 vk->vkCreateImage(
172 device, &infoNoNative, pAllocator, &out->image);
173
174 if (createResult != VK_SUCCESS) return createResult;
175
176 // Now import the backing memory.
177 const auto& cbInfo = getColorBufferInfo(out->colorBufferHandle);
178 const auto& memInfo = cbInfo.memory;
179
180 vk->vkGetImageMemoryRequirements(
181 device, out->image, &out->memReqs);
182
183 if (out->memReqs.size < memInfo.size) {
184 out->memReqs.size = memInfo.size;
185 }
186
187 if (!importExternalMemory(vk, device, &memInfo, &out->imageMemory)) {
188 fprintf(stderr, "%s: Failed to import external memory\n", __func__);
189 return VK_ERROR_INITIALIZATION_FAILED;
190 }
191
192 } else {
193 VkResult createResult =
194 vk->vkCreateImage(
195 device, &infoNoNative, pAllocator, &out->image);
196
197 if (createResult != VK_SUCCESS) return createResult;
198
199 vk->vkGetImageMemoryRequirements(
200 device, out->image, &out->memReqs);
201
202 uint32_t imageMemoryTypeIndex = 0;
203 bool imageMemoryTypeIndexFound = false;
204
205 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
206 bool supported =
207 out->memReqs.memoryTypeBits & (1 << i);
208 if (supported) {
209 imageMemoryTypeIndex = i;
210 imageMemoryTypeIndexFound = true;
211 break;
212 }
213 }
214
215 if (!imageMemoryTypeIndexFound) {
216 VK_ANB_ERR("VK_ANDROID_native_buffer: could not obtain "
217 "image memory type index");
218 teardownAndroidNativeBufferImage(vk, out);
219 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
220 }
221
222 out->imageMemoryTypeIndex = imageMemoryTypeIndex;
223
224 VkMemoryAllocateInfo allocInfo = {
225 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
226 out->memReqs.size,
227 out->imageMemoryTypeIndex,
228 };
229
230 if (VK_SUCCESS !=
231 vk->vkAllocateMemory(
232 device, &allocInfo, nullptr,
233 &out->imageMemory)) {
234 VK_ANB_ERR("VK_ANDROID_native_buffer: could not allocate "
235 "image memory. requested size: %zu", (size_t)(out->memReqs.size));
236 teardownAndroidNativeBufferImage(vk, out);
237 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
238 }
239 }
240
241 if (VK_SUCCESS !=
242 vk->vkBindImageMemory(
243 device, out->image, out->imageMemory, 0)) {
244 VK_ANB_ERR("VK_ANDROID_native_buffer: could not bind "
245 "image memory.");
246 teardownAndroidNativeBufferImage(vk, out);
247 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
248 }
249
250 // Allocate a staging memory and set up the staging buffer.
251 // TODO: Make this shared as well if we can get that to
252 // work on Windows with NVIDIA.
253 {
254 bool stagingIndexRes =
255 getStagingMemoryTypeIndex(
256 vk, device, memProps, &out->stagingMemoryTypeIndex);
257
258 if (!stagingIndexRes) {
259 VK_ANB_ERR(
260 "VK_ANDROID_native_buffer: could not obtain "
261 "staging memory type index");
262 teardownAndroidNativeBufferImage(vk, out);
263 return VK_ERROR_OUT_OF_HOST_MEMORY;
264 }
265
266 VkMemoryAllocateInfo allocInfo = {
267 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
268 out->memReqs.size,
269 out->stagingMemoryTypeIndex,
270 };
271
272 VkResult res = vk->vkAllocateMemory(device, &allocInfo, nullptr,
273 &out->stagingMemory);
274 if (VK_SUCCESS != res) {
275 VK_ANB_ERR(
276 "VK_ANDROID_native_buffer: could not allocate staging memory. "
277 "res = %d. requested size: %zu",
278 (int)res, (size_t)(out->memReqs.size));
279 teardownAndroidNativeBufferImage(vk, out);
280 return VK_ERROR_OUT_OF_HOST_MEMORY;
281 }
282
283 VkBufferCreateInfo stagingBufferCreateInfo = {
284 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
285 0,
286 0,
287 out->memReqs.size,
288 VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
289 VK_SHARING_MODE_EXCLUSIVE,
290 0,
291 nullptr,
292 };
293 if (out->queueFamilyIndices.size() > 1) {
294 stagingBufferCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
295 stagingBufferCreateInfo.queueFamilyIndexCount =
296 static_cast<uint32_t>(out->queueFamilyIndices.size());
297 stagingBufferCreateInfo.pQueueFamilyIndices =
298 out->queueFamilyIndices.data();
299 }
300
301 if (VK_SUCCESS !=
302 vk->vkCreateBuffer(
303 device, &stagingBufferCreateInfo, nullptr,
304 &out->stagingBuffer)) {
305 VK_ANB_ERR("VK_ANDROID_native_buffer: could not create "
306 "staging buffer.");
307 teardownAndroidNativeBufferImage(vk, out);
308 return VK_ERROR_OUT_OF_HOST_MEMORY;
309 }
310
311 if (VK_SUCCESS !=
312 vk->vkBindBufferMemory(
313 device, out->stagingBuffer, out->stagingMemory, 0)) {
314 VK_ANB_ERR("VK_ANDROID_native_buffer: could not bind "
315 "staging buffer to staging memory.");
316 teardownAndroidNativeBufferImage(vk, out);
317 return VK_ERROR_OUT_OF_HOST_MEMORY;
318 }
319
320 if (VK_SUCCESS !=
321 vk->vkMapMemory(
322 device, out->stagingMemory, 0,
323 out->memReqs.size, 0,
324 (void**)&out->mappedStagingPtr)) {
325 VK_ANB_ERR("VK_ANDROID_native_buffer: could not map "
326 "staging buffer.");
327 teardownAndroidNativeBufferImage(vk, out);
328 return VK_ERROR_OUT_OF_HOST_MEMORY;
329 }
330 }
331
332 out->qsriWaitFencePool =
333 std::make_unique<AndroidNativeBufferInfo::QsriWaitFencePool>(out->vk, out->device);
334 out->qsriTimeline = std::make_unique<VkQsriTimeline>();
335 return VK_SUCCESS;
336 }
337
teardownAndroidNativeBufferImage(VulkanDispatch * vk,AndroidNativeBufferInfo * anbInfo)338 void teardownAndroidNativeBufferImage(
339 VulkanDispatch* vk, AndroidNativeBufferInfo* anbInfo) {
340 auto device = anbInfo->device;
341
342 auto image = anbInfo->image;
343 auto imageMemory = anbInfo->imageMemory;
344
345 auto stagingBuffer = anbInfo->stagingBuffer;
346 auto mappedPtr = anbInfo->mappedStagingPtr;
347 auto stagingMemory = anbInfo->stagingMemory;
348
349 if (image) vk->vkDestroyImage(device, image, nullptr);
350 if (imageMemory) vk->vkFreeMemory(device, imageMemory, nullptr);
351 if (stagingBuffer) vk->vkDestroyBuffer(device, stagingBuffer, nullptr);
352 if (mappedPtr) vk->vkUnmapMemory(device, stagingMemory);
353 if (stagingMemory) vk->vkFreeMemory(device, stagingMemory, nullptr);
354
355 for (auto queueState : anbInfo->queueStates) {
356 queueState.teardown(vk, device);
357 }
358
359 anbInfo->queueStates.clear();
360
361 anbInfo->acquireQueueState.teardown(vk, device);
362
363 anbInfo->vk = nullptr;
364 anbInfo->device = VK_NULL_HANDLE;
365 anbInfo->image = VK_NULL_HANDLE;
366 anbInfo->imageMemory = VK_NULL_HANDLE;
367 anbInfo->stagingBuffer = VK_NULL_HANDLE;
368 anbInfo->mappedStagingPtr = nullptr;
369 anbInfo->stagingMemory = VK_NULL_HANDLE;
370
371 anbInfo->qsriWaitFencePool = nullptr;
372 }
373
getGralloc0Usage(VkFormat format,VkImageUsageFlags imageUsage,int * usage_out)374 void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage,
375 int* usage_out) {
376 // Pick some default flexible values for gralloc usage for now.
377 (void)format;
378 (void)imageUsage;
379 *usage_out =
380 GRALLOC_USAGE_SW_READ_OFTEN |
381 GRALLOC_USAGE_SW_WRITE_OFTEN |
382 GRALLOC_USAGE_HW_RENDER |
383 GRALLOC_USAGE_HW_TEXTURE;
384 }
385
386 // Taken from Android GrallocUsageConversion.h
getGralloc1Usage(VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * consumerUsage_out,uint64_t * producerUsage_out)387 void getGralloc1Usage(VkFormat format, VkImageUsageFlags imageUsage,
388 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
389 uint64_t* consumerUsage_out,
390 uint64_t* producerUsage_out) {
391 // Pick some default flexible values for gralloc usage for now.
392 (void)format;
393 (void)imageUsage;
394 (void)swapchainImageUsage;
395
396 constexpr int usage =
397 GRALLOC_USAGE_SW_READ_OFTEN |
398 GRALLOC_USAGE_SW_WRITE_OFTEN |
399 GRALLOC_USAGE_HW_RENDER |
400 GRALLOC_USAGE_HW_TEXTURE;
401
402 constexpr uint64_t PRODUCER_MASK =
403 GRALLOC1_PRODUCER_USAGE_CPU_READ |
404 /* GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN | */
405 GRALLOC1_PRODUCER_USAGE_CPU_WRITE |
406 /* GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN | */
407 GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET |
408 GRALLOC1_PRODUCER_USAGE_PROTECTED |
409 GRALLOC1_PRODUCER_USAGE_CAMERA |
410 GRALLOC1_PRODUCER_USAGE_VIDEO_DECODER |
411 GRALLOC1_PRODUCER_USAGE_SENSOR_DIRECT_DATA;
412 constexpr uint64_t CONSUMER_MASK =
413 GRALLOC1_CONSUMER_USAGE_CPU_READ |
414 /* GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN | */
415 GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE |
416 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER |
417 GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET |
418 GRALLOC1_CONSUMER_USAGE_CURSOR |
419 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER |
420 GRALLOC1_CONSUMER_USAGE_CAMERA |
421 GRALLOC1_CONSUMER_USAGE_RENDERSCRIPT |
422 GRALLOC1_CONSUMER_USAGE_GPU_DATA_BUFFER;
423
424 *producerUsage_out = static_cast<uint64_t>(usage) & PRODUCER_MASK;
425 *consumerUsage_out = static_cast<uint64_t>(usage) & CONSUMER_MASK;
426
427 if ((static_cast<uint32_t>(usage) & GRALLOC_USAGE_SW_READ_OFTEN) ==
428 GRALLOC_USAGE_SW_READ_OFTEN) {
429 *producerUsage_out |= GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN;
430 *consumerUsage_out |= GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN;
431 }
432
433 if ((static_cast<uint32_t>(usage) & GRALLOC_USAGE_SW_WRITE_OFTEN) ==
434 GRALLOC_USAGE_SW_WRITE_OFTEN) {
435 *producerUsage_out |= GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
436 }
437 }
438
setup(VulkanDispatch * vk,VkDevice device,VkQueue queueIn,uint32_t queueFamilyIndexIn,android::base::Lock * queueLockIn)439 void AndroidNativeBufferInfo::QueueState::setup(
440 VulkanDispatch* vk,
441 VkDevice device,
442 VkQueue queueIn,
443 uint32_t queueFamilyIndexIn,
444 android::base::Lock* queueLockIn) {
445
446 queue = queueIn;
447 queueFamilyIndex = queueFamilyIndexIn;
448 lock = queueLockIn;
449
450 VkCommandPoolCreateInfo poolCreateInfo = {
451 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0,
452 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
453 queueFamilyIndex,
454 };
455
456 vk->vkCreateCommandPool(
457 device,
458 &poolCreateInfo,
459 nullptr,
460 &pool);
461
462 VkCommandBufferAllocateInfo cbAllocInfo = {
463 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
464 pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
465 };
466
467 vk->vkAllocateCommandBuffers(
468 device,
469 &cbAllocInfo,
470 &cb);
471
472 vk->vkAllocateCommandBuffers(
473 device,
474 &cbAllocInfo,
475 &cb2);
476
477 VkFenceCreateInfo fenceCreateInfo = {
478 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, 0,
479 };
480
481 vk->vkCreateFence(
482 device,
483 &fenceCreateInfo,
484 nullptr,
485 &fence);
486 }
487
teardown(VulkanDispatch * vk,VkDevice device)488 void AndroidNativeBufferInfo::QueueState::teardown(
489 VulkanDispatch* vk, VkDevice device) {
490
491 if (queue) {
492 AutoLock qlock(*lock);
493 vk->vkQueueWaitIdle(queue);
494 }
495 if (cb) vk->vkFreeCommandBuffers(device, pool, 1, &cb);
496 if (pool) vk->vkDestroyCommandPool(device, pool, nullptr);
497 if (fence) vk->vkDestroyFence(device, fence, nullptr);
498
499 lock = nullptr;
500 queue = VK_NULL_HANDLE;
501 pool = VK_NULL_HANDLE;
502 cb = VK_NULL_HANDLE;
503 fence = VK_NULL_HANDLE;
504 queueFamilyIndex = 0;
505 }
506
setAndroidNativeImageSemaphoreSignaled(VulkanDispatch * vk,VkDevice device,VkQueue defaultQueue,uint32_t defaultQueueFamilyIndex,Lock * defaultQueueLock,VkSemaphore semaphore,VkFence fence,AndroidNativeBufferInfo * anbInfo)507 VkResult setAndroidNativeImageSemaphoreSignaled(
508 VulkanDispatch* vk,
509 VkDevice device,
510 VkQueue defaultQueue,
511 uint32_t defaultQueueFamilyIndex,
512 Lock* defaultQueueLock,
513 VkSemaphore semaphore,
514 VkFence fence,
515 AndroidNativeBufferInfo* anbInfo) {
516
517 auto fb = FrameBuffer::getFB();
518
519 bool firstTimeSetup =
520 !anbInfo->everSynced &&
521 !anbInfo->everAcquired;
522
523 anbInfo->everAcquired = true;
524
525 if (firstTimeSetup) {
526 VkSubmitInfo submitInfo = {
527 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
528 0, nullptr, nullptr,
529 0, nullptr,
530 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
531 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
532 };
533 AutoLock qlock(*defaultQueueLock);
534 vk->vkQueueSubmit(defaultQueue, 1, &submitInfo, fence);
535 } else {
536
537 const AndroidNativeBufferInfo::QueueState& queueState =
538 anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
539
540 // If we used the Vulkan image without copying it back
541 // to the CPU, reset the layout to PRESENT.
542 if (anbInfo->useVulkanNativeImage) {
543 fb->setColorBufferInUse(anbInfo->colorBufferHandle, true);
544
545 VkCommandBufferBeginInfo beginInfo = {
546 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
547 0,
548 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
549 nullptr /* no inheritance info */,
550 };
551
552 vk->vkBeginCommandBuffer(queueState.cb2, &beginInfo);
553
554 VkImageMemoryBarrier queueTransferBarrier = {
555 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
556 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
557 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
558 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
559 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
560 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL,
561 .dstQueueFamilyIndex = anbInfo->lastUsedQueueFamilyIndex,
562 .image = anbInfo->image,
563 .subresourceRange =
564 {
565 VK_IMAGE_ASPECT_COLOR_BIT,
566 0,
567 1,
568 0,
569 1,
570 },
571 };
572 vk->vkCmdPipelineBarrier(queueState.cb2, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
573 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr,
574 1, &queueTransferBarrier);
575 vk->vkEndCommandBuffer(queueState.cb2);
576
577 VkSubmitInfo submitInfo = {
578 VK_STRUCTURE_TYPE_SUBMIT_INFO,
579 0,
580 0,
581 nullptr,
582 nullptr,
583 1,
584 &queueState.cb2,
585 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
586 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
587 };
588
589 AutoLock qlock(*queueState.lock);
590 // TODO(kaiyili): initiate ownership transfer from DisplayVk here
591 vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence);
592 } else {
593 const AndroidNativeBufferInfo::QueueState&
594 queueState = anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
595 VkSubmitInfo submitInfo = {
596 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
597 0, nullptr, nullptr,
598 0, nullptr,
599 (uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
600 semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
601 };
602 AutoLock qlock(*queueState.lock);
603 vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence);
604 }
605 }
606
607 return VK_SUCCESS;
608 }
609
610 static constexpr uint64_t kTimeoutNs = 3ULL * 1000000000ULL;
611
syncImageToColorBuffer(VulkanDispatch * vk,uint32_t queueFamilyIndex,VkQueue queue,Lock * queueLock,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,int * pNativeFenceFd,std::shared_ptr<AndroidNativeBufferInfo> anbInfo)612 VkResult syncImageToColorBuffer(
613 VulkanDispatch* vk,
614 uint32_t queueFamilyIndex,
615 VkQueue queue,
616 Lock* queueLock,
617 uint32_t waitSemaphoreCount,
618 const VkSemaphore* pWaitSemaphores,
619 int* pNativeFenceFd,
620 std::shared_ptr<AndroidNativeBufferInfo> anbInfo) {
621
622 auto anbInfoPtr = anbInfo.get();
623 auto fb = FrameBuffer::getFB();
624 fb->lock();
625
626 // Implicitly synchronized
627 *pNativeFenceFd = -1;
628
629 anbInfo->everSynced = true;
630 anbInfo->lastUsedQueueFamilyIndex = queueFamilyIndex;
631
632 // Setup queue state for this queue family index.
633 if (queueFamilyIndex >= anbInfo->queueStates.size()) {
634 anbInfo->queueStates.resize(queueFamilyIndex + 1);
635 }
636
637 auto& queueState = anbInfo->queueStates[queueFamilyIndex];
638
639 if (!queueState.queue) {
640 queueState.setup(
641 vk, anbInfo->device, queue, queueFamilyIndex, queueLock);
642 }
643
644 // Record our synchronization commands.
645 VkCommandBufferBeginInfo beginInfo = {
646 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
647 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
648 nullptr /* no inheritance info */,
649 };
650
651 vk->vkBeginCommandBuffer(queueState.cb, &beginInfo);
652
653 // If using the Vulkan image directly (rather than copying it back to
654 // the CPU), change its layout for that use.
655 if (anbInfo->useVulkanNativeImage) {
656 VkImageMemoryBarrier queueTransferBarrier = {
657 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
658 .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
659 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
660 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
661 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
662 .srcQueueFamilyIndex = queueFamilyIndex,
663 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL,
664 .image = anbInfo->image,
665 .subresourceRange =
666 {
667 VK_IMAGE_ASPECT_COLOR_BIT,
668 0,
669 1,
670 0,
671 1,
672 },
673 };
674 vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
675 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
676 &queueTransferBarrier);
677
678 } else {
679 // Not a GL texture. Read it back and put it back in present layout.
680
681 // From the spec: If an application does not need the contents of a resource
682 // to remain valid when transferring from one queue family to another, then
683 // the ownership transfer should be skipped.
684 // We definitely need to transition the image to
685 // VK_TRANSFER_SRC_OPTIMAL and back.
686 VkImageMemoryBarrier presentToTransferSrc = {
687 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0,
688 0,
689 VK_ACCESS_HOST_READ_BIT,
690 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
691 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
692 VK_QUEUE_FAMILY_IGNORED,
693 VK_QUEUE_FAMILY_IGNORED,
694 anbInfo->image,
695 {
696 VK_IMAGE_ASPECT_COLOR_BIT,
697 0, 1, 0, 1,
698 },
699 };
700
701 vk->vkCmdPipelineBarrier(
702 queueState.cb,
703 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
704 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
705 0,
706 0, nullptr,
707 0, nullptr,
708 1, &presentToTransferSrc);
709
710 VkBufferImageCopy region = {
711 0 /* buffer offset */,
712 anbInfo->extent.width,
713 anbInfo->extent.height,
714 {
715 VK_IMAGE_ASPECT_COLOR_BIT,
716 0, 0, 1,
717 },
718 { 0, 0, 0 },
719 anbInfo->extent,
720 };
721
722 vk->vkCmdCopyImageToBuffer(
723 queueState.cb,
724 anbInfo->image,
725 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
726 anbInfo->stagingBuffer,
727 1, ®ion);
728
729 // Transfer back to present src.
730 VkImageMemoryBarrier backToPresentSrc = {
731 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0,
732 VK_ACCESS_HOST_READ_BIT,
733 0,
734 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
735 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
736 VK_QUEUE_FAMILY_IGNORED,
737 VK_QUEUE_FAMILY_IGNORED,
738 anbInfo->image,
739 {
740 VK_IMAGE_ASPECT_COLOR_BIT,
741 0, 1, 0, 1,
742 },
743 };
744
745 vk->vkCmdPipelineBarrier(
746 queueState.cb,
747 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
748 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
749 0,
750 0, nullptr,
751 0, nullptr,
752 1, &backToPresentSrc);
753
754 }
755
756 vk->vkEndCommandBuffer(queueState.cb);
757
758 std::vector<VkPipelineStageFlags> pipelineStageFlags;
759 pipelineStageFlags.resize(waitSemaphoreCount, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
760
761 VkSubmitInfo submitInfo = {
762 VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
763 waitSemaphoreCount, pWaitSemaphores,
764 pipelineStageFlags.data(),
765 1, &queueState.cb,
766 0, nullptr,
767 };
768
769 // TODO(kaiyili): initiate ownership transfer to DisplayVk here.
770 VkFence qsriFence = anbInfo->qsriWaitFencePool->getFenceFromPool();
771 AutoLock qLock(*queueLock);
772 vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, qsriFence);
773 auto waitForQsriFenceTask = [anbInfoPtr, anbInfo, vk, device = anbInfo->device, qsriFence] {
774 (void)anbInfoPtr;
775 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: enter");
776 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: wait for fence %p...", qsriFence);
777 VkResult res = vk->vkWaitForFences(device, 1, &qsriFence, VK_FALSE, kTimeoutNs);
778 switch (res) {
779 case VK_SUCCESS:
780 break;
781 case VK_TIMEOUT:
782 VK_ANB_ERR("Timeout when waiting for the Qsri fence.");
783 break;
784 default:
785 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
786 << "Fail to wait for the Qsri VkFence: " << res << "(" << string_VkResult(res)
787 << ").";
788 }
789 VK_ANB_DEBUG_OBJ(anbInfoPtr, "wait callback: wait for fence %p...(done)", qsriFence);
790 anbInfo->qsriWaitFencePool->returnFence(qsriFence);
791 };
792 fb->unlock();
793
794 if (anbInfo->useVulkanNativeImage) {
795 VK_ANB_DEBUG_OBJ(anbInfoPtr, "using native image, so use sync thread to wait");
796 fb->setColorBufferInUse(anbInfo->colorBufferHandle, false);
797 // Queue wait to sync thread with completion callback
798 // Pass anbInfo by value to get a ref
799 SyncThread::get()->triggerGeneral(
800 [waitForQsriFenceTask = std::move(waitForQsriFenceTask), anbInfo]() mutable {
801 waitForQsriFenceTask();
802 anbInfo->qsriTimeline->signalNextPresentAndPoll();
803 },
804 "wait for the guest Qsri VkFence signaled");
805 } else {
806 VK_ANB_DEBUG_OBJ(anbInfoPtr, "not using native image, so wait right away");
807 waitForQsriFenceTask();
808
809 VkMappedMemoryRange toInvalidate = {
810 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
811 anbInfo->stagingMemory,
812 0, VK_WHOLE_SIZE,
813 };
814
815 vk->vkInvalidateMappedMemoryRanges(
816 anbInfo->device, 1, &toInvalidate);
817
818 uint32_t colorBufferHandle = anbInfo->colorBufferHandle;
819
820 // Copy to from staging buffer to color buffer
821 uint32_t bpp = 4; /* format always rgba8...not */
822 switch (anbInfo->vkFormat) {
823 case VK_FORMAT_R5G6B5_UNORM_PACK16:
824 bpp = 2;
825 break;
826 case VK_FORMAT_R8G8B8_UNORM:
827 bpp = 3;
828 break;
829 default:
830 case VK_FORMAT_R8G8B8A8_UNORM:
831 case VK_FORMAT_B8G8R8A8_UNORM:
832 bpp = 4;
833 break;
834 }
835
836 FrameBuffer::getFB()->
837 replaceColorBufferContents(
838 colorBufferHandle,
839 anbInfo->mappedStagingPtr,
840 bpp * anbInfo->extent.width * anbInfo->extent.height);
841 anbInfo->qsriTimeline->signalNextPresentAndPoll();
842 }
843
844 return VK_SUCCESS;
845 }
846
847 } // namespace goldfish_vk
848