1 /*
2 * Copyright (C) 2021 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "host/libs/graphics_detector/vk.h"
18
19 #include <string>
20 #include <unordered_set>
21 #include <vector>
22
23 #include <android-base/logging.h>
24 #include <android-base/strings.h>
25
26 VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
27
28 namespace cuttlefish {
29 namespace {
30
31 constexpr const bool kEnableValidationLayers = false;
32
VulkanDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,VkDebugUtilsMessageTypeFlagsEXT,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData,void *)33 static VKAPI_ATTR VkBool32 VKAPI_CALL VulkanDebugCallback(
34 VkDebugUtilsMessageSeverityFlagBitsEXT severity,
35 VkDebugUtilsMessageTypeFlagsEXT,
36 const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void*) {
37 if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
38 LOG(VERBOSE) << pCallbackData->pMessage;
39 } else if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
40 LOG(INFO) << pCallbackData->pMessage;
41 } else if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
42 LOG(ERROR) << pCallbackData->pMessage;
43 } else if (severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
44 LOG(ERROR) << pCallbackData->pMessage;
45 }
46 return VK_FALSE;
47 }
48
GetMemoryType(const vk::raii::PhysicalDevice & physical_device,uint32_t memory_type_mask,vk::MemoryPropertyFlags memory_properties)49 uint32_t GetMemoryType(const vk::raii::PhysicalDevice& physical_device,
50 uint32_t memory_type_mask,
51 vk::MemoryPropertyFlags memory_properties) {
52 const auto props = physical_device.getMemoryProperties();
53 for (uint32_t i = 0; i < props.memoryTypeCount; i++) {
54 if (!(memory_type_mask & (1 << i))) {
55 continue;
56 }
57 if ((props.memoryTypes[i].propertyFlags & memory_properties) !=
58 memory_properties) {
59 continue;
60 }
61 return i;
62 }
63 return -1;
64 }
65
DoCreateBuffer(const vk::raii::PhysicalDevice & physical_device,const vk::raii::Device & device,vk::DeviceSize buffer_size,vk::BufferUsageFlags buffer_usages,vk::MemoryPropertyFlags buffer_memory_properties)66 VkExpected<Vk::BufferWithMemory> DoCreateBuffer(
67 const vk::raii::PhysicalDevice& physical_device,
68 const vk::raii::Device& device, vk::DeviceSize buffer_size,
69 vk::BufferUsageFlags buffer_usages,
70 vk::MemoryPropertyFlags buffer_memory_properties) {
71 const vk::BufferCreateInfo buffer_create_info = {
72 .size = static_cast<VkDeviceSize>(buffer_size),
73 .usage = buffer_usages,
74 .sharingMode = vk::SharingMode::eExclusive,
75 };
76 auto buffer = VK_EXPECT(vk::raii::Buffer::create(device, buffer_create_info));
77
78 const auto buffer_memory_requirements = buffer.getMemoryRequirements();
79 const auto buffer_memory_type =
80 GetMemoryType(physical_device, buffer_memory_requirements.memoryTypeBits,
81 buffer_memory_properties);
82
83 const vk::MemoryAllocateInfo buffer_memory_allocate_info = {
84 .allocationSize = buffer_memory_requirements.size,
85 .memoryTypeIndex = buffer_memory_type,
86 };
87 auto buffer_memory = VK_EXPECT(
88 vk::raii::DeviceMemory::create(device, buffer_memory_allocate_info));
89
90 buffer.bindMemory(*buffer_memory, 0);
91
92 return Vk::BufferWithMemory{
93 .buffer = std::move(buffer),
94 .buffer_memory = std::move(buffer_memory),
95 };
96 }
97
98 } // namespace
99
100 /*static*/
Load(const std::vector<std::string> & requested_instance_extensions,const std::vector<std::string> & requested_instance_layers,const std::vector<std::string> & requested_device_extensions)101 std::optional<Vk> Vk::Load(
102 const std::vector<std::string>& requested_instance_extensions,
103 const std::vector<std::string>& requested_instance_layers,
104 const std::vector<std::string>& requested_device_extensions) {
105 VkExpected<Vk> vk =
106 LoadImpl(requested_instance_extensions, requested_instance_layers,
107 requested_device_extensions);
108 if (vk.ok()) {
109 return std::move(vk.value());
110 }
111 return std::nullopt;
112 }
113
114 /*static*/
LoadImpl(const std::vector<std::string> & requested_instance_extensions,const std::vector<std::string> & requested_instance_layers,const std::vector<std::string> & requested_device_extensions)115 VkExpected<Vk> Vk::LoadImpl(
116 const std::vector<std::string>& requested_instance_extensions,
117 const std::vector<std::string>& requested_instance_layers,
118 const std::vector<std::string>& requested_device_extensions) {
119 vk::DynamicLoader loader;
120 VULKAN_HPP_DEFAULT_DISPATCHER.init(
121 loader.getProcAddress<PFN_vkGetInstanceProcAddr>(
122 "vkGetInstanceProcAddr"));
123
124 vk::raii::Context context;
125
126 const auto available_instance_layers =
127 context.enumerateInstanceLayerProperties();
128 LOG(VERBOSE) << "Available instance layers:";
129 for (const vk::LayerProperties& layer : available_instance_layers) {
130 LOG(VERBOSE) << layer.layerName;
131 }
132 LOG(VERBOSE) << "";
133
134 std::vector<const char*> requested_instance_extensions_chars;
135 requested_instance_extensions_chars.reserve(
136 requested_instance_extensions.size());
137 for (const auto& e : requested_instance_extensions) {
138 requested_instance_extensions_chars.push_back(e.c_str());
139 }
140 if (kEnableValidationLayers) {
141 requested_instance_extensions_chars.push_back(
142 VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
143 }
144
145 std::vector<const char*> requested_instance_layers_chars;
146 requested_instance_layers_chars.reserve(requested_instance_layers.size());
147 for (const auto& l : requested_instance_layers) {
148 requested_instance_layers_chars.push_back(l.c_str());
149 }
150
151 const vk::ApplicationInfo applicationInfo{
152 .pApplicationName = "Cuttlefish Graphics Detector",
153 .applicationVersion = 1,
154 .pEngineName = "Cuttlefish Graphics Detector",
155 .engineVersion = 1,
156 .apiVersion = VK_API_VERSION_1_2,
157 };
158 const vk::InstanceCreateInfo instance_create_info{
159 .pApplicationInfo = &applicationInfo,
160 .enabledLayerCount =
161 static_cast<uint32_t>(requested_instance_layers_chars.size()),
162 .ppEnabledLayerNames = requested_instance_layers_chars.data(),
163 .enabledExtensionCount =
164 static_cast<uint32_t>(requested_instance_extensions_chars.size()),
165 .ppEnabledExtensionNames = requested_instance_extensions_chars.data(),
166 };
167
168 auto instance =
169 VK_EXPECT(vk::raii::Instance::create(context, instance_create_info));
170
171 std::optional<vk::raii::DebugUtilsMessengerEXT> debug_messenger;
172 if (kEnableValidationLayers) {
173 const vk::DebugUtilsMessengerCreateInfoEXT debug_create_info = {
174 .messageSeverity = vk::DebugUtilsMessageSeverityFlagBitsEXT::eVerbose |
175 vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning |
176 vk::DebugUtilsMessageSeverityFlagBitsEXT::eError,
177 .messageType = vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral |
178 vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation |
179 vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance,
180 .pfnUserCallback = VulkanDebugCallback,
181 .pUserData = nullptr,
182 };
183 debug_messenger = VK_EXPECT(
184 vk::raii::DebugUtilsMessengerEXT::create(instance, debug_create_info));
185 }
186
187 auto physical_devices =
188 VK_EXPECT(vk::raii::PhysicalDevices::create(instance));
189
190 LOG(VERBOSE) << "Available physical devices:";
191 for (const auto& physical_device : physical_devices) {
192 const auto physical_device_props = physical_device.getProperties();
193 LOG(VERBOSE) << physical_device_props.deviceName;
194 }
195 LOG(VERBOSE) << "";
196
197 vk::raii::PhysicalDevice physical_device = std::move(physical_devices[0]);
198 {
199 const auto props = physical_device.getProperties();
200 LOG(VERBOSE) << "Selected physical device: " << props.deviceName;
201 LOG(VERBOSE) << "";
202 }
203
204 std::unordered_set<std::string> available_device_extensions;
205 {
206 const auto exts = physical_device.enumerateDeviceExtensionProperties();
207 LOG(VERBOSE) << "Available physical device extensions:";
208 for (const auto& ext : exts) {
209 LOG(VERBOSE) << ext.extensionName;
210 available_device_extensions.emplace(ext.extensionName);
211 }
212 LOG(VERBOSE) << "";
213 }
214
215 const auto features2 =
216 physical_device
217 .getFeatures2<vk::PhysicalDeviceFeatures2, //
218 vk::PhysicalDeviceSamplerYcbcrConversionFeatures>();
219
220 bool ycbcr_conversion_needed = false;
221
222 std::vector<const char*> requested_device_extensions_chars;
223 requested_device_extensions_chars.reserve(requested_device_extensions.size());
224 for (const auto& e : requested_device_extensions) {
225 if (e == std::string(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
226 // The interface of VK_KHR_sampler_ycbcr_conversion was promoted to core
227 // in Vulkan 1.1 but the feature/functionality is still optional. Check
228 // here:
229 const auto& sampler_features =
230 features2.get<vk::PhysicalDeviceSamplerYcbcrConversionFeatures>();
231
232 if (sampler_features.samplerYcbcrConversion == VK_FALSE) {
233 LOG(VERBOSE) << "Requested device extension " << e
234 << " feature not available.";
235 return android::base::unexpected(vk::Result::eErrorExtensionNotPresent);
236 }
237 ycbcr_conversion_needed = true;
238 } else {
239 if (available_device_extensions.find(e) ==
240 available_device_extensions.end()) {
241 LOG(VERBOSE) << "Requested device extensions " << e
242 << " not available.";
243 return android::base::unexpected(vk::Result::eErrorExtensionNotPresent);
244 }
245 requested_device_extensions_chars.push_back(e.c_str());
246 }
247 }
248
249 uint32_t queue_family_index = -1;
250 {
251 const auto props = physical_device.getQueueFamilyProperties();
252 for (uint32_t i = 0; i < props.size(); i++) {
253 const auto& prop = props[i];
254 if (prop.queueFlags & vk::QueueFlagBits::eGraphics) {
255 queue_family_index = i;
256 break;
257 }
258 }
259 }
260 LOG(VERBOSE) << "Graphics queue family index: " << queue_family_index;
261
262 const float queue_priority = 1.0f;
263 const vk::DeviceQueueCreateInfo device_queue_create_info = {
264 .queueFamilyIndex = queue_family_index,
265 .queueCount = 1,
266 .pQueuePriorities = &queue_priority,
267 };
268 const vk::PhysicalDeviceVulkan11Features device_enable_features = {
269 .samplerYcbcrConversion = ycbcr_conversion_needed,
270 };
271 const vk::DeviceCreateInfo device_create_info = {
272 .pNext = &device_enable_features,
273 .pQueueCreateInfos = &device_queue_create_info,
274 .queueCreateInfoCount = 1,
275 .enabledLayerCount =
276 static_cast<uint32_t>(requested_instance_layers_chars.size()),
277 .ppEnabledLayerNames = requested_instance_layers_chars.data(),
278 .enabledExtensionCount =
279 static_cast<uint32_t>(requested_device_extensions_chars.size()),
280 .ppEnabledExtensionNames = requested_device_extensions_chars.data(),
281 };
282 auto device =
283 VK_EXPECT(vk::raii::Device::create(physical_device, device_create_info));
284 auto queue = vk::raii::Queue(device, queue_family_index, 0);
285
286 const vk::CommandPoolCreateInfo command_pool_create_info = {
287 .queueFamilyIndex = queue_family_index,
288 };
289 auto command_pool = VK_EXPECT(
290 vk::raii::CommandPool::create(device, command_pool_create_info));
291
292 auto staging_buffer =
293 VK_EXPECT(DoCreateBuffer(physical_device, device, kStagingBufferSize,
294 vk::BufferUsageFlagBits::eTransferDst |
295 vk::BufferUsageFlagBits::eTransferSrc,
296 vk::MemoryPropertyFlagBits::eHostVisible |
297 vk::MemoryPropertyFlagBits::eHostCoherent));
298
299 return Vk(std::move(loader), std::move(context), std::move(instance),
300 std::move(debug_messenger), std::move(physical_device),
301 std::move(device), std::move(queue), queue_family_index,
302 std::move(command_pool), std::move(staging_buffer.buffer),
303 std::move(staging_buffer.buffer_memory));
304 }
305
CreateBuffer(vk::DeviceSize buffer_size,vk::BufferUsageFlags buffer_usages,vk::MemoryPropertyFlags buffer_memory_properties)306 VkExpected<Vk::BufferWithMemory> Vk::CreateBuffer(
307 vk::DeviceSize buffer_size, vk::BufferUsageFlags buffer_usages,
308 vk::MemoryPropertyFlags buffer_memory_properties) {
309 return DoCreateBuffer(vk_physical_device, vk_device, buffer_size,
310 buffer_usages, buffer_memory_properties);
311 }
312
CreateBufferWithData(vk::DeviceSize buffer_size,vk::BufferUsageFlags buffer_usages,vk::MemoryPropertyFlags buffer_memory_properties,const uint8_t * buffer_data)313 VkExpected<Vk::BufferWithMemory> Vk::CreateBufferWithData(
314 vk::DeviceSize buffer_size, vk::BufferUsageFlags buffer_usages,
315 vk::MemoryPropertyFlags buffer_memory_properties,
316 const uint8_t* buffer_data) {
317 auto buffer = VK_EXPECT(CreateBuffer(
318 buffer_size, buffer_usages | vk::BufferUsageFlagBits::eTransferDst,
319 buffer_memory_properties));
320
321 void* mapped = vk_staging_buffer_memory_.mapMemory(0, kStagingBufferSize);
322 if (mapped == nullptr) {
323 LOG(FATAL) << "Failed to map staging buffer.";
324 }
325
326 std::memcpy(mapped, buffer_data, buffer_size);
327 vk_staging_buffer_memory_.unmapMemory();
328
329 DoCommandsImmediate([&](vk::raii::CommandBuffer& cmd) {
330 const std::vector<vk::BufferCopy> regions = {
331 vk::BufferCopy{
332 .srcOffset = 0,
333 .dstOffset = 0,
334 .size = buffer_size,
335 },
336 };
337 cmd.copyBuffer(*vk_staging_buffer_, *buffer.buffer, regions);
338 return vk::Result::eSuccess;
339 });
340
341 return std::move(buffer);
342 }
343
CreateImage(uint32_t width,uint32_t height,vk::Format format,vk::ImageUsageFlags usages,vk::MemoryPropertyFlags memory_properties,vk::ImageLayout returned_layout)344 VkExpected<Vk::ImageWithMemory> Vk::CreateImage(
345 uint32_t width, uint32_t height, vk::Format format,
346 vk::ImageUsageFlags usages, vk::MemoryPropertyFlags memory_properties,
347 vk::ImageLayout returned_layout) {
348 const vk::ImageCreateInfo image_create_info = {
349 .imageType = vk::ImageType::e2D,
350 .extent.width = width,
351 .extent.height = height,
352 .extent.depth = 1,
353 .mipLevels = 1,
354 .arrayLayers = 1,
355 .format = format,
356 .tiling = vk::ImageTiling::eOptimal,
357 .initialLayout = vk::ImageLayout::eUndefined,
358 .usage = usages,
359 .sharingMode = vk::SharingMode::eExclusive,
360 .samples = vk::SampleCountFlagBits::e1,
361 };
362 auto image = VK_EXPECT(vk::raii::Image::create(vk_device, image_create_info));
363
364 vk::MemoryRequirements memory_requirements = image.getMemoryRequirements();
365 const uint32_t memory_index =
366 GetMemoryType(vk_physical_device, memory_requirements.memoryTypeBits,
367 memory_properties);
368
369 const vk::MemoryAllocateInfo image_memory_allocate_info = {
370 .allocationSize = memory_requirements.size,
371 .memoryTypeIndex = memory_index,
372 };
373 auto image_memory = VK_EXPECT(
374 vk::raii::DeviceMemory::create(vk_device, image_memory_allocate_info));
375
376 image.bindMemory(*image_memory, 0);
377
378 const vk::ImageViewCreateInfo image_view_create_info = {
379 .image = *image,
380 .viewType = vk::ImageViewType::e2D,
381 .format = format,
382 .components =
383 {
384 .r = vk::ComponentSwizzle::eIdentity,
385 .g = vk::ComponentSwizzle::eIdentity,
386 .b = vk::ComponentSwizzle::eIdentity,
387 .a = vk::ComponentSwizzle::eIdentity,
388 },
389 .subresourceRange =
390 {
391 .aspectMask = vk::ImageAspectFlagBits::eColor,
392 .baseMipLevel = 0,
393 .levelCount = 1,
394 .baseArrayLayer = 0,
395 .layerCount = 1,
396 },
397 };
398 auto image_view =
399 VK_EXPECT(vk::raii::ImageView::create(vk_device, image_view_create_info));
400
401 VK_ASSERT(DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
402 const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
403 vk::ImageMemoryBarrier{
404 .oldLayout = vk::ImageLayout::eUndefined,
405 .newLayout = returned_layout,
406 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
407 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
408 .image = *image,
409 .subresourceRange =
410 {
411 .aspectMask = vk::ImageAspectFlagBits::eColor,
412 .baseMipLevel = 0,
413 .levelCount = 1,
414 .baseArrayLayer = 0,
415 .layerCount = 1,
416 },
417 .srcAccessMask = {},
418 .dstAccessMask = vk::AccessFlagBits::eTransferWrite,
419 },
420 };
421 command_buffer.pipelineBarrier(
422 /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
423 /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
424 /*dependencyFlags=*/{},
425 /*memoryBarriers=*/{},
426 /*bufferMemoryBarriers=*/{},
427 /*imageMemoryBarriers=*/image_memory_barriers);
428
429 return vk::Result::eSuccess;
430 }));
431
432 return ImageWithMemory{
433 .image_memory = std::move(image_memory),
434 .image = std::move(image),
435 .image_view = std::move(image_view),
436 };
437 }
438
DownloadImage(uint32_t width,uint32_t height,const vk::raii::Image & image,vk::ImageLayout current_layout,vk::ImageLayout returned_layout,std::vector<uint8_t> * out_pixels)439 vk::Result Vk::DownloadImage(uint32_t width, uint32_t height,
440 const vk::raii::Image& image,
441 vk::ImageLayout current_layout,
442 vk::ImageLayout returned_layout,
443 std::vector<uint8_t>* out_pixels) {
444 VK_RETURN_IF_NOT_SUCCESS(
445 DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
446 if (current_layout != vk::ImageLayout::eTransferSrcOptimal) {
447 const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
448 vk::ImageMemoryBarrier{
449 .oldLayout = current_layout,
450 .newLayout = vk::ImageLayout::eTransferSrcOptimal,
451 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
452 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
453 .image = *image,
454 .subresourceRange =
455 {
456 .aspectMask = vk::ImageAspectFlagBits::eColor,
457 .baseMipLevel = 0,
458 .levelCount = 1,
459 .baseArrayLayer = 0,
460 .layerCount = 1,
461 },
462 .srcAccessMask = vk::AccessFlagBits::eMemoryRead |
463 vk::AccessFlagBits::eMemoryWrite,
464 .dstAccessMask = vk::AccessFlagBits::eTransferRead,
465 },
466 };
467 command_buffer.pipelineBarrier(
468 /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
469 /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
470 /*dependencyFlags=*/{},
471 /*memoryBarriers=*/{},
472 /*bufferMemoryBarriers=*/{},
473 /*imageMemoryBarriers=*/image_memory_barriers);
474 }
475
476 const std::vector<vk::BufferImageCopy> regions = {
477 vk::BufferImageCopy{
478 .bufferOffset = 0,
479 .bufferRowLength = 0,
480 .bufferImageHeight = 0,
481 .imageSubresource =
482 {
483 .aspectMask = vk::ImageAspectFlagBits::eColor,
484 .mipLevel = 0,
485 .baseArrayLayer = 0,
486 .layerCount = 1,
487 },
488 .imageOffset =
489 {
490 .x = 0,
491 .y = 0,
492 .z = 0,
493 },
494 .imageExtent =
495 {
496 .width = width,
497 .height = height,
498 .depth = 1,
499 },
500 },
501 };
502 command_buffer.copyImageToBuffer(*image,
503 vk::ImageLayout::eTransferSrcOptimal,
504 *vk_staging_buffer_, regions);
505
506 if (returned_layout != vk::ImageLayout::eTransferSrcOptimal) {
507 const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
508 vk::ImageMemoryBarrier{
509 .oldLayout = vk::ImageLayout::eTransferSrcOptimal,
510 .newLayout = returned_layout,
511 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
512 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
513 .image = *image,
514 .subresourceRange =
515 {
516 .aspectMask = vk::ImageAspectFlagBits::eColor,
517 .baseMipLevel = 0,
518 .levelCount = 1,
519 .baseArrayLayer = 0,
520 .layerCount = 1,
521 },
522 .srcAccessMask = vk::AccessFlagBits::eTransferRead,
523 .dstAccessMask = vk::AccessFlagBits::eMemoryRead |
524 vk::AccessFlagBits::eMemoryWrite,
525 },
526 };
527 command_buffer.pipelineBarrier(
528 /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
529 /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
530 /*dependencyFlags=*/{},
531 /*memoryBarriers=*/{},
532 /*bufferMemoryBarriers=*/{},
533 /*imageMemoryBarriers=*/image_memory_barriers);
534 }
535
536 return vk::Result::eSuccess;
537 }));
538
539 auto* mapped = reinterpret_cast<uint8_t*>(
540 vk_staging_buffer_memory_.mapMemory(0, kStagingBufferSize));
541 if (mapped == nullptr) {
542 LOG(ERROR) << "Failed to map staging buffer.";
543 return vk::Result::eErrorMemoryMapFailed;
544 }
545
546 out_pixels->clear();
547 out_pixels->resize(width * height * 4);
548 std::memcpy(out_pixels->data(), mapped, out_pixels->size());
549 vk_staging_buffer_memory_.unmapMemory();
550
551 return vk::Result::eSuccess;
552 }
553
CreateYuvImage(uint32_t width,uint32_t height,vk::ImageUsageFlags usages,vk::MemoryPropertyFlags memory_properties,vk::ImageLayout layout)554 VkExpected<Vk::YuvImageWithMemory> Vk::CreateYuvImage(
555 uint32_t width, uint32_t height, vk::ImageUsageFlags usages,
556 vk::MemoryPropertyFlags memory_properties, vk::ImageLayout layout) {
557 const vk::SamplerYcbcrConversionCreateInfo conversion_create_info = {
558 .format = vk::Format::eG8B8R83Plane420Unorm,
559 .ycbcrModel = vk::SamplerYcbcrModelConversion::eYcbcr601,
560 .ycbcrRange = vk::SamplerYcbcrRange::eItuNarrow,
561 .components =
562 {
563 .r = vk::ComponentSwizzle::eIdentity,
564 .g = vk::ComponentSwizzle::eIdentity,
565 .b = vk::ComponentSwizzle::eIdentity,
566 .a = vk::ComponentSwizzle::eIdentity,
567 },
568 .xChromaOffset = vk::ChromaLocation::eMidpoint,
569 .yChromaOffset = vk::ChromaLocation::eMidpoint,
570 .chromaFilter = vk::Filter::eLinear,
571 .forceExplicitReconstruction = VK_FALSE,
572 };
573 auto image_sampler_conversion =
574 VK_EXPECT(vk::raii::SamplerYcbcrConversion::create(
575 vk_device, conversion_create_info));
576
577 const vk::SamplerYcbcrConversionInfo sampler_conversion_info = {
578 .conversion = *image_sampler_conversion,
579 };
580 const vk::SamplerCreateInfo sampler_create_info = {
581 .pNext = &sampler_conversion_info,
582 .magFilter = vk::Filter::eLinear,
583 .minFilter = vk::Filter::eLinear,
584 .mipmapMode = vk::SamplerMipmapMode::eNearest,
585 .addressModeU = vk::SamplerAddressMode::eClampToEdge,
586 .addressModeV = vk::SamplerAddressMode::eClampToEdge,
587 .addressModeW = vk::SamplerAddressMode::eClampToEdge,
588 .mipLodBias = 0.0f,
589 .anisotropyEnable = VK_FALSE,
590 .maxAnisotropy = 1.0f,
591 .compareEnable = VK_FALSE,
592 .compareOp = vk::CompareOp::eLessOrEqual,
593 .minLod = 0.0f,
594 .maxLod = 0.25f,
595 .borderColor = vk::BorderColor::eIntTransparentBlack,
596 .unnormalizedCoordinates = VK_FALSE,
597 };
598 auto image_sampler =
599 VK_EXPECT(vk::raii::Sampler::create(vk_device, sampler_create_info));
600
601 const vk::ImageCreateInfo image_create_info = {
602 .imageType = vk::ImageType::e2D,
603 .extent.width = width,
604 .extent.height = height,
605 .extent.depth = 1,
606 .mipLevels = 1,
607 .arrayLayers = 1,
608 .format = vk::Format::eG8B8R83Plane420Unorm,
609 .tiling = vk::ImageTiling::eOptimal,
610 .initialLayout = vk::ImageLayout::eUndefined,
611 .usage = usages,
612 .sharingMode = vk::SharingMode::eExclusive,
613 .samples = vk::SampleCountFlagBits::e1,
614 };
615 auto image = VK_EXPECT(vk::raii::Image::create(vk_device, image_create_info));
616
617 vk::MemoryRequirements memory_requirements = image.getMemoryRequirements();
618
619 const uint32_t memory_index =
620 GetMemoryType(vk_physical_device, memory_requirements.memoryTypeBits,
621 memory_properties);
622
623 const vk::MemoryAllocateInfo image_memory_allocate_info = {
624 .allocationSize = memory_requirements.size,
625 .memoryTypeIndex = memory_index,
626 };
627 auto image_memory = VK_EXPECT(
628 vk::raii::DeviceMemory::create(vk_device, image_memory_allocate_info));
629
630 image.bindMemory(*image_memory, 0);
631
632 const vk::ImageViewCreateInfo image_view_create_info = {
633 .pNext = &sampler_conversion_info,
634 .image = *image,
635 .viewType = vk::ImageViewType::e2D,
636 .format = vk::Format::eG8B8R83Plane420Unorm,
637 .components =
638 {
639 .r = vk::ComponentSwizzle::eIdentity,
640 .g = vk::ComponentSwizzle::eIdentity,
641 .b = vk::ComponentSwizzle::eIdentity,
642 .a = vk::ComponentSwizzle::eIdentity,
643 },
644 .subresourceRange =
645 {
646 .aspectMask = vk::ImageAspectFlagBits::eColor,
647 .baseMipLevel = 0,
648 .levelCount = 1,
649 .baseArrayLayer = 0,
650 .layerCount = 1,
651 },
652 };
653 auto image_view =
654 VK_EXPECT(vk::raii::ImageView::create(vk_device, image_view_create_info));
655
656 VK_ASSERT(DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
657 const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
658 vk::ImageMemoryBarrier{
659 .oldLayout = vk::ImageLayout::eUndefined,
660 .newLayout = layout,
661 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
662 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
663 .image = *image,
664 .subresourceRange =
665 {
666 .aspectMask = vk::ImageAspectFlagBits::eColor,
667 .baseMipLevel = 0,
668 .levelCount = 1,
669 .baseArrayLayer = 0,
670 .layerCount = 1,
671 },
672 .srcAccessMask = {},
673 .dstAccessMask = vk::AccessFlagBits::eTransferWrite,
674 },
675 };
676 command_buffer.pipelineBarrier(
677 /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
678 /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
679 /*dependencyFlags=*/{},
680 /*memoryBarriers=*/{},
681 /*bufferMemoryBarriers=*/{},
682 /*imageMemoryBarriers=*/image_memory_barriers);
683 return vk::Result::eSuccess;
684 }));
685
686 return YuvImageWithMemory{
687 .image_sampler_conversion = std::move(image_sampler_conversion),
688 .image_sampler = std::move(image_sampler),
689 .image_memory = std::move(image_memory),
690 .image = std::move(image),
691 .image_view = std::move(image_view),
692 };
693 }
694
LoadYuvImage(const vk::raii::Image & image,uint32_t width,uint32_t height,const std::vector<uint8_t> & image_data_y,const std::vector<uint8_t> & image_data_u,const std::vector<uint8_t> & image_data_v,vk::ImageLayout current_layout,vk::ImageLayout returned_layout)695 vk::Result Vk::LoadYuvImage(const vk::raii::Image& image, uint32_t width,
696 uint32_t height,
697 const std::vector<uint8_t>& image_data_y,
698 const std::vector<uint8_t>& image_data_u,
699 const std::vector<uint8_t>& image_data_v,
700 vk::ImageLayout current_layout,
701 vk::ImageLayout returned_layout) {
702 auto* mapped = reinterpret_cast<uint8_t*>(
703 vk_staging_buffer_memory_.mapMemory(0, kStagingBufferSize));
704 if (mapped == nullptr) {
705 LOG(ERROR) << "Failed to map staging buffer.";
706 return vk::Result::eErrorMemoryMapFailed;
707 }
708
709 const VkDeviceSize y_offset = 0;
710 const VkDeviceSize u_offset = image_data_y.size();
711 const VkDeviceSize v_offset = image_data_y.size() + image_data_u.size();
712 std::memcpy(mapped + y_offset, image_data_y.data(), image_data_y.size());
713 std::memcpy(mapped + u_offset, image_data_u.data(), image_data_u.size());
714 std::memcpy(mapped + v_offset, image_data_v.data(), image_data_v.size());
715 vk_staging_buffer_memory_.unmapMemory();
716
717 return DoCommandsImmediate([&](vk::raii::CommandBuffer& command_buffer) {
718 if (current_layout != vk::ImageLayout::eTransferDstOptimal) {
719 const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
720 vk::ImageMemoryBarrier{
721 .oldLayout = current_layout,
722 .newLayout = vk::ImageLayout::eTransferDstOptimal,
723 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
724 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
725 .image = *image,
726 .subresourceRange =
727 {
728 .aspectMask = vk::ImageAspectFlagBits::eColor,
729 .baseMipLevel = 0,
730 .levelCount = 1,
731 .baseArrayLayer = 0,
732 .layerCount = 1,
733 },
734 .srcAccessMask = vk::AccessFlagBits::eMemoryRead |
735 vk::AccessFlagBits::eMemoryWrite,
736 .dstAccessMask = vk::AccessFlagBits::eTransferWrite,
737 },
738 };
739 command_buffer.pipelineBarrier(
740 /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
741 /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
742 /*dependencyFlags=*/{},
743 /*memoryBarriers=*/{},
744 /*bufferMemoryBarriers=*/{},
745 /*imageMemoryBarriers=*/image_memory_barriers);
746 }
747
748 const std::vector<vk::BufferImageCopy> image_copy_regions = {
749 vk::BufferImageCopy{
750 .bufferOffset = y_offset,
751 .bufferRowLength = 0,
752 .bufferImageHeight = 0,
753 .imageSubresource =
754 {
755 .aspectMask = vk::ImageAspectFlagBits::ePlane0,
756 .mipLevel = 0,
757 .baseArrayLayer = 0,
758 .layerCount = 1,
759 },
760 .imageOffset =
761 {
762 .x = 0,
763 .y = 0,
764 .z = 0,
765 },
766 .imageExtent =
767 {
768 .width = width,
769 .height = height,
770 .depth = 1,
771 },
772 },
773 vk::BufferImageCopy{
774 .bufferOffset = u_offset,
775 .bufferRowLength = 0,
776 .bufferImageHeight = 0,
777 .imageSubresource =
778 {
779 .aspectMask = vk::ImageAspectFlagBits::ePlane1,
780 .mipLevel = 0,
781 .baseArrayLayer = 0,
782 .layerCount = 1,
783 },
784 .imageOffset =
785 {
786 .x = 0,
787 .y = 0,
788 .z = 0,
789 },
790 .imageExtent =
791 {
792 .width = width / 2,
793 .height = height / 2,
794 .depth = 1,
795 },
796 },
797 vk::BufferImageCopy{
798 .bufferOffset = v_offset,
799 .bufferRowLength = 0,
800 .bufferImageHeight = 0,
801 .imageSubresource =
802 {
803 .aspectMask = vk::ImageAspectFlagBits::ePlane2,
804 .mipLevel = 0,
805 .baseArrayLayer = 0,
806 .layerCount = 1,
807 },
808 .imageOffset =
809 {
810 .x = 0,
811 .y = 0,
812 .z = 0,
813 },
814 .imageExtent =
815 {
816 .width = width / 2,
817 .height = height / 2,
818 .depth = 1,
819 },
820 },
821 };
822 command_buffer.copyBufferToImage(*vk_staging_buffer_, *image,
823 vk::ImageLayout::eTransferDstOptimal,
824 image_copy_regions);
825
826 if (returned_layout != vk::ImageLayout::eTransferDstOptimal) {
827 const std::vector<vk::ImageMemoryBarrier> image_memory_barriers = {
828 vk::ImageMemoryBarrier{
829 .oldLayout = vk::ImageLayout::eTransferDstOptimal,
830 .newLayout = returned_layout,
831 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
832 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
833 .image = *image,
834 .subresourceRange =
835 {
836 .aspectMask = vk::ImageAspectFlagBits::eColor,
837 .baseMipLevel = 0,
838 .levelCount = 1,
839 .baseArrayLayer = 0,
840 .layerCount = 1,
841 },
842 .srcAccessMask = vk::AccessFlagBits::eTransferWrite,
843 .dstAccessMask = vk::AccessFlagBits::eMemoryRead |
844 vk::AccessFlagBits::eMemoryWrite,
845 },
846 };
847 command_buffer.pipelineBarrier(
848 /*srcStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
849 /*dstStageMask=*/vk::PipelineStageFlagBits::eAllCommands,
850 /*dependencyFlags=*/{},
851 /*memoryBarriers=*/{},
852 /*bufferMemoryBarriers=*/{},
853 /*imageMemoryBarriers=*/image_memory_barriers);
854 }
855 return vk::Result::eSuccess;
856 });
857 }
858
CreateFramebuffer(uint32_t width,uint32_t height,vk::Format color_format,vk::Format depth_format)859 VkExpected<Vk::FramebufferWithAttachments> Vk::CreateFramebuffer(
860 uint32_t width, uint32_t height, vk::Format color_format,
861 vk::Format depth_format) {
862 std::optional<Vk::ImageWithMemory> color_attachment;
863 if (color_format != vk::Format::eUndefined) {
864 color_attachment =
865 VK_EXPECT(CreateImage(width, height, color_format,
866 vk::ImageUsageFlagBits::eColorAttachment |
867 vk::ImageUsageFlagBits::eTransferSrc,
868 vk::MemoryPropertyFlagBits::eDeviceLocal,
869 vk::ImageLayout::eColorAttachmentOptimal));
870 }
871
872 std::optional<Vk::ImageWithMemory> depth_attachment;
873 if (depth_format != vk::Format::eUndefined) {
874 depth_attachment =
875 VK_EXPECT(CreateImage(width, height, depth_format,
876 vk::ImageUsageFlagBits::eDepthStencilAttachment |
877 vk::ImageUsageFlagBits::eTransferSrc,
878 vk::MemoryPropertyFlagBits::eDeviceLocal,
879 vk::ImageLayout::eDepthStencilAttachmentOptimal));
880 }
881
882 std::vector<vk::AttachmentDescription> attachments;
883
884 std::optional<vk::AttachmentReference> color_attachment_reference;
885 if (color_format != vk::Format::eUndefined) {
886 attachments.push_back(vk::AttachmentDescription{
887 .format = color_format,
888 .samples = vk::SampleCountFlagBits::e1,
889 .loadOp = vk::AttachmentLoadOp::eClear,
890 .storeOp = vk::AttachmentStoreOp::eStore,
891 .stencilLoadOp = vk::AttachmentLoadOp::eClear,
892 .stencilStoreOp = vk::AttachmentStoreOp::eStore,
893 .initialLayout = vk::ImageLayout::eColorAttachmentOptimal,
894 .finalLayout = vk::ImageLayout::eColorAttachmentOptimal,
895 });
896
897 color_attachment_reference = vk::AttachmentReference{
898 .attachment = static_cast<uint32_t>(attachments.size() - 1),
899 .layout = vk::ImageLayout::eColorAttachmentOptimal,
900 };
901 }
902
903 std::optional<vk::AttachmentReference> depth_attachment_reference;
904 if (depth_format != vk::Format::eUndefined) {
905 attachments.push_back(vk::AttachmentDescription{
906 .format = depth_format,
907 .samples = vk::SampleCountFlagBits::e1,
908 .loadOp = vk::AttachmentLoadOp::eClear,
909 .storeOp = vk::AttachmentStoreOp::eStore,
910 .stencilLoadOp = vk::AttachmentLoadOp::eClear,
911 .stencilStoreOp = vk::AttachmentStoreOp::eStore,
912 .initialLayout = vk::ImageLayout::eColorAttachmentOptimal,
913 .finalLayout = vk::ImageLayout::eColorAttachmentOptimal,
914 });
915
916 depth_attachment_reference = vk::AttachmentReference{
917 .attachment = static_cast<uint32_t>(attachments.size() - 1),
918 .layout = vk::ImageLayout::eDepthStencilAttachmentOptimal,
919 };
920 }
921
922 vk::SubpassDependency dependency = {
923 .srcSubpass = 0,
924 .dstSubpass = 0,
925 .srcStageMask = {},
926 .dstStageMask = vk::PipelineStageFlagBits::eFragmentShader,
927 .srcAccessMask = {},
928 .dstAccessMask = vk::AccessFlagBits::eInputAttachmentRead,
929 .dependencyFlags = vk::DependencyFlagBits::eByRegion,
930 };
931 if (color_format != vk::Format::eUndefined) {
932 dependency.srcStageMask |=
933 vk::PipelineStageFlagBits::eColorAttachmentOutput;
934 dependency.dstStageMask |=
935 vk::PipelineStageFlagBits::eColorAttachmentOutput;
936 dependency.srcAccessMask |= vk::AccessFlagBits::eColorAttachmentWrite;
937 }
938 if (depth_format != vk::Format::eUndefined) {
939 dependency.srcStageMask |=
940 vk::PipelineStageFlagBits::eColorAttachmentOutput;
941 dependency.dstStageMask |=
942 vk::PipelineStageFlagBits::eColorAttachmentOutput;
943 dependency.srcAccessMask |= vk::AccessFlagBits::eColorAttachmentWrite;
944 }
945
946 vk::SubpassDescription subpass = {
947 .pipelineBindPoint = vk::PipelineBindPoint::eGraphics,
948 .inputAttachmentCount = 0,
949 .pInputAttachments = nullptr,
950 .colorAttachmentCount = 0,
951 .pColorAttachments = nullptr,
952 .pResolveAttachments = nullptr,
953 .pDepthStencilAttachment = nullptr,
954 .pPreserveAttachments = nullptr,
955 };
956 if (color_format != vk::Format::eUndefined) {
957 subpass.colorAttachmentCount = 1;
958 subpass.pColorAttachments = &*color_attachment_reference;
959 }
960 if (depth_format != vk::Format::eUndefined) {
961 subpass.pDepthStencilAttachment = &*depth_attachment_reference;
962 }
963
964 const vk::RenderPassCreateInfo renderpass_create_info = {
965 .attachmentCount = static_cast<uint32_t>(attachments.size()),
966 .pAttachments = attachments.data(),
967 .subpassCount = 1,
968 .pSubpasses = &subpass,
969 .dependencyCount = 1,
970 .pDependencies = &dependency,
971 };
972 auto renderpass = VK_EXPECT(
973 vk::raii::RenderPass::create(vk_device, renderpass_create_info));
974
975 std::vector<vk::ImageView> frammebuffer_attachments;
976 if (color_attachment) {
977 frammebuffer_attachments.push_back(*color_attachment->image_view);
978 }
979 if (depth_attachment) {
980 frammebuffer_attachments.push_back(*depth_attachment->image_view);
981 }
982 const vk::FramebufferCreateInfo framebuffer_create_info = {
983 .renderPass = *renderpass,
984 .attachmentCount = static_cast<uint32_t>(frammebuffer_attachments.size()),
985 .pAttachments = frammebuffer_attachments.data(),
986 .width = width,
987 .height = height,
988 .layers = 1,
989 };
990 auto framebuffer = VK_EXPECT(
991 vk::raii::Framebuffer::create(vk_device, framebuffer_create_info));
992
993 return Vk::FramebufferWithAttachments{
994 .color_attachment = std::move(color_attachment),
995 .depth_attachment = std::move(depth_attachment),
996 .renderpass = std::move(renderpass),
997 .framebuffer = std::move(framebuffer),
998 };
999 }
1000
DoCommandsImmediate(const std::function<vk::Result (vk::raii::CommandBuffer &)> & func,const std::vector<vk::raii::Semaphore> & semaphores_wait,const std::vector<vk::raii::Semaphore> & semaphores_signal)1001 vk::Result Vk::DoCommandsImmediate(
1002 const std::function<vk::Result(vk::raii::CommandBuffer&)>& func,
1003 const std::vector<vk::raii::Semaphore>& semaphores_wait,
1004 const std::vector<vk::raii::Semaphore>& semaphores_signal) {
1005 const vk::CommandBufferAllocateInfo command_buffer_allocate_info = {
1006 .level = vk::CommandBufferLevel::ePrimary,
1007 .commandPool = *vk_command_pool_,
1008 .commandBufferCount = 1,
1009 };
1010 auto command_buffers = VK_EXPECT_RESULT(vk::raii::CommandBuffers::create(
1011 vk_device, command_buffer_allocate_info));
1012 auto command_buffer = std::move(command_buffers[0]);
1013
1014 const vk::CommandBufferBeginInfo command_buffer_begin_info = {
1015 .flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit,
1016 };
1017 command_buffer.begin(command_buffer_begin_info);
1018
1019 VK_RETURN_IF_NOT_SUCCESS(func(command_buffer));
1020
1021 command_buffer.end();
1022
1023 std::vector<vk::CommandBuffer> command_buffer_handles;
1024 command_buffer_handles.push_back(*command_buffer);
1025
1026 std::vector<vk::Semaphore> semaphores_handles_wait;
1027 semaphores_handles_wait.reserve(semaphores_wait.size());
1028 for (const auto& s : semaphores_wait) {
1029 semaphores_handles_wait.emplace_back(*s);
1030 }
1031
1032 std::vector<vk::Semaphore> semaphores_handles_signal;
1033 semaphores_handles_signal.reserve(semaphores_signal.size());
1034 for (const auto& s : semaphores_signal) {
1035 semaphores_handles_signal.emplace_back(*s);
1036 }
1037
1038 vk::SubmitInfo submit_info = {
1039 .commandBufferCount =
1040 static_cast<uint32_t>(command_buffer_handles.size()),
1041 .pCommandBuffers = command_buffer_handles.data(),
1042 };
1043 if (!semaphores_handles_wait.empty()) {
1044 submit_info.waitSemaphoreCount =
1045 static_cast<uint32_t>(semaphores_handles_wait.size());
1046 submit_info.pWaitSemaphores = semaphores_handles_wait.data();
1047 }
1048 if (!semaphores_handles_signal.empty()) {
1049 submit_info.signalSemaphoreCount =
1050 static_cast<uint32_t>(semaphores_handles_signal.size());
1051 submit_info.pSignalSemaphores = semaphores_handles_signal.data();
1052 }
1053 vk_queue.submit(submit_info);
1054 vk_queue.waitIdle();
1055
1056 return vk::Result::eSuccess;
1057 }
1058
1059 } // namespace cuttlefish
1060