• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2021 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_android.h"
12 
13 #include <dlfcn.h>
14 #include <hardware/gralloc.h>
15 #include <hardware/hwvulkan.h>
16 #include <vndk/hardware_buffer.h>
17 #include <vulkan/vk_icd.h>
18 
19 #include "drm-uapi/drm_fourcc.h"
20 #include "util/libsync.h"
21 #include "util/os_file.h"
22 
23 #include "vn_buffer.h"
24 #include "vn_device.h"
25 #include "vn_device_memory.h"
26 #include "vn_image.h"
27 #include "vn_instance.h"
28 #include "vn_physical_device.h"
29 #include "vn_queue.h"
30 
31 /* perform options supported by CrOS Gralloc */
32 #define CROS_GRALLOC_DRM_GET_BUFFER_INFO 4
33 #define CROS_GRALLOC_DRM_GET_USAGE 5
34 #define CROS_GRALLOC_DRM_GET_USAGE_FRONT_RENDERING_BIT 0x1
35 
36 struct vn_android_gralloc {
37    const gralloc_module_t *module;
38    uint32_t front_rendering_usage;
39 };
40 
41 static struct vn_android_gralloc _vn_android_gralloc;
42 
43 static int
vn_android_gralloc_init()44 vn_android_gralloc_init()
45 {
46    static const char CROS_GRALLOC_MODULE_NAME[] = "CrOS Gralloc";
47    const gralloc_module_t *gralloc = NULL;
48    uint32_t front_rendering_usage = 0;
49    int ret;
50 
51    /* get gralloc module for gralloc buffer info query */
52    ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID,
53                        (const hw_module_t **)&gralloc);
54    if (ret) {
55       vn_log(NULL, "failed to open gralloc module(ret=%d)", ret);
56       return ret;
57    }
58 
59    if (strcmp(gralloc->common.name, CROS_GRALLOC_MODULE_NAME) != 0) {
60       dlclose(gralloc->common.dso);
61       vn_log(NULL, "unexpected gralloc (name: %s)", gralloc->common.name);
62       return -1;
63    }
64 
65    if (!gralloc->perform) {
66       dlclose(gralloc->common.dso);
67       vn_log(NULL, "missing required gralloc helper: perform");
68       return -1;
69    }
70 
71    if (gralloc->perform(gralloc, CROS_GRALLOC_DRM_GET_USAGE,
72                         CROS_GRALLOC_DRM_GET_USAGE_FRONT_RENDERING_BIT,
73                         &front_rendering_usage) == 0) {
74       assert(front_rendering_usage);
75       _vn_android_gralloc.front_rendering_usage = front_rendering_usage;
76    }
77 
78    _vn_android_gralloc.module = gralloc;
79 
80    return 0;
81 }
82 
83 static inline void
vn_android_gralloc_fini()84 vn_android_gralloc_fini()
85 {
86    dlclose(_vn_android_gralloc.module->common.dso);
87 }
88 
89 uint32_t
vn_android_gralloc_get_shared_present_usage()90 vn_android_gralloc_get_shared_present_usage()
91 {
92    return _vn_android_gralloc.front_rendering_usage;
93 }
94 
95 struct cros_gralloc0_buffer_info {
96    uint32_t drm_fourcc;
97    int num_fds; /* ignored */
98    int fds[4];  /* ignored */
99    uint64_t modifier;
100    uint32_t offset[4];
101    uint32_t stride[4];
102 };
103 
104 struct vn_android_gralloc_buffer_properties {
105    uint32_t drm_fourcc;
106    uint64_t modifier;
107 
108    /* plane order matches VkImageDrmFormatModifierExplicitCreateInfoEXT */
109    uint32_t offset[4];
110    uint32_t stride[4];
111 };
112 
113 static bool
vn_android_gralloc_get_buffer_properties(buffer_handle_t handle,struct vn_android_gralloc_buffer_properties * out_props)114 vn_android_gralloc_get_buffer_properties(
115    buffer_handle_t handle,
116    struct vn_android_gralloc_buffer_properties *out_props)
117 {
118    const gralloc_module_t *gralloc = _vn_android_gralloc.module;
119    struct cros_gralloc0_buffer_info info;
120    if (gralloc->perform(gralloc, CROS_GRALLOC_DRM_GET_BUFFER_INFO, handle,
121                         &info) != 0) {
122       vn_log(NULL, "CROS_GRALLOC_DRM_GET_BUFFER_INFO failed");
123       return false;
124    }
125 
126    if (info.modifier == DRM_FORMAT_MOD_INVALID) {
127       vn_log(NULL, "Unexpected DRM_FORMAT_MOD_INVALID");
128       return false;
129    }
130 
131    out_props->drm_fourcc = info.drm_fourcc;
132    for (uint32_t i = 0; i < 4; i++) {
133       out_props->stride[i] = info.stride[i];
134       out_props->offset[i] = info.offset[i];
135    }
136 
137    /* YVU420 has a chroma order of CrCb. So we must swap the planes for CrCb
138     * to align with VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM. This is to serve
139     * VkImageDrmFormatModifierExplicitCreateInfoEXT explicit plane layouts.
140     */
141    if (info.drm_fourcc == DRM_FORMAT_YVU420) {
142       out_props->stride[1] = info.stride[2];
143       out_props->offset[1] = info.offset[2];
144       out_props->stride[2] = info.stride[1];
145       out_props->offset[2] = info.offset[1];
146    }
147 
148    out_props->modifier = info.modifier;
149 
150    return true;
151 }
152 
153 static int
vn_android_gralloc_get_dma_buf_fd(const native_handle_t * handle)154 vn_android_gralloc_get_dma_buf_fd(const native_handle_t *handle)
155 {
156    /* There can be multiple fds wrapped inside a native_handle_t, but we
157     * expect the 1st one pointing to the dma_buf. For multi-planar format,
158     * there should only exist one undelying dma_buf. The other fd(s) could be
159     * dups to the same dma_buf or point to the shared memory used to store
160     * gralloc buffer metadata.
161     */
162    assert(handle);
163 
164    if (handle->numFds < 1) {
165       vn_log(NULL, "handle->numFds is %d, expected >= 1", handle->numFds);
166       return -1;
167    }
168 
169    if (handle->data[0] < 0) {
170       vn_log(NULL, "handle->data[0] < 0");
171       return -1;
172    }
173 
174    return handle->data[0];
175 }
176 
177 static int
178 vn_hal_open(const struct hw_module_t *mod,
179             const char *id,
180             struct hw_device_t **dev);
181 
182 static void UNUSED
static_asserts(void)183 static_asserts(void)
184 {
185    STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
186 }
187 
188 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
189    .common = {
190       .tag = HARDWARE_MODULE_TAG,
191       .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
192       .hal_api_version = HARDWARE_HAL_API_VERSION,
193       .id = HWVULKAN_HARDWARE_MODULE_ID,
194       .name = "Venus Vulkan HAL",
195       .author = "Google LLC",
196       .methods = &(hw_module_methods_t) {
197          .open = vn_hal_open,
198       },
199    },
200 };
201 
202 static int
vn_hal_close(UNUSED struct hw_device_t * dev)203 vn_hal_close(UNUSED struct hw_device_t *dev)
204 {
205    vn_android_gralloc_fini();
206    return 0;
207 }
208 
209 static hwvulkan_device_t vn_hal_dev = {
210   .common = {
211      .tag = HARDWARE_DEVICE_TAG,
212      .version = HWVULKAN_DEVICE_API_VERSION_0_1,
213      .module = &HAL_MODULE_INFO_SYM.common,
214      .close = vn_hal_close,
215   },
216  .EnumerateInstanceExtensionProperties = vn_EnumerateInstanceExtensionProperties,
217  .CreateInstance = vn_CreateInstance,
218  .GetInstanceProcAddr = vn_GetInstanceProcAddr,
219 };
220 
221 static int
vn_hal_open(const struct hw_module_t * mod,const char * id,struct hw_device_t ** dev)222 vn_hal_open(const struct hw_module_t *mod,
223             const char *id,
224             struct hw_device_t **dev)
225 {
226    int ret;
227 
228    assert(mod == &HAL_MODULE_INFO_SYM.common);
229    assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
230 
231    ret = vn_android_gralloc_init();
232    if (ret)
233       return ret;
234 
235    *dev = &vn_hal_dev.common;
236 
237    return 0;
238 }
239 
240 static uint32_t
vn_android_ahb_format_from_vk_format(VkFormat format)241 vn_android_ahb_format_from_vk_format(VkFormat format)
242 {
243    /* Only non-external AHB compatible formats are expected at:
244     * - image format query
245     * - memory export allocation
246     */
247    switch (format) {
248    case VK_FORMAT_R8G8B8A8_UNORM:
249       return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
250    case VK_FORMAT_R8G8B8_UNORM:
251       return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
252    case VK_FORMAT_R5G6B5_UNORM_PACK16:
253       return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
254    case VK_FORMAT_R16G16B16A16_SFLOAT:
255       return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
256    case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
257       return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
258    default:
259       return 0;
260    }
261 }
262 
263 const VkFormat *
vn_android_format_to_view_formats(VkFormat format,uint32_t * out_count)264 vn_android_format_to_view_formats(VkFormat format, uint32_t *out_count)
265 {
266    /* For AHB image prop query and creation, venus overrides the tiling to
267     * VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, which requires to chain
268     * VkImageFormatListCreateInfo struct in the corresponding pNext when the
269     * VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT is set. Those AHB images are assumed
270     * to be mutable no more than sRGB-ness, and the implementations can fail
271     * whenever going beyond.
272     *
273     * This helper provides the view formats that have sRGB variants for the
274     * image format that venus supports.
275     */
276    static const VkFormat view_formats_r8g8b8a8[] = {
277       VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_R8G8B8A8_SRGB
278    };
279    static const VkFormat view_formats_r8g8b8[] = { VK_FORMAT_R8G8B8_UNORM,
280                                                    VK_FORMAT_R8G8B8_SRGB };
281 
282    switch (format) {
283    case VK_FORMAT_R8G8B8A8_UNORM:
284       *out_count = ARRAY_SIZE(view_formats_r8g8b8a8);
285       return view_formats_r8g8b8a8;
286       break;
287    case VK_FORMAT_R8G8B8_UNORM:
288       *out_count = ARRAY_SIZE(view_formats_r8g8b8);
289       return view_formats_r8g8b8;
290       break;
291    default:
292       /* let the caller handle the fallback case */
293       *out_count = 0;
294       return NULL;
295    }
296 }
297 
298 VkFormat
vn_android_drm_format_to_vk_format(uint32_t format)299 vn_android_drm_format_to_vk_format(uint32_t format)
300 {
301    switch (format) {
302    case DRM_FORMAT_ABGR8888:
303    case DRM_FORMAT_XBGR8888:
304       return VK_FORMAT_R8G8B8A8_UNORM;
305    case DRM_FORMAT_BGR888:
306       return VK_FORMAT_R8G8B8_UNORM;
307    case DRM_FORMAT_RGB565:
308       return VK_FORMAT_R5G6B5_UNORM_PACK16;
309    case DRM_FORMAT_ABGR16161616F:
310       return VK_FORMAT_R16G16B16A16_SFLOAT;
311    case DRM_FORMAT_ABGR2101010:
312       return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
313    case DRM_FORMAT_YVU420:
314       return VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
315    case DRM_FORMAT_NV12:
316       return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
317    default:
318       return VK_FORMAT_UNDEFINED;
319    }
320 }
321 
322 static bool
vn_android_drm_format_is_yuv(uint32_t format)323 vn_android_drm_format_is_yuv(uint32_t format)
324 {
325    assert(vn_android_drm_format_to_vk_format(format) != VK_FORMAT_UNDEFINED);
326 
327    switch (format) {
328    case DRM_FORMAT_YVU420:
329    case DRM_FORMAT_NV12:
330       return true;
331    default:
332       return false;
333    }
334 }
335 
336 uint64_t
vn_android_get_ahb_usage(const VkImageUsageFlags usage,const VkImageCreateFlags flags)337 vn_android_get_ahb_usage(const VkImageUsageFlags usage,
338                          const VkImageCreateFlags flags)
339 {
340    uint64_t ahb_usage = 0;
341    if (usage &
342        (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
343       ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
344 
345    if (usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
346                 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT))
347       ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
348 
349    if (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
350       ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
351 
352    if (flags & VK_IMAGE_CREATE_PROTECTED_BIT)
353       ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
354 
355    /* must include at least one GPU usage flag */
356    if (ahb_usage == 0)
357       ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
358 
359    return ahb_usage;
360 }
361 
362 VkResult
vn_GetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)363 vn_GetSwapchainGrallocUsage2ANDROID(
364    VkDevice device,
365    VkFormat format,
366    VkImageUsageFlags imageUsage,
367    VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
368    uint64_t *grallocConsumerUsage,
369    uint64_t *grallocProducerUsage)
370 {
371    struct vn_device *dev = vn_device_from_handle(device);
372 
373    if (VN_DEBUG(WSI)) {
374       vn_log(dev->instance,
375              "format=%d, imageUsage=0x%x, swapchainImageUsage=0x%x", format,
376              imageUsage, swapchainImageUsage);
377    }
378 
379    *grallocConsumerUsage = 0;
380    *grallocProducerUsage = 0;
381    if (imageUsage & (VK_IMAGE_USAGE_TRANSFER_DST_BIT |
382                      VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
383       *grallocProducerUsage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
384 
385    if (imageUsage &
386        (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
387         VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
388       *grallocProducerUsage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
389 
390    if (swapchainImageUsage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)
391       *grallocProducerUsage |= vn_android_gralloc_get_shared_present_usage();
392 
393    return VK_SUCCESS;
394 }
395 
396 static VkResult
vn_android_get_modifier_properties(struct vn_device * dev,VkFormat format,uint64_t modifier,const VkAllocationCallbacks * alloc,VkDrmFormatModifierPropertiesEXT * out_props)397 vn_android_get_modifier_properties(struct vn_device *dev,
398                                    VkFormat format,
399                                    uint64_t modifier,
400                                    const VkAllocationCallbacks *alloc,
401                                    VkDrmFormatModifierPropertiesEXT *out_props)
402 {
403    VkPhysicalDevice physical_device =
404       vn_physical_device_to_handle(dev->physical_device);
405    VkDrmFormatModifierPropertiesListEXT mod_prop_list = {
406       .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
407       .pNext = NULL,
408       .drmFormatModifierCount = 0,
409       .pDrmFormatModifierProperties = NULL,
410    };
411    VkFormatProperties2 format_prop = {
412       .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
413       .pNext = &mod_prop_list,
414    };
415    VkDrmFormatModifierPropertiesEXT *mod_props = NULL;
416    bool modifier_found = false;
417 
418    vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
419                                          &format_prop);
420 
421    if (!mod_prop_list.drmFormatModifierCount) {
422       vn_log(dev->instance, "No compatible modifier for VkFormat(%u)",
423              format);
424       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
425    }
426 
427    mod_props = vk_zalloc(
428       alloc, sizeof(*mod_props) * mod_prop_list.drmFormatModifierCount,
429       VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
430    if (!mod_props)
431       return VK_ERROR_OUT_OF_HOST_MEMORY;
432 
433    mod_prop_list.pDrmFormatModifierProperties = mod_props;
434    vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
435                                          &format_prop);
436 
437    for (uint32_t i = 0; i < mod_prop_list.drmFormatModifierCount; i++) {
438       if (mod_props[i].drmFormatModifier == modifier) {
439          *out_props = mod_props[i];
440          modifier_found = true;
441          break;
442       }
443    }
444 
445    vk_free(alloc, mod_props);
446 
447    if (!modifier_found) {
448       vn_log(dev->instance,
449              "No matching modifier(%" PRIu64 ") properties for VkFormat(%u)",
450              modifier, format);
451       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
452    }
453 
454    return VK_SUCCESS;
455 }
456 
457 struct vn_android_image_builder {
458    VkImageCreateInfo create;
459    VkSubresourceLayout layouts[4];
460    VkImageDrmFormatModifierExplicitCreateInfoEXT modifier;
461    VkExternalMemoryImageCreateInfo external;
462    VkImageFormatListCreateInfo list;
463 };
464 
465 static VkResult
vn_android_get_image_builder(struct vn_device * dev,const VkImageCreateInfo * create_info,const native_handle_t * handle,const VkAllocationCallbacks * alloc,struct vn_android_image_builder * out_builder)466 vn_android_get_image_builder(struct vn_device *dev,
467                              const VkImageCreateInfo *create_info,
468                              const native_handle_t *handle,
469                              const VkAllocationCallbacks *alloc,
470                              struct vn_android_image_builder *out_builder)
471 {
472    VkResult result = VK_SUCCESS;
473    struct vn_android_gralloc_buffer_properties buf_props;
474    VkDrmFormatModifierPropertiesEXT mod_props;
475    uint32_t vcount = 0;
476    const VkFormat *vformats = NULL;
477 
478    /* Android image builder is only used by ANB or AHB. For ANB, Android
479     * Vulkan loader will never pass the below structs. For AHB, struct
480     * vn_image_create_deferred_info will never carry below either.
481     */
482    assert(!vk_find_struct_const(
483       create_info->pNext,
484       IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT));
485    assert(!vk_find_struct_const(create_info->pNext,
486                                 EXTERNAL_MEMORY_IMAGE_CREATE_INFO));
487 
488    if (!vn_android_gralloc_get_buffer_properties(handle, &buf_props))
489       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
490 
491    result = vn_android_get_modifier_properties(
492       dev, create_info->format, buf_props.modifier, alloc, &mod_props);
493    if (result != VK_SUCCESS)
494       return result;
495 
496    /* fill VkImageCreateInfo */
497    memset(out_builder, 0, sizeof(*out_builder));
498    out_builder->create = *create_info;
499    out_builder->create.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
500 
501    /* fill VkImageDrmFormatModifierExplicitCreateInfoEXT */
502    for (uint32_t i = 0; i < mod_props.drmFormatModifierPlaneCount; i++) {
503       out_builder->layouts[i].offset = buf_props.offset[i];
504       out_builder->layouts[i].rowPitch = buf_props.stride[i];
505    }
506    out_builder->modifier = (VkImageDrmFormatModifierExplicitCreateInfoEXT){
507       .sType =
508          VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
509       .pNext = out_builder->create.pNext,
510       .drmFormatModifier = buf_props.modifier,
511       .drmFormatModifierPlaneCount = mod_props.drmFormatModifierPlaneCount,
512       .pPlaneLayouts = out_builder->layouts,
513    };
514    out_builder->create.pNext = &out_builder->modifier;
515 
516    /* fill VkExternalMemoryImageCreateInfo */
517    out_builder->external = (VkExternalMemoryImageCreateInfo){
518       .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
519       .pNext = out_builder->create.pNext,
520       .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
521    };
522    out_builder->create.pNext = &out_builder->external;
523 
524    /* fill VkImageFormatListCreateInfo if needed
525     *
526     * vn_image::deferred_info only stores VkImageFormatListCreateInfo with a
527     * non-zero viewFormatCount, and that stored struct will be respected.
528     */
529    if ((create_info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) &&
530        !vk_find_struct_const(create_info->pNext,
531                              IMAGE_FORMAT_LIST_CREATE_INFO)) {
532       /* 12.3. Images
533        *
534        * If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT and flags
535        * contains VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, then the pNext chain
536        * must include a VkImageFormatListCreateInfo structure with non-zero
537        * viewFormatCount.
538        */
539       vformats =
540          vn_android_format_to_view_formats(create_info->format, &vcount);
541       if (!vformats) {
542          /* image builder struct persists through the image creation call */
543          vformats = &out_builder->create.format;
544          vcount = 1;
545       }
546       out_builder->list = (VkImageFormatListCreateInfo){
547          .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
548          .pNext = out_builder->create.pNext,
549          .viewFormatCount = vcount,
550          .pViewFormats = vformats,
551       };
552       out_builder->create.pNext = &out_builder->list;
553    }
554 
555    return VK_SUCCESS;
556 }
557 
558 VkResult
vn_android_image_from_anb(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkNativeBufferANDROID * anb_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)559 vn_android_image_from_anb(struct vn_device *dev,
560                           const VkImageCreateInfo *create_info,
561                           const VkNativeBufferANDROID *anb_info,
562                           const VkAllocationCallbacks *alloc,
563                           struct vn_image **out_img)
564 {
565    /* If anb_info->handle points to a classic resouce created from
566     * virtio_gpu_cmd_resource_create_3d, anb_info->stride is the stride of the
567     * guest shadow storage other than the host gpu storage.
568     *
569     * We also need to pass the correct stride to vn_CreateImage, which will be
570     * done via VkImageDrmFormatModifierExplicitCreateInfoEXT and will require
571     * VK_EXT_image_drm_format_modifier support in the host driver. The struct
572     * needs host storage info which can be queried from cros gralloc.
573     */
574    VkResult result = VK_SUCCESS;
575    VkDevice device = vn_device_to_handle(dev);
576    VkDeviceMemory memory = VK_NULL_HANDLE;
577    VkImage image = VK_NULL_HANDLE;
578    struct vn_image *img = NULL;
579    uint64_t alloc_size = 0;
580    uint32_t mem_type_bits = 0;
581    int dma_buf_fd = -1;
582    int dup_fd = -1;
583    VkImageCreateInfo local_create_info;
584    struct vn_android_image_builder builder;
585 
586    dma_buf_fd = vn_android_gralloc_get_dma_buf_fd(anb_info->handle);
587    if (dma_buf_fd < 0) {
588       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
589       goto fail;
590    }
591 
592    assert(!(create_info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT));
593    assert(!vk_find_struct_const(create_info->pNext,
594                                 IMAGE_FORMAT_LIST_CREATE_INFO));
595    assert(!vk_find_struct_const(create_info->pNext,
596                                 IMAGE_STENCIL_USAGE_CREATE_INFO));
597 
598    /* strip VkNativeBufferANDROID and VkSwapchainImageCreateInfoANDROID */
599    local_create_info = *create_info;
600    local_create_info.pNext = NULL;
601    result = vn_android_get_image_builder(dev, &local_create_info,
602                                          anb_info->handle, alloc, &builder);
603    if (result != VK_SUCCESS)
604       goto fail;
605 
606    /* encoder will strip the Android specific pNext structs */
607    result = vn_image_create(dev, &builder.create, alloc, &img);
608    if (result != VK_SUCCESS) {
609       if (VN_DEBUG(WSI))
610          vn_log(dev->instance, "vn_image_create failed");
611       goto fail;
612    }
613 
614    image = vn_image_to_handle(img);
615 
616    const VkMemoryRequirements *mem_req =
617       &img->requirements[0].memory.memoryRequirements;
618    if (!mem_req->memoryTypeBits) {
619       if (VN_DEBUG(WSI))
620          vn_log(dev->instance, "mem_req->memoryTypeBits cannot be zero");
621       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
622       goto fail;
623    }
624 
625    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
626                                              &mem_type_bits);
627    if (result != VK_SUCCESS)
628       goto fail;
629 
630    if (VN_DEBUG(WSI)) {
631       vn_log(dev->instance,
632              "size = img(%" PRIu64 ") fd(%" PRIu64 "), "
633              "memoryTypeBits = img(0x%X) & fd(0x%X)",
634              mem_req->size, alloc_size, mem_req->memoryTypeBits,
635              mem_type_bits);
636    }
637 
638    if (alloc_size < mem_req->size) {
639       if (VN_DEBUG(WSI)) {
640          vn_log(dev->instance,
641                 "alloc_size(%" PRIu64 ") mem_req->size(%" PRIu64 ")",
642                 alloc_size, mem_req->size);
643       }
644       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
645       goto fail;
646    }
647 
648    mem_type_bits &= mem_req->memoryTypeBits;
649    if (!mem_type_bits) {
650       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
651       goto fail;
652    }
653 
654    dup_fd = os_dupfd_cloexec(dma_buf_fd);
655    if (dup_fd < 0) {
656       result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
657                                  : VK_ERROR_OUT_OF_HOST_MEMORY;
658       goto fail;
659    }
660 
661    const VkImportMemoryFdInfoKHR import_fd_info = {
662       .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
663       .pNext = NULL,
664       .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
665       .fd = dup_fd,
666    };
667    const VkMemoryAllocateInfo memory_info = {
668       .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
669       .pNext = &import_fd_info,
670       .allocationSize = mem_req->size,
671       .memoryTypeIndex = ffs(mem_type_bits) - 1,
672    };
673    result = vn_AllocateMemory(device, &memory_info, alloc, &memory);
674    if (result != VK_SUCCESS) {
675       /* only need to close the dup_fd on import failure */
676       close(dup_fd);
677       goto fail;
678    }
679 
680    const VkBindImageMemoryInfo bind_info = {
681       .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
682       .pNext = NULL,
683       .image = image,
684       .memory = memory,
685       .memoryOffset = 0,
686    };
687    result = vn_BindImageMemory2(device, 1, &bind_info);
688    if (result != VK_SUCCESS)
689       goto fail;
690 
691    img->wsi.is_wsi = true;
692    img->wsi.tiling_override = builder.create.tiling;
693    img->wsi.drm_format_modifier = builder.modifier.drmFormatModifier;
694    /* Android WSI image owns the memory */
695    img->wsi.memory = vn_device_memory_from_handle(memory);
696    img->wsi.memory_owned = true;
697    *out_img = img;
698 
699    return VK_SUCCESS;
700 
701 fail:
702    if (image != VK_NULL_HANDLE)
703       vn_DestroyImage(device, image, alloc);
704    if (memory != VK_NULL_HANDLE)
705       vn_FreeMemory(device, memory, alloc);
706    return vn_error(dev->instance, result);
707 }
708 
709 VkResult
vn_AcquireImageANDROID(VkDevice device,UNUSED VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)710 vn_AcquireImageANDROID(VkDevice device,
711                        UNUSED VkImage image,
712                        int nativeFenceFd,
713                        VkSemaphore semaphore,
714                        VkFence fence)
715 {
716    VN_TRACE_FUNC();
717    struct vn_device *dev = vn_device_from_handle(device);
718    VkResult result = VK_SUCCESS;
719 
720    if (dev->instance->experimental.globalFencing == VK_FALSE) {
721       /* Fallback when VkVenusExperimentalFeatures100000MESA::globalFencing is
722        * VK_FALSE, out semaphore and fence are filled with already signaled
723        * payloads, and the native fence fd is waited inside until signaled.
724        */
725       if (nativeFenceFd >= 0) {
726          int ret = sync_wait(nativeFenceFd, -1);
727          /* Android loader expects the ICD to always close the fd */
728          close(nativeFenceFd);
729          if (ret)
730             return vn_error(dev->instance, VK_ERROR_SURFACE_LOST_KHR);
731       }
732 
733       if (semaphore != VK_NULL_HANDLE)
734          vn_semaphore_signal_wsi(dev, vn_semaphore_from_handle(semaphore));
735 
736       if (fence != VK_NULL_HANDLE)
737          vn_fence_signal_wsi(dev, vn_fence_from_handle(fence));
738 
739       return VK_SUCCESS;
740    }
741 
742    int semaphore_fd = -1;
743    int fence_fd = -1;
744    if (nativeFenceFd >= 0) {
745       if (semaphore != VK_NULL_HANDLE && fence != VK_NULL_HANDLE) {
746          semaphore_fd = nativeFenceFd;
747          fence_fd = os_dupfd_cloexec(nativeFenceFd);
748          if (fence_fd < 0) {
749             result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
750                                        : VK_ERROR_OUT_OF_HOST_MEMORY;
751             close(nativeFenceFd);
752             return vn_error(dev->instance, result);
753          }
754       } else if (semaphore != VK_NULL_HANDLE) {
755          semaphore_fd = nativeFenceFd;
756       } else if (fence != VK_NULL_HANDLE) {
757          fence_fd = nativeFenceFd;
758       } else {
759          close(nativeFenceFd);
760       }
761    }
762 
763    if (semaphore != VK_NULL_HANDLE) {
764       const VkImportSemaphoreFdInfoKHR info = {
765          .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
766          .pNext = NULL,
767          .semaphore = semaphore,
768          .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
769          .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
770          .fd = semaphore_fd,
771       };
772       result = vn_ImportSemaphoreFdKHR(device, &info);
773       if (result == VK_SUCCESS)
774          semaphore_fd = -1;
775    }
776 
777    if (result == VK_SUCCESS && fence != VK_NULL_HANDLE) {
778       const VkImportFenceFdInfoKHR info = {
779          .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
780          .pNext = NULL,
781          .fence = fence,
782          .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
783          .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
784          .fd = fence_fd,
785       };
786       result = vn_ImportFenceFdKHR(device, &info);
787       if (result == VK_SUCCESS)
788          fence_fd = -1;
789    }
790 
791    if (semaphore_fd >= 0)
792       close(semaphore_fd);
793    if (fence_fd >= 0)
794       close(fence_fd);
795 
796    return vn_result(dev->instance, result);
797 }
798 
799 VkResult
vn_QueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)800 vn_QueueSignalReleaseImageANDROID(VkQueue queue,
801                                   uint32_t waitSemaphoreCount,
802                                   const VkSemaphore *pWaitSemaphores,
803                                   VkImage image,
804                                   int *pNativeFenceFd)
805 {
806    VN_TRACE_FUNC();
807    struct vn_queue *que = vn_queue_from_handle(queue);
808    struct vn_device *dev = que->device;
809    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
810    VkDevice device = vn_device_to_handle(dev);
811    VkPipelineStageFlags local_stage_masks[8];
812    VkPipelineStageFlags *stage_masks = local_stage_masks;
813    VkResult result = VK_SUCCESS;
814    int fd = -1;
815 
816    if (waitSemaphoreCount == 0) {
817       *pNativeFenceFd = -1;
818       return VK_SUCCESS;
819    }
820 
821    if (waitSemaphoreCount > ARRAY_SIZE(local_stage_masks)) {
822       stage_masks =
823          vk_alloc(alloc, sizeof(*stage_masks) * waitSemaphoreCount,
824                   VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
825       if (!stage_masks)
826          return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
827    }
828 
829    for (uint32_t i = 0; i < waitSemaphoreCount; i++)
830       stage_masks[i] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
831 
832    const VkSubmitInfo submit_info = {
833       .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
834       .pNext = NULL,
835       .waitSemaphoreCount = waitSemaphoreCount,
836       .pWaitSemaphores = pWaitSemaphores,
837       .pWaitDstStageMask = stage_masks,
838       .commandBufferCount = 0,
839       .pCommandBuffers = NULL,
840       .signalSemaphoreCount = 0,
841       .pSignalSemaphores = NULL,
842    };
843    /* XXX When globalFencing is supported, our implementation is not able to
844     * reset the fence during vn_GetFenceFdKHR currently. Thus to ensure proper
845     * host driver behavior, we pass VK_NULL_HANDLE here.
846     */
847    result = vn_QueueSubmit(
848       queue, 1, &submit_info,
849       dev->instance->experimental.globalFencing == VK_TRUE ? VK_NULL_HANDLE
850                                                            : que->wait_fence);
851 
852    if (stage_masks != local_stage_masks)
853       vk_free(alloc, stage_masks);
854 
855    if (result != VK_SUCCESS)
856       return vn_error(dev->instance, result);
857 
858    if (dev->instance->experimental.globalFencing == VK_TRUE) {
859       /* With globalFencing, the external queue fence was not passed in the
860        * above vn_QueueSubmit to hint it to be synchronous. So we need to wait
861        * for the ring here before vn_GetFenceFdKHR which is pure kernel ops.
862        * Skip ring wait if async queue submit is disabled.
863        */
864       if (!VN_PERF(NO_ASYNC_QUEUE_SUBMIT))
865          vn_instance_ring_wait(dev->instance);
866 
867       const VkFenceGetFdInfoKHR fd_info = {
868          .sType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
869          .pNext = NULL,
870          .fence = que->wait_fence,
871          .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
872       };
873       result = vn_GetFenceFdKHR(device, &fd_info, &fd);
874    } else {
875       result =
876          vn_WaitForFences(device, 1, &que->wait_fence, VK_TRUE, UINT64_MAX);
877       if (result != VK_SUCCESS)
878          return vn_error(dev->instance, result);
879 
880       result = vn_ResetFences(device, 1, &que->wait_fence);
881    }
882 
883    if (result != VK_SUCCESS)
884       return vn_error(dev->instance, result);
885 
886    *pNativeFenceFd = fd;
887 
888    return VK_SUCCESS;
889 }
890 
891 static VkResult
vn_android_get_ahb_format_properties(struct vn_device * dev,const struct AHardwareBuffer * ahb,VkAndroidHardwareBufferFormatPropertiesANDROID * out_props)892 vn_android_get_ahb_format_properties(
893    struct vn_device *dev,
894    const struct AHardwareBuffer *ahb,
895    VkAndroidHardwareBufferFormatPropertiesANDROID *out_props)
896 {
897    AHardwareBuffer_Desc desc;
898    VkFormat format;
899    struct vn_android_gralloc_buffer_properties buf_props;
900    VkDrmFormatModifierPropertiesEXT mod_props;
901 
902    AHardwareBuffer_describe(ahb, &desc);
903    if (!(desc.usage & (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
904                        AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
905                        AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
906       vn_log(dev->instance,
907              "AHB usage(%" PRIu64 ") must include at least one GPU bit",
908              desc.usage);
909       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
910    }
911 
912    /* Handle the special AHARDWAREBUFFER_FORMAT_BLOB for VkBuffer case. */
913    if (desc.format == AHARDWAREBUFFER_FORMAT_BLOB) {
914       out_props->format = VK_FORMAT_UNDEFINED;
915       return VK_SUCCESS;
916    }
917 
918    if (!vn_android_gralloc_get_buffer_properties(
919           AHardwareBuffer_getNativeHandle(ahb), &buf_props))
920       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
921 
922    /* We implement AHB extension support with EXT_image_drm_format_modifier.
923     * It requires us to have a compatible VkFormat but not DRM formats. So if
924     * the ahb is not intended for backing a VkBuffer, error out early if the
925     * format is VK_FORMAT_UNDEFINED.
926     */
927    format = vn_android_drm_format_to_vk_format(buf_props.drm_fourcc);
928    if (format == VK_FORMAT_UNDEFINED) {
929       vn_log(dev->instance, "Unknown drm_fourcc(%u) from AHB format(0x%X)",
930              buf_props.drm_fourcc, desc.format);
931       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
932    }
933 
934    VkResult result = vn_android_get_modifier_properties(
935       dev, format, buf_props.modifier, &dev->base.base.alloc, &mod_props);
936    if (result != VK_SUCCESS)
937       return result;
938 
939    /* The spec requires that formatFeatures must include at least one of
940     * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
941     * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT.
942     */
943    const VkFormatFeatureFlags format_features =
944       mod_props.drmFormatModifierTilingFeatures |
945       VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
946 
947    /* 11.2.7. Android Hardware Buffer External Memory
948     *
949     * Implementations may not always be able to determine the color model,
950     * numerical range, or chroma offsets of the image contents, so the values
951     * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
952     * Applications should treat these values as sensible defaults to use in the
953     * absence of more reliable information obtained through some other means.
954     */
955    const bool is_yuv = vn_android_drm_format_is_yuv(buf_props.drm_fourcc);
956    const VkSamplerYcbcrModelConversion model =
957       is_yuv ? VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601
958              : VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
959 
960    /* ANGLE expects VK_FORMAT_UNDEFINED with externalFormat resolved from
961     * AHARDWAREBUFFER_FORMAT_IMPLEMENTATION_DEFINED and any supported planar
962     * AHB formats. Venus supports below explicit ones:
963     * - AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420 (DRM_FORMAT_NV12)
964     * - AHARDWAREBUFFER_FORMAT_YV12 (DRM_FORMAT_YVU420)
965     */
966    if (desc.format == AHARDWAREBUFFER_FORMAT_IMPLEMENTATION_DEFINED || is_yuv)
967       format = VK_FORMAT_UNDEFINED;
968 
969    *out_props = (VkAndroidHardwareBufferFormatPropertiesANDROID) {
970       .sType = out_props->sType,
971       .pNext = out_props->pNext,
972       .format = format,
973       .externalFormat = buf_props.drm_fourcc,
974       .formatFeatures = format_features,
975       .samplerYcbcrConversionComponents = {
976          .r = VK_COMPONENT_SWIZZLE_IDENTITY,
977          .g = VK_COMPONENT_SWIZZLE_IDENTITY,
978          .b = VK_COMPONENT_SWIZZLE_IDENTITY,
979          .a = VK_COMPONENT_SWIZZLE_IDENTITY,
980       },
981       .suggestedYcbcrModel = model,
982       /* match EGL_YUV_NARROW_RANGE_EXT used in egl platform_android */
983       .suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
984       .suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
985       .suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
986    };
987 
988    return VK_SUCCESS;
989 }
990 
991 VkResult
vn_GetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)992 vn_GetAndroidHardwareBufferPropertiesANDROID(
993    VkDevice device,
994    const struct AHardwareBuffer *buffer,
995    VkAndroidHardwareBufferPropertiesANDROID *pProperties)
996 {
997    VN_TRACE_FUNC();
998    struct vn_device *dev = vn_device_from_handle(device);
999    VkResult result = VK_SUCCESS;
1000    int dma_buf_fd = -1;
1001    uint64_t alloc_size = 0;
1002    uint32_t mem_type_bits = 0;
1003 
1004    VkAndroidHardwareBufferFormatPropertiesANDROID *format_props =
1005       vk_find_struct(pProperties->pNext,
1006                      ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
1007    if (format_props) {
1008       result =
1009          vn_android_get_ahb_format_properties(dev, buffer, format_props);
1010       if (result != VK_SUCCESS)
1011          return vn_error(dev->instance, result);
1012    }
1013 
1014    dma_buf_fd = vn_android_gralloc_get_dma_buf_fd(
1015       AHardwareBuffer_getNativeHandle(buffer));
1016    if (dma_buf_fd < 0)
1017       return vn_error(dev->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
1018 
1019    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1020                                              &mem_type_bits);
1021    if (result != VK_SUCCESS)
1022       return vn_error(dev->instance, result);
1023 
1024    pProperties->allocationSize = alloc_size;
1025    pProperties->memoryTypeBits = mem_type_bits;
1026 
1027    return VK_SUCCESS;
1028 }
1029 
1030 static AHardwareBuffer *
vn_android_ahb_allocate(uint32_t width,uint32_t height,uint32_t layers,uint32_t format,uint64_t usage)1031 vn_android_ahb_allocate(uint32_t width,
1032                         uint32_t height,
1033                         uint32_t layers,
1034                         uint32_t format,
1035                         uint64_t usage)
1036 {
1037    AHardwareBuffer *ahb = NULL;
1038    AHardwareBuffer_Desc desc;
1039    int ret = 0;
1040 
1041    memset(&desc, 0, sizeof(desc));
1042    desc.width = width;
1043    desc.height = height;
1044    desc.layers = layers;
1045    desc.format = format;
1046    desc.usage = usage;
1047 
1048    ret = AHardwareBuffer_allocate(&desc, &ahb);
1049    if (ret) {
1050       /* We just log the error code here for now since the platform falsely
1051        * maps all gralloc allocation failures to oom.
1052        */
1053       vn_log(NULL, "AHB alloc(w=%u,h=%u,l=%u,f=%u,u=%" PRIu64 ") failed(%d)",
1054              width, height, layers, format, usage, ret);
1055       return NULL;
1056    }
1057 
1058    return ahb;
1059 }
1060 
1061 bool
vn_android_get_drm_format_modifier_info(const VkPhysicalDeviceImageFormatInfo2 * format_info,VkPhysicalDeviceImageDrmFormatModifierInfoEXT * out_info)1062 vn_android_get_drm_format_modifier_info(
1063    const VkPhysicalDeviceImageFormatInfo2 *format_info,
1064    VkPhysicalDeviceImageDrmFormatModifierInfoEXT *out_info)
1065 {
1066    /* To properly fill VkPhysicalDeviceImageDrmFormatModifierInfoEXT, we have
1067     * to allocate an ahb to retrieve the drm format modifier. For the image
1068     * sharing mode, we assume VK_SHARING_MODE_EXCLUSIVE for now.
1069     */
1070    AHardwareBuffer *ahb = NULL;
1071    uint32_t format = 0;
1072    uint64_t usage = 0;
1073    struct vn_android_gralloc_buffer_properties buf_props;
1074 
1075    assert(format_info->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
1076 
1077    format = vn_android_ahb_format_from_vk_format(format_info->format);
1078    if (!format)
1079       return false;
1080 
1081    usage = vn_android_get_ahb_usage(format_info->usage, format_info->flags);
1082    ahb = vn_android_ahb_allocate(16, 16, 1, format, usage);
1083    if (!ahb)
1084       return false;
1085 
1086    if (!vn_android_gralloc_get_buffer_properties(
1087           AHardwareBuffer_getNativeHandle(ahb), &buf_props)) {
1088       AHardwareBuffer_release(ahb);
1089       return false;
1090    }
1091 
1092    *out_info = (VkPhysicalDeviceImageDrmFormatModifierInfoEXT){
1093       .sType =
1094          VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
1095       .pNext = NULL,
1096       .drmFormatModifier = buf_props.modifier,
1097       .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
1098       .queueFamilyIndexCount = 0,
1099       .pQueueFamilyIndices = NULL,
1100    };
1101 
1102    AHardwareBuffer_release(ahb);
1103    return true;
1104 }
1105 
1106 VkResult
vn_android_image_from_ahb(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)1107 vn_android_image_from_ahb(struct vn_device *dev,
1108                           const VkImageCreateInfo *create_info,
1109                           const VkAllocationCallbacks *alloc,
1110                           struct vn_image **out_img)
1111 {
1112    const VkExternalFormatANDROID *ext_info =
1113       vk_find_struct_const(create_info->pNext, EXTERNAL_FORMAT_ANDROID);
1114 
1115    VkImageCreateInfo local_info;
1116    if (ext_info && ext_info->externalFormat) {
1117       assert(create_info->format == VK_FORMAT_UNDEFINED);
1118       assert(create_info->imageType == VK_IMAGE_TYPE_2D);
1119       assert(create_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
1120       assert(create_info->tiling == VK_IMAGE_TILING_OPTIMAL);
1121       assert(!(create_info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT));
1122 
1123       local_info = *create_info;
1124       local_info.format =
1125          vn_android_drm_format_to_vk_format(ext_info->externalFormat);
1126       create_info = &local_info;
1127    }
1128 
1129    return vn_image_create_deferred(dev, create_info, alloc, out_img);
1130 }
1131 
1132 VkResult
vn_android_device_import_ahb(struct vn_device * dev,struct vn_device_memory * mem,const VkMemoryAllocateInfo * alloc_info,const VkAllocationCallbacks * alloc,struct AHardwareBuffer * ahb,bool internal_ahb)1133 vn_android_device_import_ahb(struct vn_device *dev,
1134                              struct vn_device_memory *mem,
1135                              const VkMemoryAllocateInfo *alloc_info,
1136                              const VkAllocationCallbacks *alloc,
1137                              struct AHardwareBuffer *ahb,
1138                              bool internal_ahb)
1139 {
1140    const VkMemoryDedicatedAllocateInfo *dedicated_info =
1141       vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
1142    const native_handle_t *handle = NULL;
1143    int dma_buf_fd = -1;
1144    int dup_fd = -1;
1145    uint64_t alloc_size = 0;
1146    uint32_t mem_type_bits = 0;
1147    uint32_t mem_type_index = alloc_info->memoryTypeIndex;
1148    bool force_unmappable = false;
1149    VkResult result = VK_SUCCESS;
1150 
1151    handle = AHardwareBuffer_getNativeHandle(ahb);
1152    dma_buf_fd = vn_android_gralloc_get_dma_buf_fd(handle);
1153    if (dma_buf_fd < 0)
1154       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1155 
1156    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1157                                              &mem_type_bits);
1158    if (result != VK_SUCCESS)
1159       return result;
1160 
1161    /* If ahb is for an image, finish the deferred image creation first */
1162    if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
1163       struct vn_image *img = vn_image_from_handle(dedicated_info->image);
1164       struct vn_android_image_builder builder;
1165 
1166       result = vn_android_get_image_builder(dev, &img->deferred_info->create,
1167                                             handle, alloc, &builder);
1168       if (result != VK_SUCCESS)
1169          return result;
1170 
1171       result = vn_image_init_deferred(dev, &builder.create, img);
1172       if (result != VK_SUCCESS)
1173          return result;
1174 
1175       const VkMemoryRequirements *mem_req =
1176          &img->requirements[0].memory.memoryRequirements;
1177       if (alloc_size < mem_req->size) {
1178          vn_log(dev->instance,
1179                 "alloc_size(%" PRIu64 ") mem_req->size(%" PRIu64 ")",
1180                 alloc_size, mem_req->size);
1181          return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1182       }
1183 
1184       alloc_size = mem_req->size;
1185 
1186       /* XXX Workaround before spec issue #2762 gets resolved. If importing an
1187        * internally allocated AHB from the exportable path, memoryTypeIndex is
1188        * undefined while defaulting to zero, which can be incompatible with
1189        * the queried memoryTypeBits from the combined memory requirement and
1190        * dma_buf fd properties. Thus we override the requested memoryTypeIndex
1191        * to an applicable one if existed.
1192        */
1193       if (internal_ahb) {
1194          if ((mem_type_bits & mem_req->memoryTypeBits) == 0) {
1195             vn_log(dev->instance, "memoryTypeBits: img(0x%X) fd(0x%X)",
1196                    mem_req->memoryTypeBits, mem_type_bits);
1197             return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1198          }
1199 
1200          mem_type_index = ffs(mem_type_bits & mem_req->memoryTypeBits) - 1;
1201       }
1202 
1203       /* XXX Workaround before we use cross-domain backend in minigbm. The
1204        * blob_mem allocated from virgl backend can have a queried guest
1205        * mappable size smaller than the size returned from image memory
1206        * requirement.
1207        */
1208       force_unmappable = true;
1209    }
1210 
1211    if (dedicated_info && dedicated_info->buffer != VK_NULL_HANDLE) {
1212       struct vn_buffer *buf = vn_buffer_from_handle(dedicated_info->buffer);
1213       const VkMemoryRequirements *mem_req =
1214          &buf->requirements.memory.memoryRequirements;
1215       if (alloc_size < mem_req->size) {
1216          vn_log(dev->instance,
1217                 "alloc_size(%" PRIu64 ") mem_req->size(%" PRIu64 ")",
1218                 alloc_size, mem_req->size);
1219          return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1220       }
1221 
1222       alloc_size = mem_req->size;
1223 
1224       assert((1 << mem_type_index) & mem_req->memoryTypeBits);
1225    }
1226 
1227    assert((1 << mem_type_index) & mem_type_bits);
1228 
1229    errno = 0;
1230    dup_fd = os_dupfd_cloexec(dma_buf_fd);
1231    if (dup_fd < 0)
1232       return (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
1233                                : VK_ERROR_OUT_OF_HOST_MEMORY;
1234 
1235    /* Spec requires AHB export info to be present, so we must strip it. In
1236     * practice, the AHB import path here only needs the main allocation info
1237     * and the dedicated_info.
1238     */
1239    VkMemoryDedicatedAllocateInfo local_dedicated_info;
1240    /* Override when dedicated_info exists and is not the tail struct. */
1241    if (dedicated_info && dedicated_info->pNext) {
1242       local_dedicated_info = *dedicated_info;
1243       local_dedicated_info.pNext = NULL;
1244       dedicated_info = &local_dedicated_info;
1245    }
1246    const VkMemoryAllocateInfo local_alloc_info = {
1247       .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1248       .pNext = dedicated_info,
1249       .allocationSize = alloc_size,
1250       .memoryTypeIndex = mem_type_index,
1251    };
1252    result = vn_device_memory_import_dma_buf(dev, mem, &local_alloc_info,
1253                                             force_unmappable, dup_fd);
1254    if (result != VK_SUCCESS) {
1255       close(dup_fd);
1256       return result;
1257    }
1258 
1259    AHardwareBuffer_acquire(ahb);
1260    mem->ahb = ahb;
1261 
1262    return VK_SUCCESS;
1263 }
1264 
1265 VkResult
vn_android_device_allocate_ahb(struct vn_device * dev,struct vn_device_memory * mem,const VkMemoryAllocateInfo * alloc_info,const VkAllocationCallbacks * alloc)1266 vn_android_device_allocate_ahb(struct vn_device *dev,
1267                                struct vn_device_memory *mem,
1268                                const VkMemoryAllocateInfo *alloc_info,
1269                                const VkAllocationCallbacks *alloc)
1270 {
1271    const VkMemoryDedicatedAllocateInfo *dedicated_info =
1272       vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
1273    uint32_t width = 0;
1274    uint32_t height = 1;
1275    uint32_t layers = 1;
1276    uint32_t format = 0;
1277    uint64_t usage = 0;
1278    struct AHardwareBuffer *ahb = NULL;
1279 
1280    if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
1281       const VkImageCreateInfo *image_info =
1282          &vn_image_from_handle(dedicated_info->image)->deferred_info->create;
1283       assert(image_info);
1284       width = image_info->extent.width;
1285       height = image_info->extent.height;
1286       layers = image_info->arrayLayers;
1287       format = vn_android_ahb_format_from_vk_format(image_info->format);
1288       usage = vn_android_get_ahb_usage(image_info->usage, image_info->flags);
1289    } else {
1290       const VkPhysicalDeviceMemoryProperties *mem_props =
1291          &dev->physical_device->memory_properties.memoryProperties;
1292 
1293       assert(alloc_info->memoryTypeIndex < mem_props->memoryTypeCount);
1294 
1295       width = alloc_info->allocationSize;
1296       format = AHARDWAREBUFFER_FORMAT_BLOB;
1297       usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
1298       if (mem_props->memoryTypes[alloc_info->memoryTypeIndex].propertyFlags &
1299           VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
1300          usage |= AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1301                   AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1302       }
1303    }
1304 
1305    ahb = vn_android_ahb_allocate(width, height, layers, format, usage);
1306    if (!ahb)
1307       return VK_ERROR_OUT_OF_HOST_MEMORY;
1308 
1309    VkResult result =
1310       vn_android_device_import_ahb(dev, mem, alloc_info, alloc, ahb, true);
1311 
1312    /* ahb alloc has already acquired a ref and import will acquire another,
1313     * must release one here to avoid leak.
1314     */
1315    AHardwareBuffer_release(ahb);
1316 
1317    return result;
1318 }
1319 
1320 void
vn_android_release_ahb(struct AHardwareBuffer * ahb)1321 vn_android_release_ahb(struct AHardwareBuffer *ahb)
1322 {
1323    AHardwareBuffer_release(ahb);
1324 }
1325 
1326 VkResult
vn_GetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1327 vn_GetMemoryAndroidHardwareBufferANDROID(
1328    VkDevice device,
1329    const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1330    struct AHardwareBuffer **pBuffer)
1331 {
1332    struct vn_device_memory *mem = vn_device_memory_from_handle(pInfo->memory);
1333 
1334    AHardwareBuffer_acquire(mem->ahb);
1335    *pBuffer = mem->ahb;
1336 
1337    return VK_SUCCESS;
1338 }
1339 
1340 struct vn_android_buffer_create_info {
1341    VkBufferCreateInfo create;
1342    VkExternalMemoryBufferCreateInfo external;
1343    VkBufferOpaqueCaptureAddressCreateInfo address;
1344 };
1345 
1346 static const VkBufferCreateInfo *
vn_android_fix_buffer_create_info(const VkBufferCreateInfo * create_info,struct vn_android_buffer_create_info * local_info)1347 vn_android_fix_buffer_create_info(
1348    const VkBufferCreateInfo *create_info,
1349    struct vn_android_buffer_create_info *local_info)
1350 {
1351    local_info->create = *create_info;
1352    VkBaseOutStructure *dst = (void *)&local_info->create;
1353 
1354    vk_foreach_struct_const(src, create_info->pNext) {
1355       void *pnext = NULL;
1356       switch (src->sType) {
1357       case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1358          memcpy(&local_info->external, src, sizeof(local_info->external));
1359          local_info->external.handleTypes =
1360             VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1361          pnext = &local_info->external;
1362          break;
1363       case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO:
1364          memcpy(&local_info->address, src, sizeof(local_info->address));
1365          pnext = &local_info->address;
1366          break;
1367       default:
1368          break;
1369       }
1370 
1371       if (pnext) {
1372          dst->pNext = pnext;
1373          dst = pnext;
1374       }
1375    }
1376 
1377    dst->pNext = NULL;
1378 
1379    return &local_info->create;
1380 }
1381 
1382 VkResult
vn_android_get_ahb_buffer_memory_type_bits(struct vn_device * dev,uint32_t * out_mem_type_bits)1383 vn_android_get_ahb_buffer_memory_type_bits(struct vn_device *dev,
1384                                            uint32_t *out_mem_type_bits)
1385 {
1386    const uint32_t format = AHARDWAREBUFFER_FORMAT_BLOB;
1387    /* ensure dma_buf_memory_type_bits covers host visible usage */
1388    const uint64_t usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER |
1389                           AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1390                           AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1391    AHardwareBuffer *ahb = NULL;
1392    int dma_buf_fd = -1;
1393    uint64_t alloc_size = 0;
1394    uint32_t mem_type_bits = 0;
1395    VkResult result;
1396 
1397    ahb = vn_android_ahb_allocate(4096, 1, 1, format, usage);
1398    if (!ahb)
1399       return VK_ERROR_OUT_OF_HOST_MEMORY;
1400 
1401    dma_buf_fd =
1402       vn_android_gralloc_get_dma_buf_fd(AHardwareBuffer_getNativeHandle(ahb));
1403    if (dma_buf_fd < 0) {
1404       AHardwareBuffer_release(ahb);
1405       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1406    }
1407 
1408    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1409                                              &mem_type_bits);
1410 
1411    AHardwareBuffer_release(ahb);
1412 
1413    if (result != VK_SUCCESS)
1414       return result;
1415 
1416    *out_mem_type_bits = mem_type_bits;
1417 
1418    return VK_SUCCESS;
1419 }
1420 
1421 VkResult
vn_android_buffer_from_ahb(struct vn_device * dev,const VkBufferCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_buffer ** out_buf)1422 vn_android_buffer_from_ahb(struct vn_device *dev,
1423                            const VkBufferCreateInfo *create_info,
1424                            const VkAllocationCallbacks *alloc,
1425                            struct vn_buffer **out_buf)
1426 {
1427    struct vn_android_buffer_create_info local_info;
1428    VkResult result;
1429 
1430    create_info = vn_android_fix_buffer_create_info(create_info, &local_info);
1431    result = vn_buffer_create(dev, create_info, alloc, out_buf);
1432    if (result != VK_SUCCESS)
1433       return result;
1434 
1435    /* AHB backed buffer layers on top of dma_buf, so here we must comine the
1436     * queried type bits from both buffer memory requirement and dma_buf fd
1437     * properties.
1438     */
1439    (*out_buf)->requirements.memory.memoryRequirements.memoryTypeBits &=
1440       dev->buffer_cache.ahb_mem_type_bits;
1441 
1442    assert((*out_buf)->requirements.memory.memoryRequirements.memoryTypeBits);
1443 
1444    return VK_SUCCESS;
1445 }
1446