• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_image.h"
12 
13 #include "venus-protocol/vn_protocol_driver_image.h"
14 #include "venus-protocol/vn_protocol_driver_image_view.h"
15 #include "venus-protocol/vn_protocol_driver_sampler.h"
16 #include "venus-protocol/vn_protocol_driver_sampler_ycbcr_conversion.h"
17 
18 #include "vn_android.h"
19 #include "vn_device.h"
20 #include "vn_device_memory.h"
21 #include "vn_wsi.h"
22 
23 static void
vn_image_init_memory_requirements(struct vn_image * img,struct vn_device * dev,const VkImageCreateInfo * create_info)24 vn_image_init_memory_requirements(struct vn_image *img,
25                                   struct vn_device *dev,
26                                   const VkImageCreateInfo *create_info)
27 {
28    uint32_t plane_count = 1;
29    if (create_info->flags & VK_IMAGE_CREATE_DISJOINT_BIT) {
30       /* TODO VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount */
31       assert(create_info->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
32 
33       switch (create_info->format) {
34       case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
35       case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
36       case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
37       case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
38       case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
39       case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
40       case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
41       case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
42          plane_count = 2;
43          break;
44       case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
45       case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
46       case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
47       case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
48       case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
49       case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
50       case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
51       case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
52       case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
53       case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
54       case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
55       case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
56          plane_count = 3;
57          break;
58       default:
59          plane_count = 1;
60          break;
61       }
62    }
63    assert(plane_count <= ARRAY_SIZE(img->requirements));
64 
65    /* TODO add a per-device cache for the requirements */
66    for (uint32_t i = 0; i < plane_count; i++) {
67       img->requirements[i].memory.sType =
68          VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
69       img->requirements[i].memory.pNext = &img->requirements[i].dedicated;
70       img->requirements[i].dedicated.sType =
71          VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
72       img->requirements[i].dedicated.pNext = NULL;
73    }
74 
75    VkDevice dev_handle = vn_device_to_handle(dev);
76    VkImage img_handle = vn_image_to_handle(img);
77    if (plane_count == 1) {
78       vn_call_vkGetImageMemoryRequirements2(
79          dev->instance, dev_handle,
80          &(VkImageMemoryRequirementsInfo2){
81             .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
82             .image = img_handle,
83          },
84          &img->requirements[0].memory);
85 
86       /* AHB backed image requires dedicated allocation */
87       if (img->deferred_info) {
88          img->requirements[0].dedicated.prefersDedicatedAllocation = VK_TRUE;
89          img->requirements[0].dedicated.requiresDedicatedAllocation = VK_TRUE;
90       }
91    } else {
92       for (uint32_t i = 0; i < plane_count; i++) {
93          vn_call_vkGetImageMemoryRequirements2(
94             dev->instance, dev_handle,
95             &(VkImageMemoryRequirementsInfo2){
96                .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
97                .pNext =
98                   &(VkImagePlaneMemoryRequirementsInfo){
99                      .sType =
100                         VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
101                      .planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT << i,
102                   },
103                .image = img_handle,
104             },
105             &img->requirements[i].memory);
106       }
107    }
108 }
109 
110 static VkResult
vn_image_deferred_info_init(struct vn_image * img,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc)111 vn_image_deferred_info_init(struct vn_image *img,
112                             const VkImageCreateInfo *create_info,
113                             const VkAllocationCallbacks *alloc)
114 {
115    struct vn_image_create_deferred_info *info = NULL;
116    VkBaseOutStructure *dst = NULL;
117 
118    info = vk_zalloc(alloc, sizeof(*info), VN_DEFAULT_ALIGN,
119                     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
120    if (!info)
121       return VK_ERROR_OUT_OF_HOST_MEMORY;
122 
123    info->create = *create_info;
124    dst = (void *)&info->create;
125 
126    vk_foreach_struct_const(src, create_info->pNext) {
127       void *pnext = NULL;
128       switch (src->sType) {
129       case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
130          /* 12.3. Images
131           *
132           * If viewFormatCount is zero, pViewFormats is ignored and the image
133           * is created as if the VkImageFormatListCreateInfo structure were
134           * not included in the pNext chain of VkImageCreateInfo.
135           */
136          if (!((const VkImageFormatListCreateInfo *)src)->viewFormatCount)
137             break;
138 
139          memcpy(&info->list, src, sizeof(info->list));
140          pnext = &info->list;
141 
142          /* need a deep copy for view formats array */
143          const size_t size = sizeof(VkFormat) * info->list.viewFormatCount;
144          VkFormat *view_formats = vk_zalloc(
145             alloc, size, VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
146          if (!view_formats) {
147             vk_free(alloc, info);
148             return VK_ERROR_OUT_OF_HOST_MEMORY;
149          }
150 
151          memcpy(view_formats,
152                 ((const VkImageFormatListCreateInfo *)src)->pViewFormats,
153                 size);
154          info->list.pViewFormats = view_formats;
155       } break;
156       case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
157          memcpy(&info->stencil, src, sizeof(info->stencil));
158          pnext = &info->stencil;
159          break;
160       case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
161          /* we should have translated the external format */
162          assert(create_info->format != VK_FORMAT_UNDEFINED);
163          info->from_external_format =
164             ((const VkExternalFormatANDROID *)src)->externalFormat;
165          break;
166       default:
167          break;
168       }
169 
170       if (pnext) {
171          dst->pNext = pnext;
172          dst = pnext;
173       }
174    }
175    dst->pNext = NULL;
176 
177    img->deferred_info = info;
178 
179    return VK_SUCCESS;
180 }
181 
182 static void
vn_image_deferred_info_fini(struct vn_image * img,const VkAllocationCallbacks * alloc)183 vn_image_deferred_info_fini(struct vn_image *img,
184                             const VkAllocationCallbacks *alloc)
185 {
186    if (!img->deferred_info)
187       return;
188 
189    if (img->deferred_info->list.pViewFormats)
190       vk_free(alloc, (void *)img->deferred_info->list.pViewFormats);
191 
192    vk_free(alloc, img->deferred_info);
193 }
194 
195 static VkResult
vn_image_init(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)196 vn_image_init(struct vn_device *dev,
197               const VkImageCreateInfo *create_info,
198               struct vn_image *img)
199 {
200    VkDevice device = vn_device_to_handle(dev);
201    VkImage image = vn_image_to_handle(img);
202    VkResult result = VK_SUCCESS;
203 
204    img->sharing_mode = create_info->sharingMode;
205 
206    /* TODO async */
207    result =
208       vn_call_vkCreateImage(dev->instance, device, create_info, NULL, &image);
209    if (result != VK_SUCCESS)
210       return result;
211 
212    vn_image_init_memory_requirements(img, dev, create_info);
213 
214    return VK_SUCCESS;
215 }
216 
217 VkResult
vn_image_create(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)218 vn_image_create(struct vn_device *dev,
219                 const VkImageCreateInfo *create_info,
220                 const VkAllocationCallbacks *alloc,
221                 struct vn_image **out_img)
222 {
223    struct vn_image *img = NULL;
224    VkResult result = VK_SUCCESS;
225 
226    img = vk_zalloc(alloc, sizeof(*img), VN_DEFAULT_ALIGN,
227                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
228    if (!img)
229       return VK_ERROR_OUT_OF_HOST_MEMORY;
230 
231    vn_object_base_init(&img->base, VK_OBJECT_TYPE_IMAGE, &dev->base);
232 
233    result = vn_image_init(dev, create_info, img);
234    if (result != VK_SUCCESS) {
235       vn_object_base_fini(&img->base);
236       vk_free(alloc, img);
237       return result;
238    }
239 
240    *out_img = img;
241 
242    return VK_SUCCESS;
243 }
244 
245 VkResult
vn_image_init_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)246 vn_image_init_deferred(struct vn_device *dev,
247                        const VkImageCreateInfo *create_info,
248                        struct vn_image *img)
249 {
250    VkResult result = vn_image_init(dev, create_info, img);
251    img->deferred_info->initialized = result == VK_SUCCESS;
252    return result;
253 }
254 
255 VkResult
vn_image_create_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)256 vn_image_create_deferred(struct vn_device *dev,
257                          const VkImageCreateInfo *create_info,
258                          const VkAllocationCallbacks *alloc,
259                          struct vn_image **out_img)
260 {
261    struct vn_image *img = NULL;
262    VkResult result = VK_SUCCESS;
263 
264    img = vk_zalloc(alloc, sizeof(*img), VN_DEFAULT_ALIGN,
265                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
266    if (!img)
267       return VK_ERROR_OUT_OF_HOST_MEMORY;
268 
269    vn_object_base_init(&img->base, VK_OBJECT_TYPE_IMAGE, &dev->base);
270 
271    result = vn_image_deferred_info_init(img, create_info, alloc);
272    if (result != VK_SUCCESS) {
273       vn_object_base_fini(&img->base);
274       vk_free(alloc, img);
275       return result;
276    }
277 
278    *out_img = img;
279 
280    return VK_SUCCESS;
281 }
282 
283 /* image commands */
284 
285 VkResult
vn_CreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)286 vn_CreateImage(VkDevice device,
287                const VkImageCreateInfo *pCreateInfo,
288                const VkAllocationCallbacks *pAllocator,
289                VkImage *pImage)
290 {
291    VN_TRACE_FUNC();
292    struct vn_device *dev = vn_device_from_handle(device);
293    const VkAllocationCallbacks *alloc =
294       pAllocator ? pAllocator : &dev->base.base.alloc;
295    struct vn_image *img;
296    VkResult result;
297 
298    const struct wsi_image_create_info *wsi_info =
299       vn_wsi_find_wsi_image_create_info(pCreateInfo);
300    const VkNativeBufferANDROID *anb_info =
301       vn_android_find_native_buffer(pCreateInfo);
302    const VkExternalMemoryImageCreateInfo *external_info =
303       vk_find_struct_const(pCreateInfo->pNext,
304                            EXTERNAL_MEMORY_IMAGE_CREATE_INFO);
305    const bool ahb_info =
306       external_info &&
307       external_info->handleTypes ==
308          VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
309 
310 #ifdef ANDROID
311    /* VkImageSwapchainCreateInfoKHR is not useful at all */
312    const VkImageSwapchainCreateInfoKHR *swapchain_info = NULL;
313 #else
314    const VkImageSwapchainCreateInfoKHR *swapchain_info = vk_find_struct_const(
315       pCreateInfo->pNext, IMAGE_SWAPCHAIN_CREATE_INFO_KHR);
316    if (swapchain_info && !swapchain_info->swapchain)
317       swapchain_info = NULL;
318 #endif
319 
320    if (wsi_info) {
321       result = vn_wsi_create_image(dev, pCreateInfo, wsi_info, alloc, &img);
322    } else if (anb_info) {
323       result =
324          vn_android_image_from_anb(dev, pCreateInfo, anb_info, alloc, &img);
325    } else if (ahb_info) {
326       result = vn_android_image_from_ahb(dev, pCreateInfo, alloc, &img);
327    } else if (swapchain_info) {
328       result = vn_wsi_create_image_from_swapchain(
329          dev, pCreateInfo, swapchain_info, alloc, &img);
330    } else {
331       result = vn_image_create(dev, pCreateInfo, alloc, &img);
332    }
333 
334    if (result != VK_SUCCESS)
335       return vn_error(dev->instance, result);
336 
337    *pImage = vn_image_to_handle(img);
338    return VK_SUCCESS;
339 }
340 
341 void
vn_DestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)342 vn_DestroyImage(VkDevice device,
343                 VkImage image,
344                 const VkAllocationCallbacks *pAllocator)
345 {
346    VN_TRACE_FUNC();
347    struct vn_device *dev = vn_device_from_handle(device);
348    struct vn_image *img = vn_image_from_handle(image);
349    const VkAllocationCallbacks *alloc =
350       pAllocator ? pAllocator : &dev->base.base.alloc;
351 
352    if (!img)
353       return;
354 
355    if (img->wsi.memory && img->wsi.memory_owned) {
356       VkDeviceMemory mem_handle = vn_device_memory_to_handle(img->wsi.memory);
357       vn_FreeMemory(device, mem_handle, pAllocator);
358    }
359 
360    /* must not ask renderer to destroy uninitialized deferred image */
361    if (!img->deferred_info || img->deferred_info->initialized)
362       vn_async_vkDestroyImage(dev->instance, device, image, NULL);
363 
364    vn_image_deferred_info_fini(img, alloc);
365 
366    vn_object_base_fini(&img->base);
367    vk_free(alloc, img);
368 }
369 
370 void
vn_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)371 vn_GetImageMemoryRequirements2(VkDevice device,
372                                const VkImageMemoryRequirementsInfo2 *pInfo,
373                                VkMemoryRequirements2 *pMemoryRequirements)
374 {
375    const struct vn_image *img = vn_image_from_handle(pInfo->image);
376    union {
377       VkBaseOutStructure *pnext;
378       VkMemoryRequirements2 *two;
379       VkMemoryDedicatedRequirements *dedicated;
380    } u = { .two = pMemoryRequirements };
381 
382    uint32_t plane = 0;
383    const VkImagePlaneMemoryRequirementsInfo *plane_info =
384       vk_find_struct_const(pInfo->pNext,
385                            IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO);
386    if (plane_info) {
387       switch (plane_info->planeAspect) {
388       case VK_IMAGE_ASPECT_PLANE_1_BIT:
389          plane = 1;
390          break;
391       case VK_IMAGE_ASPECT_PLANE_2_BIT:
392          plane = 2;
393          break;
394       default:
395          plane = 0;
396          break;
397       }
398    }
399 
400    while (u.pnext) {
401       switch (u.pnext->sType) {
402       case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
403          u.two->memoryRequirements =
404             img->requirements[plane].memory.memoryRequirements;
405          break;
406       case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
407          u.dedicated->prefersDedicatedAllocation =
408             img->requirements[plane].dedicated.prefersDedicatedAllocation;
409          u.dedicated->requiresDedicatedAllocation =
410             img->requirements[plane].dedicated.requiresDedicatedAllocation;
411          break;
412       default:
413          break;
414       }
415       u.pnext = u.pnext->pNext;
416    }
417 }
418 
419 void
vn_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)420 vn_GetImageSparseMemoryRequirements2(
421    VkDevice device,
422    const VkImageSparseMemoryRequirementsInfo2 *pInfo,
423    uint32_t *pSparseMemoryRequirementCount,
424    VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
425 {
426    struct vn_device *dev = vn_device_from_handle(device);
427 
428    /* TODO per-device cache */
429    vn_call_vkGetImageSparseMemoryRequirements2(dev->instance, device, pInfo,
430                                                pSparseMemoryRequirementCount,
431                                                pSparseMemoryRequirements);
432 }
433 
434 static void
vn_image_bind_wsi_memory(struct vn_image * img,struct vn_device_memory * mem)435 vn_image_bind_wsi_memory(struct vn_image *img, struct vn_device_memory *mem)
436 {
437    assert(img->wsi.is_wsi && !img->wsi.memory);
438    img->wsi.memory = mem;
439 }
440 
441 VkResult
vn_BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)442 vn_BindImageMemory2(VkDevice device,
443                     uint32_t bindInfoCount,
444                     const VkBindImageMemoryInfo *pBindInfos)
445 {
446    struct vn_device *dev = vn_device_from_handle(device);
447    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
448 
449    VkBindImageMemoryInfo *local_infos = NULL;
450    for (uint32_t i = 0; i < bindInfoCount; i++) {
451       const VkBindImageMemoryInfo *info = &pBindInfos[i];
452       struct vn_image *img = vn_image_from_handle(info->image);
453       struct vn_device_memory *mem =
454          vn_device_memory_from_handle(info->memory);
455 
456       /* no bind info fixup needed */
457       if (mem && !mem->base_memory) {
458          if (img->wsi.is_wsi)
459             vn_image_bind_wsi_memory(img, mem);
460          continue;
461       }
462 
463       if (!mem) {
464 #ifdef ANDROID
465          /* TODO handle VkNativeBufferANDROID when we bump up
466           * VN_ANDROID_NATIVE_BUFFER_SPEC_VERSION
467           */
468          unreachable("VkBindImageMemoryInfo with no memory");
469 #else
470          const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
471             vk_find_struct_const(info->pNext,
472                                  BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR);
473          assert(img->wsi.is_wsi && swapchain_info);
474 
475          struct vn_image *swapchain_img =
476             vn_image_from_handle(wsi_common_get_image(
477                swapchain_info->swapchain, swapchain_info->imageIndex));
478          mem = swapchain_img->wsi.memory;
479 #endif
480       }
481 
482       if (img->wsi.is_wsi)
483          vn_image_bind_wsi_memory(img, mem);
484 
485       if (!local_infos) {
486          const size_t size = sizeof(*local_infos) * bindInfoCount;
487          local_infos = vk_alloc(alloc, size, VN_DEFAULT_ALIGN,
488                                 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
489          if (!local_infos)
490             return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
491 
492          memcpy(local_infos, pBindInfos, size);
493       }
494 
495       /* If mem is suballocated, mem->base_memory is non-NULL and we must
496        * patch it in.  If VkBindImageMemorySwapchainInfoKHR is given, we've
497        * looked mem up above and also need to patch it in.
498        */
499       local_infos[i].memory = vn_device_memory_to_handle(
500          mem->base_memory ? mem->base_memory : mem);
501       local_infos[i].memoryOffset += mem->base_offset;
502    }
503    if (local_infos)
504       pBindInfos = local_infos;
505 
506    vn_async_vkBindImageMemory2(dev->instance, device, bindInfoCount,
507                                pBindInfos);
508 
509    vk_free(alloc, local_infos);
510 
511    return VK_SUCCESS;
512 }
513 
514 VkResult
vn_GetImageDrmFormatModifierPropertiesEXT(VkDevice device,VkImage image,VkImageDrmFormatModifierPropertiesEXT * pProperties)515 vn_GetImageDrmFormatModifierPropertiesEXT(
516    VkDevice device,
517    VkImage image,
518    VkImageDrmFormatModifierPropertiesEXT *pProperties)
519 {
520    struct vn_device *dev = vn_device_from_handle(device);
521 
522    /* TODO local cache */
523    return vn_call_vkGetImageDrmFormatModifierPropertiesEXT(
524       dev->instance, device, image, pProperties);
525 }
526 
527 void
vn_GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)528 vn_GetImageSubresourceLayout(VkDevice device,
529                              VkImage image,
530                              const VkImageSubresource *pSubresource,
531                              VkSubresourceLayout *pLayout)
532 {
533    struct vn_device *dev = vn_device_from_handle(device);
534    struct vn_image *img = vn_image_from_handle(image);
535 
536    /* override aspect mask for wsi/ahb images with tiling modifier */
537    VkImageSubresource local_subresource;
538    if ((img->wsi.is_wsi && img->wsi.tiling_override ==
539                               VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) ||
540        img->deferred_info) {
541       VkImageAspectFlags aspect = pSubresource->aspectMask;
542       switch (aspect) {
543       case VK_IMAGE_ASPECT_COLOR_BIT:
544       case VK_IMAGE_ASPECT_DEPTH_BIT:
545       case VK_IMAGE_ASPECT_STENCIL_BIT:
546       case VK_IMAGE_ASPECT_PLANE_0_BIT:
547          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
548          break;
549       case VK_IMAGE_ASPECT_PLANE_1_BIT:
550          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
551          break;
552       case VK_IMAGE_ASPECT_PLANE_2_BIT:
553          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
554          break;
555       default:
556          break;
557       }
558 
559       /* only handle supported aspect override */
560       if (aspect != pSubresource->aspectMask) {
561          local_subresource = *pSubresource;
562          local_subresource.aspectMask = aspect;
563          pSubresource = &local_subresource;
564       }
565    }
566 
567    /* TODO local cache */
568    vn_call_vkGetImageSubresourceLayout(dev->instance, device, image,
569                                        pSubresource, pLayout);
570 }
571 
572 /* image view commands */
573 
574 VkResult
vn_CreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)575 vn_CreateImageView(VkDevice device,
576                    const VkImageViewCreateInfo *pCreateInfo,
577                    const VkAllocationCallbacks *pAllocator,
578                    VkImageView *pView)
579 {
580    struct vn_device *dev = vn_device_from_handle(device);
581    struct vn_image *img = vn_image_from_handle(pCreateInfo->image);
582    const VkAllocationCallbacks *alloc =
583       pAllocator ? pAllocator : &dev->base.base.alloc;
584 
585    VkImageViewCreateInfo local_info;
586    if (img->deferred_info && img->deferred_info->from_external_format) {
587       assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
588 
589       local_info = *pCreateInfo;
590       local_info.format = img->deferred_info->create.format;
591       pCreateInfo = &local_info;
592 
593       assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
594    }
595 
596    struct vn_image_view *view =
597       vk_zalloc(alloc, sizeof(*view), VN_DEFAULT_ALIGN,
598                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
599    if (!view)
600       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
601 
602    vn_object_base_init(&view->base, VK_OBJECT_TYPE_IMAGE_VIEW, &dev->base);
603    view->image = img;
604 
605    VkImageView view_handle = vn_image_view_to_handle(view);
606    vn_async_vkCreateImageView(dev->instance, device, pCreateInfo, NULL,
607                               &view_handle);
608 
609    *pView = view_handle;
610 
611    return VK_SUCCESS;
612 }
613 
614 void
vn_DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)615 vn_DestroyImageView(VkDevice device,
616                     VkImageView imageView,
617                     const VkAllocationCallbacks *pAllocator)
618 {
619    struct vn_device *dev = vn_device_from_handle(device);
620    struct vn_image_view *view = vn_image_view_from_handle(imageView);
621    const VkAllocationCallbacks *alloc =
622       pAllocator ? pAllocator : &dev->base.base.alloc;
623 
624    if (!view)
625       return;
626 
627    vn_async_vkDestroyImageView(dev->instance, device, imageView, NULL);
628 
629    vn_object_base_fini(&view->base);
630    vk_free(alloc, view);
631 }
632 
633 /* sampler commands */
634 
635 VkResult
vn_CreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)636 vn_CreateSampler(VkDevice device,
637                  const VkSamplerCreateInfo *pCreateInfo,
638                  const VkAllocationCallbacks *pAllocator,
639                  VkSampler *pSampler)
640 {
641    struct vn_device *dev = vn_device_from_handle(device);
642    const VkAllocationCallbacks *alloc =
643       pAllocator ? pAllocator : &dev->base.base.alloc;
644 
645    struct vn_sampler *sampler =
646       vk_zalloc(alloc, sizeof(*sampler), VN_DEFAULT_ALIGN,
647                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
648    if (!sampler)
649       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
650 
651    vn_object_base_init(&sampler->base, VK_OBJECT_TYPE_SAMPLER, &dev->base);
652 
653    VkSampler sampler_handle = vn_sampler_to_handle(sampler);
654    vn_async_vkCreateSampler(dev->instance, device, pCreateInfo, NULL,
655                             &sampler_handle);
656 
657    *pSampler = sampler_handle;
658 
659    return VK_SUCCESS;
660 }
661 
662 void
vn_DestroySampler(VkDevice device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)663 vn_DestroySampler(VkDevice device,
664                   VkSampler _sampler,
665                   const VkAllocationCallbacks *pAllocator)
666 {
667    struct vn_device *dev = vn_device_from_handle(device);
668    struct vn_sampler *sampler = vn_sampler_from_handle(_sampler);
669    const VkAllocationCallbacks *alloc =
670       pAllocator ? pAllocator : &dev->base.base.alloc;
671 
672    if (!sampler)
673       return;
674 
675    vn_async_vkDestroySampler(dev->instance, device, _sampler, NULL);
676 
677    vn_object_base_fini(&sampler->base);
678    vk_free(alloc, sampler);
679 }
680 
681 /* sampler YCbCr conversion commands */
682 
683 VkResult
vn_CreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)684 vn_CreateSamplerYcbcrConversion(
685    VkDevice device,
686    const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
687    const VkAllocationCallbacks *pAllocator,
688    VkSamplerYcbcrConversion *pYcbcrConversion)
689 {
690    struct vn_device *dev = vn_device_from_handle(device);
691    const VkAllocationCallbacks *alloc =
692       pAllocator ? pAllocator : &dev->base.base.alloc;
693    const VkExternalFormatANDROID *ext_info =
694       vk_find_struct_const(pCreateInfo->pNext, EXTERNAL_FORMAT_ANDROID);
695 
696    VkSamplerYcbcrConversionCreateInfo local_info;
697    if (ext_info && ext_info->externalFormat) {
698       assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
699 
700       local_info = *pCreateInfo;
701       local_info.format =
702          vn_android_drm_format_to_vk_format(ext_info->externalFormat);
703       local_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
704       local_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
705       local_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
706       local_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
707       pCreateInfo = &local_info;
708 
709       assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
710    }
711 
712    struct vn_sampler_ycbcr_conversion *conv =
713       vk_zalloc(alloc, sizeof(*conv), VN_DEFAULT_ALIGN,
714                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
715    if (!conv)
716       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
717 
718    vn_object_base_init(&conv->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
719                        &dev->base);
720 
721    VkSamplerYcbcrConversion conv_handle =
722       vn_sampler_ycbcr_conversion_to_handle(conv);
723    vn_async_vkCreateSamplerYcbcrConversion(dev->instance, device, pCreateInfo,
724                                            NULL, &conv_handle);
725 
726    *pYcbcrConversion = conv_handle;
727 
728    return VK_SUCCESS;
729 }
730 
731 void
vn_DestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)732 vn_DestroySamplerYcbcrConversion(VkDevice device,
733                                  VkSamplerYcbcrConversion ycbcrConversion,
734                                  const VkAllocationCallbacks *pAllocator)
735 {
736    struct vn_device *dev = vn_device_from_handle(device);
737    struct vn_sampler_ycbcr_conversion *conv =
738       vn_sampler_ycbcr_conversion_from_handle(ycbcrConversion);
739    const VkAllocationCallbacks *alloc =
740       pAllocator ? pAllocator : &dev->base.base.alloc;
741 
742    if (!conv)
743       return;
744 
745    vn_async_vkDestroySamplerYcbcrConversion(dev->instance, device,
746                                             ycbcrConversion, NULL);
747 
748    vn_object_base_fini(&conv->base);
749    vk_free(alloc, conv);
750 }
751 
752 void
vn_GetDeviceImageMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)753 vn_GetDeviceImageMemoryRequirements(
754    VkDevice device,
755    const VkDeviceImageMemoryRequirements *pInfo,
756    VkMemoryRequirements2 *pMemoryRequirements)
757 {
758    struct vn_device *dev = vn_device_from_handle(device);
759 
760    /* TODO per-device cache */
761    vn_call_vkGetDeviceImageMemoryRequirements(dev->instance, device, pInfo,
762                                               pMemoryRequirements);
763 }
764 
765 void
vn_GetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)766 vn_GetDeviceImageSparseMemoryRequirements(
767    VkDevice device,
768    const VkDeviceImageMemoryRequirements *pInfo,
769    uint32_t *pSparseMemoryRequirementCount,
770    VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
771 {
772    struct vn_device *dev = vn_device_from_handle(device);
773 
774    /* TODO per-device cache */
775    vn_call_vkGetDeviceImageSparseMemoryRequirements(
776       dev->instance, device, pInfo, pSparseMemoryRequirementCount,
777       pSparseMemoryRequirements);
778 }
779