• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_image.h"
12 
13 #include "venus-protocol/vn_protocol_driver_image.h"
14 #include "venus-protocol/vn_protocol_driver_image_view.h"
15 #include "venus-protocol/vn_protocol_driver_sampler.h"
16 #include "venus-protocol/vn_protocol_driver_sampler_ycbcr_conversion.h"
17 #include "vk_format.h"
18 
19 #include "vn_android.h"
20 #include "vn_device.h"
21 #include "vn_device_memory.h"
22 #include "vn_physical_device.h"
23 #include "vn_wsi.h"
24 
25 #define IMAGE_REQS_CACHE_MAX_ENTRIES 500
26 
27 /* image commands */
28 
29 static inline uint32_t
vn_image_get_plane_count(const VkImageCreateInfo * create_info)30 vn_image_get_plane_count(const VkImageCreateInfo *create_info)
31 {
32    if (!(create_info->flags & VK_IMAGE_CREATE_DISJOINT_BIT))
33       return 1;
34 
35    /* TODO VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount */
36    assert(create_info->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
37    return vk_format_get_plane_count(create_info->format);
38 }
39 
40 static void
vn_image_cache_debug_dump(struct vn_image_reqs_cache * cache)41 vn_image_cache_debug_dump(struct vn_image_reqs_cache *cache)
42 {
43    vn_log(NULL, "dumping image reqs cache statistics");
44    vn_log(NULL, "  hit %u\n", cache->debug.cache_hit_count);
45    vn_log(NULL, "  miss %u\n", cache->debug.cache_miss_count);
46    vn_log(NULL, "  skip %u\n", cache->debug.cache_skip_count);
47 }
48 
49 static bool
vn_image_get_image_reqs_key(struct vn_device * dev,const VkImageCreateInfo * create_info,uint8_t * key)50 vn_image_get_image_reqs_key(struct vn_device *dev,
51                             const VkImageCreateInfo *create_info,
52                             uint8_t *key)
53 {
54    struct mesa_sha1 sha1_ctx;
55 
56    if (!dev->image_reqs_cache.ht)
57       return false;
58 
59    _mesa_sha1_init(&sha1_ctx);
60 
61    /* Hash relevant fields in the pNext chain */
62    vk_foreach_struct_const(src, create_info->pNext) {
63       switch (src->sType) {
64       case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO: {
65          struct VkExternalMemoryImageCreateInfo *ext_mem =
66             (struct VkExternalMemoryImageCreateInfo *)src;
67          _mesa_sha1_update(&sha1_ctx, &ext_mem->handleTypes,
68                            sizeof(VkExternalMemoryHandleTypeFlags));
69          break;
70       }
71       case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
72          struct VkImageFormatListCreateInfo *format_list =
73             (struct VkImageFormatListCreateInfo *)src;
74          _mesa_sha1_update(&sha1_ctx, format_list->pViewFormats,
75                            sizeof(VkFormat) * format_list->viewFormatCount);
76          break;
77       }
78       case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: {
79          struct VkImageDrmFormatModifierListCreateInfoEXT *format_mod_list =
80             (struct VkImageDrmFormatModifierListCreateInfoEXT *)src;
81          _mesa_sha1_update(
82             &sha1_ctx, format_mod_list->pDrmFormatModifiers,
83             sizeof(uint64_t) * format_mod_list->drmFormatModifierCount);
84          break;
85       }
86       case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: {
87          struct VkImageDrmFormatModifierExplicitCreateInfoEXT
88             *format_mod_explicit =
89                (struct VkImageDrmFormatModifierExplicitCreateInfoEXT *)src;
90          _mesa_sha1_update(&sha1_ctx, &format_mod_explicit->drmFormatModifier,
91                            sizeof(uint64_t));
92          _mesa_sha1_update(
93             &sha1_ctx, format_mod_explicit->pPlaneLayouts,
94             sizeof(VkSubresourceLayout) *
95                format_mod_explicit->drmFormatModifierPlaneCount);
96          break;
97       }
98       case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: {
99          struct VkImageStencilUsageCreateInfo *stencil_usage =
100             (struct VkImageStencilUsageCreateInfo *)src;
101          _mesa_sha1_update(&sha1_ctx, &stencil_usage->stencilUsage,
102                            sizeof(VkImageUsageFlags));
103          break;
104       }
105       default:
106          /* Skip cache for unsupported pNext */
107          dev->image_reqs_cache.debug.cache_skip_count++;
108          return false;
109       }
110    }
111 
112    /* Hash contingous block of VkImageCreateInfo starting with
113     * VkImageCreateInfo->flags and ending with VkImageCreateInfo->sharingMode
114     *
115     * There's no padding in involved in this hash block so no concern for C
116     * enum sizes or alignment.
117     */
118    static const size_t create_image_hash_block_size =
119       offsetof(VkImageCreateInfo, queueFamilyIndexCount) -
120       offsetof(VkImageCreateInfo, flags);
121 
122    _mesa_sha1_update(&sha1_ctx, &create_info->flags,
123                      create_image_hash_block_size);
124 
125    /* Follow pointer and hash pQueueFamilyIndices separately.
126     * pQueueFamilyIndices is ignored if sharingMode is not
127     * VK_SHARING_MODE_CONCURRENT
128     */
129    if (create_info->sharingMode == VK_SHARING_MODE_CONCURRENT) {
130       _mesa_sha1_update(
131          &sha1_ctx, create_info->pQueueFamilyIndices,
132          sizeof(uint32_t) * create_info->queueFamilyIndexCount);
133    }
134 
135    _mesa_sha1_update(&sha1_ctx, &create_info->initialLayout,
136                      sizeof(create_info->initialLayout));
137    _mesa_sha1_final(&sha1_ctx, key);
138 
139    return true;
140 }
141 
142 void
vn_image_reqs_cache_init(struct vn_device * dev)143 vn_image_reqs_cache_init(struct vn_device *dev)
144 {
145    struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
146 
147    if (VN_PERF(NO_ASYNC_IMAGE_CREATE))
148       return;
149 
150    cache->ht = _mesa_hash_table_create(NULL, vn_cache_key_hash_function,
151                                        vn_cache_key_equal_function);
152    if (!cache->ht)
153       return;
154 
155    simple_mtx_init(&cache->mutex, mtx_plain);
156    list_inithead(&dev->image_reqs_cache.lru);
157 }
158 
159 void
vn_image_reqs_cache_fini(struct vn_device * dev)160 vn_image_reqs_cache_fini(struct vn_device *dev)
161 {
162    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
163    struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
164 
165    if (!cache->ht)
166       return;
167 
168    hash_table_foreach(cache->ht, hash_entry) {
169       struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data;
170       list_del(&cache_entry->head);
171       vk_free(alloc, cache_entry);
172    }
173    assert(list_is_empty(&dev->image_reqs_cache.lru));
174 
175    _mesa_hash_table_destroy(cache->ht, NULL);
176 
177    simple_mtx_destroy(&cache->mutex);
178 
179    if (VN_DEBUG(CACHE))
180       vn_image_cache_debug_dump(cache);
181 }
182 
183 static bool
vn_image_init_reqs_from_cache(struct vn_device * dev,struct vn_image * img,uint8_t * key)184 vn_image_init_reqs_from_cache(struct vn_device *dev,
185                               struct vn_image *img,
186                               uint8_t *key)
187 {
188    struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
189 
190    assert(cache->ht);
191 
192    simple_mtx_lock(&cache->mutex);
193    struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key);
194    if (hash_entry) {
195       struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data;
196       for (uint32_t i = 0; i < cache_entry->plane_count; i++)
197          img->requirements[i] = cache_entry->requirements[i];
198       list_move_to(&cache_entry->head, &dev->image_reqs_cache.lru);
199       p_atomic_inc(&cache->debug.cache_hit_count);
200    } else {
201       p_atomic_inc(&cache->debug.cache_miss_count);
202    }
203    simple_mtx_unlock(&cache->mutex);
204 
205    return !!hash_entry;
206 }
207 
208 static void
vn_image_store_reqs_in_cache(struct vn_device * dev,uint8_t * key,uint32_t plane_count,struct vn_image_memory_requirements * requirements)209 vn_image_store_reqs_in_cache(struct vn_device *dev,
210                              uint8_t *key,
211                              uint32_t plane_count,
212                              struct vn_image_memory_requirements *requirements)
213 {
214    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
215    struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
216    struct vn_image_reqs_cache_entry *cache_entry;
217 
218    assert(cache->ht);
219 
220    simple_mtx_lock(&cache->mutex);
221 
222    /* Check if entry was added before lock */
223    if (_mesa_hash_table_search(cache->ht, key)) {
224       simple_mtx_unlock(&cache->mutex);
225       return;
226    }
227 
228    if (_mesa_hash_table_num_entries(cache->ht) ==
229        IMAGE_REQS_CACHE_MAX_ENTRIES) {
230       /* Evict/use the last entry in the lru list for this new entry */
231       cache_entry =
232          list_last_entry(&cache->lru, struct vn_image_reqs_cache_entry, head);
233 
234       _mesa_hash_table_remove_key(cache->ht, cache_entry->key);
235       list_del(&cache_entry->head);
236    } else {
237       cache_entry = vk_zalloc(alloc, sizeof(*cache_entry), VN_DEFAULT_ALIGN,
238                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
239       if (!cache_entry) {
240          simple_mtx_unlock(&cache->mutex);
241          return;
242       }
243    }
244 
245    for (uint32_t i = 0; i < plane_count; i++)
246       cache_entry->requirements[i] = requirements[i];
247 
248    memcpy(cache_entry->key, key, SHA1_DIGEST_LENGTH);
249    cache_entry->plane_count = plane_count;
250 
251    _mesa_hash_table_insert(dev->image_reqs_cache.ht, cache_entry->key,
252                            cache_entry);
253    list_add(&cache_entry->head, &cache->lru);
254 
255    simple_mtx_unlock(&cache->mutex);
256 }
257 
258 static void
vn_image_init_memory_requirements(struct vn_image * img,struct vn_device * dev,uint32_t plane_count)259 vn_image_init_memory_requirements(struct vn_image *img,
260                                   struct vn_device *dev,
261                                   uint32_t plane_count)
262 {
263    assert(plane_count <= ARRAY_SIZE(img->requirements));
264 
265    for (uint32_t i = 0; i < plane_count; i++) {
266       img->requirements[i].memory.sType =
267          VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
268       img->requirements[i].memory.pNext = &img->requirements[i].dedicated;
269       img->requirements[i].dedicated.sType =
270          VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
271       img->requirements[i].dedicated.pNext = NULL;
272    }
273 
274    VkDevice dev_handle = vn_device_to_handle(dev);
275    VkImage img_handle = vn_image_to_handle(img);
276    if (plane_count == 1) {
277       vn_call_vkGetImageMemoryRequirements2(
278          dev->primary_ring, dev_handle,
279          &(VkImageMemoryRequirementsInfo2){
280             .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
281             .image = img_handle,
282          },
283          &img->requirements[0].memory);
284 
285       /* AHB backed image requires dedicated allocation */
286       if (img->deferred_info) {
287          img->requirements[0].dedicated.prefersDedicatedAllocation = VK_TRUE;
288          img->requirements[0].dedicated.requiresDedicatedAllocation = VK_TRUE;
289       }
290    } else {
291       for (uint32_t i = 0; i < plane_count; i++) {
292          vn_call_vkGetImageMemoryRequirements2(
293             dev->primary_ring, dev_handle,
294             &(VkImageMemoryRequirementsInfo2){
295                .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
296                .pNext =
297                   &(VkImagePlaneMemoryRequirementsInfo){
298                      .sType =
299                         VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
300                      .planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT << i,
301                   },
302                .image = img_handle,
303             },
304             &img->requirements[i].memory);
305       }
306    }
307 }
308 
309 static VkResult
vn_image_deferred_info_init(struct vn_image * img,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc)310 vn_image_deferred_info_init(struct vn_image *img,
311                             const VkImageCreateInfo *create_info,
312                             const VkAllocationCallbacks *alloc)
313 {
314    struct vn_image_create_deferred_info *info = NULL;
315    VkBaseOutStructure *dst = NULL;
316 
317    info = vk_zalloc(alloc, sizeof(*info), VN_DEFAULT_ALIGN,
318                     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
319    if (!info)
320       return VK_ERROR_OUT_OF_HOST_MEMORY;
321 
322    info->create = *create_info;
323    dst = (void *)&info->create;
324 
325    vk_foreach_struct_const(src, create_info->pNext) {
326       void *pnext = NULL;
327       switch (src->sType) {
328       case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
329          /* 12.3. Images
330           *
331           * If viewFormatCount is zero, pViewFormats is ignored and the image
332           * is created as if the VkImageFormatListCreateInfo structure were
333           * not included in the pNext chain of VkImageCreateInfo.
334           */
335          if (!((const VkImageFormatListCreateInfo *)src)->viewFormatCount)
336             break;
337 
338          memcpy(&info->list, src, sizeof(info->list));
339          pnext = &info->list;
340 
341          /* need a deep copy for view formats array */
342          const size_t size = sizeof(VkFormat) * info->list.viewFormatCount;
343          VkFormat *view_formats = vk_zalloc(
344             alloc, size, VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
345          if (!view_formats) {
346             vk_free(alloc, info);
347             return VK_ERROR_OUT_OF_HOST_MEMORY;
348          }
349 
350          memcpy(view_formats,
351                 ((const VkImageFormatListCreateInfo *)src)->pViewFormats,
352                 size);
353          info->list.pViewFormats = view_formats;
354       } break;
355       case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
356          memcpy(&info->stencil, src, sizeof(info->stencil));
357          pnext = &info->stencil;
358          break;
359       case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID: {
360          const uint32_t drm_format =
361             (uint32_t)((const VkExternalFormatANDROID *)src)->externalFormat;
362          if (drm_format) {
363             info->create.format =
364                vn_android_drm_format_to_vk_format(drm_format);
365             info->from_external_format = true;
366          }
367       } break;
368       default:
369          break;
370       }
371 
372       if (pnext) {
373          dst->pNext = pnext;
374          dst = pnext;
375       }
376    }
377    dst->pNext = NULL;
378 
379    img->deferred_info = info;
380 
381    return VK_SUCCESS;
382 }
383 
384 static void
vn_image_deferred_info_fini(struct vn_image * img,const VkAllocationCallbacks * alloc)385 vn_image_deferred_info_fini(struct vn_image *img,
386                             const VkAllocationCallbacks *alloc)
387 {
388    if (!img->deferred_info)
389       return;
390 
391    if (img->deferred_info->list.pViewFormats)
392       vk_free(alloc, (void *)img->deferred_info->list.pViewFormats);
393 
394    vk_free(alloc, img->deferred_info);
395 }
396 
397 static VkResult
vn_image_init(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)398 vn_image_init(struct vn_device *dev,
399               const VkImageCreateInfo *create_info,
400               struct vn_image *img)
401 {
402    VkDevice device = vn_device_to_handle(dev);
403    VkImage image = vn_image_to_handle(img);
404    VkResult result = VK_SUCCESS;
405 
406    img->sharing_mode = create_info->sharingMode;
407 
408    /* Check if mem reqs in cache. If found, make async call */
409    uint8_t key[SHA1_DIGEST_LENGTH] = { 0 };
410    const bool cacheable = vn_image_get_image_reqs_key(dev, create_info, key);
411 
412    if (cacheable && vn_image_init_reqs_from_cache(dev, img, key)) {
413       vn_async_vkCreateImage(dev->primary_ring, device, create_info, NULL,
414                              &image);
415       return VK_SUCCESS;
416    }
417 
418    result = vn_call_vkCreateImage(dev->primary_ring, device, create_info,
419                                   NULL, &image);
420    if (result != VK_SUCCESS)
421       return result;
422 
423    const uint32_t plane_count = vn_image_get_plane_count(create_info);
424    vn_image_init_memory_requirements(img, dev, plane_count);
425 
426    if (cacheable)
427       vn_image_store_reqs_in_cache(dev, key, plane_count, img->requirements);
428 
429    return VK_SUCCESS;
430 }
431 
432 VkResult
vn_image_create(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)433 vn_image_create(struct vn_device *dev,
434                 const VkImageCreateInfo *create_info,
435                 const VkAllocationCallbacks *alloc,
436                 struct vn_image **out_img)
437 {
438    struct vn_image *img =
439       vk_image_create(&dev->base.base, create_info, alloc, sizeof(*img));
440    if (!img)
441       return VK_ERROR_OUT_OF_HOST_MEMORY;
442 
443    vn_object_set_id(img, vn_get_next_obj_id(), VK_OBJECT_TYPE_IMAGE);
444 
445    VkResult result = vn_image_init(dev, create_info, img);
446    if (result != VK_SUCCESS) {
447       vk_image_destroy(&dev->base.base, alloc, &img->base.base);
448       return result;
449    }
450 
451    *out_img = img;
452 
453    return VK_SUCCESS;
454 }
455 
456 VkResult
vn_image_init_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)457 vn_image_init_deferred(struct vn_device *dev,
458                        const VkImageCreateInfo *create_info,
459                        struct vn_image *img)
460 {
461    VkResult result = vn_image_init(dev, create_info, img);
462    img->deferred_info->initialized = result == VK_SUCCESS;
463    return result;
464 }
465 
466 static VkResult
vn_image_create_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)467 vn_image_create_deferred(struct vn_device *dev,
468                          const VkImageCreateInfo *create_info,
469                          const VkAllocationCallbacks *alloc,
470                          struct vn_image **out_img)
471 {
472    struct vn_image *img =
473       vk_image_create(&dev->base.base, create_info, alloc, sizeof(*img));
474    if (!img)
475       return VK_ERROR_OUT_OF_HOST_MEMORY;
476 
477    vn_object_set_id(img, vn_get_next_obj_id(), VK_OBJECT_TYPE_IMAGE);
478 
479    VkResult result = vn_image_deferred_info_init(img, create_info, alloc);
480    if (result != VK_SUCCESS) {
481       vk_image_destroy(&dev->base.base, alloc, &img->base.base);
482       return result;
483    }
484 
485    *out_img = img;
486 
487    return VK_SUCCESS;
488 }
489 
490 struct vn_image_create_info {
491    VkImageCreateInfo create;
492    VkExternalMemoryImageCreateInfo external;
493    VkImageFormatListCreateInfo format_list;
494    VkImageStencilUsageCreateInfo stencil;
495    VkImageDrmFormatModifierListCreateInfoEXT modifier_list;
496    VkImageDrmFormatModifierExplicitCreateInfoEXT modifier_explicit;
497 };
498 
499 static const VkImageCreateInfo *
vn_image_fix_create_info(const VkImageCreateInfo * create_info,const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,struct vn_image_create_info * local_info)500 vn_image_fix_create_info(
501    const VkImageCreateInfo *create_info,
502    const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,
503    struct vn_image_create_info *local_info)
504 {
505    local_info->create = *create_info;
506    VkBaseOutStructure *cur = (void *)&local_info->create;
507 
508    vk_foreach_struct_const(src, create_info->pNext) {
509       void *next = NULL;
510       switch (src->sType) {
511       case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
512          memcpy(&local_info->external, src, sizeof(local_info->external));
513          local_info->external.handleTypes = renderer_handle_type;
514          next = &local_info->external;
515          break;
516       case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
517          memcpy(&local_info->format_list, src,
518                 sizeof(local_info->format_list));
519          next = &local_info->format_list;
520          break;
521       case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
522          memcpy(&local_info->stencil, src, sizeof(local_info->stencil));
523          next = &local_info->stencil;
524          break;
525       case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
526          memcpy(&local_info->modifier_list, src,
527                 sizeof(local_info->modifier_list));
528          next = &local_info->modifier_list;
529          break;
530       case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
531          memcpy(&local_info->modifier_explicit, src,
532                 sizeof(local_info->modifier_explicit));
533          next = &local_info->modifier_explicit;
534          break;
535       default:
536          break;
537       }
538 
539       if (next) {
540          cur->pNext = next;
541          cur = next;
542       }
543    }
544 
545    cur->pNext = NULL;
546 
547    return &local_info->create;
548 }
549 
550 VkResult
vn_CreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)551 vn_CreateImage(VkDevice device,
552                const VkImageCreateInfo *pCreateInfo,
553                const VkAllocationCallbacks *pAllocator,
554                VkImage *pImage)
555 {
556    struct vn_device *dev = vn_device_from_handle(device);
557    const VkAllocationCallbacks *alloc =
558       pAllocator ? pAllocator : &dev->base.base.alloc;
559    const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
560       dev->physical_device->external_memory.renderer_handle_type;
561    struct vn_image *img;
562    VkResult result;
563 
564    const struct wsi_image_create_info *wsi_info = NULL;
565    const VkNativeBufferANDROID *anb_info = NULL;
566    const VkImageSwapchainCreateInfoKHR *swapchain_info = NULL;
567    const VkExternalMemoryImageCreateInfo *external_info = NULL;
568    bool ahb_info = false;
569 
570    vk_foreach_struct_const(pnext, pCreateInfo->pNext) {
571       switch ((uint32_t)pnext->sType) {
572       case VK_STRUCTURE_TYPE_WSI_IMAGE_CREATE_INFO_MESA:
573          wsi_info = (void *)pnext;
574          break;
575       case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
576          anb_info = (void *)pnext;
577          break;
578       case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
579          swapchain_info = (void *)pnext;
580          if (!swapchain_info->swapchain)
581             swapchain_info = NULL;
582          break;
583       case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
584          external_info = (void *)pnext;
585          if (!external_info->handleTypes)
586             external_info = NULL;
587          else if (
588             external_info->handleTypes ==
589             VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
590             ahb_info = true;
591          break;
592       default:
593          break;
594       }
595    }
596 
597    /* No need to fix external handle type for:
598     * - common wsi image: dma_buf is hard-coded in wsi_configure_native_image
599     * - common wsi image alias: it aligns with wsi_info on external handle
600     * - Android wsi image: VK_ANDROID_native_buffer involves no external info
601     * - AHB external image: deferred creation reconstructs external info
602     *
603     * Must fix the external handle type for:
604     * - non-AHB external image requesting handle types different from renderer
605     *
606     * Will have to fix more when renderer handle type is no longer dma_buf.
607     */
608    if (wsi_info) {
609       assert(external_info->handleTypes == renderer_handle_type);
610       result = vn_wsi_create_image(dev, pCreateInfo, wsi_info, alloc, &img);
611    } else if (anb_info) {
612       result =
613          vn_android_image_from_anb(dev, pCreateInfo, anb_info, alloc, &img);
614    } else if (ahb_info) {
615       result = vn_image_create_deferred(dev, pCreateInfo, alloc, &img);
616    } else if (swapchain_info) {
617       result = vn_wsi_create_image_from_swapchain(
618          dev, pCreateInfo, swapchain_info, alloc, &img);
619    } else {
620       struct vn_image_create_info local_info;
621       if (external_info &&
622           external_info->handleTypes != renderer_handle_type) {
623          pCreateInfo = vn_image_fix_create_info(
624             pCreateInfo, renderer_handle_type, &local_info);
625       }
626 
627       result = vn_image_create(dev, pCreateInfo, alloc, &img);
628    }
629 
630    if (result != VK_SUCCESS)
631       return vn_error(dev->instance, result);
632 
633    *pImage = vn_image_to_handle(img);
634    return VK_SUCCESS;
635 }
636 
637 void
vn_DestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)638 vn_DestroyImage(VkDevice device,
639                 VkImage image,
640                 const VkAllocationCallbacks *pAllocator)
641 {
642    struct vn_device *dev = vn_device_from_handle(device);
643    struct vn_image *img = vn_image_from_handle(image);
644    const VkAllocationCallbacks *alloc =
645       pAllocator ? pAllocator : &dev->base.base.alloc;
646 
647    if (!img)
648       return;
649 
650    if (img->wsi.memory && img->wsi.memory_owned) {
651       VkDeviceMemory mem_handle = vn_device_memory_to_handle(img->wsi.memory);
652       vn_FreeMemory(device, mem_handle, pAllocator);
653    }
654 
655    /* must not ask renderer to destroy uninitialized deferred image */
656    if (!img->deferred_info || img->deferred_info->initialized)
657       vn_async_vkDestroyImage(dev->primary_ring, device, image, NULL);
658 
659    vn_image_deferred_info_fini(img, alloc);
660 
661    vk_image_destroy(&dev->base.base, alloc, &img->base.base);
662 }
663 
664 void
vn_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)665 vn_GetImageMemoryRequirements2(VkDevice device,
666                                const VkImageMemoryRequirementsInfo2 *pInfo,
667                                VkMemoryRequirements2 *pMemoryRequirements)
668 {
669    const struct vn_image *img = vn_image_from_handle(pInfo->image);
670    union {
671       VkBaseOutStructure *pnext;
672       VkMemoryRequirements2 *two;
673       VkMemoryDedicatedRequirements *dedicated;
674    } u = { .two = pMemoryRequirements };
675 
676    uint32_t plane = 0;
677    const VkImagePlaneMemoryRequirementsInfo *plane_info =
678       vk_find_struct_const(pInfo->pNext,
679                            IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO);
680    if (plane_info) {
681       switch (plane_info->planeAspect) {
682       case VK_IMAGE_ASPECT_PLANE_1_BIT:
683          plane = 1;
684          break;
685       case VK_IMAGE_ASPECT_PLANE_2_BIT:
686          plane = 2;
687          break;
688       default:
689          plane = 0;
690          break;
691       }
692    }
693 
694    while (u.pnext) {
695       switch (u.pnext->sType) {
696       case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
697          u.two->memoryRequirements =
698             img->requirements[plane].memory.memoryRequirements;
699          break;
700       case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
701          u.dedicated->prefersDedicatedAllocation =
702             img->requirements[plane].dedicated.prefersDedicatedAllocation;
703          u.dedicated->requiresDedicatedAllocation =
704             img->requirements[plane].dedicated.requiresDedicatedAllocation;
705          break;
706       default:
707          break;
708       }
709       u.pnext = u.pnext->pNext;
710    }
711 }
712 
713 void
vn_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)714 vn_GetImageSparseMemoryRequirements2(
715    VkDevice device,
716    const VkImageSparseMemoryRequirementsInfo2 *pInfo,
717    uint32_t *pSparseMemoryRequirementCount,
718    VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
719 {
720    struct vn_device *dev = vn_device_from_handle(device);
721 
722    /* see vn_GetPhysicalDeviceSparseImageFormatProperties2 */
723    if (dev->physical_device->sparse_binding_disabled) {
724       *pSparseMemoryRequirementCount = 0;
725       return;
726    }
727 
728    /* TODO local or per-device cache */
729    vn_call_vkGetImageSparseMemoryRequirements2(
730       dev->primary_ring, device, pInfo, pSparseMemoryRequirementCount,
731       pSparseMemoryRequirements);
732 }
733 
734 static void
vn_image_bind_wsi_memory(struct vn_image * img,struct vn_device_memory * mem)735 vn_image_bind_wsi_memory(struct vn_image *img, struct vn_device_memory *mem)
736 {
737    assert(img->wsi.is_wsi && !img->wsi.memory);
738    img->wsi.memory = mem;
739 }
740 
741 VkResult
vn_BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)742 vn_BindImageMemory2(VkDevice device,
743                     uint32_t bindInfoCount,
744                     const VkBindImageMemoryInfo *pBindInfos)
745 {
746    struct vn_device *dev = vn_device_from_handle(device);
747    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
748 
749    VkBindImageMemoryInfo *local_infos = NULL;
750    for (uint32_t i = 0; i < bindInfoCount; i++) {
751       const VkBindImageMemoryInfo *info = &pBindInfos[i];
752       struct vn_image *img = vn_image_from_handle(info->image);
753       struct vn_device_memory *mem =
754          vn_device_memory_from_handle(info->memory);
755 
756       /* no bind info fixup needed */
757       if (mem && !mem->base_memory) {
758          if (img->wsi.is_wsi)
759             vn_image_bind_wsi_memory(img, mem);
760          continue;
761       }
762 
763       if (!mem) {
764 #if DETECT_OS_ANDROID
765          /* TODO handle VkNativeBufferANDROID when we bump up
766           * VN_ANDROID_NATIVE_BUFFER_SPEC_VERSION
767           */
768          unreachable("VkBindImageMemoryInfo with no memory");
769 #else
770          const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
771             vk_find_struct_const(info->pNext,
772                                  BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR);
773          assert(img->wsi.is_wsi && swapchain_info);
774 
775          struct vn_image *swapchain_img =
776             vn_image_from_handle(wsi_common_get_image(
777                swapchain_info->swapchain, swapchain_info->imageIndex));
778          mem = swapchain_img->wsi.memory;
779 #endif
780       }
781 
782       if (img->wsi.is_wsi)
783          vn_image_bind_wsi_memory(img, mem);
784 
785       if (!local_infos) {
786          const size_t size = sizeof(*local_infos) * bindInfoCount;
787          local_infos = vk_alloc(alloc, size, VN_DEFAULT_ALIGN,
788                                 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
789          if (!local_infos)
790             return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
791 
792          memcpy(local_infos, pBindInfos, size);
793       }
794 
795       /* If mem is suballocated, mem->base_memory is non-NULL and we must
796        * patch it in.  If VkBindImageMemorySwapchainInfoKHR is given, we've
797        * looked mem up above and also need to patch it in.
798        */
799       local_infos[i].memory = vn_device_memory_to_handle(
800          mem->base_memory ? mem->base_memory : mem);
801       local_infos[i].memoryOffset += mem->base_offset;
802    }
803    if (local_infos)
804       pBindInfos = local_infos;
805 
806    vn_async_vkBindImageMemory2(dev->primary_ring, device, bindInfoCount,
807                                pBindInfos);
808 
809    vk_free(alloc, local_infos);
810 
811    return VK_SUCCESS;
812 }
813 
814 VkResult
vn_GetImageDrmFormatModifierPropertiesEXT(VkDevice device,VkImage image,VkImageDrmFormatModifierPropertiesEXT * pProperties)815 vn_GetImageDrmFormatModifierPropertiesEXT(
816    VkDevice device,
817    VkImage image,
818    VkImageDrmFormatModifierPropertiesEXT *pProperties)
819 {
820    struct vn_device *dev = vn_device_from_handle(device);
821 
822    /* TODO local cache */
823    return vn_call_vkGetImageDrmFormatModifierPropertiesEXT(
824       dev->primary_ring, device, image, pProperties);
825 }
826 
827 void
vn_GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)828 vn_GetImageSubresourceLayout(VkDevice device,
829                              VkImage image,
830                              const VkImageSubresource *pSubresource,
831                              VkSubresourceLayout *pLayout)
832 {
833    struct vn_device *dev = vn_device_from_handle(device);
834    struct vn_image *img = vn_image_from_handle(image);
835 
836    /* override aspect mask for wsi/ahb images with tiling modifier */
837    VkImageSubresource local_subresource;
838    if ((img->wsi.is_wsi && img->wsi.tiling_override ==
839                               VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) ||
840        img->deferred_info) {
841       VkImageAspectFlags aspect = pSubresource->aspectMask;
842       switch (aspect) {
843       case VK_IMAGE_ASPECT_COLOR_BIT:
844       case VK_IMAGE_ASPECT_DEPTH_BIT:
845       case VK_IMAGE_ASPECT_STENCIL_BIT:
846       case VK_IMAGE_ASPECT_PLANE_0_BIT:
847          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
848          break;
849       case VK_IMAGE_ASPECT_PLANE_1_BIT:
850          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
851          break;
852       case VK_IMAGE_ASPECT_PLANE_2_BIT:
853          aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
854          break;
855       default:
856          break;
857       }
858 
859       /* only handle supported aspect override */
860       if (aspect != pSubresource->aspectMask) {
861          local_subresource = *pSubresource;
862          local_subresource.aspectMask = aspect;
863          pSubresource = &local_subresource;
864       }
865    }
866 
867    /* TODO local cache */
868    vn_call_vkGetImageSubresourceLayout(dev->primary_ring, device, image,
869                                        pSubresource, pLayout);
870 }
871 
872 /* image view commands */
873 
874 VkResult
vn_CreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)875 vn_CreateImageView(VkDevice device,
876                    const VkImageViewCreateInfo *pCreateInfo,
877                    const VkAllocationCallbacks *pAllocator,
878                    VkImageView *pView)
879 {
880    struct vn_device *dev = vn_device_from_handle(device);
881    struct vn_image *img = vn_image_from_handle(pCreateInfo->image);
882    const VkAllocationCallbacks *alloc =
883       pAllocator ? pAllocator : &dev->base.base.alloc;
884 
885    VkImageViewCreateInfo local_info;
886    if (img->deferred_info && img->deferred_info->from_external_format) {
887       assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
888 
889       local_info = *pCreateInfo;
890       local_info.format = img->deferred_info->create.format;
891       pCreateInfo = &local_info;
892 
893       assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
894    }
895 
896    struct vn_image_view *view =
897       vk_zalloc(alloc, sizeof(*view), VN_DEFAULT_ALIGN,
898                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
899    if (!view)
900       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
901 
902    vn_object_base_init(&view->base, VK_OBJECT_TYPE_IMAGE_VIEW, &dev->base);
903    view->image = img;
904 
905    VkImageView view_handle = vn_image_view_to_handle(view);
906    vn_async_vkCreateImageView(dev->primary_ring, device, pCreateInfo, NULL,
907                               &view_handle);
908 
909    *pView = view_handle;
910 
911    return VK_SUCCESS;
912 }
913 
914 void
vn_DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)915 vn_DestroyImageView(VkDevice device,
916                     VkImageView imageView,
917                     const VkAllocationCallbacks *pAllocator)
918 {
919    struct vn_device *dev = vn_device_from_handle(device);
920    struct vn_image_view *view = vn_image_view_from_handle(imageView);
921    const VkAllocationCallbacks *alloc =
922       pAllocator ? pAllocator : &dev->base.base.alloc;
923 
924    if (!view)
925       return;
926 
927    vn_async_vkDestroyImageView(dev->primary_ring, device, imageView, NULL);
928 
929    vn_object_base_fini(&view->base);
930    vk_free(alloc, view);
931 }
932 
933 /* sampler commands */
934 
935 VkResult
vn_CreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)936 vn_CreateSampler(VkDevice device,
937                  const VkSamplerCreateInfo *pCreateInfo,
938                  const VkAllocationCallbacks *pAllocator,
939                  VkSampler *pSampler)
940 {
941    struct vn_device *dev = vn_device_from_handle(device);
942    const VkAllocationCallbacks *alloc =
943       pAllocator ? pAllocator : &dev->base.base.alloc;
944 
945    struct vn_sampler *sampler =
946       vk_zalloc(alloc, sizeof(*sampler), VN_DEFAULT_ALIGN,
947                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
948    if (!sampler)
949       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
950 
951    vn_object_base_init(&sampler->base, VK_OBJECT_TYPE_SAMPLER, &dev->base);
952 
953    VkSampler sampler_handle = vn_sampler_to_handle(sampler);
954    vn_async_vkCreateSampler(dev->primary_ring, device, pCreateInfo, NULL,
955                             &sampler_handle);
956 
957    *pSampler = sampler_handle;
958 
959    return VK_SUCCESS;
960 }
961 
962 void
vn_DestroySampler(VkDevice device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)963 vn_DestroySampler(VkDevice device,
964                   VkSampler _sampler,
965                   const VkAllocationCallbacks *pAllocator)
966 {
967    struct vn_device *dev = vn_device_from_handle(device);
968    struct vn_sampler *sampler = vn_sampler_from_handle(_sampler);
969    const VkAllocationCallbacks *alloc =
970       pAllocator ? pAllocator : &dev->base.base.alloc;
971 
972    if (!sampler)
973       return;
974 
975    vn_async_vkDestroySampler(dev->primary_ring, device, _sampler, NULL);
976 
977    vn_object_base_fini(&sampler->base);
978    vk_free(alloc, sampler);
979 }
980 
981 /* sampler YCbCr conversion commands */
982 
983 VkResult
vn_CreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)984 vn_CreateSamplerYcbcrConversion(
985    VkDevice device,
986    const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
987    const VkAllocationCallbacks *pAllocator,
988    VkSamplerYcbcrConversion *pYcbcrConversion)
989 {
990    struct vn_device *dev = vn_device_from_handle(device);
991    const VkAllocationCallbacks *alloc =
992       pAllocator ? pAllocator : &dev->base.base.alloc;
993    const VkExternalFormatANDROID *ext_info =
994       vk_find_struct_const(pCreateInfo->pNext, EXTERNAL_FORMAT_ANDROID);
995 
996    VkSamplerYcbcrConversionCreateInfo local_info;
997    if (ext_info && ext_info->externalFormat) {
998       assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
999 
1000       local_info = *pCreateInfo;
1001       local_info.format =
1002          vn_android_drm_format_to_vk_format(ext_info->externalFormat);
1003       local_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
1004       local_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
1005       local_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
1006       local_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
1007       pCreateInfo = &local_info;
1008 
1009       assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
1010    }
1011 
1012    struct vn_sampler_ycbcr_conversion *conv =
1013       vk_zalloc(alloc, sizeof(*conv), VN_DEFAULT_ALIGN,
1014                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1015    if (!conv)
1016       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1017 
1018    vn_object_base_init(&conv->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
1019                        &dev->base);
1020 
1021    VkSamplerYcbcrConversion conv_handle =
1022       vn_sampler_ycbcr_conversion_to_handle(conv);
1023    vn_async_vkCreateSamplerYcbcrConversion(dev->primary_ring, device,
1024                                            pCreateInfo, NULL, &conv_handle);
1025 
1026    *pYcbcrConversion = conv_handle;
1027 
1028    return VK_SUCCESS;
1029 }
1030 
1031 void
vn_DestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)1032 vn_DestroySamplerYcbcrConversion(VkDevice device,
1033                                  VkSamplerYcbcrConversion ycbcrConversion,
1034                                  const VkAllocationCallbacks *pAllocator)
1035 {
1036    struct vn_device *dev = vn_device_from_handle(device);
1037    struct vn_sampler_ycbcr_conversion *conv =
1038       vn_sampler_ycbcr_conversion_from_handle(ycbcrConversion);
1039    const VkAllocationCallbacks *alloc =
1040       pAllocator ? pAllocator : &dev->base.base.alloc;
1041 
1042    if (!conv)
1043       return;
1044 
1045    vn_async_vkDestroySamplerYcbcrConversion(dev->primary_ring, device,
1046                                             ycbcrConversion, NULL);
1047 
1048    vn_object_base_fini(&conv->base);
1049    vk_free(alloc, conv);
1050 }
1051 
1052 void
vn_GetDeviceImageMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1053 vn_GetDeviceImageMemoryRequirements(
1054    VkDevice device,
1055    const VkDeviceImageMemoryRequirements *pInfo,
1056    VkMemoryRequirements2 *pMemoryRequirements)
1057 {
1058    struct vn_device *dev = vn_device_from_handle(device);
1059 
1060    /* TODO integrate image memory requirements cache */
1061    vn_call_vkGetDeviceImageMemoryRequirements(dev->primary_ring, device,
1062                                               pInfo, pMemoryRequirements);
1063 }
1064 
1065 void
vn_GetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1066 vn_GetDeviceImageSparseMemoryRequirements(
1067    VkDevice device,
1068    const VkDeviceImageMemoryRequirements *pInfo,
1069    uint32_t *pSparseMemoryRequirementCount,
1070    VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
1071 {
1072    struct vn_device *dev = vn_device_from_handle(device);
1073 
1074    /* see vn_GetPhysicalDeviceSparseImageFormatProperties2 */
1075    if (dev->physical_device->sparse_binding_disabled) {
1076       *pSparseMemoryRequirementCount = 0;
1077       return;
1078    }
1079 
1080    /* TODO per-device cache */
1081    vn_call_vkGetDeviceImageSparseMemoryRequirements(
1082       dev->primary_ring, device, pInfo, pSparseMemoryRequirementCount,
1083       pSparseMemoryRequirements);
1084 }
1085