Lines Matching +full:lru +full:- +full:cache
3 * SPDX-License-Identifier: MIT
13 #include "venus-protocol/vn_protocol_driver_image.h"
14 #include "venus-protocol/vn_protocol_driver_image_view.h"
15 #include "venus-protocol/vn_protocol_driver_sampler.h"
16 #include "venus-protocol/vn_protocol_driver_sampler_ycbcr_conversion.h"
32 if (!(create_info->flags & VK_IMAGE_CREATE_DISJOINT_BIT)) in vn_image_get_plane_count()
36 assert(create_info->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT); in vn_image_get_plane_count()
37 return vk_format_get_plane_count(create_info->format); in vn_image_get_plane_count()
64 switch (u.pnext->sType) { in vn_image_fill_reqs()
66 u.two->memoryRequirements = req->memory.memoryRequirements; in vn_image_fill_reqs()
69 u.dedicated->prefersDedicatedAllocation = in vn_image_fill_reqs()
70 req->dedicated.prefersDedicatedAllocation; in vn_image_fill_reqs()
71 u.dedicated->requiresDedicatedAllocation = in vn_image_fill_reqs()
72 req->dedicated.requiresDedicatedAllocation; in vn_image_fill_reqs()
77 u.pnext = u.pnext->pNext; in vn_image_fill_reqs()
82 vn_image_cache_debug_dump(struct vn_image_reqs_cache *cache) in vn_image_cache_debug_dump() argument
84 vn_log(NULL, "dumping image reqs cache statistics"); in vn_image_cache_debug_dump()
85 vn_log(NULL, " hit %u\n", cache->debug.cache_hit_count); in vn_image_cache_debug_dump()
86 vn_log(NULL, " miss %u\n", cache->debug.cache_miss_count); in vn_image_cache_debug_dump()
87 vn_log(NULL, " skip %u\n", cache->debug.cache_skip_count); in vn_image_cache_debug_dump()
97 if (!dev->image_reqs_cache.ht) in vn_image_get_image_reqs_key()
103 vk_foreach_struct_const(src, create_info->pNext) { in vn_image_get_image_reqs_key()
104 switch (src->sType) { in vn_image_get_image_reqs_key()
108 _mesa_sha1_update(&sha1_ctx, &ext_mem->handleTypes, in vn_image_get_image_reqs_key()
115 _mesa_sha1_update(&sha1_ctx, format_list->pViewFormats, in vn_image_get_image_reqs_key()
116 sizeof(VkFormat) * format_list->viewFormatCount); in vn_image_get_image_reqs_key()
123 &sha1_ctx, format_mod_list->pDrmFormatModifiers, in vn_image_get_image_reqs_key()
124 sizeof(uint64_t) * format_mod_list->drmFormatModifierCount); in vn_image_get_image_reqs_key()
131 _mesa_sha1_update(&sha1_ctx, &format_mod_explicit->drmFormatModifier, in vn_image_get_image_reqs_key()
134 &sha1_ctx, format_mod_explicit->pPlaneLayouts, in vn_image_get_image_reqs_key()
136 format_mod_explicit->drmFormatModifierPlaneCount); in vn_image_get_image_reqs_key()
142 _mesa_sha1_update(&sha1_ctx, &stencil_usage->stencilUsage, in vn_image_get_image_reqs_key()
147 /* Skip cache for unsupported pNext */ in vn_image_get_image_reqs_key()
148 dev->image_reqs_cache.debug.cache_skip_count++; in vn_image_get_image_reqs_key()
154 * VkImageCreateInfo->flags and ending with VkImageCreateInfo->sharingMode in vn_image_get_image_reqs_key()
160 offsetof(VkImageCreateInfo, queueFamilyIndexCount) - in vn_image_get_image_reqs_key()
163 _mesa_sha1_update(&sha1_ctx, &create_info->flags, in vn_image_get_image_reqs_key()
170 if (create_info->sharingMode == VK_SHARING_MODE_CONCURRENT) { in vn_image_get_image_reqs_key()
172 &sha1_ctx, create_info->pQueueFamilyIndices, in vn_image_get_image_reqs_key()
173 sizeof(uint32_t) * create_info->queueFamilyIndexCount); in vn_image_get_image_reqs_key()
176 _mesa_sha1_update(&sha1_ctx, &create_info->initialLayout, in vn_image_get_image_reqs_key()
177 sizeof(create_info->initialLayout)); in vn_image_get_image_reqs_key()
186 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache; in vn_image_reqs_cache_init() local
191 cache->ht = _mesa_hash_table_create(NULL, vn_cache_key_hash_function, in vn_image_reqs_cache_init()
193 if (!cache->ht) in vn_image_reqs_cache_init()
196 simple_mtx_init(&cache->mutex, mtx_plain); in vn_image_reqs_cache_init()
197 list_inithead(&dev->image_reqs_cache.lru); in vn_image_reqs_cache_init()
203 const VkAllocationCallbacks *alloc = &dev->base.base.alloc; in vn_image_reqs_cache_fini()
204 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache; in vn_image_reqs_cache_fini() local
206 if (!cache->ht) in vn_image_reqs_cache_fini()
209 hash_table_foreach(cache->ht, hash_entry) { in vn_image_reqs_cache_fini()
210 struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data; in vn_image_reqs_cache_fini()
211 list_del(&cache_entry->head); in vn_image_reqs_cache_fini()
214 assert(list_is_empty(&dev->image_reqs_cache.lru)); in vn_image_reqs_cache_fini()
216 _mesa_hash_table_destroy(cache->ht, NULL); in vn_image_reqs_cache_fini()
218 simple_mtx_destroy(&cache->mutex); in vn_image_reqs_cache_fini()
220 if (VN_DEBUG(CACHE)) in vn_image_reqs_cache_fini()
221 vn_image_cache_debug_dump(cache); in vn_image_reqs_cache_fini()
229 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache; in vn_image_init_reqs_from_cache() local
231 assert(cache->ht); in vn_image_init_reqs_from_cache()
233 simple_mtx_lock(&cache->mutex); in vn_image_init_reqs_from_cache()
234 struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key); in vn_image_init_reqs_from_cache()
236 struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data; in vn_image_init_reqs_from_cache()
237 for (uint32_t i = 0; i < cache_entry->plane_count; i++) in vn_image_init_reqs_from_cache()
238 img->requirements[i] = cache_entry->requirements[i]; in vn_image_init_reqs_from_cache()
239 list_move_to(&cache_entry->head, &dev->image_reqs_cache.lru); in vn_image_init_reqs_from_cache()
240 p_atomic_inc(&cache->debug.cache_hit_count); in vn_image_init_reqs_from_cache()
242 p_atomic_inc(&cache->debug.cache_miss_count); in vn_image_init_reqs_from_cache()
244 simple_mtx_unlock(&cache->mutex); in vn_image_init_reqs_from_cache()
255 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache; in vn_image_get_reqs_from_cache() local
257 assert(cache->ht); in vn_image_get_reqs_from_cache()
259 simple_mtx_lock(&cache->mutex); in vn_image_get_reqs_from_cache()
260 struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key); in vn_image_get_reqs_from_cache()
262 struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data; in vn_image_get_reqs_from_cache()
263 requirements = &cache_entry->requirements[plane]; in vn_image_get_reqs_from_cache()
264 list_move_to(&cache_entry->head, &dev->image_reqs_cache.lru); in vn_image_get_reqs_from_cache()
265 p_atomic_inc(&cache->debug.cache_hit_count); in vn_image_get_reqs_from_cache()
267 p_atomic_inc(&cache->debug.cache_miss_count); in vn_image_get_reqs_from_cache()
269 simple_mtx_unlock(&cache->mutex); in vn_image_get_reqs_from_cache()
280 const VkAllocationCallbacks *alloc = &dev->base.base.alloc; in vn_image_store_reqs_in_cache()
281 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache; in vn_image_store_reqs_in_cache() local
284 assert(cache->ht); in vn_image_store_reqs_in_cache()
286 simple_mtx_lock(&cache->mutex); in vn_image_store_reqs_in_cache()
289 if (_mesa_hash_table_search(cache->ht, key)) { in vn_image_store_reqs_in_cache()
290 simple_mtx_unlock(&cache->mutex); in vn_image_store_reqs_in_cache()
294 if (_mesa_hash_table_num_entries(cache->ht) == in vn_image_store_reqs_in_cache()
296 /* Evict/use the last entry in the lru list for this new entry */ in vn_image_store_reqs_in_cache()
298 list_last_entry(&cache->lru, struct vn_image_reqs_cache_entry, head); in vn_image_store_reqs_in_cache()
300 _mesa_hash_table_remove_key(cache->ht, cache_entry->key); in vn_image_store_reqs_in_cache()
301 list_del(&cache_entry->head); in vn_image_store_reqs_in_cache()
306 simple_mtx_unlock(&cache->mutex); in vn_image_store_reqs_in_cache()
312 cache_entry->requirements[i] = requirements[i]; in vn_image_store_reqs_in_cache()
314 memcpy(cache_entry->key, key, SHA1_DIGEST_LENGTH); in vn_image_store_reqs_in_cache()
315 cache_entry->plane_count = plane_count; in vn_image_store_reqs_in_cache()
317 _mesa_hash_table_insert(dev->image_reqs_cache.ht, cache_entry->key, in vn_image_store_reqs_in_cache()
319 list_add(&cache_entry->head, &cache->lru); in vn_image_store_reqs_in_cache()
321 simple_mtx_unlock(&cache->mutex); in vn_image_store_reqs_in_cache()
329 assert(plane_count <= ARRAY_SIZE(img->requirements)); in vn_image_init_memory_requirements()
332 img->requirements[i].memory.sType = in vn_image_init_memory_requirements()
334 img->requirements[i].memory.pNext = &img->requirements[i].dedicated; in vn_image_init_memory_requirements()
335 img->requirements[i].dedicated.sType = in vn_image_init_memory_requirements()
337 img->requirements[i].dedicated.pNext = NULL; in vn_image_init_memory_requirements()
344 dev->primary_ring, dev_handle, in vn_image_init_memory_requirements()
349 &img->requirements[0].memory); in vn_image_init_memory_requirements()
352 if (img->deferred_info) { in vn_image_init_memory_requirements()
353 img->requirements[0].dedicated.prefersDedicatedAllocation = VK_TRUE; in vn_image_init_memory_requirements()
354 img->requirements[0].dedicated.requiresDedicatedAllocation = VK_TRUE; in vn_image_init_memory_requirements()
359 dev->primary_ring, dev_handle, in vn_image_init_memory_requirements()
370 &img->requirements[i].memory); in vn_image_init_memory_requirements()
388 info->create = *create_info; in vn_image_deferred_info_init()
389 dst = (void *)&info->create; in vn_image_deferred_info_init()
391 vk_foreach_struct_const(src, create_info->pNext) { in vn_image_deferred_info_init()
393 switch (src->sType) { in vn_image_deferred_info_init()
401 if (!((const VkImageFormatListCreateInfo *)src)->viewFormatCount) in vn_image_deferred_info_init()
404 memcpy(&info->list, src, sizeof(info->list)); in vn_image_deferred_info_init()
405 pnext = &info->list; in vn_image_deferred_info_init()
408 const size_t size = sizeof(VkFormat) * info->list.viewFormatCount; in vn_image_deferred_info_init()
417 ((const VkImageFormatListCreateInfo *)src)->pViewFormats, in vn_image_deferred_info_init()
419 info->list.pViewFormats = view_formats; in vn_image_deferred_info_init()
422 memcpy(&info->stencil, src, sizeof(info->stencil)); in vn_image_deferred_info_init()
423 pnext = &info->stencil; in vn_image_deferred_info_init()
427 (uint32_t)((const VkExternalFormatANDROID *)src)->externalFormat; in vn_image_deferred_info_init()
429 info->create.format = in vn_image_deferred_info_init()
431 info->from_external_format = true; in vn_image_deferred_info_init()
435 img->wsi.is_wsi = true; in vn_image_deferred_info_init()
442 dst->pNext = pnext; in vn_image_deferred_info_init()
446 dst->pNext = NULL; in vn_image_deferred_info_init()
448 img->deferred_info = info; in vn_image_deferred_info_init()
457 if (!img->deferred_info) in vn_image_deferred_info_fini()
460 if (img->deferred_info->list.pViewFormats) in vn_image_deferred_info_fini()
461 vk_free(alloc, (void *)img->deferred_info->list.pViewFormats); in vn_image_deferred_info_fini()
463 vk_free(alloc, img->deferred_info); in vn_image_deferred_info_fini()
475 img->sharing_mode = create_info->sharingMode; in vn_image_init()
477 /* Check if mem reqs in cache. If found, make async call */ in vn_image_init()
482 vn_async_vkCreateImage(dev->primary_ring, device, create_info, NULL, in vn_image_init()
487 result = vn_call_vkCreateImage(dev->primary_ring, device, create_info, in vn_image_init()
496 vn_image_store_reqs_in_cache(dev, key, plane_count, img->requirements); in vn_image_init()
508 vk_image_create(&dev->base.base, create_info, alloc, sizeof(*img)); in vn_image_create()
516 vk_image_destroy(&dev->base.base, alloc, &img->base.base); in vn_image_create()
531 img->deferred_info->initialized = result == VK_SUCCESS; in vn_image_init_deferred()
542 vk_image_create(&dev->base.base, create_info, alloc, sizeof(*img)); in vn_image_create_deferred()
550 vk_image_destroy(&dev->base.base, alloc, &img->base.base); in vn_image_create_deferred()
574 local_info->create = *create_info; in vn_image_fix_create_info()
575 VkBaseOutStructure *cur = (void *)&local_info->create; in vn_image_fix_create_info()
577 vk_foreach_struct_const(src, create_info->pNext) { in vn_image_fix_create_info()
579 switch (src->sType) { in vn_image_fix_create_info()
581 memcpy(&local_info->external, src, sizeof(local_info->external)); in vn_image_fix_create_info()
582 local_info->external.handleTypes = renderer_handle_type; in vn_image_fix_create_info()
583 next = &local_info->external; in vn_image_fix_create_info()
586 memcpy(&local_info->format_list, src, in vn_image_fix_create_info()
587 sizeof(local_info->format_list)); in vn_image_fix_create_info()
588 next = &local_info->format_list; in vn_image_fix_create_info()
591 memcpy(&local_info->stencil, src, sizeof(local_info->stencil)); in vn_image_fix_create_info()
592 next = &local_info->stencil; in vn_image_fix_create_info()
595 memcpy(&local_info->modifier_list, src, in vn_image_fix_create_info()
596 sizeof(local_info->modifier_list)); in vn_image_fix_create_info()
597 next = &local_info->modifier_list; in vn_image_fix_create_info()
600 memcpy(&local_info->modifier_explicit, src, in vn_image_fix_create_info()
601 sizeof(local_info->modifier_explicit)); in vn_image_fix_create_info()
602 next = &local_info->modifier_explicit; in vn_image_fix_create_info()
609 cur->pNext = next; in vn_image_fix_create_info()
614 cur->pNext = NULL; in vn_image_fix_create_info()
616 return &local_info->create; in vn_image_fix_create_info()
627 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateImage()
629 dev->physical_device->external_memory.renderer_handle_type; in vn_CreateImage()
639 vk_foreach_struct_const(pnext, pCreateInfo->pNext) { in vn_CreateImage()
640 switch ((uint32_t)pnext->sType) { in vn_CreateImage()
649 if (!swapchain_info->swapchain) in vn_CreateImage()
654 if (!external_info->handleTypes) in vn_CreateImage()
657 external_info->handleTypes == in vn_CreateImage()
667 * - common wsi image: dma_buf is hard-coded in wsi_configure_native_image in vn_CreateImage()
668 * - common wsi image alias: it aligns with wsi_info on external handle in vn_CreateImage()
669 * - Android wsi image: VK_ANDROID_native_buffer involves no external info in vn_CreateImage()
670 * - AHB external image: deferred creation reconstructs external info in vn_CreateImage()
673 * - non-AHB external image requesting handle types different from renderer in vn_CreateImage()
678 assert(external_info->handleTypes == renderer_handle_type); in vn_CreateImage()
695 external_info->handleTypes != renderer_handle_type) { in vn_CreateImage()
704 return vn_error(dev->instance, result); in vn_CreateImage()
718 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyImage()
723 if (img->wsi.memory && img->wsi.memory_owned) { in vn_DestroyImage()
724 VkDeviceMemory mem_handle = vn_device_memory_to_handle(img->wsi.memory); in vn_DestroyImage()
729 if (!img->deferred_info || img->deferred_info->initialized) in vn_DestroyImage()
730 vn_async_vkDestroyImage(dev->primary_ring, device, image, NULL); in vn_DestroyImage()
734 vk_image_destroy(&dev->base.base, alloc, &img->base.base); in vn_DestroyImage()
742 const struct vn_image *img = vn_image_from_handle(pInfo->image); in vn_GetImageMemoryRequirements2()
746 vk_find_struct_const(pInfo->pNext, in vn_GetImageMemoryRequirements2()
749 plane = vn_image_get_plane(plane_info->planeAspect); in vn_GetImageMemoryRequirements2()
751 vn_image_fill_reqs(&img->requirements[plane], pMemoryRequirements); in vn_GetImageMemoryRequirements2()
764 if (dev->physical_device->sparse_binding_disabled) { in vn_GetImageSparseMemoryRequirements2()
769 /* TODO local or per-device cache */ in vn_GetImageSparseMemoryRequirements2()
771 dev->primary_ring, device, pInfo, pSparseMemoryRequirementCount, in vn_GetImageSparseMemoryRequirements2()
785 struct vn_image *img = vn_image_from_handle(info->image); in vn_image_bind_wsi_memory()
787 vn_device_memory_from_handle(info->memory); in vn_image_bind_wsi_memory()
798 vk_find_struct_const(info->pNext, in vn_image_bind_wsi_memory()
800 assert(img->wsi.is_wsi && swapchain_info); in vn_image_bind_wsi_memory()
804 swapchain_info->swapchain, swapchain_info->imageIndex)); in vn_image_bind_wsi_memory()
805 mem = swapchain_img->wsi.memory; in vn_image_bind_wsi_memory()
807 info->memory = vn_device_memory_to_handle(mem); in vn_image_bind_wsi_memory()
809 assert(mem && info->memory != VK_NULL_HANDLE); in vn_image_bind_wsi_memory()
812 assert(img->wsi.memory); in vn_image_bind_wsi_memory()
814 assert(!img->wsi.memory); in vn_image_bind_wsi_memory()
815 img->wsi.memory = mem; in vn_image_bind_wsi_memory()
819 vn_async_vkBindImageMemory2(dev->primary_ring, vn_device_to_handle(dev), in vn_image_bind_wsi_memory()
836 if (img->wsi.is_wsi) in vn_BindImageMemory2()
840 vn_async_vkBindImageMemory2(dev->primary_ring, device, bindInfoCount, in vn_BindImageMemory2()
853 /* TODO local cache */ in vn_GetImageDrmFormatModifierPropertiesEXT()
855 dev->primary_ring, device, image, pProperties); in vn_GetImageDrmFormatModifierPropertiesEXT()
869 if ((img->wsi.is_wsi && img->wsi.tiling_override == in vn_GetImageSubresourceLayout()
871 img->deferred_info) { in vn_GetImageSubresourceLayout()
872 VkImageAspectFlags aspect = pSubresource->aspectMask; in vn_GetImageSubresourceLayout()
891 if (aspect != pSubresource->aspectMask) { in vn_GetImageSubresourceLayout()
898 /* TODO local cache */ in vn_GetImageSubresourceLayout()
899 vn_call_vkGetImageSubresourceLayout(dev->primary_ring, device, image, in vn_GetImageSubresourceLayout()
912 struct vn_image *img = vn_image_from_handle(pCreateInfo->image); in vn_CreateImageView()
914 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateImageView()
917 if (img->deferred_info && img->deferred_info->from_external_format) { in vn_CreateImageView()
918 assert(pCreateInfo->format == VK_FORMAT_UNDEFINED); in vn_CreateImageView()
921 local_info.format = img->deferred_info->create.format; in vn_CreateImageView()
924 assert(pCreateInfo->format != VK_FORMAT_UNDEFINED); in vn_CreateImageView()
931 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateImageView()
933 vn_object_base_init(&view->base, VK_OBJECT_TYPE_IMAGE_VIEW, &dev->base); in vn_CreateImageView()
934 view->image = img; in vn_CreateImageView()
937 vn_async_vkCreateImageView(dev->primary_ring, device, pCreateInfo, NULL, in vn_CreateImageView()
953 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyImageView()
958 vn_async_vkDestroyImageView(dev->primary_ring, device, imageView, NULL); in vn_DestroyImageView()
960 vn_object_base_fini(&view->base); in vn_DestroyImageView()
974 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateSampler()
980 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateSampler()
982 vn_object_base_init(&sampler->base, VK_OBJECT_TYPE_SAMPLER, &dev->base); in vn_CreateSampler()
985 vn_async_vkCreateSampler(dev->primary_ring, device, pCreateInfo, NULL, in vn_CreateSampler()
1001 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroySampler()
1006 vn_async_vkDestroySampler(dev->primary_ring, device, _sampler, NULL); in vn_DestroySampler()
1008 vn_object_base_fini(&sampler->base); in vn_DestroySampler()
1023 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateSamplerYcbcrConversion()
1025 vk_find_struct_const(pCreateInfo->pNext, EXTERNAL_FORMAT_ANDROID); in vn_CreateSamplerYcbcrConversion()
1028 if (ext_info && ext_info->externalFormat) { in vn_CreateSamplerYcbcrConversion()
1029 assert(pCreateInfo->format == VK_FORMAT_UNDEFINED); in vn_CreateSamplerYcbcrConversion()
1033 vn_android_drm_format_to_vk_format(ext_info->externalFormat); in vn_CreateSamplerYcbcrConversion()
1040 assert(pCreateInfo->format != VK_FORMAT_UNDEFINED); in vn_CreateSamplerYcbcrConversion()
1047 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateSamplerYcbcrConversion()
1049 vn_object_base_init(&conv->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, in vn_CreateSamplerYcbcrConversion()
1050 &dev->base); in vn_CreateSamplerYcbcrConversion()
1054 vn_async_vkCreateSamplerYcbcrConversion(dev->primary_ring, device, in vn_CreateSamplerYcbcrConversion()
1071 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroySamplerYcbcrConversion()
1076 vn_async_vkDestroySamplerYcbcrConversion(dev->primary_ring, device, in vn_DestroySamplerYcbcrConversion()
1079 vn_object_base_fini(&conv->base); in vn_DestroySamplerYcbcrConversion()
1093 vn_image_get_image_reqs_key(dev, pInfo->pCreateInfo, key); in vn_GetDeviceImageMemoryRequirements()
1097 if (pInfo->pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) in vn_GetDeviceImageMemoryRequirements()
1098 vn_image_get_plane(pInfo->planeAspect); in vn_GetDeviceImageMemoryRequirements()
1108 vn_image_get_plane_count(pInfo->pCreateInfo); in vn_GetDeviceImageMemoryRequirements()
1112 /* Retrieve reqs for all planes so the cache entry is complete */ in vn_GetDeviceImageMemoryRequirements()
1117 req_info[i].pCreateInfo = pInfo->pCreateInfo; in vn_GetDeviceImageMemoryRequirements()
1127 dev->primary_ring, device, &req_info[i], &reqs[i].memory); in vn_GetDeviceImageMemoryRequirements()
1135 vn_call_vkGetDeviceImageMemoryRequirements(dev->primary_ring, device, in vn_GetDeviceImageMemoryRequirements()
1150 if (dev->physical_device->sparse_binding_disabled) { in vn_GetDeviceImageSparseMemoryRequirements()
1155 /* TODO per-device cache */ in vn_GetDeviceImageSparseMemoryRequirements()
1157 dev->primary_ring, device, pInfo, pSparseMemoryRequirementCount, in vn_GetDeviceImageSparseMemoryRequirements()
1168 /* TODO per-device cache */ in vn_GetDeviceImageSubresourceLayoutKHR()
1170 dev->primary_ring, device, pInfo, pLayout); in vn_GetDeviceImageSubresourceLayoutKHR()
1184 if ((img->wsi.is_wsi && img->wsi.tiling_override == in vn_GetImageSubresourceLayout2KHR()
1186 img->deferred_info) { in vn_GetImageSubresourceLayout2KHR()
1187 VkImageAspectFlags aspect = pSubresource->imageSubresource.aspectMask; in vn_GetImageSubresourceLayout2KHR()
1206 if (aspect != pSubresource->imageSubresource.aspectMask) { in vn_GetImageSubresourceLayout2KHR()
1214 dev->primary_ring, device, image, pSubresource, pLayout); in vn_GetImageSubresourceLayout2KHR()