• Home
  • Raw
  • Download

Lines Matching +full:cache +full:- +full:base

3  * SPDX-License-Identifier: MIT
13 #include "venus-protocol/vn_protocol_driver_buffer.h"
14 #include "venus-protocol/vn_protocol_driver_buffer_view.h"
25 struct vn_buffer_reqs_cache *cache) in vn_buffer_get_cache_index() argument
27 /* For simplicity, cache only when below conditions are met: in vn_buffer_get_cache_index()
28 * - pNext is NULL in vn_buffer_get_cache_index()
29 * - VK_SHARING_MODE_EXCLUSIVE or VK_SHARING_MODE_CONCURRENT across all in vn_buffer_get_cache_index()
36 assert(!(create_info->flags & 0x80000000)); in vn_buffer_get_cache_index()
39 create_info->sharingMode == VK_SHARING_MODE_EXCLUSIVE; in vn_buffer_get_cache_index()
41 create_info->sharingMode == VK_SHARING_MODE_CONCURRENT && in vn_buffer_get_cache_index()
42 create_info->queueFamilyIndexCount == cache->queue_family_count; in vn_buffer_get_cache_index()
43 if (create_info->size <= cache->max_buffer_size && in vn_buffer_get_cache_index()
44 create_info->pNext == NULL && (is_exclusive || is_concurrent)) { in vn_buffer_get_cache_index()
46 (uint64_t)create_info->flags << 32 | create_info->usage; in vn_buffer_get_cache_index()
57 * - anv: 1ull << 30 in vn_buffer_get_max_buffer_size()
58 * - radv: UINT32_MAX - 4 in vn_buffer_get_max_buffer_size()
59 * - tu: UINT32_MAX + 1 in vn_buffer_get_max_buffer_size()
60 * - lvp: UINT32_MAX in vn_buffer_get_max_buffer_size()
61 * - mali: UINT32_MAX in vn_buffer_get_max_buffer_size()
64 return physical_dev->base.base.supported_features.maintenance4 in vn_buffer_get_max_buffer_size()
65 ? physical_dev->properties.vulkan_1_3.maxBufferSize in vn_buffer_get_max_buffer_size()
72 assert(dev->physical_device->queue_family_count); in vn_buffer_reqs_cache_init()
74 dev->buffer_reqs_cache.max_buffer_size = in vn_buffer_reqs_cache_init()
75 vn_buffer_get_max_buffer_size(dev->physical_device); in vn_buffer_reqs_cache_init()
76 dev->buffer_reqs_cache.queue_family_count = in vn_buffer_reqs_cache_init()
77 dev->physical_device->queue_family_count; in vn_buffer_reqs_cache_init()
79 simple_mtx_init(&dev->buffer_reqs_cache.mutex, mtx_plain); in vn_buffer_reqs_cache_init()
80 util_sparse_array_init(&dev->buffer_reqs_cache.entries, in vn_buffer_reqs_cache_init()
85 vn_buffer_reqs_cache_debug_dump(struct vn_buffer_reqs_cache *cache) in vn_buffer_reqs_cache_debug_dump() argument
87 vn_log(NULL, "dumping buffer cache statistics"); in vn_buffer_reqs_cache_debug_dump()
88 vn_log(NULL, " cache hit: %d", cache->debug.cache_hit_count); in vn_buffer_reqs_cache_debug_dump()
89 vn_log(NULL, " cache miss: %d", cache->debug.cache_miss_count); in vn_buffer_reqs_cache_debug_dump()
90 vn_log(NULL, " cache skip: %d", cache->debug.cache_skip_count); in vn_buffer_reqs_cache_debug_dump()
96 util_sparse_array_finish(&dev->buffer_reqs_cache.entries); in vn_buffer_reqs_cache_fini()
97 simple_mtx_destroy(&dev->buffer_reqs_cache.mutex); in vn_buffer_reqs_cache_fini()
99 if (VN_DEBUG(CACHE)) in vn_buffer_reqs_cache_fini()
100 vn_buffer_reqs_cache_debug_dump(&dev->buffer_reqs_cache); in vn_buffer_reqs_cache_fini()
106 struct vn_buffer_reqs_cache *cache = &dev->buffer_reqs_cache; in vn_buffer_get_ahb_memory_type_bits() local
107 if (unlikely(!cache->ahb_mem_type_bits_valid)) { in vn_buffer_get_ahb_memory_type_bits()
108 simple_mtx_lock(&cache->mutex); in vn_buffer_get_ahb_memory_type_bits()
109 if (!cache->ahb_mem_type_bits_valid) { in vn_buffer_get_ahb_memory_type_bits()
110 cache->ahb_mem_type_bits = in vn_buffer_get_ahb_memory_type_bits()
112 cache->ahb_mem_type_bits_valid = true; in vn_buffer_get_ahb_memory_type_bits()
114 simple_mtx_unlock(&cache->mutex); in vn_buffer_get_ahb_memory_type_bits()
117 return cache->ahb_mem_type_bits; in vn_buffer_get_ahb_memory_type_bits()
129 return align64(size, req->alignment); in vn_buffer_get_aligned_memory_requirement_size()
134 struct vn_buffer_reqs_cache *cache, in vn_buffer_get_cached_memory_requirements() argument
148 const uint64_t idx = vn_buffer_get_cache_index(create_info, cache); in vn_buffer_get_cached_memory_requirements()
151 util_sparse_array_get(&cache->entries, idx); in vn_buffer_get_cached_memory_requirements()
153 if (entry->valid) { in vn_buffer_get_cached_memory_requirements()
154 *out = entry->requirements; in vn_buffer_get_cached_memory_requirements()
156 out->memory.memoryRequirements.size = in vn_buffer_get_cached_memory_requirements()
158 create_info->size, &out->memory.memoryRequirements); in vn_buffer_get_cached_memory_requirements()
160 p_atomic_inc(&cache->debug.cache_hit_count); in vn_buffer_get_cached_memory_requirements()
162 p_atomic_inc(&cache->debug.cache_miss_count); in vn_buffer_get_cached_memory_requirements()
168 p_atomic_inc(&cache->debug.cache_skip_count); in vn_buffer_get_cached_memory_requirements()
174 vn_buffer_reqs_cache_entry_init(struct vn_buffer_reqs_cache *cache, in vn_buffer_reqs_cache_entry_init() argument
178 simple_mtx_lock(&cache->mutex); in vn_buffer_reqs_cache_entry_init()
183 if (entry->valid) in vn_buffer_reqs_cache_entry_init()
186 entry->requirements.memory = *req; in vn_buffer_reqs_cache_entry_init()
189 vk_find_struct_const(req->pNext, MEMORY_DEDICATED_REQUIREMENTS); in vn_buffer_reqs_cache_entry_init()
191 entry->requirements.dedicated = *dedicated_req; in vn_buffer_reqs_cache_entry_init()
193 entry->valid = true; in vn_buffer_reqs_cache_entry_init()
196 simple_mtx_unlock(&cache->mutex); in vn_buffer_reqs_cache_entry_init()
199 req->memoryRequirements.size = in vn_buffer_reqs_cache_entry_init()
201 req->memoryRequirements.size, in vn_buffer_reqs_cache_entry_init()
202 &entry->requirements.memory.memoryRequirements); in vn_buffer_reqs_cache_entry_init()
217 switch (u.pnext->sType) { in vn_copy_cached_memory_requirements()
219 u.two->memoryRequirements = cached->memory.memoryRequirements; in vn_copy_cached_memory_requirements()
222 u.dedicated->prefersDedicatedAllocation = in vn_copy_cached_memory_requirements()
223 cached->dedicated.prefersDedicatedAllocation; in vn_copy_cached_memory_requirements()
224 u.dedicated->requiresDedicatedAllocation = in vn_copy_cached_memory_requirements()
225 cached->dedicated.requiresDedicatedAllocation; in vn_copy_cached_memory_requirements()
230 u.pnext = u.pnext->pNext; in vn_copy_cached_memory_requirements()
241 struct vn_buffer_reqs_cache *cache = &dev->buffer_reqs_cache; in vn_buffer_init() local
244 /* If cacheable and mem requirements found in cache, make async call */ in vn_buffer_init()
246 vn_buffer_get_cached_memory_requirements(cache, create_info, in vn_buffer_init()
247 &buf->requirements); in vn_buffer_init()
249 /* Check size instead of entry->valid to be lock free */ in vn_buffer_init()
250 if (buf->requirements.memory.memoryRequirements.size) { in vn_buffer_init()
251 vn_async_vkCreateBuffer(dev->primary_ring, dev_handle, create_info, in vn_buffer_init()
256 /* If cache miss or not cacheable, make synchronous call */ in vn_buffer_init()
257 result = vn_call_vkCreateBuffer(dev->primary_ring, dev_handle, create_info, in vn_buffer_init()
262 buf->requirements.memory.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2; in vn_buffer_init()
263 buf->requirements.memory.pNext = &buf->requirements.dedicated; in vn_buffer_init()
264 buf->requirements.dedicated.sType = in vn_buffer_init()
266 buf->requirements.dedicated.pNext = NULL; in vn_buffer_init()
269 dev->primary_ring, dev_handle, in vn_buffer_init()
274 &buf->requirements.memory); in vn_buffer_init()
278 vn_buffer_reqs_cache_entry_init(cache, entry, in vn_buffer_init()
279 &buf->requirements.memory); in vn_buffer_init()
299 vn_object_base_init(&buf->base, VK_OBJECT_TYPE_BUFFER, &dev->base); in vn_buffer_create()
303 vn_object_base_fini(&buf->base); in vn_buffer_create()
325 local_info->create = *create_info; in vn_buffer_fix_create_info()
326 VkBaseOutStructure *cur = (void *)&local_info->create; in vn_buffer_fix_create_info()
328 vk_foreach_struct_const(src, create_info->pNext) { in vn_buffer_fix_create_info()
330 switch (src->sType) { in vn_buffer_fix_create_info()
332 memcpy(&local_info->external, src, sizeof(local_info->external)); in vn_buffer_fix_create_info()
333 local_info->external.handleTypes = renderer_handle_type; in vn_buffer_fix_create_info()
334 next = &local_info->external; in vn_buffer_fix_create_info()
337 memcpy(&local_info->capture, src, sizeof(local_info->capture)); in vn_buffer_fix_create_info()
338 next = &local_info->capture; in vn_buffer_fix_create_info()
345 cur->pNext = next; in vn_buffer_fix_create_info()
350 cur->pNext = NULL; in vn_buffer_fix_create_info()
352 return &local_info->create; in vn_buffer_fix_create_info()
363 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateBuffer()
365 dev->physical_device->external_memory.renderer_handle_type; in vn_CreateBuffer()
369 vk_find_struct_const(pCreateInfo->pNext, in vn_CreateBuffer()
371 if (external_info && external_info->handleTypes && in vn_CreateBuffer()
372 external_info->handleTypes != renderer_handle_type) { in vn_CreateBuffer()
380 return vn_error(dev->instance, result); in vn_CreateBuffer()
383 external_info->handleTypes == in vn_CreateBuffer()
389 buf->requirements.memory.memoryRequirements.memoryTypeBits &= in vn_CreateBuffer()
392 assert(buf->requirements.memory.memoryRequirements.memoryTypeBits); in vn_CreateBuffer()
408 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyBuffer()
413 vn_async_vkDestroyBuffer(dev->primary_ring, device, buffer, NULL); in vn_DestroyBuffer()
415 vn_object_base_fini(&buf->base); in vn_DestroyBuffer()
425 return vn_call_vkGetBufferDeviceAddress(dev->primary_ring, device, pInfo); in vn_GetBufferDeviceAddress()
434 return vn_call_vkGetBufferOpaqueCaptureAddress(dev->primary_ring, device, in vn_GetBufferOpaqueCaptureAddress()
443 const struct vn_buffer *buf = vn_buffer_from_handle(pInfo->buffer); in vn_GetBufferMemoryRequirements2()
445 vn_copy_cached_memory_requirements(&buf->requirements, in vn_GetBufferMemoryRequirements2()
455 const VkAllocationCallbacks *alloc = &dev->base.base.alloc; in vn_BindBufferMemory2()
461 vn_device_memory_from_handle(info->memory); in vn_BindBufferMemory2()
462 if (!mem->base_memory) in vn_BindBufferMemory2()
470 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_BindBufferMemory2()
475 local_infos[i].memory = vn_device_memory_to_handle(mem->base_memory); in vn_BindBufferMemory2()
476 local_infos[i].memoryOffset += mem->base_offset; in vn_BindBufferMemory2()
481 vn_async_vkBindBufferMemory2(dev->primary_ring, device, bindInfoCount, in vn_BindBufferMemory2()
499 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_CreateBufferView()
505 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY); in vn_CreateBufferView()
507 vn_object_base_init(&view->base, VK_OBJECT_TYPE_BUFFER_VIEW, &dev->base); in vn_CreateBufferView()
510 vn_async_vkCreateBufferView(dev->primary_ring, device, pCreateInfo, NULL, in vn_CreateBufferView()
526 pAllocator ? pAllocator : &dev->base.base.alloc; in vn_DestroyBufferView()
531 vn_async_vkDestroyBufferView(dev->primary_ring, device, bufferView, NULL); in vn_DestroyBufferView()
533 vn_object_base_fini(&view->base); in vn_DestroyBufferView()
544 struct vn_buffer_reqs_cache *cache = &dev->buffer_reqs_cache; in vn_GetDeviceBufferMemoryRequirements() local
547 /* If cacheable and mem requirements found in cache, skip host call */ in vn_GetDeviceBufferMemoryRequirements()
549 vn_buffer_get_cached_memory_requirements(cache, pInfo->pCreateInfo, in vn_GetDeviceBufferMemoryRequirements()
552 /* Check size instead of entry->valid to be lock free */ in vn_GetDeviceBufferMemoryRequirements()
558 /* Make the host call if not found in cache or not cacheable */ in vn_GetDeviceBufferMemoryRequirements()
559 vn_call_vkGetDeviceBufferMemoryRequirements(dev->primary_ring, device, in vn_GetDeviceBufferMemoryRequirements()
564 vn_buffer_reqs_cache_entry_init(cache, entry, pMemoryRequirements); in vn_GetDeviceBufferMemoryRequirements()