Lines Matching full:cache
46 radv_pipeline_cache_lock(struct radv_pipeline_cache *cache) in radv_pipeline_cache_lock() argument
48 if (cache->flags & VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT) in radv_pipeline_cache_lock()
51 mtx_lock(&cache->mutex); in radv_pipeline_cache_lock()
55 radv_pipeline_cache_unlock(struct radv_pipeline_cache *cache) in radv_pipeline_cache_unlock() argument
57 if (cache->flags & VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT) in radv_pipeline_cache_unlock()
60 mtx_unlock(&cache->mutex); in radv_pipeline_cache_unlock()
64 radv_pipeline_cache_init(struct radv_pipeline_cache *cache, struct radv_device *device) in radv_pipeline_cache_init() argument
66 vk_object_base_init(&device->vk, &cache->base, VK_OBJECT_TYPE_PIPELINE_CACHE); in radv_pipeline_cache_init()
68 cache->device = device; in radv_pipeline_cache_init()
69 mtx_init(&cache->mutex, mtx_plain); in radv_pipeline_cache_init()
70 cache->flags = 0; in radv_pipeline_cache_init()
72 cache->modified = false; in radv_pipeline_cache_init()
73 cache->kernel_count = 0; in radv_pipeline_cache_init()
74 cache->total_size = 0; in radv_pipeline_cache_init()
75 cache->table_size = 1024; in radv_pipeline_cache_init()
76 const size_t byte_size = cache->table_size * sizeof(cache->hash_table[0]); in radv_pipeline_cache_init()
77 cache->hash_table = malloc(byte_size); in radv_pipeline_cache_init()
80 * cache. Disable caching when we want to keep shader debug info, since in radv_pipeline_cache_init()
82 if (cache->hash_table == NULL || (device->instance->debug_flags & RADV_DEBUG_NO_CACHE)) in radv_pipeline_cache_init()
83 cache->table_size = 0; in radv_pipeline_cache_init()
85 memset(cache->hash_table, 0, byte_size); in radv_pipeline_cache_init()
89 radv_pipeline_cache_finish(struct radv_pipeline_cache *cache) in radv_pipeline_cache_finish() argument
91 for (unsigned i = 0; i < cache->table_size; ++i) in radv_pipeline_cache_finish()
92 if (cache->hash_table[i]) { in radv_pipeline_cache_finish()
94 if (cache->hash_table[i]->variants[j]) in radv_pipeline_cache_finish()
95 radv_shader_variant_destroy(cache->device, cache->hash_table[i]->variants[j]); in radv_pipeline_cache_finish()
97 vk_free(&cache->alloc, cache->hash_table[i]); in radv_pipeline_cache_finish()
99 mtx_destroy(&cache->mutex); in radv_pipeline_cache_finish()
100 free(cache->hash_table); in radv_pipeline_cache_finish()
102 vk_object_base_finish(&cache->base); in radv_pipeline_cache_finish()
182 radv_pipeline_cache_search_unlocked(struct radv_pipeline_cache *cache, const unsigned char *sha1) in radv_pipeline_cache_search_unlocked() argument
184 const uint32_t mask = cache->table_size - 1; in radv_pipeline_cache_search_unlocked()
187 if (cache->table_size == 0) in radv_pipeline_cache_search_unlocked()
190 for (uint32_t i = 0; i < cache->table_size; i++) { in radv_pipeline_cache_search_unlocked()
192 struct cache_entry *entry = cache->hash_table[index]; in radv_pipeline_cache_search_unlocked()
206 radv_pipeline_cache_search(struct radv_pipeline_cache *cache, const unsigned char *sha1) in radv_pipeline_cache_search() argument
210 radv_pipeline_cache_lock(cache); in radv_pipeline_cache_search()
212 entry = radv_pipeline_cache_search_unlocked(cache, sha1); in radv_pipeline_cache_search()
214 radv_pipeline_cache_unlock(cache); in radv_pipeline_cache_search()
220 radv_pipeline_cache_set_entry(struct radv_pipeline_cache *cache, struct cache_entry *entry) in radv_pipeline_cache_set_entry() argument
222 const uint32_t mask = cache->table_size - 1; in radv_pipeline_cache_set_entry()
226 assert(cache->kernel_count < cache->table_size / 2); in radv_pipeline_cache_set_entry()
228 for (uint32_t i = 0; i < cache->table_size; i++) { in radv_pipeline_cache_set_entry()
230 if (!cache->hash_table[index]) { in radv_pipeline_cache_set_entry()
231 cache->hash_table[index] = entry; in radv_pipeline_cache_set_entry()
236 cache->total_size += entry_size(entry); in radv_pipeline_cache_set_entry()
237 cache->kernel_count++; in radv_pipeline_cache_set_entry()
241 radv_pipeline_cache_grow(struct radv_pipeline_cache *cache) in radv_pipeline_cache_grow() argument
243 const uint32_t table_size = cache->table_size * 2; in radv_pipeline_cache_grow()
244 const uint32_t old_table_size = cache->table_size; in radv_pipeline_cache_grow()
245 const size_t byte_size = table_size * sizeof(cache->hash_table[0]); in radv_pipeline_cache_grow()
247 struct cache_entry **old_table = cache->hash_table; in radv_pipeline_cache_grow()
251 return vk_error(cache, VK_ERROR_OUT_OF_HOST_MEMORY); in radv_pipeline_cache_grow()
253 cache->hash_table = table; in radv_pipeline_cache_grow()
254 cache->table_size = table_size; in radv_pipeline_cache_grow()
255 cache->kernel_count = 0; in radv_pipeline_cache_grow()
256 cache->total_size = 0; in radv_pipeline_cache_grow()
258 memset(cache->hash_table, 0, byte_size); in radv_pipeline_cache_grow()
264 radv_pipeline_cache_set_entry(cache, entry); in radv_pipeline_cache_grow()
273 radv_pipeline_cache_add_entry(struct radv_pipeline_cache *cache, struct cache_entry *entry) in radv_pipeline_cache_add_entry() argument
275 if (cache->kernel_count == cache->table_size / 2) in radv_pipeline_cache_add_entry()
276 radv_pipeline_cache_grow(cache); in radv_pipeline_cache_add_entry()
281 if (cache->kernel_count < cache->table_size / 2) in radv_pipeline_cache_add_entry()
282 radv_pipeline_cache_set_entry(cache, entry); in radv_pipeline_cache_add_entry()
296 struct radv_device *device, struct radv_pipeline_cache *cache, const unsigned char *sha1, in radv_create_shader_variants_from_pipeline_cache() argument
302 if (!cache) { in radv_create_shader_variants_from_pipeline_cache()
303 cache = device->mem_cache; in radv_create_shader_variants_from_pipeline_cache()
307 radv_pipeline_cache_lock(cache); in radv_create_shader_variants_from_pipeline_cache()
309 entry = radv_pipeline_cache_search_unlocked(cache, sha1); in radv_create_shader_variants_from_pipeline_cache()
314 /* Don't cache when we want debug info, since this isn't in radv_create_shader_variants_from_pipeline_cache()
315 * present in the cache. in radv_create_shader_variants_from_pipeline_cache()
318 radv_pipeline_cache_unlock(cache); in radv_create_shader_variants_from_pipeline_cache()
328 radv_pipeline_cache_unlock(cache); in radv_create_shader_variants_from_pipeline_cache()
333 vk_alloc(&cache->alloc, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_CACHE); in radv_create_shader_variants_from_pipeline_cache()
336 radv_pipeline_cache_unlock(cache); in radv_create_shader_variants_from_pipeline_cache()
345 cache != device->mem_cache) in radv_create_shader_variants_from_pipeline_cache()
346 radv_pipeline_cache_add_entry(cache, new_entry); in radv_create_shader_variants_from_pipeline_cache()
374 if (device->instance->debug_flags & RADV_DEBUG_NO_MEMORY_CACHE && cache == device->mem_cache) in radv_create_shader_variants_from_pipeline_cache()
375 vk_free(&cache->alloc, entry); in radv_create_shader_variants_from_pipeline_cache()
382 radv_pipeline_cache_unlock(cache); in radv_create_shader_variants_from_pipeline_cache()
387 radv_pipeline_cache_insert_shaders(struct radv_device *device, struct radv_pipeline_cache *cache, in radv_pipeline_cache_insert_shaders() argument
393 if (!cache) in radv_pipeline_cache_insert_shaders()
394 cache = device->mem_cache; in radv_pipeline_cache_insert_shaders()
396 radv_pipeline_cache_lock(cache); in radv_pipeline_cache_insert_shaders()
397 struct cache_entry *entry = radv_pipeline_cache_search_unlocked(cache, sha1); in radv_pipeline_cache_insert_shaders()
401 radv_shader_variant_destroy(cache->device, variants[i]); in radv_pipeline_cache_insert_shaders()
409 radv_pipeline_cache_unlock(cache); in radv_pipeline_cache_insert_shaders()
413 /* Don't cache when we want debug info, since this isn't in radv_pipeline_cache_insert_shaders()
414 * present in the cache. in radv_pipeline_cache_insert_shaders()
417 radv_pipeline_cache_unlock(cache); in radv_pipeline_cache_insert_shaders()
428 entry = vk_alloc(&cache->alloc, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_CACHE); in radv_pipeline_cache_insert_shaders()
430 radv_pipeline_cache_unlock(cache); in radv_pipeline_cache_insert_shaders()
460 /* Always add cache items to disk. This will allow collection of in radv_pipeline_cache_insert_shaders()
462 * implements its own pipeline cache. in radv_pipeline_cache_insert_shaders()
464 * Make sure to exclude meta shaders because they are stored in a different cache file. in radv_pipeline_cache_insert_shaders()
466 if (device->physical_device->disk_cache && cache != &device->meta_state.cache) { in radv_pipeline_cache_insert_shaders()
474 if (device->instance->debug_flags & RADV_DEBUG_NO_MEMORY_CACHE && cache == device->mem_cache) { in radv_pipeline_cache_insert_shaders()
475 vk_free2(&cache->alloc, NULL, entry); in radv_pipeline_cache_insert_shaders()
476 radv_pipeline_cache_unlock(cache); in radv_pipeline_cache_insert_shaders()
480 /* We delay setting the variant so we have reproducible disk cache in radv_pipeline_cache_insert_shaders()
491 radv_pipeline_cache_add_entry(cache, entry); in radv_pipeline_cache_insert_shaders()
493 cache->modified = true; in radv_pipeline_cache_insert_shaders()
494 radv_pipeline_cache_unlock(cache); in radv_pipeline_cache_insert_shaders()
499 radv_pipeline_cache_load(struct radv_pipeline_cache *cache, const void *data, size_t size) in radv_pipeline_cache_load() argument
501 struct radv_device *device = cache->device; in radv_pipeline_cache_load()
528 dest_entry = vk_alloc(&cache->alloc, size_of_entry, 8, VK_SYSTEM_ALLOCATION_SCOPE_CACHE); in radv_pipeline_cache_load()
533 radv_pipeline_cache_add_entry(cache, dest_entry); in radv_pipeline_cache_load()
546 struct radv_pipeline_cache *cache; in radv_CreatePipelineCache() local
550 cache = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*cache), 8, in radv_CreatePipelineCache()
552 if (cache == NULL) in radv_CreatePipelineCache()
556 cache->alloc = *pAllocator; in radv_CreatePipelineCache()
558 cache->alloc = device->vk.alloc; in radv_CreatePipelineCache()
560 radv_pipeline_cache_init(cache, device); in radv_CreatePipelineCache()
561 cache->flags = pCreateInfo->flags; in radv_CreatePipelineCache()
564 radv_pipeline_cache_load(cache, pCreateInfo->pInitialData, pCreateInfo->initialDataSize); in radv_CreatePipelineCache()
567 *pPipelineCache = radv_pipeline_cache_to_handle(cache); in radv_CreatePipelineCache()
577 RADV_FROM_HANDLE(radv_pipeline_cache, cache, _cache); in radv_DestroyPipelineCache()
579 if (!cache) in radv_DestroyPipelineCache()
582 radv_pipeline_cache_finish(cache); in radv_DestroyPipelineCache()
583 vk_free2(&device->vk.alloc, pAllocator, cache); in radv_DestroyPipelineCache()
590 RADV_FROM_HANDLE(radv_pipeline_cache, cache, _cache); in radv_GetPipelineCacheData()
594 radv_pipeline_cache_lock(cache); in radv_GetPipelineCacheData()
596 const size_t size = sizeof(*header) + cache->total_size; in radv_GetPipelineCacheData()
598 radv_pipeline_cache_unlock(cache); in radv_GetPipelineCacheData()
603 radv_pipeline_cache_unlock(cache); in radv_GetPipelineCacheData()
617 for (uint32_t i = 0; i < cache->table_size; i++) { in radv_GetPipelineCacheData()
618 if (!cache->hash_table[i]) in radv_GetPipelineCacheData()
620 entry = cache->hash_table[i]; in radv_GetPipelineCacheData()
634 radv_pipeline_cache_unlock(cache); in radv_GetPipelineCacheData()