Searched refs:slab_cache (Results 1 – 16 of 16) sorted by relevance
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/ |
D | i915_active.c | 27 struct kmem_cache *slab_cache; member 178 kmem_cache_free(global.slab_cache, it); in __active_retire() 307 prealloc = kmem_cache_alloc(global.slab_cache, GFP_KERNEL); in active_instance() 321 kmem_cache_free(global.slab_cache, prealloc); in active_instance() 797 kmem_cache_free(global.slab_cache, ref->cache); in i915_active_fini() 917 node = kmem_cache_alloc(global.slab_cache, GFP_KERNEL); in i915_active_acquire_preallocate_barrier() 965 kmem_cache_free(global.slab_cache, node); in i915_active_acquire_preallocate_barrier() 1188 kmem_cache_shrink(global.slab_cache); in i915_global_active_shrink() 1193 kmem_cache_destroy(global.slab_cache); in i915_global_active_exit() 1203 global.slab_cache = KMEM_CACHE(active_node, SLAB_HWCACHE_ALIGN); in i915_global_active_init() [all …]
|
/kernel/linux/linux-5.10/mm/kasan/ |
D | tags_report.c | 52 cache = page->slab_cache; in get_bug_type()
|
D | common.c | 516 return __kasan_kmalloc(page->slab_cache, object, size, in kasan_krealloc() 533 __kasan_slab_free(page->slab_cache, ptr, ip, false); in kasan_poison_kfree()
|
D | quarantine.c | 130 return virt_to_head_page(qlink)->slab_cache; in qlink_to_cache()
|
D | generic.c | 338 cache = page->slab_cache; in kasan_record_aux_stack()
|
D | report.c | 389 struct kmem_cache *cache = page->slab_cache; in print_address_description()
|
/kernel/linux/linux-5.10/drivers/md/ |
D | dm-bufio.c | 95 struct kmem_cache *slab_cache; member 404 if (unlikely(c->slab_cache != NULL)) { in alloc_buffer_data() 406 return kmem_cache_alloc(c->slab_cache, gfp_mask); in alloc_buffer_data() 446 kmem_cache_free(c->slab_cache, data); in free_buffer_data() 1775 c->slab_cache = kmem_cache_create(slab_name, block_size, align, in dm_bufio_client_create() 1777 if (!c->slab_cache) { in dm_bufio_client_create() 1829 kmem_cache_destroy(c->slab_cache); in dm_bufio_client_create() 1878 kmem_cache_destroy(c->slab_cache); in dm_bufio_client_destroy()
|
D | raid5.h | 625 struct kmem_cache *slab_cache; /* for allocating stripes */ member
|
D | raid5.c | 2337 sh = alloc_stripe(conf->slab_cache, gfp, conf->pool_size, conf); in grow_one_stripe() 2343 free_stripe(conf->slab_cache, sh); in grow_one_stripe() 2376 conf->slab_cache = sc; in grow_stripes() 2551 free_stripe(conf->slab_cache, osh); in resize_stripes() 2559 kmem_cache_destroy(conf->slab_cache); in resize_stripes() 2589 conf->slab_cache = sc; in resize_stripes() 2647 free_stripe(conf->slab_cache, sh); in drop_one_stripe() 2659 kmem_cache_destroy(conf->slab_cache); in shrink_stripes() 2660 conf->slab_cache = NULL; in shrink_stripes()
|
/kernel/linux/linux-5.10/mm/ |
D | slab.h | 372 s = page->slab_cache; in memcg_slab_free_hook() 438 return page->slab_cache; in virt_to_cache()
|
D | slub.c | 1189 if (unlikely(s != page->slab_cache)) { in free_consistency_checks() 1193 } else if (!page->slab_cache) { in free_consistency_checks() 1782 page->slab_cache = s; in allocate_slab() 1859 __free_slab(page->slab_cache, page); in rcu_free_slab() 3224 df->s = page->slab_cache; in build_detached_freelist() 4048 s = page->slab_cache; in __check_heap_object() 4103 return slab_ksize(page->slab_cache); in __ksize() 4128 slab_free(page->slab_cache, page, object, NULL, 1, _RET_IP_); in kfree() 4352 p->slab_cache = s; in bootstrap() 4356 p->slab_cache = s; in bootstrap()
|
D | slab.c | 1416 cachep = page->slab_cache; in kmem_rcu_free() 2555 page->slab_cache = cache; in slab_map_pages() 4148 cachep = page->slab_cache; in __check_heap_object()
|
D | memcontrol.c | 2982 off = obj_to_index(page->slab_cache, page, p); in mem_cgroup_from_obj()
|
/kernel/linux/linux-5.10/tools/cgroup/ |
D | memcg_slabinfo.py | 199 cache = page.slab_cache
|
/kernel/linux/linux-5.10/include/linux/ |
D | mm_types.h | 120 struct kmem_cache *slab_cache; /* not slob */ member
|
/kernel/linux/linux-5.10/Documentation/vm/ |
D | split_page_table_lock.rst | 62 allocation: slab uses page->slab_cache for its pages.
|