Home
last modified time | relevance | path

Searched refs:cache (Results 1 – 25 of 106) sorted by relevance

12345

/fs/fscache/
Dcache.c26 struct fscache_cache *cache; in fscache_alloc_cache() local
28 cache = kzalloc(sizeof(*cache), GFP_KERNEL); in fscache_alloc_cache()
29 if (cache) { in fscache_alloc_cache()
31 cache->name = kstrdup(name, GFP_KERNEL); in fscache_alloc_cache()
32 if (!cache->name) { in fscache_alloc_cache()
33 kfree(cache); in fscache_alloc_cache()
37 refcount_set(&cache->ref, 1); in fscache_alloc_cache()
38 INIT_LIST_HEAD(&cache->cache_link); in fscache_alloc_cache()
39 cache->debug_id = atomic_inc_return(&fscache_cache_debug_id); in fscache_alloc_cache()
41 return cache; in fscache_alloc_cache()
[all …]
Dvolume.c87 if (!fscache_cache_is_live(volume->cache)) in fscache_begin_volume_access()
90 if (!fscache_cache_is_live(volume->cache)) { in fscache_begin_volume_access()
128 a->cache != b->cache || in fscache_volume_same()
205 struct fscache_cache *cache; in fscache_alloc_volume() local
216 cache = fscache_lookup_cache(cache_name, false); in fscache_alloc_volume()
217 if (IS_ERR(cache)) in fscache_alloc_volume()
225 volume->cache = cache; in fscache_alloc_volume()
249 atomic_inc(&cache->n_volumes); in fscache_alloc_volume()
260 fscache_put_cache(cache, fscache_cache_put_alloc_volume); in fscache_alloc_volume()
277 ops = volume->cache->ops; in fscache_create_volume_work()
[all …]
/fs/cachefiles/
Ddaemon.c61 int (*handler)(struct cachefiles_cache *cache, char *args);
90 struct cachefiles_cache *cache; in cachefiles_daemon_open() local
103 cache = kzalloc(sizeof(struct cachefiles_cache), GFP_KERNEL); in cachefiles_daemon_open()
104 if (!cache) { in cachefiles_daemon_open()
109 mutex_init(&cache->daemon_mutex); in cachefiles_daemon_open()
110 init_waitqueue_head(&cache->daemon_pollwq); in cachefiles_daemon_open()
111 INIT_LIST_HEAD(&cache->volumes); in cachefiles_daemon_open()
112 INIT_LIST_HEAD(&cache->object_list); in cachefiles_daemon_open()
113 spin_lock_init(&cache->object_list_lock); in cachefiles_daemon_open()
114 refcount_set(&cache->unbind_pincount, 1); in cachefiles_daemon_open()
[all …]
Dcache.c16 int cachefiles_add_cache(struct cachefiles_cache *cache) in cachefiles_add_cache() argument
27 cache_cookie = fscache_acquire_cache(cache->tag); in cachefiles_add_cache()
32 ret = cachefiles_get_security_ID(cache); in cachefiles_add_cache()
36 cachefiles_begin_secure(cache, &saved_cred); in cachefiles_add_cache()
39 ret = kern_path(cache->rootdirname, LOOKUP_DIRECTORY, &path); in cachefiles_add_cache()
43 cache->mnt = path.mnt; in cachefiles_add_cache()
77 ret = cachefiles_determine_cache_security(cache, root, &saved_cred); in cachefiles_add_cache()
94 cache->bsize = stats.f_bsize; in cachefiles_add_cache()
95 cache->bshift = ilog2(stats.f_bsize); in cachefiles_add_cache()
98 cache->bsize, cache->bshift); in cachefiles_add_cache()
[all …]
Dondemand.c11 struct cachefiles_cache *cache = object->volume->cache; in cachefiles_ondemand_fd_release() local
14 XA_STATE(xas, &cache->reqs, 0); in cachefiles_ondemand_fd_release()
16 xa_lock(&cache->reqs); in cachefiles_ondemand_fd_release()
31 xa_unlock(&cache->reqs); in cachefiles_ondemand_fd_release()
33 xa_erase(&cache->ondemand_ids, object_id); in cachefiles_ondemand_fd_release()
36 cachefiles_put_unbind_pincount(cache); in cachefiles_ondemand_fd_release()
44 struct cachefiles_cache *cache = object->volume->cache; in cachefiles_ondemand_fd_write_iter() local
54 cachefiles_begin_secure(cache, &saved_cred); in cachefiles_ondemand_fd_write_iter()
56 cachefiles_end_secure(cache, saved_cred); in cachefiles_ondemand_fd_write_iter()
84 struct cachefiles_cache *cache = object->volume->cache; in cachefiles_ondemand_fd_ioctl() local
[all …]
Dnamei.c69 struct cachefiles_cache *cache = object->volume->cache; in cachefiles_unmark_inode_in_use() local
75 atomic_long_add(inode->i_blocks, &cache->b_released); in cachefiles_unmark_inode_in_use()
76 if (atomic_inc_return(&cache->f_released)) in cachefiles_unmark_inode_in_use()
77 cachefiles_state_changed(cache); in cachefiles_unmark_inode_in_use()
84 struct dentry *cachefiles_get_directory(struct cachefiles_cache *cache, in cachefiles_get_directory() argument
119 ret = cachefiles_has_space(cache, 1, 0, in cachefiles_get_directory()
126 path.mnt = cache->mnt; in cachefiles_get_directory()
228 static int cachefiles_unlink(struct cachefiles_cache *cache, in cachefiles_unlink() argument
234 .mnt = cache->mnt, in cachefiles_unlink()
242 cachefiles_io_error(cache, "Unlink security error"); in cachefiles_unlink()
[all …]
Dinterface.c42 fscache_count_object(vcookie->cache); in cachefiles_alloc_object()
79 struct fscache_cache *cache; in cachefiles_put_object() local
92 cache = object->volume->cache->cache; in cachefiles_put_object()
96 fscache_uncount_object(cache); in cachefiles_put_object()
175 struct cachefiles_cache *cache = cookie->volume->cache->cache_priv; in cachefiles_lookup_cookie() local
190 cachefiles_begin_secure(cache, &saved_cred); in cachefiles_lookup_cookie()
198 spin_lock(&cache->object_list_lock); in cachefiles_lookup_cookie()
199 list_add(&object->cache_link, &cache->object_list); in cachefiles_lookup_cookie()
200 spin_unlock(&cache->object_list_lock); in cachefiles_lookup_cookie()
203 cachefiles_end_secure(cache, saved_cred); in cachefiles_lookup_cookie()
[all …]
Dvolume.c20 struct cachefiles_cache *cache = vcookie->cache->cache_priv; in cachefiles_acquire_volume() local
34 volume->cache = cache; in cachefiles_acquire_volume()
37 cachefiles_begin_secure(cache, &saved_cred); in cachefiles_acquire_volume()
48 vdentry = cachefiles_get_directory(cache, cache->store, name, &is_new); in cachefiles_acquire_volume()
61 inode_lock_nested(d_inode(cache->store), I_MUTEX_PARENT); in cachefiles_acquire_volume()
62 cachefiles_bury_object(cache, NULL, cache->store, vdentry, in cachefiles_acquire_volume()
72 fan = cachefiles_get_directory(cache, vdentry, name, NULL); in cachefiles_acquire_volume()
78 cachefiles_end_secure(cache, saved_cred); in cachefiles_acquire_volume()
86 spin_lock(&cache->object_list_lock); in cachefiles_acquire_volume()
87 list_add(&volume->cache_link, &volume->cache->volumes); in cachefiles_acquire_volume()
[all …]
Dinternal.h40 struct cachefiles_cache *cache; member
74 struct fscache_cache *cache; /* Cache cookie */ member
119 static inline bool cachefiles_in_ondemand_mode(struct cachefiles_cache *cache) in cachefiles_in_ondemand_mode() argument
122 test_bit(CACHEFILES_ONDEMAND_MODE, &cache->flags); in cachefiles_in_ondemand_mode()
151 static inline void cachefiles_state_changed(struct cachefiles_cache *cache) in cachefiles_state_changed() argument
153 set_bit(CACHEFILES_STATE_CHANGED, &cache->flags); in cachefiles_state_changed()
154 wake_up_all(&cache->daemon_pollwq); in cachefiles_state_changed()
160 extern int cachefiles_add_cache(struct cachefiles_cache *cache);
161 extern void cachefiles_withdraw_cache(struct cachefiles_cache *cache);
168 extern int cachefiles_has_space(struct cachefiles_cache *cache,
[all …]
Dsecurity.c16 int cachefiles_get_security_ID(struct cachefiles_cache *cache) in cachefiles_get_security_ID() argument
21 _enter("{%s}", cache->secctx); in cachefiles_get_security_ID()
29 if (cache->secctx) { in cachefiles_get_security_ID()
30 ret = set_security_override_from_ctx(new, cache->secctx); in cachefiles_get_security_ID()
39 cache->cache_cred = new; in cachefiles_get_security_ID()
49 static int cachefiles_check_cache_dir(struct cachefiles_cache *cache, in cachefiles_check_cache_dir() argument
75 int cachefiles_determine_cache_security(struct cachefiles_cache *cache, in cachefiles_determine_cache_security() argument
90 cachefiles_end_secure(cache, *_saved_cred); in cachefiles_determine_cache_security()
97 cachefiles_begin_secure(cache, _saved_cred); in cachefiles_determine_cache_security()
102 put_cred(cache->cache_cred); in cachefiles_determine_cache_security()
[all …]
Dio.c213 granularity = max_t(size_t, object->volume->cache->bsize, granularity); in cachefiles_query_occupancy()
270 atomic_long_sub(ki->b_writing, &object->volume->cache->b_writing); in cachefiles_write_complete()
287 struct cachefiles_cache *cache; in __cachefiles_write() local
295 cache = object->volume->cache; in __cachefiles_write()
319 ki->b_writing = (len + (1 << cache->bshift) - 1) >> cache->bshift; in __cachefiles_write()
323 atomic_long_add(ki->b_writing, &cache->b_writing); in __cachefiles_write()
399 struct cachefiles_cache *cache; in cachefiles_prepare_read() local
435 cache = object->volume->cache; in cachefiles_prepare_read()
436 cachefiles_begin_secure(cache, &saved_cred); in cachefiles_prepare_read()
458 off = round_up(off, cache->bsize); in cachefiles_prepare_read()
[all …]
/fs/btrfs/tests/
Dfree-space-tests.c20 static int test_extents(struct btrfs_block_group *cache) in test_extents() argument
27 ret = btrfs_add_free_space(cache, 0, SZ_4M); in test_extents()
33 ret = btrfs_remove_free_space(cache, 0, SZ_4M); in test_extents()
39 if (test_check_exists(cache, 0, SZ_4M)) { in test_extents()
45 ret = btrfs_add_free_space(cache, 0, SZ_4M); in test_extents()
51 ret = btrfs_remove_free_space(cache, 3 * SZ_1M, SZ_1M); in test_extents()
57 ret = btrfs_remove_free_space(cache, 0, SZ_1M); in test_extents()
63 ret = btrfs_remove_free_space(cache, SZ_2M, 4096); in test_extents()
69 if (test_check_exists(cache, 0, SZ_1M)) { in test_extents()
74 if (test_check_exists(cache, SZ_2M, 4096)) { in test_extents()
[all …]
Dfree-space-tree-tests.c21 struct btrfs_block_group *cache, in __check_free_space_extents() argument
34 info = search_free_space_info(trans, cache, path, 0); in __check_free_space_extents()
51 end = cache->start + cache->length; in __check_free_space_extents()
59 bit = free_space_test_bit(cache, path, offset); in __check_free_space_extents()
108 struct btrfs_block_group *cache, in check_free_space_extents() argument
117 info = search_free_space_info(trans, cache, path, 0); in check_free_space_extents()
126 ret = __check_free_space_extents(trans, fs_info, cache, path, extents, in check_free_space_extents()
133 ret = convert_free_space_to_extents(trans, cache, path); in check_free_space_extents()
139 ret = convert_free_space_to_bitmaps(trans, cache, path); in check_free_space_extents()
145 return __check_free_space_extents(trans, fs_info, cache, path, extents, in check_free_space_extents()
[all …]
Dbtrfs-tests.c216 struct btrfs_block_group *cache; in btrfs_alloc_dummy_block_group() local
218 cache = kzalloc(sizeof(*cache), GFP_KERNEL); in btrfs_alloc_dummy_block_group()
219 if (!cache) in btrfs_alloc_dummy_block_group()
221 cache->free_space_ctl = kzalloc(sizeof(*cache->free_space_ctl), in btrfs_alloc_dummy_block_group()
223 if (!cache->free_space_ctl) { in btrfs_alloc_dummy_block_group()
224 kfree(cache); in btrfs_alloc_dummy_block_group()
228 cache->start = 0; in btrfs_alloc_dummy_block_group()
229 cache->length = length; in btrfs_alloc_dummy_block_group()
230 cache->full_stripe_len = fs_info->sectorsize; in btrfs_alloc_dummy_block_group()
231 cache->fs_info = fs_info; in btrfs_alloc_dummy_block_group()
[all …]
/fs/
Dmbcache.c47 static unsigned long mb_cache_shrink(struct mb_cache *cache,
50 static inline struct hlist_bl_head *mb_cache_entry_head(struct mb_cache *cache, in mb_cache_entry_head() argument
53 return &cache->c_hash[hash_32(key, cache->c_bucket_bits)]; in mb_cache_entry_head()
74 int mb_cache_entry_create(struct mb_cache *cache, gfp_t mask, u32 key, in mb_cache_entry_create() argument
82 if (cache->c_entry_count >= cache->c_max_entries) in mb_cache_entry_create()
83 schedule_work(&cache->c_shrink_work); in mb_cache_entry_create()
85 if (cache->c_entry_count >= 2*cache->c_max_entries) in mb_cache_entry_create()
86 mb_cache_shrink(cache, SYNC_SHRINK_BATCH); in mb_cache_entry_create()
106 head = mb_cache_entry_head(cache, key); in mb_cache_entry_create()
117 spin_lock(&cache->c_list_lock); in mb_cache_entry_create()
[all …]
/fs/squashfs/
Dcache.c53 struct squashfs_cache *cache, u64 block, int length) in squashfs_cache_get() argument
58 spin_lock(&cache->lock); in squashfs_cache_get()
61 for (i = cache->curr_blk, n = 0; n < cache->entries; n++) { in squashfs_cache_get()
62 if (cache->entry[i].block == block) { in squashfs_cache_get()
63 cache->curr_blk = i; in squashfs_cache_get()
66 i = (i + 1) % cache->entries; in squashfs_cache_get()
69 if (n == cache->entries) { in squashfs_cache_get()
74 if (cache->unused == 0) { in squashfs_cache_get()
75 cache->num_waiters++; in squashfs_cache_get()
76 spin_unlock(&cache->lock); in squashfs_cache_get()
[all …]
/fs/nfs/
Dnfs42xattr.c64 struct nfs4_xattr_cache *cache; member
106 nfs4_xattr_hash_init(struct nfs4_xattr_cache *cache) in nfs4_xattr_hash_init() argument
111 INIT_HLIST_HEAD(&cache->buckets[i].hlist); in nfs4_xattr_hash_init()
112 spin_lock_init(&cache->buckets[i].lock); in nfs4_xattr_hash_init()
113 cache->buckets[i].cache = cache; in nfs4_xattr_hash_init()
114 cache->buckets[i].draining = false; in nfs4_xattr_hash_init()
270 struct nfs4_xattr_cache *cache; in nfs4_xattr_free_cache_cb() local
273 cache = container_of(kref, struct nfs4_xattr_cache, ref); in nfs4_xattr_free_cache_cb()
276 if (WARN_ON(!hlist_empty(&cache->buckets[i].hlist))) in nfs4_xattr_free_cache_cb()
278 cache->buckets[i].draining = false; in nfs4_xattr_free_cache_cb()
[all …]
/fs/btrfs/
Dblock-group.c125 void btrfs_get_block_group(struct btrfs_block_group *cache) in btrfs_get_block_group() argument
127 refcount_inc(&cache->refs); in btrfs_get_block_group()
130 void btrfs_put_block_group(struct btrfs_block_group *cache) in btrfs_put_block_group() argument
132 if (refcount_dec_and_test(&cache->refs)) { in btrfs_put_block_group()
133 WARN_ON(cache->pinned > 0); in btrfs_put_block_group()
141 if (!(cache->flags & BTRFS_BLOCK_GROUP_METADATA) || in btrfs_put_block_group()
142 !BTRFS_FS_LOG_CLEANUP_ERROR(cache->fs_info)) in btrfs_put_block_group()
143 WARN_ON(cache->reserved > 0); in btrfs_put_block_group()
150 if (WARN_ON(!list_empty(&cache->discard_list))) in btrfs_put_block_group()
151 btrfs_discard_cancel_work(&cache->fs_info->discard_ctl, in btrfs_put_block_group()
[all …]
Dbackref.c1551 static bool lookup_backref_shared_cache(struct btrfs_backref_shared_cache *cache, in lookup_backref_shared_cache() argument
1557 if (!cache->use_cache) in lookup_backref_shared_cache()
1571 entry = &cache->entries[level]; in lookup_backref_shared_cache()
1604 cache->entries[i].is_shared = true; in lookup_backref_shared_cache()
1605 cache->entries[i].gen = entry->gen; in lookup_backref_shared_cache()
1617 static void store_backref_shared_cache(struct btrfs_backref_shared_cache *cache, in store_backref_shared_cache() argument
1624 if (!cache->use_cache) in store_backref_shared_cache()
1643 entry = &cache->entries[level]; in store_backref_shared_cache()
1657 entry = &cache->entries[i]; in store_backref_shared_cache()
1690 struct btrfs_backref_shared_cache *cache) in btrfs_is_data_extent_shared() argument
[all …]
Dzoned.c1202 static int calculate_alloc_pointer(struct btrfs_block_group *cache, in calculate_alloc_pointer() argument
1205 struct btrfs_fs_info *fs_info = cache->fs_info; in calculate_alloc_pointer()
1232 key.objectid = cache->start + cache->length; in calculate_alloc_pointer()
1244 ret = btrfs_previous_extent_item(root, path, cache->start); in calculate_alloc_pointer()
1260 if (!(found_key.objectid >= cache->start && in calculate_alloc_pointer()
1261 found_key.objectid + length <= cache->start + cache->length)) { in calculate_alloc_pointer()
1265 *offset_ret = found_key.objectid + length - cache->start; in calculate_alloc_pointer()
1273 int btrfs_load_block_group_zone_info(struct btrfs_block_group *cache, bool new) in btrfs_load_block_group_zone_info() argument
1275 struct btrfs_fs_info *fs_info = cache->fs_info; in btrfs_load_block_group_zone_info()
1280 u64 logical = cache->start; in btrfs_load_block_group_zone_info()
[all …]
Dblock-group.h280 struct btrfs_block_group *cache);
281 void btrfs_get_block_group(struct btrfs_block_group *cache);
282 void btrfs_put_block_group(struct btrfs_block_group *cache);
290 void btrfs_wait_block_group_cache_progress(struct btrfs_block_group *cache,
292 int btrfs_cache_block_group(struct btrfs_block_group *cache, bool wait);
295 struct btrfs_block_group *cache);
313 int btrfs_inc_block_group_ro(struct btrfs_block_group *cache,
315 void btrfs_dec_block_group_ro(struct btrfs_block_group *cache);
321 int btrfs_add_reserved_bytes(struct btrfs_block_group *cache,
323 void btrfs_free_reserved_bytes(struct btrfs_block_group *cache,
[all …]
/fs/exfat/
Dcache.c40 struct exfat_cache *cache = (struct exfat_cache *)c; in exfat_cache_init_once() local
42 INIT_LIST_HEAD(&cache->cache_list); in exfat_cache_init_once()
68 static inline void exfat_cache_free(struct exfat_cache *cache) in exfat_cache_free() argument
70 WARN_ON(!list_empty(&cache->cache_list)); in exfat_cache_free()
71 kmem_cache_free(exfat_cachep, cache); in exfat_cache_free()
75 struct exfat_cache *cache) in exfat_cache_update_lru() argument
79 if (ei->cache_lru.next != &cache->cache_list) in exfat_cache_update_lru()
80 list_move(&cache->cache_list, &ei->cache_lru); in exfat_cache_update_lru()
141 struct exfat_cache *cache, *tmp; in exfat_cache_add() local
151 cache = exfat_cache_merge(inode, new); in exfat_cache_add()
[all …]
/fs/overlayfs/
Dreaddir.c55 struct ovl_dir_cache *cache; member
230 struct ovl_dir_cache *cache = ovl_dir_cache(inode); in ovl_dir_cache_free() local
232 if (cache) { in ovl_dir_cache_free()
233 ovl_cache_free(&cache->entries); in ovl_dir_cache_free()
234 kfree(cache); in ovl_dir_cache_free()
240 struct ovl_dir_cache *cache = od->cache; in ovl_cache_put() local
242 WARN_ON(cache->refcount <= 0); in ovl_cache_put()
243 cache->refcount--; in ovl_cache_put()
244 if (!cache->refcount) { in ovl_cache_put()
245 if (ovl_dir_cache(d_inode(dentry)) == cache) in ovl_cache_put()
[all …]
/fs/fat/
Dcache.c41 struct fat_cache *cache = (struct fat_cache *)foo; in init_once() local
43 INIT_LIST_HEAD(&cache->cache_list); in init_once()
67 static inline void fat_cache_free(struct fat_cache *cache) in fat_cache_free() argument
69 BUG_ON(!list_empty(&cache->cache_list)); in fat_cache_free()
70 kmem_cache_free(fat_cache_cachep, cache); in fat_cache_free()
74 struct fat_cache *cache) in fat_cache_update_lru() argument
76 if (MSDOS_I(inode)->cache_lru.next != &cache->cache_list) in fat_cache_update_lru()
77 list_move(&cache->cache_list, &MSDOS_I(inode)->cache_lru); in fat_cache_update_lru()
136 struct fat_cache *cache, *tmp; in fat_cache_add() local
146 cache = fat_cache_merge(inode, new); in fat_cache_add()
[all …]
/fs/nilfs2/
Dalloc.c264 struct nilfs_palloc_cache *cache = NILFS_MDT(inode)->mi_palloc_cache; in nilfs_palloc_get_desc_block() local
269 bhp, &cache->prev_desc, &cache->lock); in nilfs_palloc_get_desc_block()
283 struct nilfs_palloc_cache *cache = NILFS_MDT(inode)->mi_palloc_cache; in nilfs_palloc_get_bitmap_block() local
288 &cache->prev_bitmap, &cache->lock); in nilfs_palloc_get_bitmap_block()
299 struct nilfs_palloc_cache *cache = NILFS_MDT(inode)->mi_palloc_cache; in nilfs_palloc_delete_bitmap_block() local
304 &cache->prev_bitmap, &cache->lock); in nilfs_palloc_delete_bitmap_block()
317 struct nilfs_palloc_cache *cache = NILFS_MDT(inode)->mi_palloc_cache; in nilfs_palloc_get_entry_block() local
322 &cache->prev_entry, &cache->lock); in nilfs_palloc_get_entry_block()
332 struct nilfs_palloc_cache *cache = NILFS_MDT(inode)->mi_palloc_cache; in nilfs_palloc_delete_entry_block() local
336 &cache->prev_entry, &cache->lock); in nilfs_palloc_delete_entry_block()
[all …]

12345