Lines Matching defs:c
99 #define PTR_HASH(c, k) \ argument
128 void bkey_put(struct cache_set *c, struct bkey *k) in bkey_put()
519 #define mca_reserve(c) (((!IS_ERR_OR_NULL(c->root) && c->root->level) \ argument
521 #define mca_can_free(c) \ argument
562 static struct btree *mca_bucket_alloc(struct cache_set *c, in mca_bucket_alloc()
649 struct cache_set *c = container_of(shrink, struct cache_set, shrink); in bch_mca_scan() local
716 struct cache_set *c = container_of(shrink, struct cache_set, shrink); in bch_mca_count() local
727 void bch_btree_cache_free(struct cache_set *c) in bch_btree_cache_free()
775 int bch_btree_cache_alloc(struct cache_set *c) in bch_btree_cache_alloc()
824 static struct hlist_head *mca_hash(struct cache_set *c, struct bkey *k) in mca_hash()
829 static struct btree *mca_find(struct cache_set *c, struct bkey *k) in mca_find()
843 static int mca_cannibalize_lock(struct cache_set *c, struct btree_op *op) in mca_cannibalize_lock()
860 static struct btree *mca_cannibalize(struct cache_set *c, struct btree_op *op, in mca_cannibalize()
888 void bch_cannibalize_unlock(struct cache_set *c) in bch_cannibalize_unlock()
898 static struct btree *mca_alloc(struct cache_set *c, struct btree_op *op, in mca_alloc()
981 struct btree *bch_btree_node_get(struct cache_set *c, struct btree_op *op, in bch_btree_node_get()
1095 struct btree *__bch_btree_node_alloc(struct cache_set *c, struct btree_op *op, in __bch_btree_node_alloc()
1138 static struct btree *bch_btree_node_alloc(struct cache_set *c, in bch_btree_node_alloc()
1181 struct cache_set *c = b->c; in btree_check_reserve() local
1202 static uint8_t __bch_btree_mark_key(struct cache_set *c, int level, in __bch_btree_mark_key()
1256 void bch_initial_mark_key(struct cache_set *c, int level, struct bkey *k) in bch_initial_mark_key()
1276 void bch_update_bucket_in_use(struct cache_set *c, struct gc_stat *stats) in bch_update_bucket_in_use()
1556 static size_t btree_gc_min_nodes(struct cache_set *c) in btree_gc_min_nodes()
1707 static void btree_gc_start(struct cache_set *c) in btree_gc_start()
1732 static void bch_btree_gc_finish(struct cache_set *c) in bch_btree_gc_finish()
1797 static void bch_btree_gc(struct cache_set *c) in bch_btree_gc()
1841 static bool gc_should_run(struct cache_set *c) in gc_should_run()
1856 struct cache_set *c = arg; in bch_gc_thread() local
1876 int bch_gc_thread_start(struct cache_set *c) in bch_gc_thread_start()
1926 struct cache_set *c = check_state->c; in bch_btree_check_thread() local
2023 int bch_btree_check(struct cache_set *c) in bch_btree_check()
2096 void bch_initial_gc_finish(struct cache_set *c) in bch_initial_gc_finish()
2456 int bch_btree_insert(struct cache_set *c, struct keylist *keys, in bch_btree_insert()
2545 int __bch_btree_map_nodes(struct btree_op *op, struct cache_set *c, in __bch_btree_map_nodes()
2579 int bch_btree_map_keys(struct btree_op *op, struct cache_set *c, in bch_btree_map_keys()
2655 void bch_refill_keybuf(struct cache_set *c, struct keybuf *buf, in bch_refill_keybuf()
2755 struct keybuf_key *bch_keybuf_next_rescan(struct cache_set *c, in bch_keybuf_next_rescan()