Home
last modified time | relevance | path

Searched refs:NM_I (Results 1 – 8 of 8) sorted by relevance

/fs/f2fs/
Dnode.h135 return NM_I(sbi)->nat_cnt[DIRTY_NAT] >= NM_I(sbi)->max_nid * in excess_dirty_nats()
136 NM_I(sbi)->dirty_nats_ratio / 100; in excess_dirty_nats()
141 return NM_I(sbi)->nat_cnt[TOTAL_NAT] >= DEF_NAT_CACHE_THRESHOLD; in excess_cached_nats()
171 struct f2fs_nm_info *nm_i = NM_I(sbi); in next_free_nid()
189 struct f2fs_nm_info *nm_i = NM_I(sbi); in get_nat_bitmap()
201 struct f2fs_nm_info *nm_i = NM_I(sbi); in current_nat_addr()
225 struct f2fs_nm_info *nm_i = NM_I(sbi); in next_nat_addr()
Dnode.c35 if (unlikely(nid < F2FS_ROOT_INO(sbi) || nid >= NM_I(sbi)->max_nid)) { in f2fs_check_nid_range()
46 struct f2fs_nm_info *nm_i = NM_I(sbi); in f2fs_available_free_memory()
144 struct f2fs_nm_info *nm_i = NM_I(sbi); in get_next_nat_page()
381 struct f2fs_nm_info *nm_i = NM_I(sbi); in f2fs_need_dentry_mark()
398 struct f2fs_nm_info *nm_i = NM_I(sbi); in f2fs_is_checkpointed_node()
412 struct f2fs_nm_info *nm_i = NM_I(sbi); in f2fs_need_inode_block_update()
430 struct f2fs_nm_info *nm_i = NM_I(sbi); in cache_nat_entry()
458 struct f2fs_nm_info *nm_i = NM_I(sbi); in set_node_addr()
516 struct f2fs_nm_info *nm_i = NM_I(sbi); in f2fs_try_to_free_nats()
548 struct f2fs_nm_info *nm_i = NM_I(sbi); in f2fs_get_node_info()
[all …]
Ddebug.c161 si->nats = NM_I(sbi)->nat_cnt[TOTAL_NAT]; in update_general_status()
162 si->dirty_nats = NM_I(sbi)->nat_cnt[DIRTY_NAT]; in update_general_status()
165 si->free_nids = NM_I(sbi)->nid_cnt[FREE_NID]; in update_general_status()
166 si->avail_nids = NM_I(sbi)->available_nids; in update_general_status()
167 si->alloc_nids = NM_I(sbi)->nid_cnt[PREALLOC_NID]; in update_general_status()
269 si->base_mem += (NM_I(sbi)->nat_bits_blocks << F2FS_BLKSIZE_BITS); in update_mem_info()
270 si->base_mem += NM_I(sbi)->nat_blocks * in update_mem_info()
272 si->base_mem += NM_I(sbi)->nat_blocks / 8; in update_mem_info()
273 si->base_mem += NM_I(sbi)->nat_blocks * sizeof(unsigned short); in update_mem_info()
292 si->cache_mem += (NM_I(sbi)->nid_cnt[FREE_NID] + in update_mem_info()
[all …]
Dshrinker.c21 return NM_I(sbi)->nat_cnt[RECLAIMABLE_NAT]; in __count_nat_entries()
26 long count = NM_I(sbi)->nid_cnt[FREE_NID] - MAX_FREE_NIDS; in __count_free_nids()
Dcheckpoint.c255 NAT_BLOCK_OFFSET(NM_I(sbi)->max_nid))) in f2fs_ra_meta_pages()
1176 struct f2fs_nm_info *nm_i = NM_I(sbi); in __prepare_cp_block()
1338 NM_I(sbi)->nat_bits_blocks > sbi->blocks_per_seg) { in update_ckpt_flags()
1464 struct f2fs_nm_info *nm_i = NM_I(sbi); in do_checkpoint()
1676 if (NM_I(sbi)->nat_cnt[DIRTY_NAT] == 0 && in f2fs_write_checkpoint()
Drecovery.c58 if (NM_I(sbi)->max_rf_node_blocks && in f2fs_space_for_roll_forward()
60 NM_I(sbi)->max_rf_node_blocks) in f2fs_space_for_roll_forward()
Dsysfs.c76 return (unsigned char *)NM_I(sbi); in __struct_ptr()
Df2fs.h2023 static inline struct f2fs_nm_info *NM_I(struct f2fs_sb_info *sbi) in NM_I() function
3950 else if (blkaddr < NM_I(sbi)->nat_blkaddr) \