/fs/ntfs/ |
D | runlist.c | 60 static inline runlist_element *ntfs_rl_realloc(runlist_element *rl, in ntfs_rl_realloc() argument 65 old_size = PAGE_ALIGN(old_size * sizeof(*rl)); in ntfs_rl_realloc() 66 new_size = PAGE_ALIGN(new_size * sizeof(*rl)); in ntfs_rl_realloc() 68 return rl; in ntfs_rl_realloc() 74 if (likely(rl != NULL)) { in ntfs_rl_realloc() 77 memcpy(new_rl, rl, old_size); in ntfs_rl_realloc() 78 ntfs_free(rl); in ntfs_rl_realloc() 106 static inline runlist_element *ntfs_rl_realloc_nofail(runlist_element *rl, in ntfs_rl_realloc_nofail() argument 111 old_size = PAGE_ALIGN(old_size * sizeof(*rl)); in ntfs_rl_realloc_nofail() 112 new_size = PAGE_ALIGN(new_size * sizeof(*rl)); in ntfs_rl_realloc_nofail() [all …]
|
D | lcnalloc.c | 37 const runlist_element *rl) in ntfs_cluster_free_from_rl_nolock() argument 43 if (!rl) in ntfs_cluster_free_from_rl_nolock() 45 for (; rl->length; rl++) { in ntfs_cluster_free_from_rl_nolock() 48 if (rl->lcn < 0) in ntfs_cluster_free_from_rl_nolock() 50 err = ntfs_bitmap_clear_run(lcnbmp_vi, rl->lcn, rl->length); in ntfs_cluster_free_from_rl_nolock() 142 runlist_element *rl = NULL; in ntfs_cluster_alloc() local 320 if ((rlpos + 2) * sizeof(*rl) > rlsize) { in ntfs_cluster_alloc() 324 if (!rl) in ntfs_cluster_alloc() 336 memcpy(rl2, rl, rlsize); in ntfs_cluster_alloc() 337 ntfs_free(rl); in ntfs_cluster_alloc() [all …]
|
D | attrib.c | 77 runlist_element *rl; in ntfs_map_runlist_nolock() local 173 rl = ntfs_mapping_pairs_decompress(ni->vol, a, ni->runlist.rl); in ntfs_map_runlist_nolock() 174 if (IS_ERR(rl)) in ntfs_map_runlist_nolock() 175 err = PTR_ERR(rl); in ntfs_map_runlist_nolock() 177 ni->runlist.rl = rl; in ntfs_map_runlist_nolock() 290 if (likely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) <= in ntfs_map_runlist() 340 if (!ni->runlist.rl) { in ntfs_attr_vcn_to_lcn_nolock() 350 lcn = ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn); in ntfs_attr_vcn_to_lcn_nolock() 364 if (unlikely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) != in ntfs_attr_vcn_to_lcn_nolock() 454 runlist_element *rl; in ntfs_attr_find_vcn_nolock() local [all …]
|
D | debug.c | 120 void ntfs_debug_dump_runlist(const runlist_element *rl) in ntfs_debug_dump_runlist() argument 129 if (!rl) { in ntfs_debug_dump_runlist() 135 LCN lcn = (rl + i)->lcn; in ntfs_debug_dump_runlist() 143 (long long)(rl + i)->vcn, lcn_str[index], in ntfs_debug_dump_runlist() 144 (long long)(rl + i)->length, in ntfs_debug_dump_runlist() 145 (rl + i)->length ? "" : in ntfs_debug_dump_runlist() 149 (long long)(rl + i)->vcn, in ntfs_debug_dump_runlist() 150 (long long)(rl + i)->lcn, in ntfs_debug_dump_runlist() 151 (long long)(rl + i)->length, in ntfs_debug_dump_runlist() 152 (rl + i)->length ? "" : in ntfs_debug_dump_runlist() [all …]
|
D | runlist.h | 41 runlist_element *rl; member 45 static inline void ntfs_init_runlist(runlist *rl) in ntfs_init_runlist() argument 47 rl->rl = NULL; in ntfs_init_runlist() 48 init_rwsem(&rl->lock); in ntfs_init_runlist() 65 extern LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn); 69 extern runlist_element *ntfs_rl_find_vcn_nolock(runlist_element *rl, 73 const runlist_element *rl, const VCN first_vcn, 77 const int dst_len, const runlist_element *rl,
|
D | mft.c | 463 runlist_element *rl; in ntfs_sync_mft_mirror() local 511 rl = NULL; in ntfs_sync_mft_mirror() 535 if (!rl) { in ntfs_sync_mft_mirror() 538 rl = NTFS_I(vol->mftmirr_ino)->runlist.rl; in ntfs_sync_mft_mirror() 543 BUG_ON(!rl); in ntfs_sync_mft_mirror() 546 while (rl->length && rl[1].vcn <= vcn) in ntfs_sync_mft_mirror() 547 rl++; in ntfs_sync_mft_mirror() 548 lcn = ntfs_rl_vcn_to_lcn(rl, vcn); in ntfs_sync_mft_mirror() 573 if (unlikely(rl)) in ntfs_sync_mft_mirror() 669 runlist_element *rl; in write_mft_record_nolock() local [all …]
|
D | logfile.c | 718 runlist_element *rl; in ntfs_empty_logfile() local 745 rl = log_ni->runlist.rl; in ntfs_empty_logfile() 746 if (unlikely(!rl || vcn < rl->vcn || !rl->length)) { in ntfs_empty_logfile() 754 rl = log_ni->runlist.rl; in ntfs_empty_logfile() 755 BUG_ON(!rl || vcn < rl->vcn || !rl->length); in ntfs_empty_logfile() 758 while (rl->length && vcn >= rl[1].vcn) in ntfs_empty_logfile() 759 rl++; in ntfs_empty_logfile() 769 lcn = rl->lcn; in ntfs_empty_logfile() 771 vcn = rl->vcn; in ntfs_empty_logfile() 775 if (unlikely(!rl->length || lcn < LCN_HOLE)) in ntfs_empty_logfile() [all …]
|
D | aops.c | 173 runlist_element *rl; in ntfs_read_block() local 186 BUG_ON(!ni->runlist.rl && !ni->mft_no && !NInoAttr(ni)); in ntfs_read_block() 225 rl = NULL; in ntfs_read_block() 246 if (!rl) { in ntfs_read_block() 249 rl = ni->runlist.rl; in ntfs_read_block() 251 if (likely(rl != NULL)) { in ntfs_read_block() 253 while (rl->length && rl[1].vcn <= vcn) in ntfs_read_block() 254 rl++; in ntfs_read_block() 255 lcn = ntfs_rl_vcn_to_lcn(rl, vcn); in ntfs_read_block() 286 rl = NULL; in ntfs_read_block() [all …]
|
D | file.c | 574 runlist_element *rl, *rl2; in ntfs_prepare_pages_for_non_resident_write() local 619 rl = NULL; in ntfs_prepare_pages_for_non_resident_write() 822 if (!rl) { in ntfs_prepare_pages_for_non_resident_write() 825 rl = ni->runlist.rl; in ntfs_prepare_pages_for_non_resident_write() 827 if (likely(rl != NULL)) { in ntfs_prepare_pages_for_non_resident_write() 829 while (rl->length && rl[1].vcn <= bh_cpos) in ntfs_prepare_pages_for_non_resident_write() 830 rl++; in ntfs_prepare_pages_for_non_resident_write() 831 lcn = ntfs_rl_vcn_to_lcn(rl, bh_cpos); in ntfs_prepare_pages_for_non_resident_write() 839 vcn_len = rl[1].vcn - vcn; in ntfs_prepare_pages_for_non_resident_write() 856 rl = NULL; in ntfs_prepare_pages_for_non_resident_write() [all …]
|
D | compress.c | 470 runlist_element *rl; in ntfs_read_compressed_block() local 595 rl = NULL; in ntfs_read_compressed_block() 600 if (!rl) { in ntfs_read_compressed_block() 603 rl = ni->runlist.rl; in ntfs_read_compressed_block() 605 if (likely(rl != NULL)) { in ntfs_read_compressed_block() 607 while (rl->length && rl[1].vcn <= vcn) in ntfs_read_compressed_block() 608 rl++; in ntfs_read_compressed_block() 609 lcn = ntfs_rl_vcn_to_lcn(rl, vcn); in ntfs_read_compressed_block() 646 if (rl) in ntfs_read_compressed_block()
|
D | lcnalloc.h | 100 const runlist_element *rl); 119 const runlist_element *rl) in ntfs_cluster_free_from_rl() argument 124 ret = ntfs_cluster_free_from_rl_nolock(vol, rl); in ntfs_cluster_free_from_rl()
|
D | debug.h | 33 extern void ntfs_debug_dump_runlist(const runlist_element *rl); 43 #define ntfs_debug_dump_runlist(rl) do {} while (0) argument
|
D | inode.c | 721 ni->attr_list_rl.rl = ntfs_mapping_pairs_decompress(vol, in ntfs_read_locked_inode() 723 if (IS_ERR(ni->attr_list_rl.rl)) { in ntfs_read_locked_inode() 724 err = PTR_ERR(ni->attr_list_rl.rl); in ntfs_read_locked_inode() 725 ni->attr_list_rl.rl = NULL; in ntfs_read_locked_inode() 1910 ni->attr_list_rl.rl = ntfs_mapping_pairs_decompress(vol, in ntfs_read_inode_mount() 1912 if (IS_ERR(ni->attr_list_rl.rl)) { in ntfs_read_inode_mount() 1913 err = PTR_ERR(ni->attr_list_rl.rl); in ntfs_read_inode_mount() 1914 ni->attr_list_rl.rl = NULL; in ntfs_read_inode_mount() 2041 nrl = ntfs_mapping_pairs_decompress(vol, a, ni->runlist.rl); in ntfs_read_inode_mount() 2048 ni->runlist.rl = nrl; in ntfs_read_inode_mount() [all …]
|
D | attrib.h | 64 extern int load_attribute_list(ntfs_volume *vol, runlist *rl, u8 *al_start,
|
D | super.c | 1079 runlist_element *rl, rl2[2]; in check_mft_mirror() local 1179 rl = mirr_ni->runlist.rl; in check_mft_mirror() 1183 if (rl2[i].vcn != rl[i].vcn || rl2[i].lcn != rl[i].lcn || in check_mft_mirror() 1184 rl2[i].length != rl[i].length) { in check_mft_mirror()
|
/fs/dlm/ |
D | rcom.c | 411 struct rcom_lock *rl) in pack_rcom_lock() argument 413 memset(rl, 0, sizeof(*rl)); in pack_rcom_lock() 415 rl->rl_ownpid = cpu_to_le32(lkb->lkb_ownpid); in pack_rcom_lock() 416 rl->rl_lkid = cpu_to_le32(lkb->lkb_id); in pack_rcom_lock() 417 rl->rl_exflags = cpu_to_le32(lkb->lkb_exflags); in pack_rcom_lock() 418 rl->rl_flags = cpu_to_le32(lkb->lkb_flags); in pack_rcom_lock() 419 rl->rl_lvbseq = cpu_to_le32(lkb->lkb_lvbseq); in pack_rcom_lock() 420 rl->rl_rqmode = lkb->lkb_rqmode; in pack_rcom_lock() 421 rl->rl_grmode = lkb->lkb_grmode; in pack_rcom_lock() 422 rl->rl_status = lkb->lkb_status; in pack_rcom_lock() [all …]
|
D | lock.c | 5558 struct rcom_lock *rl = (struct rcom_lock *) rc->rc_buf; in receive_rcom_lock_args() local 5561 lkb->lkb_ownpid = le32_to_cpu(rl->rl_ownpid); in receive_rcom_lock_args() 5562 lkb->lkb_remid = le32_to_cpu(rl->rl_lkid); in receive_rcom_lock_args() 5563 lkb->lkb_exflags = le32_to_cpu(rl->rl_exflags); in receive_rcom_lock_args() 5564 lkb->lkb_flags = le32_to_cpu(rl->rl_flags) & 0x0000FFFF; in receive_rcom_lock_args() 5566 lkb->lkb_lvbseq = le32_to_cpu(rl->rl_lvbseq); in receive_rcom_lock_args() 5567 lkb->lkb_rqmode = rl->rl_rqmode; in receive_rcom_lock_args() 5568 lkb->lkb_grmode = rl->rl_grmode; in receive_rcom_lock_args() 5571 lkb->lkb_bastfn = (rl->rl_asts & DLM_CB_BAST) ? &fake_bastfn : NULL; in receive_rcom_lock_args() 5572 lkb->lkb_astfn = (rl->rl_asts & DLM_CB_CAST) ? &fake_astfn : NULL; in receive_rcom_lock_args() [all …]
|
/fs/ocfs2/ |
D | refcounttree.c | 1241 struct ocfs2_refcount_list *rl = &rb->rf_records; in ocfs2_change_refcount_rec() local 1242 struct ocfs2_refcount_rec *rec = &rl->rl_recs[index]; in ocfs2_change_refcount_rec() 1257 if (index != le16_to_cpu(rl->rl_used) - 1) { in ocfs2_change_refcount_rec() 1259 (le16_to_cpu(rl->rl_used) - index - 1) * in ocfs2_change_refcount_rec() 1261 memset(&rl->rl_recs[le16_to_cpu(rl->rl_used) - 1], in ocfs2_change_refcount_rec() 1265 le16_add_cpu(&rl->rl_used, -1); in ocfs2_change_refcount_rec() 1411 static int ocfs2_find_refcount_split_pos(struct ocfs2_refcount_list *rl, in ocfs2_find_refcount_split_pos() argument 1414 int num_used = le16_to_cpu(rl->rl_used); in ocfs2_find_refcount_split_pos() 1420 &rl->rl_recs[middle - delta - 1], in ocfs2_find_refcount_split_pos() 1421 &rl->rl_recs[middle - delta])) { in ocfs2_find_refcount_split_pos() [all …]
|
/fs/qnx4/ |
D | inode.c | 155 int rd, rl; in qnx4_checkroot() local 162 rl = le32_to_cpu(s->RootDir.di_first_xtnt.xtnt_size); in qnx4_checkroot() 163 for (j = 0; j < rl; j++) { in qnx4_checkroot()
|
/fs/incfs/ |
D | data_mgmt.c | 30 struct read_log *rl = container_of(dw, struct read_log, ml_wakeup_work); in log_wake_up_all() local 31 wake_up_all(&rl->ml_notif_wq); in log_wake_up_all() 490 static void log_read_one_record(struct read_log *rl, struct read_log_state *rs) in log_read_one_record() argument 493 (union log_record *)((u8 *)rl->rl_ring_buf + rs->next_offset); in log_read_one_record() 543 if (rs->next_offset > rl->rl_size - sizeof(*record)) { in log_read_one_record()
|
/fs/ |
D | binfmt_flat.c | 339 static void old_reloc(unsigned long rl) in old_reloc() argument 346 r.value = rl; in old_reloc()
|