/fs/ntfs3/ |
D | attrib.c | 92 CLST evcn = le64_to_cpu(attr->nres.evcn); in attr_load_runs() local 96 if (svcn >= evcn + 1 || run_is_mapped_full(run, svcn, evcn)) in attr_load_runs() 99 if (vcn && (evcn < *vcn || *vcn < svcn)) in attr_load_runs() 108 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn, in attr_load_runs() 424 CLST alen, vcn, lcn, new_alen, old_alen, svcn, evcn; in attr_set_size() local 474 evcn = le64_to_cpu(attr_b->nres.evcn); in attr_set_size() 476 if (svcn <= vcn && vcn <= evcn) { in attr_set_size() 494 evcn = le64_to_cpu(attr->nres.evcn); in attr_set_size() 589 next_svcn = le64_to_cpu(attr->nres.evcn) + 1; in attr_set_size() 610 evcn = old_alen - 1; in attr_set_size() [all …]
|
D | frecord.c | 229 *vcn > le64_to_cpu(attr->nres.evcn)) { in ni_find_attr() 326 vcn <= le64_to_cpu(attr->nres.evcn)) in ni_load_attr() 526 CLST svcn, evcn = 0, svcn_p, evcn_p, next_svcn; in ni_repack() local 546 } else if (svcn != evcn + 1) { in ni_repack() 551 evcn = le64_to_cpu(attr->nres.evcn); in ni_repack() 553 if (svcn > evcn + 1) { in ni_repack() 564 if (evcn + 1 == alloc) in ni_repack() 576 err = run_unpack(&run, sbi, ni->mi.rno, svcn, evcn, svcn, in ni_repack() 586 evcn_p = evcn; in ni_repack() 596 err = mi_pack_runs(mi_p, attr_p, &run, evcn + 1 - svcn_p); in ni_repack() [all …]
|
D | run.c | 167 bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn) in run_is_mapped_full() argument 181 if (next_vcn > evcn) in run_is_mapped_full() 874 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, in run_unpack() argument 885 if (evcn + 1 == svcn) in run_unpack() 888 if (evcn < svcn) in run_unpack() 945 if (next_vcn > evcn + 1) in run_unpack() 987 if (vcn64 != evcn + 1) { in run_unpack() 1004 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, in run_unpack_ex() argument 1013 ret = run_unpack(run, sbi, ino, svcn, evcn, vcn, run_buf, run_buf_size); in run_unpack_ex() 1027 next_vcn <= evcn; in run_unpack_ex()
|
D | fslog.c | 2678 u64 dsize, svcn, evcn; in check_attr() local 2703 evcn = le64_to_cpu(attr->nres.evcn); in check_attr() 2706 if (svcn > evcn + 1 || run_off >= asize || in check_attr() 2715 if (run_unpack(NULL, sbi, 0, svcn, evcn, svcn, in check_attr() 2990 attr->nres.evcn = cpu_to_le64((u64)bytes_to_cluster(sbi, size) - 1); in attr_create_nonres_log() 3344 attr->nres.evcn = cpu_to_le64(t64); in do_action() 3347 oa2->attr->nres.evcn = attr->nres.evcn; in do_action() 4768 le64_to_cpu(attr->nres.evcn), svcn, in log_replay()
|
D | ntfs_fs.h | 789 bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn); 794 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, 799 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf,
|
D | attrlist.c | 78 0, le64_to_cpu(attr->nres.evcn), 0, in ntfs_load_attr_list()
|
D | inode.c | 392 err = run_unpack_ex(run, sbi, ino, t64, le64_to_cpu(attr->nres.evcn), in ntfs_read_mft() 1457 attr->nres.evcn = cpu_to_le64(-1ll); in ntfs_create_inode() 1528 attr->nres.evcn = cpu_to_le64(clst - 1); in ntfs_create_inode()
|
D | record.c | 620 attr->nres.evcn = cpu_to_le64(svcn + plen - 1); in mi_pack_runs()
|
D | ntfs.h | 333 __le64 evcn; // 0x18: End VCN of this segment. member
|