Lines Matching refs:vcn
88 struct runs_tree *run, const CLST *vcn) in attr_load_runs() argument
99 if (vcn && (evcn < *vcn || *vcn < svcn)) in attr_load_runs()
109 vcn ? *vcn : svcn, Add2Ptr(attr, run_off), in attr_load_runs()
121 CLST vcn, CLST len, CLST *done, bool trim) in run_deallocate_ex() argument
124 CLST vcn_next, vcn0 = vcn, lcn, clen, dn = 0; in run_deallocate_ex()
130 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) { in run_deallocate_ex()
155 vcn_next = vcn + clen; in run_deallocate_ex()
156 if (!run_get_entry(run, ++idx, &vcn, &lcn, &clen) || in run_deallocate_ex()
157 vcn != vcn_next) { in run_deallocate_ex()
174 CLST vcn, CLST lcn, CLST len, CLST *pre_alloc, in attr_allocate_clusters() argument
179 CLST flen, vcn0 = vcn, pre = pre_alloc ? *pre_alloc : 0; in attr_allocate_clusters()
197 if (new_lcn && vcn == vcn0) in attr_allocate_clusters()
201 if (!run_add_entry(run, vcn, lcn, flen, opt == ALLOCATE_MFT)) { in attr_allocate_clusters()
210 vcn += flen; in attr_allocate_clusters()
214 *alen = vcn - vcn0; in attr_allocate_clusters()
223 if (vcn - vcn0) { in attr_allocate_clusters()
224 run_deallocate_ex(sbi, run, vcn0, vcn - vcn0, NULL, false); in attr_allocate_clusters()
424 CLST alen, vcn, lcn, new_alen, old_alen, svcn, evcn; in attr_set_size() local
471 vcn = old_alen - 1; in attr_set_size()
476 if (svcn <= vcn && vcn <= evcn) { in attr_set_size()
485 attr = ni_find_attr(ni, attr_b, &le, type, name, name_len, &vcn, in attr_set_size()
534 !run_lookup_entry(run, vcn, &lcn, NULL, NULL)) { in attr_set_size()
553 vcn = old_alen; in attr_set_size()
556 if (!run_add_entry(run, vcn, SPARSE_LCN, to_allocate, in attr_set_size()
565 sbi, run, vcn, lcn, to_allocate, &pre_alloc, in attr_set_size()
578 vcn += alen; in attr_set_size()
585 err = mi_pack_runs(mi, attr, run, vcn - svcn); in attr_set_size()
594 if (next_svcn >= vcn && !to_allocate) { in attr_set_size()
601 if (is_mft && next_svcn == vcn && in attr_set_size()
622 if (next_svcn < vcn) in attr_set_size()
636 if (next_svcn >= vcn) { in attr_set_size()
643 next_svcn, vcn - next_svcn, in attr_set_size()
666 attr_b->nres.alloc_size = cpu_to_le64((u64)vcn << cluster_bits); in attr_set_size()
675 vcn = max(svcn, new_alen); in attr_set_size()
676 new_alloc_tmp = (u64)vcn << cluster_bits; in attr_set_size()
679 err = run_deallocate_ex(sbi, run, vcn, evcn - vcn + 1, &alen, in attr_set_size()
684 run_truncate(run, vcn); in attr_set_size()
686 if (vcn > svcn) { in attr_set_size()
687 err = mi_pack_runs(mi, attr, run, vcn - svcn); in attr_set_size()
690 } else if (le && le->vcn) { in attr_set_size()
709 attr->nres.evcn = cpu_to_le64((u64)vcn - 1); in attr_set_size()
715 if (vcn == new_alen) { in attr_set_size()
741 vcn = svcn - 1; in attr_set_size()
810 int attr_data_get_block(struct ntfs_inode *ni, CLST vcn, CLST clen, CLST *lcn, in attr_data_get_block() argument
829 ok = run_lookup_entry(run, vcn, lcn, len, NULL); in attr_data_get_block()
863 if (vcn >= asize) { in attr_data_get_block()
871 if (vcn + to_alloc > asize) in attr_data_get_block()
872 to_alloc = asize - vcn; in attr_data_get_block()
881 if (le_b && (vcn < svcn || evcn1 <= vcn)) { in attr_data_get_block()
882 attr = ni_find_attr(ni, attr_b, &le, ATTR_DATA, NULL, 0, &vcn, in attr_data_get_block()
897 ok = run_lookup_entry(run, vcn, lcn, len, NULL); in attr_data_get_block()
925 if (vcn > evcn1) { in attr_data_get_block()
926 if (!run_add_entry(run, evcn1, SPARSE_LCN, vcn - evcn1, in attr_data_get_block()
931 } else if (vcn && !run_lookup_entry(run, vcn - 1, &hint, NULL, NULL)) { in attr_data_get_block()
936 sbi, run, vcn, hint + 1, to_alloc, NULL, 0, len, in attr_data_get_block()
943 end = vcn + *len; in attr_data_get_block()
1044 le->vcn = cpu_to_le64(next_svcn); in attr_data_get_block()
1059 run_truncate_around(run, vcn); in attr_data_get_block()
1142 CLST vcn) in attr_load_runs_vcn() argument
1154 attr = ni_find_attr(ni, NULL, NULL, type, name, name_len, &vcn, NULL); in attr_load_runs_vcn()
1163 if (evcn < vcn || vcn < svcn) { in attr_load_runs_vcn()
1189 CLST vcn = from >> cluster_bits; in attr_load_runs_range() local
1194 for (vcn = from >> cluster_bits; vcn <= vcn_last; vcn += clen) { in attr_load_runs_range()
1195 if (!run_lookup_entry(run, vcn, &lcn, &clen, NULL)) { in attr_load_runs_range()
1197 vcn); in attr_load_runs_range()
1363 CLST clen, lcn, vcn, alen, slen, vcn_next; in attr_is_frame_compressed() local
1376 vcn = frame * clst_frame; in attr_is_frame_compressed()
1379 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) { in attr_is_frame_compressed()
1381 attr->name_len, run, vcn); in attr_is_frame_compressed()
1385 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) in attr_is_frame_compressed()
1411 while ((vcn += clen) < alen) { in attr_is_frame_compressed()
1412 vcn_next = vcn; in attr_is_frame_compressed()
1414 if (!run_get_entry(run, ++idx, &vcn, &lcn, &clen) || in attr_is_frame_compressed()
1415 vcn_next != vcn) { in attr_is_frame_compressed()
1421 vcn = vcn_next; in attr_is_frame_compressed()
1423 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) in attr_is_frame_compressed()
1472 CLST vcn, end, clst_data; in attr_allocate_frame() local
1483 vcn = frame << NTFS_LZNT_CUNIT; in attr_allocate_frame()
1490 if (svcn <= vcn && vcn < evcn1) { in attr_allocate_frame()
1499 attr = ni_find_attr(ni, attr_b, &le, ATTR_DATA, NULL, 0, &vcn, in attr_allocate_frame()
1525 err = run_deallocate_ex(sbi, run, vcn + len, clst_data - len, in attr_allocate_frame()
1530 if (!run_add_entry(run, vcn + len, SPARSE_LCN, clst_data - len, in attr_allocate_frame()
1535 end = vcn + clst_data; in attr_allocate_frame()
1540 if (vcn + clst_data && in attr_allocate_frame()
1541 !run_lookup_entry(run, vcn + clst_data - 1, &hint, NULL, in attr_allocate_frame()
1546 err = attr_allocate_clusters(sbi, run, vcn + clst_data, in attr_allocate_frame()
1552 end = vcn + len; in attr_allocate_frame()
1653 le->vcn = cpu_to_le64(next_svcn); in attr_allocate_frame()
1668 run_truncate_around(run, vcn); in attr_allocate_frame()
1696 CLST vcn, end; in attr_collapse_range() local
1754 vcn = vbo >> sbi->cluster_bits; in attr_collapse_range()
1756 end = vcn + len; in attr_collapse_range()
1762 if (svcn <= vcn && vcn < evcn1) { in attr_collapse_range()
1771 attr = ni_find_attr(ni, attr_b, &le, ATTR_DATA, NULL, 0, &vcn, in attr_collapse_range()
1788 le->vcn = attr->nres.svcn; in attr_collapse_range()
1792 } else if (svcn < vcn || end < evcn1) { in attr_collapse_range()
1799 vcn1 = max(vcn, svcn); in attr_collapse_range()
1812 if (svcn >= vcn) { in attr_collapse_range()
1814 attr->nres.svcn = cpu_to_le64(vcn); in attr_collapse_range()
1816 le->vcn = attr->nres.svcn; in attr_collapse_range()
1959 CLST svcn, evcn1, vcn, len, end, alen, dealloc; in attr_punch_hole() local
2017 vcn = vbo >> sbi->cluster_bits; in attr_punch_hole()
2019 end = vcn + len; in attr_punch_hole()
2025 if (svcn <= vcn && vcn < evcn1) { in attr_punch_hole()
2034 attr = ni_find_attr(ni, attr_b, &le, ATTR_DATA, NULL, 0, &vcn, in attr_punch_hole()
2051 vcn1 = max(vcn, svcn); in attr_punch_hole()