Home
last modified time | relevance | path

Searched refs:vcn (Results 1 – 25 of 49) sorted by relevance

12

/third_party/ntfs-3g/libntfs-3g/
Drunlist.c174 if ((dst->vcn + dst->length) != src->vcn) in ntfs_rl_are_mergeable()
259 dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn; in ntfs_rl_append()
263 dst[marker].vcn = dst[marker-1].vcn + dst[marker-1].length; in ntfs_rl_append()
306 disc = (src[0].vcn > 0); in ntfs_rl_insert()
316 disc = (src[0].vcn > dst[loc - 1].vcn + merged_length); in ntfs_rl_insert()
346 dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length; in ntfs_rl_insert()
349 dst[marker].length = dst[marker + 1].vcn - dst[marker].vcn; in ntfs_rl_insert()
354 dst[loc].vcn = dst[loc - 1].vcn + dst[loc - 1].length; in ntfs_rl_insert()
355 dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn; in ntfs_rl_insert()
357 dst[loc].vcn = 0; in ntfs_rl_insert()
[all …]
Dcompress.c659 cb_clusters -= rl->length - (cb_start_vcn - rl->vcn); in ntfs_is_cb_compressed()
665 cb_start_vcn = rl->vcn; in ntfs_is_cb_compressed()
666 rl = ntfs_attr_find_vcn(na, rl->vcn); in ntfs_is_cb_compressed()
673 if (rl->vcn < cb_start_vcn) in ntfs_is_cb_compressed()
713 VCN start_vcn, vcn, end_vcn; in ntfs_compressed_attr_pread() local
808 vcn = start_vcn; in ntfs_compressed_attr_pread()
812 rl = ntfs_attr_find_vcn(na, vcn); in ntfs_compressed_attr_pread()
831 } else if (!ntfs_is_cb_compressed(na, rl, vcn, cb_clusters)) { in ntfs_compressed_attr_pread()
847 ofs += vcn << vol->cluster_size_bits; in ntfs_compressed_attr_pread()
913 (vcn << vol->cluster_size_bits) + in ntfs_compressed_attr_pread()
[all …]
Dindex.c73 static s64 ntfs_ib_vcn_to_pos(ntfs_index_context *icx, VCN vcn) in ntfs_ib_vcn_to_pos() argument
75 return vcn << icx->vcn_size_bits; in ntfs_ib_vcn_to_pos()
85 s64 ret, vcn = sle64_to_cpu(ib->index_block_vcn); in ntfs_ib_write() local
87 ntfs_log_trace("vcn: %lld\n", (long long)vcn); in ntfs_ib_write()
89 ret = ntfs_attr_mst_pwrite(icx->ia_na, ntfs_ib_vcn_to_pos(icx, vcn), in ntfs_ib_write()
93 (long long)vcn, (unsigned long long)icx->ni->mft_no); in ntfs_ib_write()
339 static void ntfs_ie_set_vcn(INDEX_ENTRY *ie, VCN vcn) in ntfs_ie_set_vcn() argument
341 *ntfs_ie_get_vcn_addr(ie) = cpu_to_sle64(vcn); in ntfs_ie_set_vcn()
461 u64 inum, VCN vcn) in ntfs_index_block_inconsistent() argument
468 "%llu\n", (long long)vcn, in ntfs_index_block_inconsistent()
[all …]
Dlcnalloc.c268 rl[0].vcn = start_vcn; in ntfs_cluster_alloc()
394 rl[rlpos].vcn = rl[rlpos - 1].vcn + in ntfs_cluster_alloc()
397 rl[rlpos].vcn = start_vcn; in ntfs_cluster_alloc()
408 (long long)rl[rlpos - 1].vcn, in ntfs_cluster_alloc()
534 rl[rlpos].vcn = rl[rlpos - 1].vcn + rl[rlpos - 1].length; in ntfs_cluster_alloc()
560 rl[rlpos].vcn = rl[rlpos - 1].vcn + rl[rlpos - 1].length; in ntfs_cluster_alloc()
699 delta = start_vcn - rl->vcn; in ntfs_cluster_free()
Dattrib.c611 int ntfs_attr_map_runlist(ntfs_attr *na, VCN vcn) in ntfs_attr_map_runlist() argument
617 (unsigned long long)na->ni->mft_no, le32_to_cpu(na->type), (long long)vcn); in ntfs_attr_map_runlist()
619 lcn = ntfs_rl_vcn_to_lcn(na->rl, vcn); in ntfs_attr_map_runlist()
629 vcn, NULL, 0, ctx)) { in ntfs_attr_map_runlist()
655 static int ntfs_attr_map_partial_runlist(ntfs_attr *na, VCN vcn) in ntfs_attr_map_partial_runlist() argument
677 needed = vcn; in ntfs_attr_map_partial_runlist()
878 LCN ntfs_attr_vcn_to_lcn(ntfs_attr *na, const VCN vcn) in ntfs_attr_vcn_to_lcn() argument
883 if (!na || !NAttrNonResident(na) || vcn < 0) in ntfs_attr_vcn_to_lcn()
890 lcn = ntfs_rl_vcn_to_lcn(na->rl, vcn); in ntfs_attr_vcn_to_lcn()
893 if (!is_retry && !ntfs_attr_map_runlist(na, vcn)) { in ntfs_attr_vcn_to_lcn()
[all …]
Ddebug.c67 (long long)rl[i].vcn, lcn_str[idx], in ntfs_debug_runlist_dump()
72 (long long)rl[i].vcn, (long long)rl[i].lcn, in ntfs_debug_runlist_dump()
Dmft.c746 rl2 = ntfs_cluster_alloc(vol, rl[1].vcn, 1, lcn, DATA_ZONE); in ntfs_mft_bitmap_extend_allocation_i()
778 mftbmp_na->name_len, 0, rl[1].vcn, NULL, 0, ctx)) { in ntfs_mft_bitmap_extend_allocation_i()
826 a->highest_vcn = cpu_to_sle64(rl[1].vcn - 1); in ntfs_mft_bitmap_extend_allocation_i()
858 mftbmp_na->name_len, 0, rl[1].vcn, NULL, 0, ctx)) { in ntfs_mft_bitmap_extend_allocation_i()
872 a->highest_vcn = cpu_to_sle64(rl[1].vcn - 2); in ntfs_mft_bitmap_extend_allocation_i()
1082 old_last_vcn = rl[1].vcn; in ntfs_mft_data_extend_allocation()
1127 rl[1].vcn, NULL, 0, ctx)) { in ntfs_mft_data_extend_allocation()
1176 a->highest_vcn = cpu_to_sle64(rl[1].vcn - 1); in ntfs_mft_data_extend_allocation()
1213 rl[1].vcn, NULL, 0, ctx)) { in ntfs_mft_data_extend_allocation()
Ddir.c252 VCN vcn; in ntfs_inode_lookup_by_name() local
394 vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8)); in ntfs_inode_lookup_by_name()
399 br = ntfs_attr_mst_pread(ia_na, vcn << index_vcn_size_bits, 1, in ntfs_inode_lookup_by_name()
405 (unsigned long long)vcn, in ntfs_inode_lookup_by_name()
411 ia_na->ni->mft_no, vcn)) { in ntfs_inode_lookup_by_name()
487 vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8)); in ntfs_inode_lookup_by_name()
488 if (vcn >= 0) in ntfs_inode_lookup_by_name()
/third_party/ntfs-3g/ntfsprogs/
Dntfsfallocate.c432 begin_common = max(grl->vcn, brl->vcn); in free_common()
433 end_common = min(grl->vcn + glth, brl->vcn + blth); in free_common()
436 brl->lcn + begin_common - brl->vcn, in free_common()
442 - brl->vcn)); in free_common()
465 && (grl->vcn<=(brl->vcn+brl->length)); grl++) { in ntfs_restore_rl()
473 && ((brl->vcn + brl->length) > grl->vcn)) { in ntfs_restore_rl()
475 brl->vcn + brl->length - grl->vcn); in ntfs_restore_rl()
506 pos = zrl->vcn << vol->cluster_size_bits; in ntfs_inner_zero()
525 pos = zrl->vcn << vol->cluster_size_bits; in ntfs_inner_zero()
549 if ((rl->vcn << vol->cluster_size_bits) < na->initialized_size) { in ntfs_merge_allocation()
[all …]
Dntfsmove.c289 if (rl->vcn < -1)
300 rl->vcn, lcn_str[j], rl->length);
303 rl->vcn, rl->lcn, rl->length);
429 res[0].vcn = 0; in find_unused()
684 to->vcn = run->vcn; in move_datarun()
694 (long long)run->vcn, (long long)run->lcn, (long long)run->length, in move_datarun()
695 (long long)to->vcn, (long long)to->lcn, (long long)to->length); in move_datarun()
703 if (from[i].vcn == run->vcn) { in move_datarun()
Dntfsck.c343 (long long)rl->vcn, (long long)rl->length); in get_last_vcn()
344 if (rl->vcn<0) in get_last_vcn()
345 res = rl->vcn; in get_last_vcn()
347 res = rl->vcn + rl->length; in get_last_vcn()
364 VCN vcn; in mft_bitmap_load() local
367 vcn = get_last_vcn(mft_bitmap_rl); in mft_bitmap_load()
368 if (vcn<=LCN_EINVAL) { in mft_bitmap_load()
374 mft_bitmap_length = vcn * rawvol->cluster_size; in mft_bitmap_load()
Dntfsresize.c663 static void rl_set(runlist *rl, VCN vcn, LCN lcn, s64 len) in rl_set() argument
665 rl->vcn = vcn; in rl_set()
682 ntfs_log_verbose(" %8lld %8lld (0x%08llx) %lld\n", (long long)r->vcn, in dump_run()
756 if (rl[1].length != 0 || rl->vcn) in collect_resize_constraints()
1188 tmp->vcn -= unmapped_len; in rl_fixup()
1238 && ((pold->vcn + pold->length) in replace_runlist()
1239 <= (reprl[0].vcn + lowest_vcn))) { in replace_runlist()
1246 && (pold->vcn < (reprl[0].vcn + lowest_vcn))) { in replace_runlist()
1247 pnew->vcn = pold->vcn; in replace_runlist()
1249 pnew->length = reprl[0].vcn + lowest_vcn - pold->vcn; in replace_runlist()
[all …]
Dntfscp.c559 VCN vcn; in sort_runlist() local
581 vcn = 0; in sort_runlist()
583 alctx->rl[k].vcn = vcn; in sort_runlist()
584 vcn += alctx->rl[k].length; in sort_runlist()
587 if (vcn > alctx->wanted_clusters) { in sort_runlist()
589 alctx->rl[k].length -= vcn - alctx->wanted_clusters; in sort_runlist()
590 vcn = alctx->wanted_clusters; in sort_runlist()
597 alctx->rl[k].vcn = vcn; in sort_runlist()
Dmkntfs.c1073 VCN vcn = 0LL; in allocate_scattered_clusters() local
1102 vcn++; in allocate_scattered_clusters()
1104 rl[rlpos].vcn = vcn++; in allocate_scattered_clusters()
1114 rl[rlpos].vcn = vcn; in allocate_scattered_clusters()
1132 rl[rlpos].vcn = vcn; in allocate_scattered_clusters()
1752 a->highest_vcn = cpu_to_sle64(rl[i].vcn - 1); in insert_non_resident_attr_in_mft_record()
3914 g_rl_mft_bmp[0].vcn = 0LL; in mkntfs_initialize_bitmaps()
3922 g_rl_mft_bmp[1].vcn = 1LL; in mkntfs_initialize_bitmaps()
3979 g_rl_mft[0].vcn = 0LL; in mkntfs_initialize_rl_mft()
3983 g_rl_mft[1].vcn = j; in mkntfs_initialize_rl_mft()
[all …]
Dntfsfix.c848 && (rl[1].vcn == rl[0].length) in short_mft_selfloc_condition()
879 VCN vcn; in attrlist_selfloc_condition() local
911 vcn = (SELFLOC_LIMIT*vol->mft_record_size) in attrlist_selfloc_condition()
913 levcn = cpu_to_sle64(vcn); in attrlist_selfloc_condition()
985 && !rl[0].vcn in self_mapped_selfloc_condition()
987 && (rl[1].vcn == lowest_vcn) in self_mapped_selfloc_condition()
Dcluster.c99 (long long)runs[j].vcn, in cluster_find()
Dntfswipe.c687 offset - (restart->vcn in wipe_compressed_attribute()
703 if (offset >= (((rlt->vcn) << in wipe_compressed_attribute()
707 size = (rlt->vcn << vol->cluster_size_bits) - offset; in wipe_compressed_attribute()
740 restart->vcn << vol->cluster_size_bits, in wipe_compressed_attribute()
748 (long long)size, (long long)rlc->vcn); in wipe_compressed_attribute()
/third_party/mesa3d/src/gallium/drivers/radeonsi/
Dsi_uvd.c125 bool vcn = ctx->family >= CHIP_RAVEN; in si_uvd_create_decoder() local
128 if (vcn) { in si_uvd_create_decoder()
138 return (vcn) ? radeon_create_decoder(context, templ) in si_uvd_create_decoder()
/third_party/ntfs-3g/include/ntfs-3g/
Drunlist.h47 VCN vcn; /* vcn = Starting virtual cluster number. */ member
55 extern LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn);
Dattrib.h313 extern int ntfs_attr_map_runlist(ntfs_attr *na, VCN vcn);
316 extern LCN ntfs_attr_vcn_to_lcn(ntfs_attr *na, const VCN vcn);
317 extern runlist_element *ntfs_attr_find_vcn(ntfs_attr *na, const VCN vcn);
Dindex.h143 u64 inum, VCN vcn);
/third_party/mesa3d/docs/relnotes/
D19.0.2.rst76 - radeon/vcn: add H.264 constrained baseline support
77 - radeon/vcn/vp9: search the render target from the whole list
D18.3.6.rst106 - radeon/vcn: add H.264 constrained baseline support
107 - radeon/vcn/vp9: search the render target from the whole list
D19.1.4.rst83 - radeon/vcn: fix poc for hevc encode
85 - radeon/vcn: enable rate control for hevc encoding
D18.0.3.rst45 - radeon/vcn: fix mpeg4 msg buffer settings

12