Home
last modified time | relevance | path

Searched refs:vcn (Results 1 – 19 of 19) sorted by relevance

/Linux-v4.19/fs/ntfs/
Drunlist.c165 if ((dst->vcn + dst->length) != src->vcn) in ntfs_are_rl_mergeable()
252 dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn; in ntfs_rl_append()
256 dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length; in ntfs_rl_append()
300 disc = (src[0].vcn > 0); in ntfs_rl_insert()
310 disc = (src[0].vcn > dst[loc - 1].vcn + merged_length); in ntfs_rl_insert()
339 dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length; in ntfs_rl_insert()
342 dst[marker].length = dst[marker + 1].vcn - dst[marker].vcn; in ntfs_rl_insert()
347 dst[loc].vcn = dst[loc - 1].vcn + dst[loc - 1].length; in ntfs_rl_insert()
348 dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn; in ntfs_rl_insert()
350 dst[loc].vcn = 0; in ntfs_rl_insert()
[all …]
Dindex.c122 VCN vcn, old_vcn; in ntfs_index_lookup() local
262 vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8)); in ntfs_index_lookup()
278 page = ntfs_map_page(ia_mapping, vcn << in ntfs_index_lookup()
290 ia = (INDEX_ALLOCATION*)(kaddr + ((vcn << in ntfs_index_lookup()
302 (long long)vcn, idx_ni->mft_no); in ntfs_index_lookup()
305 if (sle64_to_cpu(ia->index_block_vcn) != vcn) { in ntfs_index_lookup()
311 (unsigned long long)vcn, idx_ni->mft_no); in ntfs_index_lookup()
319 "driver bug.", (unsigned long long)vcn, in ntfs_index_lookup()
330 "driver.", (unsigned long long)vcn, in ntfs_index_lookup()
338 (unsigned long long)vcn, idx_ni->mft_no); in ntfs_index_lookup()
[all …]
Dlogfile.c729 VCN vcn, end_vcn; in ntfs_empty_logfile() local
753 vcn = 0; in ntfs_empty_logfile()
761 if (unlikely(!rl || vcn < rl->vcn || !rl->length)) { in ntfs_empty_logfile()
763 err = ntfs_map_runlist_nolock(log_ni, vcn, NULL); in ntfs_empty_logfile()
770 BUG_ON(!rl || vcn < rl->vcn || !rl->length); in ntfs_empty_logfile()
773 while (rl->length && vcn >= rl[1].vcn) in ntfs_empty_logfile()
786 vcn = rl->vcn; in ntfs_empty_logfile()
797 if (rl[1].vcn > end_vcn) in ntfs_empty_logfile()
798 len = end_vcn - rl->vcn; in ntfs_empty_logfile()
834 } while ((++rl)->vcn < end_vcn); in ntfs_empty_logfile()
Ddir.c93 VCN vcn, old_vcn; in ntfs_lookup_inode_by_name() local
305 vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8)); in ntfs_lookup_inode_by_name()
321 page = ntfs_map_page(ia_mapping, vcn << in ntfs_lookup_inode_by_name()
333 ia = (INDEX_ALLOCATION*)(kaddr + ((vcn << in ntfs_lookup_inode_by_name()
345 (unsigned long long)vcn, dir_ni->mft_no); in ntfs_lookup_inode_by_name()
348 if (sle64_to_cpu(ia->index_block_vcn) != vcn) { in ntfs_lookup_inode_by_name()
354 (unsigned long long)vcn, dir_ni->mft_no); in ntfs_lookup_inode_by_name()
363 (unsigned long long)vcn, dir_ni->mft_no, in ntfs_lookup_inode_by_name()
373 "driver.", (unsigned long long)vcn, in ntfs_lookup_inode_by_name()
381 (unsigned long long)vcn, dir_ni->mft_no); in ntfs_lookup_inode_by_name()
[all …]
Dattrib.h63 extern int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn,
65 extern int ntfs_map_runlist(ntfs_inode *ni, VCN vcn);
67 extern LCN ntfs_attr_vcn_to_lcn_nolock(ntfs_inode *ni, const VCN vcn,
71 const VCN vcn, ntfs_attr_search_ctx *ctx);
Daops.c184 VCN vcn; in ntfs_read_block() local
259 vcn = (VCN)iblock << blocksize_bits >> in ntfs_read_block()
270 while (rl->length && rl[1].vcn <= vcn) in ntfs_read_block()
272 lcn = ntfs_rl_vcn_to_lcn(rl, vcn); in ntfs_read_block()
300 err = ntfs_map_runlist(ni, vcn); in ntfs_read_block()
325 ni->type, (unsigned long long)vcn, in ntfs_read_block()
549 VCN vcn; in ntfs_write_block() local
716 vcn = (VCN)block << blocksize_bits; in ntfs_write_block()
717 vcn_ofs = vcn & vol->cluster_size_mask; in ntfs_write_block()
718 vcn >>= vol->cluster_size_bits; in ntfs_write_block()
[all …]
Drunlist.h43 VCN vcn; /* vcn = Starting virtual cluster number. */ member
79 extern LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn);
84 const VCN vcn);
Dattrib.c84 int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, ntfs_attr_search_ctx *ctx) in ntfs_map_runlist_nolock() argument
98 (unsigned long long)vcn); in ntfs_map_runlist_nolock()
134 if (vcn >= allocated_size_vcn || (a->type == ni->type && in ntfs_map_runlist_nolock()
139 <= vcn && end_vcn >= vcn)) in ntfs_map_runlist_nolock()
167 CASE_SENSITIVE, vcn, NULL, 0, ctx); in ntfs_map_runlist_nolock()
183 if (unlikely(vcn && vcn >= end_vcn)) { in ntfs_map_runlist_nolock()
298 int ntfs_map_runlist(ntfs_inode *ni, VCN vcn) in ntfs_map_runlist() argument
304 if (likely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) <= in ntfs_map_runlist()
306 err = ntfs_map_runlist_nolock(ni, vcn, NULL); in ntfs_map_runlist()
341 LCN ntfs_attr_vcn_to_lcn_nolock(ntfs_inode *ni, const VCN vcn, in ntfs_attr_vcn_to_lcn_nolock() argument
[all …]
Dcompress.c492 VCN vcn; in ntfs_read_compressed_block() local
610 for (vcn = start_vcn, start_vcn += cb_clusters; vcn < start_vcn; in ntfs_read_compressed_block()
611 vcn++) { in ntfs_read_compressed_block()
621 while (rl->length && rl[1].vcn <= vcn) in ntfs_read_compressed_block()
623 lcn = ntfs_rl_vcn_to_lcn(rl, vcn); in ntfs_read_compressed_block()
627 (unsigned long long)vcn, in ntfs_read_compressed_block()
644 if (!ntfs_map_runlist(ni, vcn)) in ntfs_read_compressed_block()
744 if (vcn == start_vcn - cb_clusters) { in ntfs_read_compressed_block()
787 } else if (vcn == start_vcn) { in ntfs_read_compressed_block()
Dlcnalloc.c402 rl[rlpos].vcn = rl[rlpos - 1].vcn + in ntfs_cluster_alloc()
407 rl[rlpos].vcn = start_vcn; in ntfs_cluster_alloc()
740 rl[rlpos].vcn = rl[rlpos - 1].vcn + rl[rlpos - 1].length; in ntfs_cluster_alloc()
897 delta = start_vcn - rl->vcn; in __ntfs_cluster_free()
930 VCN vcn; in __ntfs_cluster_free() local
933 vcn = rl->vcn; in __ntfs_cluster_free()
934 rl = ntfs_attr_find_vcn_nolock(ni, vcn, ctx); in __ntfs_cluster_free()
Dmft.c539 VCN vcn; in ntfs_sync_mft_mirror() local
545 vcn = ((VCN)mft_no << vol->mft_record_size_bits) + in ntfs_sync_mft_mirror()
547 vcn_ofs = vcn & vol->cluster_size_mask; in ntfs_sync_mft_mirror()
548 vcn >>= vol->cluster_size_bits; in ntfs_sync_mft_mirror()
560 while (rl->length && rl[1].vcn <= vcn) in ntfs_sync_mft_mirror()
562 lcn = ntfs_rl_vcn_to_lcn(rl, vcn); in ntfs_sync_mft_mirror()
732 VCN vcn; in write_mft_record_nolock() local
738 vcn = ((VCN)ni->mft_no << vol->mft_record_size_bits) + in write_mft_record_nolock()
740 vcn_ofs = vcn & vol->cluster_size_mask; in write_mft_record_nolock()
741 vcn >>= vol->cluster_size_bits; in write_mft_record_nolock()
[all …]
Dfile.c587 VCN vcn, highest_vcn = 0, cpos, cend, bh_cpos, bh_cend; in ntfs_prepare_pages_for_non_resident_write() local
642 vcn = lcn = -1; in ntfs_prepare_pages_for_non_resident_write()
720 cdelta = bh_cpos - vcn; in ntfs_prepare_pages_for_non_resident_write()
850 while (rl->length && rl[1].vcn <= bh_cpos) in ntfs_prepare_pages_for_non_resident_write()
859 vcn = bh_cpos; in ntfs_prepare_pages_for_non_resident_write()
860 vcn_len = rl[1].vcn - vcn; in ntfs_prepare_pages_for_non_resident_write()
871 if (likely(vcn + vcn_len >= cend)) { in ntfs_prepare_pages_for_non_resident_write()
1056 vcn = sle64_to_cpu(a->data.non_resident.lowest_vcn); in ntfs_prepare_pages_for_non_resident_write()
1057 rl2 = ntfs_rl_find_vcn_nolock(rl, vcn); in ntfs_prepare_pages_for_non_resident_write()
1074 mp_size = ntfs_get_size_for_mapping_pairs(vol, rl2, vcn, in ntfs_prepare_pages_for_non_resident_write()
[all …]
Ddebug.c157 (long long)(rl + i)->vcn, lcn_str[index], in ntfs_debug_dump_runlist()
163 (long long)(rl + i)->vcn, in ntfs_debug_dump_runlist()
Dsuper.c1180 rl2[0].vcn = 0; in check_mft_mirror()
1184 rl2[1].vcn = rl2[0].length; in check_mft_mirror()
1197 if (rl2[i].vcn != rl[i].vcn || rl2[i].lcn != rl[i].lcn || in check_mft_mirror()
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vcn.c58 INIT_DELAYED_WORK(&adev->vcn.idle_work, amdgpu_vcn_idle_work_handler); in amdgpu_vcn_sw_init()
68 r = request_firmware(&adev->vcn.fw, fw_name, adev->dev); in amdgpu_vcn_sw_init()
75 r = amdgpu_ucode_validate(adev->vcn.fw); in amdgpu_vcn_sw_init()
79 release_firmware(adev->vcn.fw); in amdgpu_vcn_sw_init()
80 adev->vcn.fw = NULL; in amdgpu_vcn_sw_init()
84 hdr = (const struct common_firmware_header *)adev->vcn.fw->data; in amdgpu_vcn_sw_init()
85 adev->vcn.fw_version = le32_to_cpu(hdr->ucode_version); in amdgpu_vcn_sw_init()
119 AMDGPU_GEM_DOMAIN_VRAM, &adev->vcn.vcpu_bo, in amdgpu_vcn_sw_init()
120 &adev->vcn.gpu_addr, &adev->vcn.cpu_addr); in amdgpu_vcn_sw_init()
133 kvfree(adev->vcn.saved_bo); in amdgpu_vcn_sw_fini()
[all …]
Dvcn_v1_0.c58 adev->vcn.num_enc_rings = 2; in vcn_v1_0_early_init()
82 …_add_id(adev, SOC15_IH_CLIENTID_VCN, VCN_1_0__SRCID__UVD_SYSTEM_MESSAGE_INTERRUPT, &adev->vcn.irq); in vcn_v1_0_sw_init()
87 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v1_0_sw_init()
89 &adev->vcn.irq); in vcn_v1_0_sw_init()
95 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_VCN, 126, &adev->vcn.irq); in vcn_v1_0_sw_init()
105 hdr = (const struct common_firmware_header *)adev->vcn.fw->data; in vcn_v1_0_sw_init()
107 adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw; in vcn_v1_0_sw_init()
117 ring = &adev->vcn.ring_dec; in vcn_v1_0_sw_init()
119 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.irq, 0); in vcn_v1_0_sw_init()
123 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v1_0_sw_init()
[all …]
Damdgpu_queue_mgr.c89 *out_ring = &adev->vcn.ring_dec; in amdgpu_identity_map()
92 *out_ring = &adev->vcn.ring_enc[ring]; in amdgpu_identity_map()
95 *out_ring = &adev->vcn.ring_jpeg; in amdgpu_identity_map()
266 ip_num_rings = adev->vcn.num_enc_rings; in amdgpu_queue_mgr_map()
Damdgpu_kms.c195 fw_info->ver = adev->vcn.fw_version; in amdgpu_firmware_info()
379 ring_mask = adev->vcn.ring_dec.ready; in amdgpu_info_ioctl()
385 for (i = 0; i < adev->vcn.num_enc_rings; i++) in amdgpu_info_ioctl()
386 ring_mask |= adev->vcn.ring_enc[i].ready << i; in amdgpu_info_ioctl()
392 ring_mask = adev->vcn.ring_jpeg.ready; in amdgpu_info_ioctl()
Damdgpu.h1496 struct amdgpu_vcn vcn; member