/Linux-v6.6/fs/ntfs3/ |
D | attrib.c | 64 CLST svcn = le64_to_cpu(attr->nres.svcn); in attr_load_runs() local 69 if (svcn >= evcn + 1 || run_is_mapped_full(run, svcn, evcn)) in attr_load_runs() 72 if (vcn && (evcn < *vcn || *vcn < svcn)) in attr_load_runs() 81 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn, in attr_load_runs() 82 vcn ? *vcn : svcn, Add2Ptr(attr, run_off), in attr_load_runs() 414 CLST alen, vcn, lcn, new_alen, old_alen, svcn, evcn; in attr_set_size() local 470 svcn = le64_to_cpu(attr_b->nres.svcn); in attr_set_size() 473 if (svcn <= vcn && vcn <= evcn) { in attr_set_size() 490 svcn = le64_to_cpu(attr->nres.svcn); in attr_set_size() 593 err = mi_pack_runs(mi, attr, run, vcn - svcn); in attr_set_size() [all …]
|
D | frecord.c | 227 if (attr->nres.svcn) in ni_find_attr() 229 } else if (le64_to_cpu(attr->nres.svcn) > *vcn || in ni_find_attr() 327 if (le64_to_cpu(attr->nres.svcn) <= vcn && in ni_load_attr() 459 CLST svcn, struct ATTR_LIST_ENTRY **ins_le) in ni_ins_new_attr() argument 469 err = al_add_le(ni, type, name, name_len, svcn, cpu_to_le16(-1), in ni_ins_new_attr() 531 CLST svcn, evcn = 0, svcn_p, evcn_p, next_svcn; in ni_repack() 541 svcn = le64_to_cpu(attr->nres.svcn); in ni_repack() 542 if (svcn != le64_to_cpu(le->vcn)) { in ni_repack() 547 if (!svcn) { in ni_repack() 551 } else if (svcn != evcn + 1) { in ni_repack() [all …]
|
D | run.c | 167 bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn) in run_is_mapped_full() argument 173 if (!run_lookup(run, svcn, &i)) in run_is_mapped_full() 817 int run_pack(const struct runs_tree *run, CLST svcn, CLST len, u8 *run_buf, in run_pack() argument 822 CLST evcn1 = svcn + len; in run_pack() 835 if (!run_lookup(run, svcn, &i)) in run_pack() 849 len = svcn - r->vcn; in run_pack() 850 vcn = svcn; in run_pack() 921 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, in run_unpack() argument 932 if (evcn + 1 == svcn) in run_unpack() 935 if (evcn < svcn) in run_unpack() [all …]
|
D | attrlist.c | 62 } else if (attr->nres.svcn) { in ntfs_load_attr_list() 172 CLST svcn = attr_svcn(attr); in al_find_le() local 175 &svcn); in al_find_le() 287 u8 name_len, CLST svcn, __le16 id, const struct MFT_REF *ref, in al_add_le() argument 309 le = al_find_le_to_insert(ni, type, name, name_len, svcn); in al_add_le() 334 le->vcn = cpu_to_le64(svcn); in al_add_le()
|
D | record.c | 305 if (le64_to_cpu(attr->nres.svcn) > le64_to_cpu(attr->nres.evcn) + 1) in mi_enum_attr() 320 if (!attr->nres.svcn && is_attr_ext(attr)) { in mi_enum_attr() 612 CLST svcn = le64_to_cpu(attr->nres.svcn); in mi_pack_runs() local 626 err = run_pack(run, svcn, len, Add2Ptr(attr, run_off), run_size + dsize, in mi_pack_runs() 638 attr->nres.evcn = cpu_to_le64(svcn + plen - 1); in mi_pack_runs()
|
D | ntfs_fs.h | 468 u8 name_len, CLST svcn, __le16 id, const struct MFT_REF *ref, 535 const struct runs_tree *run, CLST svcn, CLST len, 799 bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn); 801 int run_pack(const struct runs_tree *run, CLST svcn, CLST len, u8 *run_buf, 804 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, 809 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf,
|
D | fslog.c | 2676 u64 dsize, svcn, evcn; in check_attr() local 2700 svcn = le64_to_cpu(attr->nres.svcn); in check_attr() 2704 if (svcn > evcn + 1 || run_off >= asize || in check_attr() 2713 if (run_unpack(NULL, sbi, 0, svcn, evcn, svcn, in check_attr() 3337 if (run_get_highest_vcn(le64_to_cpu(attr->nres.svcn), in do_action() 4758 CLST svcn = le64_to_cpu(attr->nres.svcn); in log_replay() local 4766 err = run_unpack(&oa->run0, sbi, inode->i_ino, svcn, in log_replay() 4767 le64_to_cpu(attr->nres.evcn), svcn, in log_replay()
|
D | ntfs.h | 338 __le64 svcn; // 0x10: Starting VCN of this segment. member 442 return attr->non_res ? le64_to_cpu(attr->nres.svcn) : 0; in attr_svcn()
|
D | inode.c | 398 t64 = le64_to_cpu(attr->nres.svcn); in ntfs_read_mft()
|