Lines Matching full:run
88 struct runs_tree *run, const CLST *vcn) in attr_load_runs() argument
96 if (svcn >= evcn + 1 || run_is_mapped_full(run, svcn, evcn)) in attr_load_runs()
104 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn, in attr_load_runs()
116 static int run_deallocate_ex(struct ntfs_sb_info *sbi, struct runs_tree *run, in run_deallocate_ex() argument
126 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) { in run_deallocate_ex()
128 run_truncate(run, vcn0); in run_deallocate_ex()
152 if (!run_get_entry(run, ++idx, &vcn, &lcn, &clen) || in run_deallocate_ex()
154 /* Save memory - don't load entire run. */ in run_deallocate_ex()
167 * attr_allocate_clusters - Find free space, mark it as used and store in @run.
169 int attr_allocate_clusters(struct ntfs_sb_info *sbi, struct runs_tree *run, in attr_allocate_clusters() argument
177 size_t cnt = run->count; in attr_allocate_clusters()
196 /* Add new fragment into run storage. */ in attr_allocate_clusters()
197 if (!run_add_entry(run, vcn, lcn, flen, opt == ALLOCATE_MFT)) { in attr_allocate_clusters()
209 (fr && run->count - cnt >= fr)) { in attr_allocate_clusters()
220 run_deallocate_ex(sbi, run, vcn0, vcn - vcn0, NULL, false); in attr_allocate_clusters()
221 run_truncate(run, vcn0); in attr_allocate_clusters()
235 u64 new_size, struct runs_tree *run, in attr_make_nonresident() argument
267 run_init(run); in attr_make_nonresident()
282 err = attr_allocate_clusters(sbi, run, 0, 0, len, NULL, in attr_make_nonresident()
290 err = ntfs_sb_write_run(sbi, run, 0, data, rsize, 0); in attr_make_nonresident()
322 attr_s->name_len, run, 0, alen, in attr_make_nonresident()
347 run_deallocate(sbi, run, false); in attr_make_nonresident()
348 run_close(run); in attr_make_nonresident()
360 u64 new_size, struct runs_tree *run, in attr_set_size_res() argument
378 run, ins_attr, NULL); in attr_set_size_res()
407 const __le16 *name, u8 name_len, struct runs_tree *run, in attr_set_size() argument
436 err = attr_set_size_res(ni, attr_b, le_b, mi_b, new_size, run, in attr_set_size()
496 err = attr_load_runs(attr, ni, run, NULL); in attr_set_size()
530 !run_lookup_entry(run, vcn, &lcn, NULL, NULL)) { in attr_set_size()
552 if (!run_add_entry(run, vcn, SPARSE_LCN, to_allocate, in attr_set_size()
561 sbi, run, vcn, lcn, to_allocate, &pre_alloc, in attr_set_size()
581 err = mi_pack_runs(mi, attr, run, vcn - svcn); in attr_set_size()
638 err = ni_insert_nonresident(ni, type, name, name_len, run, in attr_set_size()
645 run_truncate_head(run, evcn + 1); in attr_set_size()
675 err = run_deallocate_ex(sbi, run, vcn, evcn - vcn + 1, &alen, in attr_set_size()
680 run_truncate(run, vcn); in attr_set_size()
683 err = mi_pack_runs(mi, attr, run, vcn - svcn); in attr_set_size()
810 struct runs_tree *run = &ni->file.run; in attr_data_get_block() local
825 ok = run_lookup_entry(run, vcn, lcn, len, NULL); in attr_data_get_block()
888 err = attr_load_runs(attr, ni, run, NULL); in attr_data_get_block()
893 ok = run_lookup_entry(run, vcn, lcn, len, NULL); in attr_data_get_block()
922 if (!run_add_entry(run, evcn1, SPARSE_LCN, vcn - evcn1, in attr_data_get_block()
927 } else if (vcn && !run_lookup_entry(run, vcn - 1, &hint, NULL, NULL)) { in attr_data_get_block()
932 sbi, run, vcn, hint + 1, to_alloc, NULL, 0, len, in attr_data_get_block()
945 err = mi_pack_runs(mi, attr, run, max(end, evcn1) - svcn); in attr_data_get_block()
1030 err = attr_load_runs(attr, ni, run, &end); in attr_data_get_block()
1036 err = mi_pack_runs(mi, attr, run, evcn1 - next_svcn); in attr_data_get_block()
1048 err = ni_insert_nonresident(ni, ATTR_DATA, NULL, 0, run, in attr_data_get_block()
1055 run_truncate_around(run, vcn); in attr_data_get_block()
1137 const __le16 *name, u8 name_len, struct runs_tree *run, in attr_load_runs_vcn() argument
1160 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn, svcn, in attr_load_runs_vcn()
1171 const __le16 *name, u8 name_len, struct runs_tree *run, in attr_load_runs_range() argument
1182 if (!run_lookup_entry(run, vcn, &lcn, &clen, NULL)) { in attr_load_runs_range()
1183 err = attr_load_runs_vcn(ni, type, name, name_len, run, in attr_load_runs_range()
1201 struct runs_tree *run, u64 frame, u64 frames, in attr_wof_frame_info() argument
1286 ARRAY_SIZE(WOF_NAME), run, in attr_wof_frame_info()
1291 err = ntfs_bio_pages(sbi, run, &page, 1, from, in attr_wof_frame_info()
1352 struct runs_tree *run; in attr_is_frame_compressed() local
1364 run = &ni->file.run; in attr_is_frame_compressed()
1366 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) { in attr_is_frame_compressed()
1368 attr->name_len, run, vcn); in attr_is_frame_compressed()
1372 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) in attr_is_frame_compressed()
1401 if (!run_get_entry(run, ++idx, &vcn, &lcn, &clen) || in attr_is_frame_compressed()
1405 attr->name_len, run, vcn_next); in attr_is_frame_compressed()
1410 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) in attr_is_frame_compressed()
1453 struct runs_tree *run = &ni->file.run; in attr_allocate_frame() local
1496 err = attr_load_runs(attr, ni, run, NULL); in attr_allocate_frame()
1512 err = run_deallocate_ex(sbi, run, vcn + len, clst_data - len, in attr_allocate_frame()
1517 if (!run_add_entry(run, vcn + len, SPARSE_LCN, clst_data - len, in attr_allocate_frame()
1523 /* Run contains updated range [vcn + len : end). */ in attr_allocate_frame()
1528 !run_lookup_entry(run, vcn + clst_data - 1, &hint, NULL, in attr_allocate_frame()
1533 err = attr_allocate_clusters(sbi, run, vcn + clst_data, in attr_allocate_frame()
1540 /* Run contains updated range [vcn + clst_data : end). */ in attr_allocate_frame()
1546 err = mi_pack_runs(mi, attr, run, max(end, evcn1) - svcn); in attr_allocate_frame()
1630 err = attr_load_runs(attr, ni, run, &end); in attr_allocate_frame()
1636 err = mi_pack_runs(mi, attr, run, evcn1 - next_svcn); in attr_allocate_frame()
1648 err = ni_insert_nonresident(ni, ATTR_DATA, NULL, 0, run, in attr_allocate_frame()
1655 run_truncate_around(run, vcn); in attr_allocate_frame()
1675 struct runs_tree *run = &ni->file.run; in attr_collapse_range() local
1726 err = attr_set_size(ni, ATTR_DATA, NULL, 0, &ni->file.run, vbo, in attr_collapse_range()
1781 err = attr_load_runs(attr, ni, run, &svcn); in attr_collapse_range()
1787 err = run_deallocate_ex(sbi, run, vcn1, eat, &dealloc, in attr_collapse_range()
1792 if (!run_collapse_range(run, vcn1, eat)) { in attr_collapse_range()
1806 err = mi_pack_runs(mi, attr, run, evcn1 - svcn - eat); in attr_collapse_range()
1813 ni, ATTR_DATA, NULL, 0, run, next_svcn, in attr_collapse_range()
1830 run_truncate(run, 0); in attr_collapse_range()
1934 struct runs_tree *run = &ni->file.run; in attr_punch_hole() local
2028 err = attr_load_runs(attr, ni, run, &svcn); in attr_punch_hole()
2035 err = run_deallocate_ex(sbi, run, vcn1, zero, &dealloc, true); in attr_punch_hole()
2042 if (!run_add_entry(run, vcn1, SPARSE_LCN, zero, in attr_punch_hole()
2048 err = mi_pack_runs(mi, attr, run, evcn1 - svcn); in attr_punch_hole()
2053 run_truncate(run, 0); in attr_punch_hole()