Lines Matching full:vi

29 	struct erofs_inode *const vi = EROFS_I(inode);  in z_erofs_load_full_lcluster()  local
31 vi->inode_isize + vi->xattr_isize) + in z_erofs_load_full_lcluster()
50 m->clusterofs = 1 << vi->z_logical_clusterbits; in z_erofs_load_full_lcluster()
53 if (!(vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_load_full_lcluster()
70 if (m->clusterofs >= 1 << vi->z_logical_clusterbits) { in z_erofs_load_full_lcluster()
124 struct erofs_inode *const vi = EROFS_I(m->inode); in unpack_compacted_index() local
125 const unsigned int lclusterbits = vi->z_logical_clusterbits; in unpack_compacted_index()
142 big_pcluster = vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1; in unpack_compacted_index()
233 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_load_compact_lcluster() local
235 ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_load_compact_lcluster()
250 if ((vi->z_advise & Z_EROFS_ADVISE_COMPACTED_2B) && in z_erofs_load_compact_lcluster()
297 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_extent_lookback() local
298 const unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_extent_lookback()
322 m->type, lcn, vi->nid); in z_erofs_extent_lookback()
329 lookback_distance, m->lcn, vi->nid); in z_erofs_extent_lookback()
338 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_get_extent_compressedlen() local
340 const unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_get_extent_compressedlen()
351 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1)) || in z_erofs_get_extent_compressedlen()
353 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2))) { in z_erofs_get_extent_compressedlen()
394 vi->nid); in z_erofs_get_extent_compressedlen()
402 erofs_err(sb, "bogus CBLKCNT @ lcn %lu of nid %llu", lcn, vi->nid); in z_erofs_get_extent_compressedlen()
410 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_get_extent_decompressedlen() local
412 unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_get_extent_decompressedlen()
439 m->type, lcn, vi->nid); in z_erofs_get_extent_decompressedlen()
453 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_do_map_blocks() local
454 bool ztailpacking = vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER; in z_erofs_do_map_blocks()
455 bool fragment = vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_do_map_blocks()
465 lclusterbits = vi->z_logical_clusterbits; in z_erofs_do_map_blocks()
475 vi->z_idataoff = m.nextpackoff; in z_erofs_do_map_blocks()
500 vi->nid); in z_erofs_do_map_blocks()
517 m.type, ofs, vi->nid); in z_erofs_do_map_blocks()
526 vi->z_tailextent_headlcn = m.lcn; in z_erofs_do_map_blocks()
528 if (fragment && vi->datalayout == EROFS_INODE_COMPRESSED_FULL) in z_erofs_do_map_blocks()
529 vi->z_fragmentoff |= (u64)m.pblk << 32; in z_erofs_do_map_blocks()
531 if (ztailpacking && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_do_map_blocks()
533 map->m_pa = vi->z_idataoff; in z_erofs_do_map_blocks()
534 map->m_plen = vi->z_idata_size; in z_erofs_do_map_blocks()
535 } else if (fragment && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_do_map_blocks()
550 if (vi->z_advise & Z_EROFS_ADVISE_INTERLACED_PCLUSTER) in z_erofs_do_map_blocks()
557 map->m_algorithmformat = vi->z_algorithmtype[1]; in z_erofs_do_map_blocks()
559 map->m_algorithmformat = vi->z_algorithmtype[0]; in z_erofs_do_map_blocks()
579 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_fill_inode_lazy() local
587 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) { in z_erofs_fill_inode_lazy()
596 if (wait_on_bit_lock(&vi->flags, EROFS_I_BL_Z_BIT, TASK_KILLABLE)) in z_erofs_fill_inode_lazy()
600 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) in z_erofs_fill_inode_lazy()
603 pos = ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_fill_inode_lazy()
616 vi->z_advise = Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_fill_inode_lazy()
617 vi->z_fragmentoff = le64_to_cpu(*(__le64 *)h) ^ (1ULL << 63); in z_erofs_fill_inode_lazy()
618 vi->z_tailextent_headlcn = 0; in z_erofs_fill_inode_lazy()
621 vi->z_advise = le16_to_cpu(h->h_advise); in z_erofs_fill_inode_lazy()
622 vi->z_algorithmtype[0] = h->h_algorithmtype & 15; in z_erofs_fill_inode_lazy()
623 vi->z_algorithmtype[1] = h->h_algorithmtype >> 4; in z_erofs_fill_inode_lazy()
626 if (vi->z_algorithmtype[0] >= Z_EROFS_COMPRESSION_MAX || in z_erofs_fill_inode_lazy()
627 vi->z_algorithmtype[++headnr] >= Z_EROFS_COMPRESSION_MAX) { in z_erofs_fill_inode_lazy()
629 headnr + 1, vi->z_algorithmtype[headnr], vi->nid); in z_erofs_fill_inode_lazy()
634 vi->z_logical_clusterbits = sb->s_blocksize_bits + (h->h_clusterbits & 7); in z_erofs_fill_inode_lazy()
636 vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_fill_inode_lazy()
639 vi->nid); in z_erofs_fill_inode_lazy()
643 if (vi->datalayout == EROFS_INODE_COMPRESSED_COMPACT && in z_erofs_fill_inode_lazy()
644 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1) ^ in z_erofs_fill_inode_lazy()
645 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2)) { in z_erofs_fill_inode_lazy()
647 vi->nid); in z_erofs_fill_inode_lazy()
652 if (vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER) { in z_erofs_fill_inode_lazy()
657 vi->z_idata_size = le16_to_cpu(h->h_idata_size); in z_erofs_fill_inode_lazy()
672 if (vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER && in z_erofs_fill_inode_lazy()
678 vi->z_fragmentoff = le32_to_cpu(h->h_fragmentoff); in z_erofs_fill_inode_lazy()
688 set_bit(EROFS_I_Z_INITED_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
692 clear_and_wake_up_bit(EROFS_I_BL_Z_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
699 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_map_blocks_iter() local
716 if ((vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER) && in z_erofs_map_blocks_iter()
717 !vi->z_tailextent_headlcn) { in z_erofs_map_blocks_iter()