Lines Matching full:vi

27 	struct erofs_inode *const vi = EROFS_I(inode);  in z_erofs_load_full_lcluster()  local
29 vi->inode_isize + vi->xattr_isize) + in z_erofs_load_full_lcluster()
43 m->clusterofs = 1 << vi->z_lclusterbits; in z_erofs_load_full_lcluster()
46 if (!(vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_load_full_lcluster()
58 if (m->clusterofs >= 1 << vi->z_lclusterbits) { in z_erofs_load_full_lcluster()
104 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_load_compact_lcluster() local
106 vi->inode_isize + vi->xattr_isize); in z_erofs_load_compact_lcluster()
107 const unsigned int lclusterbits = vi->z_lclusterbits; in z_erofs_load_compact_lcluster()
111 bool big_pcluster = vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1; in z_erofs_load_compact_lcluster()
123 if ((vi->z_advise & Z_EROFS_ADVISE_COMPACTED_2B) && in z_erofs_load_compact_lcluster()
257 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_extent_lookback() local
258 const unsigned int lclusterbits = vi->z_lclusterbits; in z_erofs_extent_lookback()
270 m->type, lcn, vi->nid); in z_erofs_extent_lookback()
285 lookback_distance, m->lcn, vi->nid); in z_erofs_extent_lookback()
295 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_get_extent_compressedlen() local
296 bool bigpcl1 = vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1; in z_erofs_get_extent_compressedlen()
297 bool bigpcl2 = vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2; in z_erofs_get_extent_compressedlen()
307 (lcn << vi->z_lclusterbits) >= inode->i_size) in z_erofs_get_extent_compressedlen()
330 erofs_err(sb, "bogus CBLKCNT @ lcn %lu of nid %llu", lcn, vi->nid); in z_erofs_get_extent_compressedlen()
344 erofs_err(sb, "cannot found CBLKCNT @ lcn %lu of nid %llu", lcn, vi->nid); in z_erofs_get_extent_compressedlen()
355 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_get_extent_decompressedlen() local
357 unsigned int lclusterbits = vi->z_lclusterbits; in z_erofs_get_extent_decompressedlen()
384 m->type, lcn, vi->nid); in z_erofs_get_extent_decompressedlen()
397 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_map_blocks_fo() local
399 bool fragment = vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_map_blocks_fo()
400 bool ztailpacking = vi->z_idata_size; in z_erofs_map_blocks_fo()
401 unsigned int lclusterbits = vi->z_lclusterbits; in z_erofs_map_blocks_fo()
413 !vi->z_tailextent_headlcn) { in z_erofs_map_blocks_fo()
428 vi->z_fragmentoff = m.nextpackoff; in z_erofs_map_blocks_fo()
451 vi->nid); in z_erofs_map_blocks_fo()
467 m.type, ofs, vi->nid); in z_erofs_map_blocks_fo()
476 vi->z_tailextent_headlcn = m.lcn; in z_erofs_map_blocks_fo()
478 if (fragment && vi->datalayout == EROFS_INODE_COMPRESSED_FULL) in z_erofs_map_blocks_fo()
479 vi->z_fragmentoff |= (u64)m.pblk << 32; in z_erofs_map_blocks_fo()
481 if (ztailpacking && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_map_blocks_fo()
483 map->m_pa = vi->z_fragmentoff; in z_erofs_map_blocks_fo()
484 map->m_plen = vi->z_idata_size; in z_erofs_map_blocks_fo()
491 } else if (fragment && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_map_blocks_fo()
506 afmt = vi->z_advise & Z_EROFS_ADVISE_INTERLACED_PCLUSTER ? in z_erofs_map_blocks_fo()
511 vi->z_algorithmtype[1] : vi->z_algorithmtype[0]; in z_erofs_map_blocks_fo()
514 afmt, vi->nid); in z_erofs_map_blocks_fo()
540 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_map_blocks_ext() local
542 bool interlaced = vi->z_advise & Z_EROFS_ADVISE_INTERLACED_PCLUSTER; in z_erofs_map_blocks_ext()
543 unsigned int recsz = z_erofs_extent_recsize(vi->z_advise); in z_erofs_map_blocks_ext()
545 vi->inode_isize + vi->xattr_isize), recsz); in z_erofs_map_blocks_ext()
562 lstart = round_down(map->m_la, 1 << vi->z_lclusterbits); in z_erofs_map_blocks_ext()
563 pos += (lstart >> vi->z_lclusterbits) * recsz; in z_erofs_map_blocks_ext()
567 for (; lstart <= map->m_la; lstart += 1 << vi->z_lclusterbits) { in z_erofs_map_blocks_ext()
580 last = (lstart >= round_up(lend, 1 << vi->z_lclusterbits)); in z_erofs_map_blocks_ext()
582 lstart -= 1 << vi->z_lclusterbits; in z_erofs_map_blocks_ext()
585 for (l = 0, r = vi->z_extents; l < r; ) { in z_erofs_map_blocks_ext()
610 last = (l >= vi->z_extents); in z_erofs_map_blocks_ext()
615 if (last && (vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER)) { in z_erofs_map_blocks_ext()
617 vi->z_fragmentoff = map->m_plen; in z_erofs_map_blocks_ext()
619 vi->z_fragmentoff |= map->m_pa << 32; in z_erofs_map_blocks_ext()
640 map->m_llen, map->m_la, vi->nid); in z_erofs_map_blocks_ext()
649 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_fill_inode_lazy() local
656 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) { in z_erofs_fill_inode_lazy()
665 if (wait_on_bit_lock(&vi->flags, EROFS_I_BL_Z_BIT, TASK_KILLABLE)) in z_erofs_fill_inode_lazy()
669 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) in z_erofs_fill_inode_lazy()
672 pos = ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_fill_inode_lazy()
684 vi->z_advise = Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_fill_inode_lazy()
685 vi->z_fragmentoff = le64_to_cpu(*(__le64 *)h) ^ (1ULL << 63); in z_erofs_fill_inode_lazy()
686 vi->z_tailextent_headlcn = 0; in z_erofs_fill_inode_lazy()
689 vi->z_advise = le16_to_cpu(h->h_advise); in z_erofs_fill_inode_lazy()
690 vi->z_lclusterbits = sb->s_blocksize_bits + (h->h_clusterbits & 15); in z_erofs_fill_inode_lazy()
691 if (vi->datalayout == EROFS_INODE_COMPRESSED_FULL && in z_erofs_fill_inode_lazy()
692 (vi->z_advise & Z_EROFS_ADVISE_EXTENTS)) { in z_erofs_fill_inode_lazy()
693 vi->z_extents = le32_to_cpu(h->h_extents_lo) | in z_erofs_fill_inode_lazy()
698 vi->z_algorithmtype[0] = h->h_algorithmtype & 15; in z_erofs_fill_inode_lazy()
699 vi->z_algorithmtype[1] = h->h_algorithmtype >> 4; in z_erofs_fill_inode_lazy()
700 if (vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER) in z_erofs_fill_inode_lazy()
701 vi->z_fragmentoff = le32_to_cpu(h->h_fragmentoff); in z_erofs_fill_inode_lazy()
702 else if (vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER) in z_erofs_fill_inode_lazy()
703 vi->z_idata_size = le16_to_cpu(h->h_idata_size); in z_erofs_fill_inode_lazy()
706 if (vi->z_algorithmtype[0] >= Z_EROFS_COMPRESSION_MAX || in z_erofs_fill_inode_lazy()
707 vi->z_algorithmtype[++headnr] >= Z_EROFS_COMPRESSION_MAX) { in z_erofs_fill_inode_lazy()
709 headnr + 1, vi->z_algorithmtype[headnr], vi->nid); in z_erofs_fill_inode_lazy()
715 vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_fill_inode_lazy()
718 vi->nid); in z_erofs_fill_inode_lazy()
722 if (vi->datalayout == EROFS_INODE_COMPRESSED_COMPACT && in z_erofs_fill_inode_lazy()
723 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1) ^ in z_erofs_fill_inode_lazy()
724 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2)) { in z_erofs_fill_inode_lazy()
726 vi->nid); in z_erofs_fill_inode_lazy()
731 if (vi->z_idata_size || in z_erofs_fill_inode_lazy()
732 (vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER)) { in z_erofs_fill_inode_lazy()
746 set_bit(EROFS_I_Z_INITED_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
750 clear_and_wake_up_bit(EROFS_I_BL_Z_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
757 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_map_blocks_iter() local
768 if (vi->datalayout == EROFS_INODE_COMPRESSED_FULL && in z_erofs_map_blocks_iter()
769 (vi->z_advise & Z_EROFS_ADVISE_EXTENTS)) in z_erofs_map_blocks_iter()