Searched refs:XFS_INODES_PER_CHUNK (Results 1 – 10 of 10) sorted by relevance
225 nr_inodes = min_t(unsigned int, XFS_INODES_PER_CHUNK, in xchk_iallocbt_check_cluster()327 cluster_base < XFS_INODES_PER_CHUNK; in xchk_iallocbt_check_clusters()367 imask = min_t(unsigned int, XFS_INODES_PER_CHUNK, in xchk_iallocbt_rec_alignment()385 iabt->next_startino += XFS_INODES_PER_CHUNK; in xchk_iallocbt_rec_alignment()406 if (igeo->inodes_per_cluster <= XFS_INODES_PER_CHUNK) in xchk_iallocbt_rec_alignment()414 iabt->next_startino = irec->ir_startino + XFS_INODES_PER_CHUNK; in xchk_iallocbt_rec_alignment()439 if (irec.ir_count > XFS_INODES_PER_CHUNK || in xchk_iallocbt_rec()440 irec.ir_freecount > XFS_INODES_PER_CHUNK) in xchk_iallocbt_rec()444 (XFS_INODES_PER_CHUNK - irec.ir_count); in xchk_iallocbt_rec()451 !xfs_verify_agino(mp, agno, agino + XFS_INODES_PER_CHUNK - 1)) { in xchk_iallocbt_rec()[all …]
468 rec.ir_startino + XFS_INODES_PER_CHUNK <= agino) in xchk_inode_xref_finobt()
252 XFS_INODES_PER_CHUNK); in xrep_calc_ag_resblks()
104 for (i = 0; i < XFS_INODES_PER_CHUNK; i += igeo->inodes_per_cluster) { in xfs_iwalk_ichunk_ra()205 for (j = 0; j < XFS_INODES_PER_CHUNK; j++) { in xfs_iwalk_ag_recs()308 irec->ir_startino + XFS_INODES_PER_CHUNK - 1); in xfs_iwalk_ag_start()314 if (irec->ir_startino + XFS_INODES_PER_CHUNK <= agino) in xfs_iwalk_ag_start()366 ASSERT(next_agino == irec->ir_startino + XFS_INODES_PER_CHUNK); in xfs_iwalk_run_callbacks()426 iwag->lastino = rec_fsino + XFS_INODES_PER_CHUNK - 1; in xfs_iwalk_ag()505 inodes = round_up(inodes, XFS_INODES_PER_CHUNK); in xfs_iwalk_prefetch()520 inobt_records = (inodes * 5) / (4 * XFS_INODES_PER_CHUNK); in xfs_iwalk_prefetch()
349 XFS_INODES_PER_CHUNK; in xfs_inumbers_walk()
91 irec->ir_count = XFS_INODES_PER_CHUNK; in xfs_inobt_btrec_to_irec()122 irec->ir_count > XFS_INODES_PER_CHUNK) in xfs_inobt_get_rec()124 if (irec->ir_freecount > XFS_INODES_PER_CHUNK) in xfs_inobt_get_rec()190 thisino += XFS_INODES_PER_CHUNK) { in xfs_inobt_insert()199 XFS_INODES_PER_CHUNK, in xfs_inobt_insert()200 XFS_INODES_PER_CHUNK, in xfs_inobt_insert()468 if (trec->ir_count + srec->ir_count > XFS_INODES_PER_CHUNK) in __xfs_inobt_can_merge()794 ASSERT(newlen <= XFS_INODES_PER_CHUNK); in xfs_ialloc_ag_alloc()1240 (trec.ir_startino + XFS_INODES_PER_CHUNK - 1) < in xfs_dialloc_ag_inobt()1365 ASSERT(offset < XFS_INODES_PER_CHUNK); in xfs_dialloc_ag_inobt()[all …]
189 *min = XFS_INODES_PER_CHUNK; in xfs_icount_range()
205 x += XFS_INODES_PER_CHUNK - 1; in xfs_inobt_init_high_key_from_rec()377 return be32_to_cpu(r1->inobt.ir_startino) + XFS_INODES_PER_CHUNK <= in xfs_inobt_recs_inorder()651 XFS_INODES_PER_CHUNK); in xfs_inobt_max_size()
1502 #define XFS_INODES_PER_CHUNK (NBBY * sizeof(xfs_inofree_t)) macro1510 (XFS_INODES_PER_CHUNK / (NBBY * sizeof(uint16_t)))1514 return ((n >= XFS_INODES_PER_CHUNK ? 0 : XFS_INOBT_MASK(n)) - 1) << i; in xfs_inobt_maskn()
258 align = XFS_INODES_PER_CHUNK * sbp->sb_inodesize in xfs_validate_sb_common()