Lines Matching refs:sc
32 struct xfs_scrub *sc, in xchk_setup_ag_iallocbt() argument
35 return xchk_setup_ag_btree(sc, ip, sc->flags & XCHK_TRY_HARDER); in xchk_setup_ag_iallocbt()
58 struct xfs_scrub *sc, in xchk_iallocbt_chunk_xref_other() argument
66 if (sc->sm->sm_type == XFS_SCRUB_TYPE_FINOBT) in xchk_iallocbt_chunk_xref_other()
67 pcur = &sc->sa.ino_cur; in xchk_iallocbt_chunk_xref_other()
69 pcur = &sc->sa.fino_cur; in xchk_iallocbt_chunk_xref_other()
73 if (!xchk_should_check_xref(sc, &error, pcur)) in xchk_iallocbt_chunk_xref_other()
77 xchk_btree_xref_set_corrupt(sc, *pcur, 0); in xchk_iallocbt_chunk_xref_other()
83 struct xfs_scrub *sc, in xchk_iallocbt_chunk_xref() argument
89 if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_chunk_xref()
92 xchk_xref_is_used_space(sc, agbno, len); in xchk_iallocbt_chunk_xref()
93 xchk_iallocbt_chunk_xref_other(sc, irec, agino); in xchk_iallocbt_chunk_xref()
94 xchk_xref_is_owned_by(sc, agbno, len, &XFS_RMAP_OINFO_INODES); in xchk_iallocbt_chunk_xref()
95 xchk_xref_is_not_shared(sc, agbno, len); in xchk_iallocbt_chunk_xref()
114 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_chunk()
116 xchk_iallocbt_chunk_xref(bs->sc, irec, agino, bno, len); in xchk_iallocbt_chunk()
159 if (xchk_should_terminate(bs->sc, &error)) in xchk_iallocbt_check_cluster_ifree()
172 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster_ifree()
181 if (!(bs->sc->flags & XCHK_TRY_HARDER) && !freemask_ok) in xchk_iallocbt_check_cluster_ifree()
195 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster_ifree()
253 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
265 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
271 xchk_xref_is_not_owned_by(bs->sc, agbno, in xchk_iallocbt_check_cluster()
277 xchk_xref_is_owned_by(bs->sc, agbno, M_IGEO(mp)->blocks_per_cluster, in xchk_iallocbt_check_cluster()
283 if (!xchk_btree_xref_process_error(bs->sc, bs->cur, 0, &error)) in xchk_iallocbt_check_cluster()
291 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
329 cluster_base += M_IGEO(bs->sc->mp)->inodes_per_cluster) { in xchk_iallocbt_check_clusters()
349 struct xfs_mount *mp = bs->sc->mp; in xchk_iallocbt_rec_alignment()
371 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
382 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
398 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
403 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
442 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
447 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
453 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
458 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_rec()
468 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
483 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
496 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
513 struct xfs_scrub *sc, in xchk_iallocbt_xref_rmap_btreeblks() argument
521 if (!sc->sa.ino_cur || !sc->sa.rmap_cur || in xchk_iallocbt_xref_rmap_btreeblks()
522 (xfs_sb_version_hasfinobt(&sc->mp->m_sb) && !sc->sa.fino_cur) || in xchk_iallocbt_xref_rmap_btreeblks()
523 xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_btreeblks()
527 error = xfs_btree_count_blocks(sc->sa.ino_cur, &inobt_blocks); in xchk_iallocbt_xref_rmap_btreeblks()
528 if (!xchk_process_error(sc, 0, 0, &error)) in xchk_iallocbt_xref_rmap_btreeblks()
531 if (sc->sa.fino_cur) { in xchk_iallocbt_xref_rmap_btreeblks()
532 error = xfs_btree_count_blocks(sc->sa.fino_cur, &finobt_blocks); in xchk_iallocbt_xref_rmap_btreeblks()
533 if (!xchk_process_error(sc, 0, 0, &error)) in xchk_iallocbt_xref_rmap_btreeblks()
537 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_btreeblks()
539 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_btreeblks()
542 xchk_btree_set_corrupt(sc, sc->sa.ino_cur, 0); in xchk_iallocbt_xref_rmap_btreeblks()
551 struct xfs_scrub *sc, in xchk_iallocbt_xref_rmap_inodes() argument
559 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_inodes()
563 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_inodes()
565 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_inodes()
567 inode_blocks = XFS_B_TO_FSB(sc->mp, inodes * sc->mp->m_sb.sb_inodesize); in xchk_iallocbt_xref_rmap_inodes()
569 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_iallocbt_xref_rmap_inodes()
575 struct xfs_scrub *sc, in xchk_iallocbt() argument
586 cur = which == XFS_BTNUM_INO ? sc->sa.ino_cur : sc->sa.fino_cur; in xchk_iallocbt()
587 error = xchk_btree(sc, cur, xchk_iallocbt_rec, &XFS_RMAP_OINFO_INOBT, in xchk_iallocbt()
592 xchk_iallocbt_xref_rmap_btreeblks(sc, which); in xchk_iallocbt()
602 xchk_iallocbt_xref_rmap_inodes(sc, which, iabt.inodes); in xchk_iallocbt()
609 struct xfs_scrub *sc) in xchk_inobt() argument
611 return xchk_iallocbt(sc, XFS_BTNUM_INO); in xchk_inobt()
616 struct xfs_scrub *sc) in xchk_finobt() argument
618 return xchk_iallocbt(sc, XFS_BTNUM_FINO); in xchk_finobt()
624 struct xfs_scrub *sc, in xchk_xref_inode_check() argument
633 if (!(*icur) || xchk_skip_xref(sc->sm)) in xchk_xref_inode_check()
637 if (!xchk_should_check_xref(sc, &error, icur)) in xchk_xref_inode_check()
640 xchk_btree_xref_set_corrupt(sc, *icur, 0); in xchk_xref_inode_check()
646 struct xfs_scrub *sc, in xchk_xref_is_not_inode_chunk() argument
650 xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, false); in xchk_xref_is_not_inode_chunk()
651 xchk_xref_inode_check(sc, agbno, len, &sc->sa.fino_cur, false); in xchk_xref_is_not_inode_chunk()
657 struct xfs_scrub *sc, in xchk_xref_is_inode_chunk() argument
661 xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, true); in xchk_xref_is_inode_chunk()