• Home
  • Raw
  • Download

Lines Matching refs:sc

32 	struct xfs_scrub	*sc,  in xchk_setup_ag_iallocbt()  argument
35 return xchk_setup_ag_btree(sc, ip, sc->flags & XCHK_TRY_HARDER); in xchk_setup_ag_iallocbt()
58 struct xfs_scrub *sc, in xchk_iallocbt_chunk_xref_other() argument
66 if (sc->sm->sm_type == XFS_SCRUB_TYPE_FINOBT) in xchk_iallocbt_chunk_xref_other()
67 pcur = &sc->sa.ino_cur; in xchk_iallocbt_chunk_xref_other()
69 pcur = &sc->sa.fino_cur; in xchk_iallocbt_chunk_xref_other()
73 if (!xchk_should_check_xref(sc, &error, pcur)) in xchk_iallocbt_chunk_xref_other()
77 xchk_btree_xref_set_corrupt(sc, *pcur, 0); in xchk_iallocbt_chunk_xref_other()
83 struct xfs_scrub *sc, in xchk_iallocbt_chunk_xref() argument
89 if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_chunk_xref()
92 xchk_xref_is_used_space(sc, agbno, len); in xchk_iallocbt_chunk_xref()
93 xchk_iallocbt_chunk_xref_other(sc, irec, agino); in xchk_iallocbt_chunk_xref()
94 xchk_xref_is_owned_by(sc, agbno, len, &XFS_RMAP_OINFO_INODES); in xchk_iallocbt_chunk_xref()
95 xchk_xref_is_not_shared(sc, agbno, len); in xchk_iallocbt_chunk_xref()
114 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_chunk()
116 xchk_iallocbt_chunk_xref(bs->sc, irec, agino, bno, len); in xchk_iallocbt_chunk()
159 if (xchk_should_terminate(bs->sc, &error)) in xchk_iallocbt_check_cluster_ifree()
172 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster_ifree()
181 if (!(bs->sc->flags & XCHK_TRY_HARDER) && !freemask_ok) in xchk_iallocbt_check_cluster_ifree()
195 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster_ifree()
253 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
265 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
271 xchk_xref_is_not_owned_by(bs->sc, agbno, in xchk_iallocbt_check_cluster()
277 xchk_xref_is_owned_by(bs->sc, agbno, M_IGEO(mp)->blocks_per_cluster, in xchk_iallocbt_check_cluster()
282 if (!xchk_btree_xref_process_error(bs->sc, bs->cur, 0, &error)) in xchk_iallocbt_check_cluster()
290 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
328 cluster_base += M_IGEO(bs->sc->mp)->inodes_per_cluster) { in xchk_iallocbt_check_clusters()
348 struct xfs_mount *mp = bs->sc->mp; in xchk_iallocbt_rec_alignment()
370 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
381 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
397 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
402 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
441 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
446 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
452 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
457 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_rec()
467 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
482 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
495 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
512 struct xfs_scrub *sc, in xchk_iallocbt_xref_rmap_btreeblks() argument
520 if (!sc->sa.ino_cur || !sc->sa.rmap_cur || in xchk_iallocbt_xref_rmap_btreeblks()
521 (xfs_sb_version_hasfinobt(&sc->mp->m_sb) && !sc->sa.fino_cur) || in xchk_iallocbt_xref_rmap_btreeblks()
522 xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_btreeblks()
526 error = xfs_btree_count_blocks(sc->sa.ino_cur, &inobt_blocks); in xchk_iallocbt_xref_rmap_btreeblks()
527 if (!xchk_process_error(sc, 0, 0, &error)) in xchk_iallocbt_xref_rmap_btreeblks()
530 if (sc->sa.fino_cur) { in xchk_iallocbt_xref_rmap_btreeblks()
531 error = xfs_btree_count_blocks(sc->sa.fino_cur, &finobt_blocks); in xchk_iallocbt_xref_rmap_btreeblks()
532 if (!xchk_process_error(sc, 0, 0, &error)) in xchk_iallocbt_xref_rmap_btreeblks()
536 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_btreeblks()
538 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_btreeblks()
541 xchk_btree_set_corrupt(sc, sc->sa.ino_cur, 0); in xchk_iallocbt_xref_rmap_btreeblks()
550 struct xfs_scrub *sc, in xchk_iallocbt_xref_rmap_inodes() argument
558 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_inodes()
562 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_inodes()
564 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_inodes()
566 inode_blocks = XFS_B_TO_FSB(sc->mp, inodes * sc->mp->m_sb.sb_inodesize); in xchk_iallocbt_xref_rmap_inodes()
568 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_iallocbt_xref_rmap_inodes()
574 struct xfs_scrub *sc, in xchk_iallocbt() argument
585 cur = which == XFS_BTNUM_INO ? sc->sa.ino_cur : sc->sa.fino_cur; in xchk_iallocbt()
586 error = xchk_btree(sc, cur, xchk_iallocbt_rec, &XFS_RMAP_OINFO_INOBT, in xchk_iallocbt()
591 xchk_iallocbt_xref_rmap_btreeblks(sc, which); in xchk_iallocbt()
601 xchk_iallocbt_xref_rmap_inodes(sc, which, iabt.inodes); in xchk_iallocbt()
608 struct xfs_scrub *sc) in xchk_inobt() argument
610 return xchk_iallocbt(sc, XFS_BTNUM_INO); in xchk_inobt()
615 struct xfs_scrub *sc) in xchk_finobt() argument
617 return xchk_iallocbt(sc, XFS_BTNUM_FINO); in xchk_finobt()
623 struct xfs_scrub *sc, in xchk_xref_inode_check() argument
632 if (!(*icur) || xchk_skip_xref(sc->sm)) in xchk_xref_inode_check()
636 if (!xchk_should_check_xref(sc, &error, icur)) in xchk_xref_inode_check()
639 xchk_btree_xref_set_corrupt(sc, *icur, 0); in xchk_xref_inode_check()
645 struct xfs_scrub *sc, in xchk_xref_is_not_inode_chunk() argument
649 xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, false); in xchk_xref_is_not_inode_chunk()
650 xchk_xref_inode_check(sc, agbno, len, &sc->sa.fino_cur, false); in xchk_xref_is_not_inode_chunk()
656 struct xfs_scrub *sc, in xchk_xref_is_inode_chunk() argument
660 xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, true); in xchk_xref_is_inode_chunk()