Lines Matching refs:sc

25 	struct xfs_scrub	*sc)  in xchk_setup_ag_refcountbt()  argument
27 return xchk_setup_ag_btree(sc, false); in xchk_setup_ag_refcountbt()
74 struct xfs_scrub *sc; member
105 if (xchk_should_terminate(refchk->sc, &error)) in xchk_refcountbt_rmap_check()
113 xchk_btree_xref_set_corrupt(refchk->sc, cur, 0); in xchk_refcountbt_rmap_check()
271 struct xfs_scrub *sc, in xchk_refcountbt_xref_rmap() argument
275 .sc = sc, in xchk_refcountbt_xref_rmap()
287 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_refcountbt_xref_rmap()
297 error = xfs_rmap_query_range(sc->sa.rmap_cur, &low, &high, in xchk_refcountbt_xref_rmap()
299 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_refcountbt_xref_rmap()
304 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_refcountbt_xref_rmap()
316 struct xfs_scrub *sc, in xchk_refcountbt_xref() argument
319 if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_refcountbt_xref()
322 xchk_xref_is_used_space(sc, irec->rc_startblock, irec->rc_blockcount); in xchk_refcountbt_xref()
323 xchk_xref_is_not_inode_chunk(sc, irec->rc_startblock, in xchk_refcountbt_xref()
325 xchk_refcountbt_xref_rmap(sc, irec); in xchk_refcountbt_xref()
342 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_refcountbt_rec()
349 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_refcountbt_rec()
352 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_refcountbt_rec()
354 xchk_refcountbt_xref(bs->sc, &irec); in xchk_refcountbt_rec()
362 struct xfs_scrub *sc, in xchk_refcount_xref_rmap() argument
369 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_refcount_xref_rmap()
373 error = xfs_btree_count_blocks(sc->sa.refc_cur, &refcbt_blocks); in xchk_refcount_xref_rmap()
374 if (!xchk_btree_process_error(sc, sc->sa.refc_cur, 0, &error)) in xchk_refcount_xref_rmap()
376 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_refcount_xref_rmap()
378 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_refcount_xref_rmap()
381 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_refcount_xref_rmap()
384 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_refcount_xref_rmap()
386 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_refcount_xref_rmap()
389 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_refcount_xref_rmap()
395 struct xfs_scrub *sc) in xchk_refcountbt() argument
400 error = xchk_btree(sc, sc->sa.refc_cur, xchk_refcountbt_rec, in xchk_refcountbt()
405 xchk_refcount_xref_rmap(sc, cow_blocks); in xchk_refcountbt()
413 struct xfs_scrub *sc, in xchk_xref_is_cow_staging() argument
421 if (!sc->sa.refc_cur || xchk_skip_xref(sc->sm)) in xchk_xref_is_cow_staging()
425 error = xfs_refcount_lookup_le(sc->sa.refc_cur, XFS_REFC_DOMAIN_COW, in xchk_xref_is_cow_staging()
427 if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur)) in xchk_xref_is_cow_staging()
430 xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0); in xchk_xref_is_cow_staging()
434 error = xfs_refcount_get_rec(sc->sa.refc_cur, &rc, &has_refcount); in xchk_xref_is_cow_staging()
435 if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur)) in xchk_xref_is_cow_staging()
438 xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0); in xchk_xref_is_cow_staging()
444 xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0); in xchk_xref_is_cow_staging()
448 xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0); in xchk_xref_is_cow_staging()
457 struct xfs_scrub *sc, in xchk_xref_is_not_shared() argument
464 if (!sc->sa.refc_cur || xchk_skip_xref(sc->sm)) in xchk_xref_is_not_shared()
467 error = xfs_refcount_has_record(sc->sa.refc_cur, XFS_REFC_DOMAIN_SHARED, in xchk_xref_is_not_shared()
469 if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur)) in xchk_xref_is_not_shared()
472 xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0); in xchk_xref_is_not_shared()