Lines Matching full:sc
35 struct xfs_scrub *sc) in xchk_setup_ag_rmapbt() argument
37 if (xchk_need_intent_drain(sc)) in xchk_setup_ag_rmapbt()
38 xchk_fsgates_enable(sc, XCHK_FSGATES_DRAIN); in xchk_setup_ag_rmapbt()
40 if (xchk_could_repair(sc)) { in xchk_setup_ag_rmapbt()
43 error = xrep_setup_ag_rmapbt(sc); in xchk_setup_ag_rmapbt()
48 return xchk_setup_ag_btree(sc, false); in xchk_setup_ag_rmapbt()
81 struct xfs_scrub *sc, in xchk_rmapbt_xref_refc() argument
92 if (!sc->sa.refc_cur || xchk_skip_xref(sc->sm)) in xchk_rmapbt_xref_refc()
101 error = xfs_refcount_find_shared(sc->sa.refc_cur, irec->rm_startblock, in xchk_rmapbt_xref_refc()
103 if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur)) in xchk_rmapbt_xref_refc()
106 xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0); in xchk_rmapbt_xref_refc()
112 struct xfs_scrub *sc, in xchk_rmapbt_xref() argument
118 if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_rmapbt_xref()
121 xchk_xref_is_used_space(sc, agbno, len); in xchk_rmapbt_xref()
123 xchk_xref_is_inode_chunk(sc, agbno, len); in xchk_rmapbt_xref()
125 xchk_xref_is_not_inode_chunk(sc, agbno, len); in xchk_rmapbt_xref()
127 xchk_xref_is_cow_staging(sc, irec->rm_startblock, in xchk_rmapbt_xref()
130 xchk_rmapbt_xref_refc(sc, irec); in xchk_rmapbt_xref()
149 struct xfs_scrub *sc = bs->sc; in xchk_rmapbt_check_unwritten_in_keyflags() local
156 if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_PREEN) in xchk_rmapbt_check_unwritten_in_keyflags()
172 xchk_btree_set_preen(sc, cur, level); in xchk_rmapbt_check_unwritten_in_keyflags()
178 xchk_btree_set_preen(sc, cur, level); in xchk_rmapbt_check_unwritten_in_keyflags()
187 struct xfs_scrub *sc, in xchk_rmapbt_is_shareable() argument
190 if (!xfs_has_reflink(sc->mp)) in xchk_rmapbt_is_shareable()
209 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_rmapbt_check_overlapping()
222 if (!xchk_rmapbt_is_shareable(bs->sc, &cr->overlap_rec) || in xchk_rmapbt_check_overlapping()
223 !xchk_rmapbt_is_shareable(bs->sc, irec)) in xchk_rmapbt_check_overlapping()
224 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_rmapbt_check_overlapping()
271 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_rmapbt_check_mergeable()
275 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_rmapbt_check_mergeable()
287 struct xfs_scrub *sc = bs->sc; in xchk_rmapbt_mark_bitmap() local
296 if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_rmapbt_mark_bitmap()
335 xchk_btree_xref_set_corrupt(bs->sc, in xchk_rmapbt_mark_bitmap()
336 bs->sc->sa.rmap_cur, 0); in xchk_rmapbt_mark_bitmap()
344 xchk_btree_xref_set_corrupt(bs->sc, bs->sc->sa.rmap_cur, 0); in xchk_rmapbt_mark_bitmap()
362 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_rmapbt_rec()
369 xchk_rmapbt_xref(bs->sc, &irec); in xchk_rmapbt_rec()
397 struct xfs_scrub *sc, in xchk_rmapbt_walk_ag_metadata() argument
400 struct xfs_mount *mp = sc->mp; in xchk_rmapbt_walk_ag_metadata()
402 struct xfs_agf *agf = sc->sa.agf_bp->b_addr; in xchk_rmapbt_walk_ag_metadata()
413 if (xfs_ag_contains_log(mp, sc->sa.pag->pag_agno)) { in xchk_rmapbt_walk_ag_metadata()
422 cur = sc->sa.bno_cur; in xchk_rmapbt_walk_ag_metadata()
424 cur = xfs_bnobt_init_cursor(sc->mp, sc->tp, sc->sa.agf_bp, in xchk_rmapbt_walk_ag_metadata()
425 sc->sa.pag); in xchk_rmapbt_walk_ag_metadata()
427 if (cur != sc->sa.bno_cur) in xchk_rmapbt_walk_ag_metadata()
432 cur = sc->sa.cnt_cur; in xchk_rmapbt_walk_ag_metadata()
434 cur = xfs_cntbt_init_cursor(sc->mp, sc->tp, sc->sa.agf_bp, in xchk_rmapbt_walk_ag_metadata()
435 sc->sa.pag); in xchk_rmapbt_walk_ag_metadata()
437 if (cur != sc->sa.cnt_cur) in xchk_rmapbt_walk_ag_metadata()
442 error = xagb_bitmap_set_btblocks(&cr->ag_owned, sc->sa.rmap_cur); in xchk_rmapbt_walk_ag_metadata()
446 error = xfs_alloc_read_agfl(sc->sa.pag, sc->tp, &agfl_bp); in xchk_rmapbt_walk_ag_metadata()
450 error = xfs_agfl_walk(sc->mp, agf, agfl_bp, xchk_rmapbt_walk_agfl, in xchk_rmapbt_walk_ag_metadata()
452 xfs_trans_brelse(sc->tp, agfl_bp); in xchk_rmapbt_walk_ag_metadata()
457 cur = sc->sa.ino_cur; in xchk_rmapbt_walk_ag_metadata()
459 cur = xfs_inobt_init_cursor(sc->sa.pag, sc->tp, sc->sa.agi_bp); in xchk_rmapbt_walk_ag_metadata()
461 if (cur != sc->sa.ino_cur) in xchk_rmapbt_walk_ag_metadata()
466 if (xfs_has_finobt(sc->mp)) { in xchk_rmapbt_walk_ag_metadata()
467 cur = sc->sa.fino_cur; in xchk_rmapbt_walk_ag_metadata()
469 cur = xfs_finobt_init_cursor(sc->sa.pag, sc->tp, in xchk_rmapbt_walk_ag_metadata()
470 sc->sa.agi_bp); in xchk_rmapbt_walk_ag_metadata()
472 if (cur != sc->sa.fino_cur) in xchk_rmapbt_walk_ag_metadata()
479 if (xfs_has_reflink(sc->mp)) { in xchk_rmapbt_walk_ag_metadata()
480 cur = sc->sa.refc_cur; in xchk_rmapbt_walk_ag_metadata()
482 cur = xfs_refcountbt_init_cursor(sc->mp, sc->tp, in xchk_rmapbt_walk_ag_metadata()
483 sc->sa.agf_bp, sc->sa.pag); in xchk_rmapbt_walk_ag_metadata()
485 if (cur != sc->sa.refc_cur) in xchk_rmapbt_walk_ag_metadata()
497 xchk_btree_xref_process_error(sc, sc->sa.rmap_cur, in xchk_rmapbt_walk_ag_metadata()
498 sc->sa.rmap_cur->bc_nlevels - 1, &error); in xchk_rmapbt_walk_ag_metadata()
510 struct xfs_scrub *sc, in xchk_rmapbt_check_bitmaps() argument
513 struct xfs_btree_cur *cur = sc->sa.rmap_cur; in xchk_rmapbt_check_bitmaps()
516 if (sc->sm->sm_flags & (XFS_SCRUB_OFLAG_CORRUPT | in xchk_rmapbt_check_bitmaps()
528 xchk_btree_xref_set_corrupt(sc, cur, level); in xchk_rmapbt_check_bitmaps()
531 xchk_btree_xref_set_corrupt(sc, cur, level); in xchk_rmapbt_check_bitmaps()
534 xchk_btree_xref_set_corrupt(sc, cur, level); in xchk_rmapbt_check_bitmaps()
537 xchk_btree_xref_set_corrupt(sc, cur, level); in xchk_rmapbt_check_bitmaps()
540 xchk_btree_xref_set_corrupt(sc, cur, level); in xchk_rmapbt_check_bitmaps()
546 struct xfs_scrub *sc) in xchk_rmapbt() argument
561 error = xchk_rmapbt_walk_ag_metadata(sc, cr); in xchk_rmapbt()
565 error = xchk_btree(sc, sc->sa.rmap_cur, xchk_rmapbt_rec, in xchk_rmapbt()
570 xchk_rmapbt_check_bitmaps(sc, cr); in xchk_rmapbt()
585 struct xfs_scrub *sc, in xchk_xref_is_only_owned_by() argument
593 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_xref_is_only_owned_by()
596 error = xfs_rmap_count_owners(sc->sa.rmap_cur, bno, len, oinfo, &res); in xchk_xref_is_only_owned_by()
597 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_xref_is_only_owned_by()
600 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_only_owned_by()
602 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_only_owned_by()
604 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_only_owned_by()
610 struct xfs_scrub *sc, in xchk_xref_is_not_owned_by() argument
618 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_xref_is_not_owned_by()
621 error = xfs_rmap_count_owners(sc->sa.rmap_cur, bno, len, oinfo, &res); in xchk_xref_is_not_owned_by()
622 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_xref_is_not_owned_by()
625 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_not_owned_by()
627 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_is_not_owned_by()
633 struct xfs_scrub *sc, in xchk_xref_has_no_owner() argument
640 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_xref_has_no_owner()
643 error = xfs_rmap_has_records(sc->sa.rmap_cur, bno, len, &outcome); in xchk_xref_has_no_owner()
644 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_xref_has_no_owner()
647 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_xref_has_no_owner()