Lines Matching +full:ganged +full:- +full:mode
1 // SPDX-License-Identifier: CDDL-1.0
10 * or https://opensource.org/licenses/CDDL-1.0.
193 return (livelist_compare(&l->svbr_blk, &r->svbr_blk)); in sublivelist_block_refcnt_compare()
207 * This field is not used for our B-Tree comparisons in sublivelist_verify_blkptr()
215 zfs_btree_find(&sv->sv_pair, ¤t, &where); in sublivelist_verify_blkptr()
219 zfs_btree_add(&sv->sv_pair, ¤t); in sublivelist_verify_blkptr()
221 pair->svbr_refcnt++; in sublivelist_verify_blkptr()
227 if (DVA_IS_EMPTY(&bp->blk_dva[i])) in sublivelist_verify_blkptr()
230 .svb_dva = bp->blk_dva[i], in sublivelist_verify_blkptr()
235 if (zfs_btree_find(&sv->sv_leftover, &svb, in sublivelist_verify_blkptr()
237 zfs_btree_add_idx(&sv->sv_leftover, in sublivelist_verify_blkptr()
243 pair->svbr_refcnt--; in sublivelist_verify_blkptr()
244 if (pair->svbr_refcnt == 0) { in sublivelist_verify_blkptr()
246 zfs_btree_remove_idx(&sv->sv_pair, &where); in sublivelist_verify_blkptr()
260 zfs_btree_create(&sv->sv_pair, sublivelist_block_refcnt_compare, NULL, in sublivelist_verify_func()
263 err = bpobj_iterate_nofree(&dle->dle_bpobj, sublivelist_verify_blkptr, in sublivelist_verify_func()
268 while ((e = zfs_btree_destroy_nodes(&sv->sv_pair, &cookie)) != NULL) { in sublivelist_verify_func()
271 &e->svbr_blk, B_TRUE); in sublivelist_verify_func()
273 e->svbr_refcnt, blkbuf); in sublivelist_verify_func()
276 zfs_btree_destroy(&sv->sv_pair); in sublivelist_verify_func()
287 if (DVA_GET_VDEV(&l->svb_dva) < DVA_GET_VDEV(&r->svb_dva)) in livelist_block_compare()
288 return (-1); in livelist_block_compare()
289 else if (DVA_GET_VDEV(&l->svb_dva) > DVA_GET_VDEV(&r->svb_dva)) in livelist_block_compare()
292 if (DVA_GET_OFFSET(&l->svb_dva) < DVA_GET_OFFSET(&r->svb_dva)) in livelist_block_compare()
293 return (-1); in livelist_block_compare()
294 else if (DVA_GET_OFFSET(&l->svb_dva) > DVA_GET_OFFSET(&r->svb_dva)) in livelist_block_compare()
297 if (DVA_GET_ASIZE(&l->svb_dva) < DVA_GET_ASIZE(&r->svb_dva)) in livelist_block_compare()
298 return (-1); in livelist_block_compare()
299 else if (DVA_GET_ASIZE(&l->svb_dva) > DVA_GET_ASIZE(&r->svb_dva)) in livelist_block_compare()
307 * sublivelist_verify_t: sv->sv_leftover
370 return (uic->uic_cb(uic->uic_spa, sme, uic->uic_txg, uic->uic_arg)); in iterate_through_spacemap_logs_cb()
380 for (spa_log_sm_t *sls = avl_first(&spa->spa_sm_logs_by_txg); in iterate_through_spacemap_logs()
381 sls; sls = AVL_NEXT(&spa->spa_sm_logs_by_txg, sls)) { in iterate_through_spacemap_logs()
384 sls->sls_sm_obj, 0, UINT64_MAX, SPA_MINBLOCKSHIFT)); in iterate_through_spacemap_logs()
388 .uic_txg = sls->sls_txg, in iterate_through_spacemap_logs()
404 DVA_SET_VDEV(&svb.svb_dva, mv->mv_vdid); in verify_livelist_allocs()
416 zfs_btree_find(&mv->mv_livelist_allocs, &svb, &where); in verify_livelist_allocs()
418 found = zfs_btree_next(&mv->mv_livelist_allocs, &where, &where); in verify_livelist_allocs()
420 for (; found != NULL && DVA_GET_VDEV(&found->svb_dva) == mv->mv_vdid && in verify_livelist_allocs()
421 DVA_GET_OFFSET(&found->svb_dva) < end_offset; in verify_livelist_allocs()
422 found = zfs_btree_next(&mv->mv_livelist_allocs, &where, &where)) { in verify_livelist_allocs()
423 if (found->svb_allocated_txg <= txg) { in verify_livelist_allocs()
426 (u_longlong_t)DVA_GET_OFFSET(&found->svb_dva), in verify_livelist_allocs()
427 (u_longlong_t)DVA_GET_ASIZE(&found->svb_dva), in verify_livelist_allocs()
428 (u_longlong_t)found->svb_allocated_txg, in verify_livelist_allocs()
439 uint64_t offset = sme->sme_offset; in metaslab_spacemap_validation_cb()
440 uint64_t size = sme->sme_run; in metaslab_spacemap_validation_cb()
441 uint64_t txg = sme->sme_txg; in metaslab_spacemap_validation_cb()
443 if (sme->sme_type == SM_ALLOC) { in metaslab_spacemap_validation_cb()
444 if (zfs_range_tree_contains(mv->mv_allocated, in metaslab_spacemap_validation_cb()
450 (u_longlong_t)size, (u_longlong_t)mv->mv_vdid, in metaslab_spacemap_validation_cb()
451 (u_longlong_t)mv->mv_msid); in metaslab_spacemap_validation_cb()
454 zfs_range_tree_add(mv->mv_allocated, in metaslab_spacemap_validation_cb()
458 if (!zfs_range_tree_contains(mv->mv_allocated, in metaslab_spacemap_validation_cb()
464 (u_longlong_t)size, (u_longlong_t)mv->mv_vdid, in metaslab_spacemap_validation_cb()
465 (u_longlong_t)mv->mv_msid); in metaslab_spacemap_validation_cb()
468 zfs_range_tree_remove(mv->mv_allocated, in metaslab_spacemap_validation_cb()
473 if (sme->sme_type != SM_ALLOC) { in metaslab_spacemap_validation_cb()
488 uint64_t offset = sme->sme_offset; in spacemap_check_sm_log_cb()
489 uint64_t vdev_id = sme->sme_vdev; in spacemap_check_sm_log_cb()
497 if (vdev_id != mv->mv_vdid) in spacemap_check_sm_log_cb()
500 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in spacemap_check_sm_log_cb()
501 if (ms->ms_id != mv->mv_msid) in spacemap_check_sm_log_cb()
508 ASSERT3U(txg, ==, sme->sme_txg); in spacemap_check_sm_log_cb()
539 ASSERT3U(zfs_btree_numnodes(&mv->mv_livelist_allocs), ==, 0); in mv_populate_livelist_allocs()
540 for (svb = zfs_btree_first(&sv->sv_leftover, &where); in mv_populate_livelist_allocs()
542 svb = zfs_btree_next(&sv->sv_leftover, &where, &where)) { in mv_populate_livelist_allocs()
543 if (DVA_GET_VDEV(&svb->svb_dva) != mv->mv_vdid) in mv_populate_livelist_allocs()
546 if (DVA_GET_OFFSET(&svb->svb_dva) < mv->mv_start && in mv_populate_livelist_allocs()
547 (DVA_GET_OFFSET(&svb->svb_dva) + in mv_populate_livelist_allocs()
548 DVA_GET_ASIZE(&svb->svb_dva)) > mv->mv_start) { in mv_populate_livelist_allocs()
551 (u_longlong_t)DVA_GET_VDEV(&svb->svb_dva), in mv_populate_livelist_allocs()
552 (u_longlong_t)DVA_GET_OFFSET(&svb->svb_dva), in mv_populate_livelist_allocs()
553 (u_longlong_t)DVA_GET_ASIZE(&svb->svb_dva)); in mv_populate_livelist_allocs()
558 if (DVA_GET_OFFSET(&svb->svb_dva) < mv->mv_start) in mv_populate_livelist_allocs()
561 if (DVA_GET_OFFSET(&svb->svb_dva) >= mv->mv_end) in mv_populate_livelist_allocs()
564 if ((DVA_GET_OFFSET(&svb->svb_dva) + in mv_populate_livelist_allocs()
565 DVA_GET_ASIZE(&svb->svb_dva)) > mv->mv_end) { in mv_populate_livelist_allocs()
568 (u_longlong_t)DVA_GET_VDEV(&svb->svb_dva), in mv_populate_livelist_allocs()
569 (u_longlong_t)DVA_GET_OFFSET(&svb->svb_dva), in mv_populate_livelist_allocs()
570 (u_longlong_t)DVA_GET_ASIZE(&svb->svb_dva)); in mv_populate_livelist_allocs()
575 zfs_btree_add(&mv->mv_livelist_allocs, svb); in mv_populate_livelist_allocs()
578 for (svb = zfs_btree_first(&mv->mv_livelist_allocs, &where); in mv_populate_livelist_allocs()
580 svb = zfs_btree_next(&mv->mv_livelist_allocs, &where, &where)) { in mv_populate_livelist_allocs()
581 zfs_btree_remove(&sv->sv_leftover, svb); in mv_populate_livelist_allocs()
588 * - report leftover frees (**)
589 * - record leftover ALLOCs together with their TXG [see Cross Check]
598 * - iterate over spacemap and then the metaslab's entries in the
627 vdev_t *rvd = spa->spa_root_vdev; in livelist_metaslab_validate()
628 for (uint64_t c = 0; c < rvd->vdev_children; c++) { in livelist_metaslab_validate()
629 vdev_t *vd = rvd->vdev_child[c]; in livelist_metaslab_validate()
634 for (uint64_t mid = 0; mid < vd->vdev_ms_count; mid++) { in livelist_metaslab_validate()
635 metaslab_t *m = vd->vdev_ms[mid]; in livelist_metaslab_validate()
640 (longlong_t)vd->vdev_id, in livelist_metaslab_validate()
642 (longlong_t)vd->vdev_ms_count); in livelist_metaslab_validate()
652 mv.mv_vdid = vd->vdev_id; in livelist_metaslab_validate()
653 mv.mv_msid = m->ms_id; in livelist_metaslab_validate()
654 mv.mv_start = m->ms_start; in livelist_metaslab_validate()
655 mv.mv_end = m->ms_start + m->ms_size; in livelist_metaslab_validate()
662 spacemap_check_ms_sm(m->ms_sm, &mv); in livelist_metaslab_validate()
691 int vdev_id = DVA_GET_VDEV(&svb->svb_dva); in livelist_metaslab_validate()
692 ASSERT3U(vdev_id, <, rvd->vdev_children); in livelist_metaslab_validate()
693 vdev_t *vd = rvd->vdev_child[vdev_id]; in livelist_metaslab_validate()
696 vdev_id, (u_longlong_t)DVA_GET_OFFSET(&svb->svb_dva), in livelist_metaslab_validate()
697 (u_longlong_t)DVA_GET_ASIZE(&svb->svb_dva), in livelist_metaslab_validate()
698 (u_longlong_t)svb->svb_allocated_txg); in livelist_metaslab_validate()
724 "Usage:\t%s [-AbcdDFGhikLMPsvXy] [-e [-V] [-p <path> ...]] " in usage()
725 "[-I <inflight I/Os>]\n" in usage()
726 "\t\t[-o <var>=<value>]... [-t <txg>] [-U <cache>] [-x <dumpdir>]\n" in usage()
727 "\t\t[-K <key>]\n" in usage()
729 "\t%s [-AdiPv] [-e [-V] [-p <path> ...]] [-U <cache>] [-K <key>]\n" in usage()
731 "\t%s -B [-e [-V] [-p <path> ...]] [-I <inflight I/Os>]\n" in usage()
732 "\t\t[-o <var>=<value>]... [-t <txg>] [-U <cache>] [-x <dumpdir>]\n" in usage()
733 "\t\t[-K <key>] <poolname>/<objset id> [<backupflags>]\n" in usage()
734 "\t%s [-v] <bookmark>\n" in usage()
735 "\t%s -C [-A] [-U <cache>] [<poolname>]\n" in usage()
736 "\t%s -l [-Aqu] <device>\n" in usage()
737 "\t%s -m [-AFLPX] [-e [-V] [-p <path> ...]] [-t <txg>] " in usage()
738 "[-U <cache>]\n\t\t<poolname> [<vdev> [<metaslab> ...]]\n" in usage()
739 "\t%s -O [-K <key>] <dataset> <path>\n" in usage()
740 "\t%s -r [-K <key>] <dataset> <path> <destination>\n" in usage()
741 "\t%s -R [-A] [-e [-V] [-p <path> ...]] [-U <cache>]\n" in usage()
743 "\t%s -E [-A] word0:word1:...:word15\n" in usage()
744 "\t%s -S [-AP] [-e [-V] [-p <path> ...]] [-U <cache>] " in usage()
759 " end Ending object number, or -1 for no upper bound\n" in usage()
766 " - Negate effect of next flag\n\n"); in usage()
768 (void) fprintf(stderr, " -b --block-stats " in usage()
770 (void) fprintf(stderr, " --bin=(lsize|psize|asize) " in usage()
773 " --class=(normal|special|dedup|other)[,...]\n" in usage()
776 (void) fprintf(stderr, " -B --backup " in usage()
778 (void) fprintf(stderr, " -c --checksum " in usage()
780 (void) fprintf(stderr, " -C --config " in usage()
782 (void) fprintf(stderr, " -d --datasets " in usage()
784 (void) fprintf(stderr, " -D --dedup-stats " in usage()
786 (void) fprintf(stderr, " -E --embedded-block-pointer=INTEGER\n" in usage()
789 (void) fprintf(stderr, " -h --history " in usage()
791 (void) fprintf(stderr, " -i --intent-logs " in usage()
793 (void) fprintf(stderr, " -l --label " in usage()
795 (void) fprintf(stderr, " -k --checkpointed-state " in usage()
797 (void) fprintf(stderr, " -L --disable-leak-tracking " in usage()
799 (void) fprintf(stderr, " -m --metaslabs " in usage()
801 (void) fprintf(stderr, " -M --metaslab-groups " in usage()
803 (void) fprintf(stderr, " -O --object-lookups " in usage()
805 (void) fprintf(stderr, " -r --copy-object " in usage()
807 (void) fprintf(stderr, " -R --read-block " in usage()
809 (void) fprintf(stderr, " -s --io-stats " in usage()
811 (void) fprintf(stderr, " -S --simulate-dedup " in usage()
813 (void) fprintf(stderr, " -v --verbose " in usage()
815 (void) fprintf(stderr, " -y --livelist " in usage()
820 (void) fprintf(stderr, " -A --ignore-assertions " in usage()
821 "ignore assertions (-A), enable panic recovery (-AA) or both " in usage()
822 "(-AAA)\n"); in usage()
823 (void) fprintf(stderr, " -e --exported " in usage()
825 (void) fprintf(stderr, " -F --automatic-rewind " in usage()
828 (void) fprintf(stderr, " -G --dump-debug-msg " in usage()
830 (void) fprintf(stderr, " -I --inflight=INTEGER " in usage()
833 (void) fprintf(stderr, " -K --key=KEY " in usage()
835 (void) fprintf(stderr, " -o --option=\"NAME=VALUE\" " in usage()
837 (void) fprintf(stderr, " -p --path==PATH " in usage()
838 "use one or more with -e to specify path to vdev dir\n"); in usage()
839 (void) fprintf(stderr, " -P --parseable " in usage()
841 (void) fprintf(stderr, " -q --skip-label " in usage()
843 (void) fprintf(stderr, " -t --txg=INTEGER " in usage()
845 (void) fprintf(stderr, " -T --brt-stats " in usage()
847 (void) fprintf(stderr, " -u --uberblock " in usage()
849 (void) fprintf(stderr, " -U --cachefile=PATH " in usage()
851 (void) fprintf(stderr, " -V --verbatim " in usage()
853 (void) fprintf(stderr, " -x --dump-blocks=PATH " in usage()
855 (void) fprintf(stderr, " -X --extreme-rewind " in usage()
857 (void) fprintf(stderr, " -Y --all-reconstruction " in usage()
859 (void) fprintf(stderr, " -Z --zstd-headers " in usage()
861 (void) fprintf(stderr, "Specify an option more than once (e.g. -bb) " in usage()
863 (void) fprintf(stderr, "Default is to dump everything non-verbosely\n"); in usage()
890 * Restore default action and re-raise signal so SIGSEGV and in sig_handler()
950 (u_longlong_t)shp->sh_pool_create_len); in dump_history_offsets()
952 (u_longlong_t)shp->sh_phys_max_off); in dump_history_offsets()
954 (u_longlong_t)shp->sh_bof); in dump_history_offsets()
956 (u_longlong_t)shp->sh_eof); in dump_history_offsets()
958 (u_longlong_t)shp->sh_records_lost); in dump_history_offsets()
980 static const uint64_t histo_width = sizeof (histo_stars) - 1;
986 int minidx = size - 1; in dump_histogram()
1007 &histo_stars[(max - histo[i]) * histo_width / max]); in dump_histogram()
1108 * allocation failures and nigh-infinite printing if the in dump_uint64()
1171 *(uint64_t *)attrp->za_name); in dump_zap()
1173 (void) printf("\t\t%s = ", attrp->za_name); in dump_zap()
1175 if (attrp->za_num_integers == 0) { in dump_zap()
1179 prop = umem_zalloc(attrp->za_num_integers * in dump_zap()
1180 attrp->za_integer_length, UMEM_NOFAIL); in dump_zap()
1184 (const uint64_t *)attrp->za_name, 1, in dump_zap()
1185 attrp->za_integer_length, attrp->za_num_integers, in dump_zap()
1188 (void) zap_lookup(os, object, attrp->za_name, in dump_zap()
1189 attrp->za_integer_length, attrp->za_num_integers, in dump_zap()
1192 if (attrp->za_integer_length == 1 && !key64) { in dump_zap()
1193 if (strcmp(attrp->za_name, in dump_zap()
1195 strcmp(attrp->za_name, in dump_zap()
1197 strcmp(attrp->za_name, DSL_CRYPTO_KEY_IV) == 0 || in dump_zap()
1198 strcmp(attrp->za_name, DSL_CRYPTO_KEY_MAC) == 0 || in dump_zap()
1199 strcmp(attrp->za_name, in dump_zap()
1203 for (i = 0; i < attrp->za_num_integers; i++) { in dump_zap()
1210 for (i = 0; i < attrp->za_num_integers; i++) { in dump_zap()
1211 switch (attrp->za_integer_length) { in dump_zap()
1233 attrp->za_num_integers * attrp->za_integer_length); in dump_zap()
1254 zdb_nicenum(bpop->bpo_bytes, bytes, sizeof (bytes)); in dump_bpobj()
1255 zdb_nicenum(bpop->bpo_comp, comp, sizeof (comp)); in dump_bpobj()
1256 zdb_nicenum(bpop->bpo_uncomp, uncomp, sizeof (uncomp)); in dump_bpobj()
1259 (u_longlong_t)bpop->bpo_num_blkptrs); in dump_bpobj()
1267 (u_longlong_t)bpop->bpo_subobjs); in dump_bpobj()
1269 (u_longlong_t)bpop->bpo_num_subobjs); in dump_bpobj()
1273 (u_longlong_t)bpop->bpo_num_freed); in dump_bpobj()
1279 for (i = 0; i < bpop->bpo_num_blkptrs; i++) { in dump_bpobj()
1312 int64_t last_nonzero = -1; in dump_bpobj_subobjs()
1345 (void) printf("\t\t%s = ", attrp->za_name); in dump_sa_attrs()
1346 if (attrp->za_num_integers == 0) { in dump_sa_attrs()
1351 (u_longlong_t)attrp->za_first_integer, in dump_sa_attrs()
1352 (int)ATTR_LENGTH(attrp->za_first_integer), in dump_sa_attrs()
1353 (int)ATTR_BSWAP(attrp->za_first_integer), in dump_sa_attrs()
1354 (int)ATTR_NUM(attrp->za_first_integer)); in dump_sa_attrs()
1375 (void) printf("\t\t%s = [", attrp->za_name); in dump_sa_layouts()
1376 if (attrp->za_num_integers == 0) { in dump_sa_layouts()
1381 VERIFY(attrp->za_integer_length == 2); in dump_sa_layouts()
1382 layout_attrs = umem_zalloc(attrp->za_num_integers * in dump_sa_layouts()
1383 attrp->za_integer_length, UMEM_NOFAIL); in dump_sa_layouts()
1385 VERIFY(zap_lookup(os, object, attrp->za_name, in dump_sa_layouts()
1386 attrp->za_integer_length, in dump_sa_layouts()
1387 attrp->za_num_integers, layout_attrs) == 0); in dump_sa_layouts()
1389 for (i = 0; i != attrp->za_num_integers; i++) in dump_sa_layouts()
1393 attrp->za_num_integers * attrp->za_integer_length); in dump_sa_layouts()
1431 attrp->za_name, ZFS_DIRENT_OBJ(attrp->za_first_integer), in dump_zpldir()
1432 typenames[ZFS_DIRENT_TYPE(attrp->za_first_integer)]); in dump_zpldir()
1443 if (vd->vdev_ops->vdev_op_leaf) { in get_dtl_refcount()
1444 space_map_t *sm = vd->vdev_dtl_sm; in get_dtl_refcount()
1447 sm->sm_dbuf->db_size == sizeof (space_map_phys_t)) in get_dtl_refcount()
1452 for (unsigned c = 0; c < vd->vdev_children; c++) in get_dtl_refcount()
1453 refcount += get_dtl_refcount(vd->vdev_child[c]); in get_dtl_refcount()
1462 if (vd->vdev_top == vd) { in get_metaslab_refcount()
1463 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in get_metaslab_refcount()
1464 space_map_t *sm = vd->vdev_ms[m]->ms_sm; in get_metaslab_refcount()
1467 sm->sm_dbuf->db_size == sizeof (space_map_phys_t)) in get_metaslab_refcount()
1471 for (unsigned c = 0; c < vd->vdev_children; c++) in get_metaslab_refcount()
1472 refcount += get_metaslab_refcount(vd->vdev_child[c]); in get_metaslab_refcount()
1484 if (vd->vdev_top == vd && obsolete_sm_object != 0) { in get_obsolete_refcount()
1486 VERIFY0(dmu_object_info(vd->vdev_spa->spa_meta_objset, in get_obsolete_refcount()
1492 ASSERT0P(vd->vdev_obsolete_sm); in get_obsolete_refcount()
1495 for (unsigned c = 0; c < vd->vdev_children; c++) { in get_obsolete_refcount()
1496 refcount += get_obsolete_refcount(vd->vdev_child[c]); in get_obsolete_refcount()
1506 spa->spa_condensing_indirect_phys.scip_prev_obsolete_sm_object; in get_prev_obsolete_spacemap_refcount()
1509 VERIFY0(dmu_object_info(spa->spa_meta_objset, prev_obj, &doi)); in get_prev_obsolete_spacemap_refcount()
1522 if (vd->vdev_top == vd && vd->vdev_top_zap != 0 && in get_checkpoint_refcount()
1523 zap_contains(spa_meta_objset(vd->vdev_spa), in get_checkpoint_refcount()
1524 vd->vdev_top_zap, VDEV_TOP_ZAP_POOL_CHECKPOINT_SM) == 0) in get_checkpoint_refcount()
1527 for (uint64_t c = 0; c < vd->vdev_children; c++) in get_checkpoint_refcount()
1528 refcount += get_checkpoint_refcount(vd->vdev_child[c]); in get_checkpoint_refcount()
1536 return (avl_numnodes(&spa->spa_sm_logs_by_txg)); in get_log_spacemap_refcount()
1548 actual_refcount = get_dtl_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1549 actual_refcount += get_metaslab_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1550 actual_refcount += get_obsolete_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1552 actual_refcount += get_checkpoint_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1575 (longlong_t)sm->sm_object); in dump_spacemap()
1577 (longlong_t)sm->sm_phys->smp_length); in dump_spacemap()
1579 (longlong_t)sm->sm_phys->smp_alloc); in dump_spacemap()
1587 uint8_t mapshift = sm->sm_shift; in dump_spacemap()
1622 sm->sm_start; in dump_spacemap()
1626 "range: %012llx-%012llx size: %08llx\n", in dump_spacemap()
1629 (u_longlong_t)(entry_off + entry_run - 1), in dump_spacemap()
1632 /* it is a two-word entry so we read another word */ in dump_spacemap()
1647 mapshift) + sm->sm_start; in dump_spacemap()
1652 "range: %012llx-%012llx size: %08llx " in dump_spacemap()
1656 (u_longlong_t)(entry_off + entry_run - 1), in dump_spacemap()
1665 alloc -= entry_run; in dump_spacemap()
1679 zfs_range_tree_t *rt = msp->ms_allocatable; in dump_metaslab_stats()
1680 zfs_btree_t *t = &msp->ms_allocatable_by_size; in dump_metaslab_stats()
1681 int free_pct = zfs_range_tree_space(rt) * 100 / msp->ms_size; in dump_metaslab_stats()
1691 (void) printf("\tIn-memory histogram:\n"); in dump_metaslab_stats()
1692 dump_histogram(rt->rt_histogram, ZFS_RANGE_TREE_HISTOGRAM_SIZE, 0); in dump_metaslab_stats()
1701 start - *off); in dump_allocated()
1708 vdev_t *vd = msp->ms_group->mg_vd; in dump_metaslab()
1709 spa_t *spa = vd->vdev_spa; in dump_metaslab()
1710 space_map_t *sm = msp->ms_sm; in dump_metaslab()
1713 zdb_nicenum(msp->ms_size - space_map_allocated(sm), freebuf, in dump_metaslab()
1718 (u_longlong_t)msp->ms_id, (u_longlong_t)msp->ms_start, in dump_metaslab()
1723 mutex_enter(&msp->ms_lock); in dump_metaslab()
1728 zfs_range_tree_stat_verify(msp->ms_allocatable); in dump_metaslab()
1733 uint64_t off = msp->ms_start; in dump_metaslab()
1734 zfs_range_tree_walk(msp->ms_allocatable, dump_allocated, in dump_metaslab()
1736 if (off != msp->ms_start + msp->ms_size) in dump_metaslab()
1738 msp->ms_size - off); in dump_metaslab()
1747 (void) printf("\tOn-disk histogram:\t\tfragmentation %llu\n", in dump_metaslab()
1748 (u_longlong_t)msp->ms_fragmentation); in dump_metaslab()
1749 dump_histogram(sm->sm_phys->smp_histogram, in dump_metaslab()
1750 SPACE_MAP_HISTOGRAM_SIZE, sm->sm_shift); in dump_metaslab()
1756 mutex_exit(&msp->ms_lock); in dump_metaslab()
1759 if (vd->vdev_ops == &vdev_draid_ops) in dump_metaslab()
1760 ASSERT3U(msp->ms_size, <=, 1ULL << vd->vdev_ms_shift); in dump_metaslab()
1762 ASSERT3U(msp->ms_size, ==, 1ULL << vd->vdev_ms_shift); in dump_metaslab()
1764 dump_spacemap(spa->spa_meta_objset, msp->ms_sm); in dump_metaslab()
1775 vdev_alloc_bias_t alloc_bias = vd->vdev_alloc_bias; in print_vdev_metaslab_header()
1777 if (alloc_bias == VDEV_BIAS_LOG || vd->vdev_islog) { in print_vdev_metaslab_header()
1786 if (vd->vdev_top_zap != 0) { in print_vdev_metaslab_header()
1787 int error = zap_lookup(spa_meta_objset(vd->vdev_spa), in print_vdev_metaslab_header()
1788 vd->vdev_top_zap, VDEV_TOP_ZAP_MS_UNFLUSHED_PHYS_TXGS, in print_vdev_metaslab_header()
1796 (u_longlong_t)vd->vdev_id, bias_str, in print_vdev_metaslab_header()
1797 (u_longlong_t)vd->vdev_ms_shift); in print_vdev_metaslab_header()
1804 (void) printf("\n\t%-10s%5llu %-19s %-15s %-12s\n", in print_vdev_metaslab_header()
1805 "metaslabs", (u_longlong_t)vd->vdev_ms_count, in print_vdev_metaslab_header()
1808 "---------------", "-------------------", in print_vdev_metaslab_header()
1809 "---------------", "------------"); in print_vdev_metaslab_header()
1815 vdev_t *rvd = spa->spa_root_vdev; in dump_metaslab_groups()
1822 for (unsigned c = 0; c < rvd->vdev_children; c++) { in dump_metaslab_groups()
1823 vdev_t *tvd = rvd->vdev_child[c]; in dump_metaslab_groups()
1824 metaslab_group_t *mg = tvd->vdev_mg; in dump_metaslab_groups()
1826 if (mg == NULL || (mg->mg_class != mc && in dump_metaslab_groups()
1827 (!show_special || mg->mg_class != smc))) in dump_metaslab_groups()
1831 mg->mg_fragmentation = metaslab_group_fragmentation(mg); in dump_metaslab_groups()
1835 (u_longlong_t)tvd->vdev_id, in dump_metaslab_groups()
1836 (u_longlong_t)tvd->vdev_ms_count); in dump_metaslab_groups()
1837 if (mg->mg_fragmentation == ZFS_FRAG_INVALID) { in dump_metaslab_groups()
1838 (void) printf("%3s\n", "-"); in dump_metaslab_groups()
1841 (u_longlong_t)mg->mg_fragmentation); in dump_metaslab_groups()
1843 dump_histogram(mg->mg_histogram, in dump_metaslab_groups()
1850 (void) printf("\t%3s\n", "-"); in dump_metaslab_groups()
1853 dump_histogram(mc->mc_histogram, ZFS_RANGE_TREE_HISTOGRAM_SIZE, 0); in dump_metaslab_groups()
1859 vdev_indirect_config_t *vic = &vd->vdev_indirect_config; in print_vdev_indirect()
1860 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in print_vdev_indirect()
1861 vdev_indirect_births_t *vib = vd->vdev_indirect_births; in print_vdev_indirect()
1869 vic->vic_mapping_object); in print_vdev_indirect()
1871 vic->vic_births_object); in print_vdev_indirect()
1874 (longlong_t)vic->vic_births_object); in print_vdev_indirect()
1879 &vib->vib_entries[i]; in print_vdev_indirect()
1880 (void) printf("\toffset %llx -> txg %llu\n", in print_vdev_indirect()
1881 (longlong_t)cur_vibe->vibe_offset, in print_vdev_indirect()
1882 (longlong_t)cur_vibe->vibe_phys_birth_txg); in print_vdev_indirect()
1887 (longlong_t)vic->vic_mapping_object); in print_vdev_indirect()
1902 &vim->vim_entries[i]; in print_vdev_indirect()
1903 (void) printf("\t<%llx:%llx:%llx> -> " in print_vdev_indirect()
1905 (longlong_t)vd->vdev_id, in print_vdev_indirect()
1907 (longlong_t)DVA_GET_ASIZE(&vimep->vimep_dst), in print_vdev_indirect()
1908 (longlong_t)DVA_GET_VDEV(&vimep->vimep_dst), in print_vdev_indirect()
1909 (longlong_t)DVA_GET_OFFSET(&vimep->vimep_dst), in print_vdev_indirect()
1910 (longlong_t)DVA_GET_ASIZE(&vimep->vimep_dst), in print_vdev_indirect()
1918 objset_t *mos = vd->vdev_spa->spa_meta_objset; in print_vdev_indirect()
1921 ASSERT(vd->vdev_obsolete_sm != NULL); in print_vdev_indirect()
1922 ASSERT3U(space_map_object(vd->vdev_obsolete_sm), ==, in print_vdev_indirect()
1924 dump_spacemap(mos, vd->vdev_obsolete_sm); in print_vdev_indirect()
1932 vdev_t *vd, *rvd = spa->spa_root_vdev; in dump_metaslabs()
1933 uint64_t m, c = 0, children = rvd->vdev_children; in dump_metaslabs()
1944 vd = rvd->vdev_child[c]; in dump_metaslabs()
1948 if (zopt_metaslab[m] < vd->vdev_ms_count) in dump_metaslabs()
1950 vd->vdev_ms[zopt_metaslab[m]]); in dump_metaslabs()
1962 vd = rvd->vdev_child[c]; in dump_metaslabs()
1967 for (m = 0; m < vd->vdev_ms_count; m++) in dump_metaslabs()
1968 dump_metaslab(vd->vdev_ms[m]); in dump_metaslabs()
1980 for (spa_log_sm_t *sls = avl_first(&spa->spa_sm_logs_by_txg); in dump_log_spacemaps()
1981 sls; sls = AVL_NEXT(&spa->spa_sm_logs_by_txg, sls)) { in dump_log_spacemaps()
1984 sls->sls_sm_obj, 0, UINT64_MAX, SPA_MINBLOCKSHIFT)); in dump_log_spacemaps()
1987 (u_longlong_t)sls->sls_sm_obj, (u_longlong_t)sls->sls_txg); in dump_log_spacemaps()
1988 dump_spacemap(spa->spa_meta_objset, sm); in dump_log_spacemaps()
1998 const ddt_key_t *ddk = &ddlwe->ddlwe_key; in dump_ddt_entry()
2004 const ddt_univ_phys_t *ddp = &ddlwe->ddlwe_phys; in dump_ddt_entry()
2009 ddt_bp_create(ddt->ddt_checksum, ddk, ddp, v, &blk); in dump_ddt_entry()
2022 if (dds->dds_blocks == 0) in dump_dedup_ratio()
2025 rL = (double)dds->dds_ref_lsize; in dump_dedup_ratio()
2026 rP = (double)dds->dds_ref_psize; in dump_dedup_ratio()
2027 rD = (double)dds->dds_ref_dsize; in dump_dedup_ratio()
2028 D = (double)dds->dds_dsize; in dump_dedup_ratio()
2042 if (ddt->ddt_version != DDT_VERSION_FDT || in dump_ddt_log()
2043 !(ddt->ddt_flags & DDT_FLAG_LOG)) in dump_ddt_log()
2047 ddt_log_t *ddl = &ddt->ddt_log[n]; in dump_ddt_log()
2050 if (ddl->ddl_flags > 0) { in dump_ddt_log()
2053 if (ddl->ddl_flags & DDL_FLAG_FLUSHING) in dump_ddt_log()
2055 sizeof (flagstr) - c); in dump_ddt_log()
2056 if (ddl->ddl_flags & DDL_FLAG_CHECKPOINT) in dump_ddt_log()
2058 sizeof (flagstr) - c); in dump_ddt_log()
2059 if (ddl->ddl_flags & in dump_ddt_log()
2062 sizeof (flagstr) - c); in dump_ddt_log()
2067 uint64_t count = avl_numnodes(&ddl->ddl_tree); in dump_ddt_log()
2071 zio_checksum_table[ddt->ddt_checksum].ci_name, n, in dump_ddt_log()
2072 ddl->ddl_flags, flagstr, in dump_ddt_log()
2073 (u_longlong_t)ddl->ddl_object, in dump_ddt_log()
2074 (u_longlong_t)ddl->ddl_length, in dump_ddt_log()
2075 (u_longlong_t)ddl->ddl_first_txg, (u_longlong_t)count); in dump_ddt_log()
2077 if (ddl->ddl_flags & DDL_FLAG_CHECKPOINT) { in dump_ddt_log()
2078 const ddt_key_t *ddk = &ddl->ddl_checkpoint; in dump_ddt_log()
2081 (u_longlong_t)ddk->ddk_cksum.zc_word[0], in dump_ddt_log()
2082 (u_longlong_t)ddk->ddk_cksum.zc_word[1], in dump_ddt_log()
2083 (u_longlong_t)ddk->ddk_cksum.zc_word[2], in dump_ddt_log()
2084 (u_longlong_t)ddk->ddk_cksum.zc_word[3], in dump_ddt_log()
2085 (u_longlong_t)ddk->ddk_prop); in dump_ddt_log()
2093 for (ddt_log_entry_t *ddle = avl_first(&ddl->ddl_tree); in dump_ddt_log()
2094 ddle; ddle = AVL_NEXT(&ddl->ddl_tree, ddle)) { in dump_ddt_log()
2134 (u_longlong_t)ddt->ddt_object[type][class]); in dump_ddt_object()
2135 zpool_dump_ddt(NULL, &ddt->ddt_histogram[type][class]); in dump_ddt_object()
2156 if (!ddt || ddt->ddt_version == DDT_VERSION_UNCONFIGURED) in dump_ddt()
2160 if (ddt->ddt_flags > 0) { in dump_ddt()
2163 if (ddt->ddt_flags & DDT_FLAG_FLAT) in dump_ddt()
2165 sizeof (flagstr) - c); in dump_ddt()
2166 if (ddt->ddt_flags & DDT_FLAG_LOG) in dump_ddt()
2168 sizeof (flagstr) - c); in dump_ddt()
2169 if (ddt->ddt_flags & ~DDT_FLAG_MASK) in dump_ddt()
2171 sizeof (flagstr) - c); in dump_ddt()
2176 printf("DDT-%s: version=%llu [%s]; flags=0x%02llx%s; rootobj=%llu\n", in dump_ddt()
2177 zio_checksum_table[ddt->ddt_checksum].ci_name, in dump_ddt()
2178 (u_longlong_t)ddt->ddt_version, in dump_ddt()
2179 (ddt->ddt_version == 0) ? "LEGACY" : in dump_ddt()
2180 (ddt->ddt_version == 1) ? "FDT" : "UNKNOWN", in dump_ddt()
2181 (u_longlong_t)ddt->ddt_flags, flagstr, in dump_ddt()
2182 (u_longlong_t)ddt->ddt_dir_object); in dump_ddt()
2198 dump_ddt(spa->spa_ddt[c]); in dump_all_ddts()
2233 "-----", "---------", "----"); in dump_all_ddts()
2267 for (uint64_t vdevid = 0; vdevid < spa->spa_brt_nvdevs; vdevid++) { in dump_brt()
2268 brt_vdev_t *brtvd = spa->spa_brt_vdevs[vdevid]; in dump_brt()
2269 if (!brtvd->bv_initiated) { in dump_brt()
2274 zdb_nicenum(brtvd->bv_totalcount, count, sizeof (count)); in dump_brt()
2275 zdb_nicebytes(brtvd->bv_usedspace, used, sizeof (used)); in dump_brt()
2276 zdb_nicebytes(brtvd->bv_savedspace, saved, sizeof (saved)); in dump_brt()
2284 /* -TTT shows a per-vdev histograms; -TTTT shows all entries */ in dump_brt()
2290 printf("\n%-16s %-10s\n", "DVA", "REFCNT"); in dump_brt()
2292 for (uint64_t vdevid = 0; vdevid < spa->spa_brt_nvdevs; vdevid++) { in dump_brt()
2293 brt_vdev_t *brtvd = spa->spa_brt_vdevs[vdevid]; in dump_brt()
2294 if (!brtvd->bv_initiated) in dump_brt()
2301 for (zap_cursor_init(&zc, spa->spa_meta_objset, in dump_brt()
2302 brtvd->bv_mos_entries); in dump_brt()
2306 VERIFY0(zap_lookup_uint64(spa->spa_meta_objset, in dump_brt()
2307 brtvd->bv_mos_entries, in dump_brt()
2308 (const uint64_t *)za->za_name, 1, in dump_brt()
2309 za->za_integer_length, za->za_num_integers, in dump_brt()
2316 *(const uint64_t *)za->za_name; in dump_brt()
2320 printf("%-16s %-10llu\n", dva, in dump_brt()
2350 spa_t *spa = vd->vdev_spa; in dump_dtl()
2364 vd->vdev_path ? vd->vdev_path : in dump_dtl()
2365 vd->vdev_parent ? vd->vdev_ops->vdev_op_type : spa_name(spa), in dump_dtl()
2366 required ? "DTL-required" : "DTL-expendable"); in dump_dtl()
2369 zfs_range_tree_t *rt = vd->vdev_dtl[t]; in dump_dtl()
2375 if (dump_opt['d'] > 5 && vd->vdev_children == 0) in dump_dtl()
2376 dump_spacemap(spa->spa_meta_objset, in dump_dtl()
2377 vd->vdev_dtl_sm); in dump_dtl()
2380 for (unsigned c = 0; c < vd->vdev_children; c++) in dump_dtl()
2381 dump_dtl(vd->vdev_child[c], indent + 4); in dump_dtl()
2413 off -= resid; in dump_history()
2514 ASSERT(zb->zb_level < 0); in blkid2offset()
2515 if (zb->zb_object == 0) in blkid2offset()
2516 return (zb->zb_blkid); in blkid2offset()
2517 return (zb->zb_blkid * BP_GET_LSIZE(bp)); in blkid2offset()
2520 ASSERT(zb->zb_level >= 0); in blkid2offset()
2522 return ((zb->zb_blkid << in blkid2offset()
2523 (zb->zb_level * (dnp->dn_indblkshift - SPA_BLKPTRSHIFT))) * in blkid2offset()
2524 dnp->dn_datablkszsec << SPA_MINBLOCKSHIFT); in blkid2offset()
2555 buflen - strlen(blkbuf), in snprintf_zstd_header()
2581 buflen - strlen(blkbuf), in snprintf_zstd_header()
2593 const dva_t *dva = bp->blk_dva; in snprintf_blkptr_compact()
2601 buflen - strlen(blkbuf), " %s", "FREE"); in snprintf_blkptr_compact()
2620 buflen - strlen(blkbuf), "%llu:%llx:%llx%s ", in snprintf_blkptr_compact()
2629 buflen - strlen(blkbuf), in snprintf_blkptr_compact()
2635 buflen - strlen(blkbuf), in snprintf_blkptr_compact()
2644 buflen - strlen(blkbuf), " %s", "FREE"); in snprintf_blkptr_compact()
2646 buflen - strlen(blkbuf), in snprintf_blkptr_compact()
2648 (u_longlong_t)bp->blk_cksum.zc_word[0], in snprintf_blkptr_compact()
2649 (u_longlong_t)bp->blk_cksum.zc_word[1], in snprintf_blkptr_compact()
2650 (u_longlong_t)bp->blk_cksum.zc_word[2], in snprintf_blkptr_compact()
2651 (u_longlong_t)bp->blk_cksum.zc_word[3]); in snprintf_blkptr_compact()
2667 ASSERT(zb->zb_level >= 0); in print_indirect()
2669 for (l = dnp->dn_nlevels - 1; l >= -1; l--) { in print_indirect()
2670 if (l == zb->zb_level) { in print_indirect()
2671 (void) printf("L%llx", (u_longlong_t)zb->zb_level); in print_indirect()
2683 if (BP_GET_TYPE(bp) != dnp->dn_type) { in print_indirect()
2686 BP_GET_TYPE(bp), dnp->dn_type); in print_indirect()
2689 if (BP_GET_LEVEL(bp) != zb->zb_level) { in print_indirect()
2692 BP_GET_LEVEL(bp), (longlong_t)zb->zb_level); in print_indirect()
2726 ASSERT(buf->b_data); in visit_indirect()
2729 cbp = buf->b_data; in visit_indirect()
2733 SET_BOOKMARK(&czb, zb->zb_objset, zb->zb_object, in visit_indirect()
2734 zb->zb_level - 1, in visit_indirect()
2735 zb->zb_blkid * epb + i); in visit_indirect()
2759 dnode_phys_t *dnp = dn->dn_phys; in dump_indirect()
2764 SET_BOOKMARK(&czb, dmu_objset_id(dn->dn_objset), in dump_indirect()
2765 dn->dn_object, dnp->dn_nlevels - 1, 0); in dump_indirect()
2766 for (int j = 0; j < dnp->dn_nblkptr; j++) { in dump_indirect()
2768 (void) visit_indirect(dmu_objset_spa(dn->dn_objset), dnp, in dump_indirect()
2769 &dnp->dn_blkptr[j], &czb); in dump_indirect()
2791 crtime = dd->dd_creation_time; in dump_dsl_dir()
2794 (u_longlong_t)dd->dd_head_dataset_obj); in dump_dsl_dir()
2796 (u_longlong_t)dd->dd_parent_obj); in dump_dsl_dir()
2798 (u_longlong_t)dd->dd_origin_obj); in dump_dsl_dir()
2800 (u_longlong_t)dd->dd_child_dir_zapobj); in dump_dsl_dir()
2801 zdb_nicenum(dd->dd_used_bytes, nice, sizeof (nice)); in dump_dsl_dir()
2803 zdb_nicenum(dd->dd_compressed_bytes, nice, sizeof (nice)); in dump_dsl_dir()
2805 zdb_nicenum(dd->dd_uncompressed_bytes, nice, sizeof (nice)); in dump_dsl_dir()
2807 zdb_nicenum(dd->dd_quota, nice, sizeof (nice)); in dump_dsl_dir()
2809 zdb_nicenum(dd->dd_reserved, nice, sizeof (nice)); in dump_dsl_dir()
2812 (u_longlong_t)dd->dd_props_zapobj); in dump_dsl_dir()
2814 (u_longlong_t)dd->dd_deleg_zapobj); in dump_dsl_dir()
2816 (u_longlong_t)dd->dd_flags); in dump_dsl_dir()
2819 zdb_nicenum(dd->dd_used_breakdown[DD_USED_ ## which], nice, \ in dump_dsl_dir()
2829 (u_longlong_t)dd->dd_clones); in dump_dsl_dir()
2853 crtime = ds->ds_creation_time; in dump_dsl_dataset()
2854 zdb_nicenum(ds->ds_referenced_bytes, used, sizeof (used)); in dump_dsl_dataset()
2855 zdb_nicenum(ds->ds_compressed_bytes, compressed, sizeof (compressed)); in dump_dsl_dataset()
2856 zdb_nicenum(ds->ds_uncompressed_bytes, uncompressed, in dump_dsl_dataset()
2858 zdb_nicenum(ds->ds_unique_bytes, unique, sizeof (unique)); in dump_dsl_dataset()
2859 snprintf_blkptr(blkbuf, sizeof (blkbuf), &ds->ds_bp); in dump_dsl_dataset()
2862 (u_longlong_t)ds->ds_dir_obj); in dump_dsl_dataset()
2864 (u_longlong_t)ds->ds_prev_snap_obj); in dump_dsl_dataset()
2866 (u_longlong_t)ds->ds_prev_snap_txg); in dump_dsl_dataset()
2868 (u_longlong_t)ds->ds_next_snap_obj); in dump_dsl_dataset()
2870 (u_longlong_t)ds->ds_snapnames_zapobj); in dump_dsl_dataset()
2872 (u_longlong_t)ds->ds_num_children); in dump_dsl_dataset()
2874 (u_longlong_t)ds->ds_userrefs_obj); in dump_dsl_dataset()
2877 (u_longlong_t)ds->ds_creation_txg); in dump_dsl_dataset()
2879 (u_longlong_t)ds->ds_deadlist_obj); in dump_dsl_dataset()
2885 (u_longlong_t)ds->ds_fsid_guid); in dump_dsl_dataset()
2887 (u_longlong_t)ds->ds_guid); in dump_dsl_dataset()
2889 (u_longlong_t)ds->ds_flags); in dump_dsl_dataset()
2891 (u_longlong_t)ds->ds_next_clones_obj); in dump_dsl_dataset()
2893 (u_longlong_t)ds->ds_props_obj); in dump_dsl_dataset()
2924 bt = db->db_data; in dump_bptree()
2925 zdb_nicenum(bt->bt_bytes, bytes, sizeof (bytes)); in dump_bptree()
2927 name, (unsigned long long)(bt->bt_end - bt->bt_begin), bytes); in dump_bptree()
2966 zdb_nicenum(bpo->bpo_phys->bpo_bytes, bytes, sizeof (bytes)); in dump_full_bpobj()
2967 if (bpo->bpo_havesubobj && bpo->bpo_phys->bpo_subobjs != 0) { in dump_full_bpobj()
2968 zdb_nicenum(bpo->bpo_phys->bpo_comp, comp, sizeof (comp)); in dump_full_bpobj()
2969 zdb_nicenum(bpo->bpo_phys->bpo_uncomp, uncomp, sizeof (uncomp)); in dump_full_bpobj()
2970 if (bpo->bpo_havefreed) { in dump_full_bpobj()
2975 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
2976 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
2977 (u_longlong_t)bpo->bpo_phys->bpo_num_freed, in dump_full_bpobj()
2978 (u_longlong_t)bpo->bpo_phys->bpo_num_subobjs, in dump_full_bpobj()
2979 (u_longlong_t)bpo->bpo_phys->bpo_subobjs, in dump_full_bpobj()
2986 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
2987 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
2988 (u_longlong_t)bpo->bpo_phys->bpo_num_subobjs, in dump_full_bpobj()
2989 (u_longlong_t)bpo->bpo_phys->bpo_subobjs, in dump_full_bpobj()
2993 for (i = 0; i < bpo->bpo_phys->bpo_num_subobjs; i++) { in dump_full_bpobj()
2997 VERIFY0(dmu_read(bpo->bpo_os, in dump_full_bpobj()
2998 bpo->bpo_phys->bpo_subobjs, in dump_full_bpobj()
3000 error = bpobj_open(&subbpo, bpo->bpo_os, subobj); in dump_full_bpobj()
3012 if (bpo->bpo_havefreed) { in dump_full_bpobj()
3016 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
3017 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
3018 (u_longlong_t)bpo->bpo_phys->bpo_num_freed, in dump_full_bpobj()
3024 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
3025 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
3046 objset_t *mos = dp->dp_spa->spa_meta_objset; in dump_bookmark()
3068 redaction_list_phys_t *rlp = rl->rl_phys; in dump_bookmark()
3070 if (rlp->rlp_last_object != UINT64_MAX || in dump_bookmark()
3071 rlp->rlp_last_blkid != UINT64_MAX) { in dump_bookmark()
3073 (u_longlong_t)rlp->rlp_last_object, in dump_bookmark()
3074 (u_longlong_t)rlp->rlp_last_blkid); in dump_bookmark()
3079 for (unsigned int i = 0; i < rlp->rlp_num_snaps; i++) { in dump_bookmark()
3083 (u_longlong_t)rlp->rlp_snaps[i]); in dump_bookmark()
3086 (u_longlong_t)rlp->rlp_num_entries); in dump_bookmark()
3093 if (rlp->rlp_num_entries == 0) { in dump_bookmark()
3122 for (size_t i = 1; i < rlp->rlp_num_entries; i++) { in dump_bookmark()
3142 dsl_pool_t *dp = spa_get_dsl(os->os_spa); in dump_bookmarks()
3143 objset_t *mos = os->os_spa->spa_meta_objset; in dump_bookmarks()
3149 for (zap_cursor_init(&zc, mos, ds->ds_bookmarks_obj); in dump_bookmarks()
3157 attrp->za_name); in dump_bookmarks()
3169 mos_obj_refd(bpo->bpo_object); in bpobj_count_refd()
3171 if (bpo->bpo_havesubobj && bpo->bpo_phys->bpo_subobjs != 0) { in bpobj_count_refd()
3172 mos_obj_refd(bpo->bpo_phys->bpo_subobjs); in bpobj_count_refd()
3173 for (uint64_t i = 0; i < bpo->bpo_phys->bpo_num_subobjs; i++) { in bpobj_count_refd()
3177 VERIFY0(dmu_read(bpo->bpo_os, in bpobj_count_refd()
3178 bpo->bpo_phys->bpo_subobjs, in bpobj_count_refd()
3180 error = bpobj_open(&subbpo, bpo->bpo_os, subobj); in bpobj_count_refd()
3198 uint64_t empty_bpobj = spa->spa_dsl_pool->dp_empty_bpobj; in dsl_deadlist_entry_count_refd()
3199 if (dle->dle_bpobj.bpo_object != empty_bpobj) in dsl_deadlist_entry_count_refd()
3200 bpobj_count_refd(&dle->dle_bpobj); in dsl_deadlist_entry_count_refd()
3211 "mintxg %llu -> obj %llu", in dsl_deadlist_entry_dump()
3212 (longlong_t)dle->dle_mintxg, in dsl_deadlist_entry_dump()
3213 (longlong_t)dle->dle_bpobj.bpo_object); in dsl_deadlist_entry_dump()
3215 dump_full_bpobj(&dle->dle_bpobj, buf, 0); in dsl_deadlist_entry_dump()
3217 (void) printf("mintxg %llu -> obj %llu\n", in dsl_deadlist_entry_dump()
3218 (longlong_t)dle->dle_mintxg, in dsl_deadlist_entry_dump()
3219 (longlong_t)dle->dle_bpobj.bpo_object); in dsl_deadlist_entry_dump()
3231 spa_t *spa = dmu_objset_spa(dl->dl_os); in dump_blkptr_list()
3232 uint64_t empty_bpobj = spa->spa_dsl_pool->dp_empty_bpobj; in dump_blkptr_list()
3234 if (dl->dl_oldfmt) { in dump_blkptr_list()
3235 if (dl->dl_bpobj.bpo_object != empty_bpobj) in dump_blkptr_list()
3236 bpobj_count_refd(&dl->dl_bpobj); in dump_blkptr_list()
3238 mos_obj_refd(dl->dl_object); in dump_blkptr_list()
3251 if (dl->dl_oldfmt) { in dump_blkptr_list()
3252 dump_full_bpobj(&dl->dl_bpobj, "old-format deadlist", 0); in dump_blkptr_list()
3256 zdb_nicenum(dl->dl_phys->dl_used, bytes, sizeof (bytes)); in dump_blkptr_list()
3257 zdb_nicenum(dl->dl_phys->dl_comp, comp, sizeof (comp)); in dump_blkptr_list()
3258 zdb_nicenum(dl->dl_phys->dl_uncomp, uncomp, sizeof (uncomp)); in dump_blkptr_list()
3259 zdb_nicenum(avl_numnodes(&dl->dl_tree), entries, sizeof (entries)); in dump_blkptr_list()
3275 dsl_pool_t *dp = spa_get_dsl(os->os_spa); in verify_dd_livelist()
3276 dsl_dir_t *dd = os->os_dsl_dataset->ds_dir; in verify_dd_livelist()
3279 if (!dsl_deadlist_is_open(&dd->dd_livelist)) in verify_dd_livelist()
3283 dsl_deadlist_iterate(&dd->dd_livelist, sublivelist_verify_lightweight, in verify_dd_livelist()
3287 dsl_deadlist_space(&dd->dd_livelist, &ll_used, in verify_dd_livelist()
3293 dsl_dir_phys(dd)->dd_origin_obj, FTAG, &origin_ds)); in verify_dd_livelist()
3294 VERIFY0(dsl_dataset_space_written(origin_ds, os->os_dsl_dataset, in verify_dd_livelist()
3330 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, dd->dd_crypto_obj, in zdb_derive_key()
3347 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, in zdb_derive_key()
3348 dd->dd_crypto_obj, zfs_prop_to_name(ZFS_PROP_PBKDF2_SALT), in zdb_derive_key()
3350 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, in zdb_derive_key()
3351 dd->dd_crypto_obj, zfs_prop_to_name(ZFS_PROP_PBKDF2_ITERS), in zdb_derive_key()
3400 dp = spa_get_dsl(os->os_spa); in zdb_load_key()
3401 dd = os->os_dsl_dataset->ds_dir; in zdb_load_key()
3404 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, dd->dd_crypto_obj, in zdb_load_key()
3406 VERIFY0(dsl_dir_hold_obj(dd->dd_pool, rddobj, NULL, FTAG, &rdd)); in zdb_load_key()
3505 (key_loaded || !(*osp)->os_encrypted)) { in open_objset()
3532 if (os->os_sa != NULL) in close_objset()
3567 ddt_t *ddt = spa->spa_ddt[c]; in zdb_ddt_cleanup()
3573 ddt_entry_t *dde = avl_first(&ddt->ddt_tree), *next; in zdb_ddt_cleanup()
3575 next = AVL_NEXT(&ddt->ddt_tree, dde); in zdb_ddt_cleanup()
3576 dde->dde_io = NULL; in zdb_ddt_cleanup()
3609 * the domain-rid string.
3617 (void) printf("\t%s %llx [%s-%d]\n", id_type, in print_idstr()
3743 uint64_t uid, gid, mode, fsize, parent, links; in dump_znode() local
3763 &mode, 8); in dump_znode()
3801 if (S_ISLNK(mode)) in dump_znode()
3809 (void) printf("\tmode %llo\n", (u_longlong_t)mode); in dump_znode()
3928 * set, the user combined the all-types flag (A) with in match_object_type()
3929 * a negated flag to exclude some types (e.g. A-f to in match_object_type()
3984 if (!key_loaded && os->os_encrypted && in dump_object()
3995 bonus = db->db_data; in dump_object()
3996 bsize = db->db_size; in dump_object()
4024 (void) snprintf(aux + strlen(aux), sizeof (aux) - strlen(aux), in dump_object()
4029 ZIO_COMPRESS_HASLEVEL(os->os_compress) && verbosity >= 6) { in dump_object()
4032 ZIO_COMPRESS_RAW(os->os_compress, os->os_complevel), in dump_object()
4035 sizeof (aux) - strlen(aux), " (Z=inherit=%s)", in dump_object()
4039 sizeof (aux) - strlen(aux), in dump_object()
4040 " (Z=inherit=%s-unknown)", in dump_object()
4041 ZDB_COMPRESS_NAME(os->os_compress)); in dump_object()
4044 (void) snprintf(aux + strlen(aux), sizeof (aux) - strlen(aux), in dump_object()
4045 " (Z=inherit=%s)", ZDB_COMPRESS_NAME(os->os_compress)); in dump_object()
4047 (void) snprintf(aux + strlen(aux), sizeof (aux) - strlen(aux), in dump_object()
4063 (dn->dn_phys->dn_flags & DNODE_FLAG_USED_BYTES) ? in dump_object()
4065 (dn->dn_phys->dn_flags & DNODE_FLAG_USERUSED_ACCOUNTED) ? in dump_object()
4067 (dn->dn_phys->dn_flags & DNODE_FLAG_USEROBJUSED_ACCOUNTED) ? in dump_object()
4069 (dn->dn_phys->dn_flags & DNODE_FLAG_SPILL_BLKPTR) ? in dump_object()
4072 (longlong_t)dn->dn_phys->dn_maxblkid); in dump_object()
4082 (!os->os_encrypted || !DMU_OT_IS_ENCRYPTED(doi.doi_type))) { in dump_object()
4093 if (dn->dn_phys->dn_flags & DNODE_FLAG_SPILL_BLKPTR) { in dump_object()
4096 DN_SPILL_BLKPTR(dn->dn_phys), B_FALSE); in dump_object()
4111 if (dn->dn_type == DMU_OT_DNODE) { in dump_object()
4128 zdb_nicenum(end - start, segsize, sizeof (segsize)); in dump_object()
4148 mos_obj_refd(dd->dd_object); in count_dir_mos_objects()
4149 mos_obj_refd(dsl_dir_phys(dd)->dd_child_dir_zapobj); in count_dir_mos_objects()
4150 mos_obj_refd(dsl_dir_phys(dd)->dd_deleg_zapobj); in count_dir_mos_objects()
4151 mos_obj_refd(dsl_dir_phys(dd)->dd_props_zapobj); in count_dir_mos_objects()
4152 mos_obj_refd(dsl_dir_phys(dd)->dd_clones); in count_dir_mos_objects()
4158 mos_obj_refd_multiple(dd->dd_crypto_obj); in count_dir_mos_objects()
4164 mos_obj_refd(ds->ds_object); in count_ds_mos_objects()
4165 mos_obj_refd(dsl_dataset_phys(ds)->ds_next_clones_obj); in count_ds_mos_objects()
4166 mos_obj_refd(dsl_dataset_phys(ds)->ds_props_obj); in count_ds_mos_objects()
4167 mos_obj_refd(dsl_dataset_phys(ds)->ds_userrefs_obj); in count_ds_mos_objects()
4168 mos_obj_refd(dsl_dataset_phys(ds)->ds_snapnames_zapobj); in count_ds_mos_objects()
4169 mos_obj_refd(ds->ds_bookmarks_obj); in count_ds_mos_objects()
4172 count_dir_mos_objects(ds->ds_dir); in count_ds_mos_objects()
4195 zor->zor_obj_start = strtoull(range, &p, 0); in parse_object_range()
4200 zor->zor_obj_start = ZDB_MAP_OBJECT_ID(zor->zor_obj_start); in parse_object_range()
4201 zor->zor_obj_end = zor->zor_obj_start; in parse_object_range()
4212 if (range[len - 1] == ':') { in parse_object_range()
4220 zor->zor_obj_start = strtoull(s, &p, 0); in parse_object_range()
4229 zor->zor_obj_end = strtoull(s, &p, 0); in parse_object_range()
4237 if (zor->zor_obj_start > zor->zor_obj_end) { in parse_object_range()
4245 zor->zor_flags = ZOR_FLAG_ALL_TYPES; in parse_object_range()
4248 *msg = "Invalid colon-delimited field after flags"; in parse_object_range()
4256 boolean_t negation = (flagstr[i] == '-'); in parse_object_range()
4277 zor->zor_flags = flags; in parse_object_range()
4279 zor->zor_obj_start = ZDB_MAP_OBJECT_ID(zor->zor_obj_start); in parse_object_range()
4280 zor->zor_obj_end = ZDB_MAP_OBJECT_ID(zor->zor_obj_end); in parse_object_range()
4322 usedobjs = BP_GET_FILL(os->os_rootbp); in dump_objset()
4323 refdbytes = dsl_dir_phys(os->os_spa->spa_dsl_pool->dp_mos_dir)-> in dump_objset()
4329 ASSERT3U(usedobjs, ==, BP_GET_FILL(os->os_rootbp)); in dump_objset()
4336 sizeof (blkbuf) - strlen(blkbuf), os->os_rootbp); in dump_objset()
4360 object--; in dump_objset()
4379 dump_blkptr_list(&ds->ds_deadlist, "Deadlist"); in dump_objset()
4380 if (dsl_deadlist_is_open(&ds->ds_dir->dd_livelist) && in dump_objset()
4382 dump_blkptr_list(&ds->ds_dir->dd_livelist, "Livelist"); in dump_objset()
4389 dump_blkptr_list(&ds->ds_remap_deadlist, "Deadlist"); in dump_objset()
4400 if (BP_IS_HOLE(os->os_rootbp)) in dump_objset()
4406 DMU_USERUSED_DNODE(os)->dn_type != 0) { in dump_objset()
4414 DMU_PROJECTUSED_DNODE(os)->dn_type != 0) in dump_objset()
4424 max_slot_used = object + dnode_slots - 1; in dump_objset()
4435 (double)(max_slot_used - total_slots_used)*100 / in dump_objset()
4456 time_t timestamp = ub->ub_timestamp; in dump_uberblock()
4459 (void) printf("\tmagic = %016llx\n", (u_longlong_t)ub->ub_magic); in dump_uberblock()
4460 (void) printf("\tversion = %llu\n", (u_longlong_t)ub->ub_version); in dump_uberblock()
4461 (void) printf("\ttxg = %llu\n", (u_longlong_t)ub->ub_txg); in dump_uberblock()
4462 (void) printf("\tguid_sum = %llu\n", (u_longlong_t)ub->ub_guid_sum); in dump_uberblock()
4464 (u_longlong_t)ub->ub_timestamp, ctime(×tamp)); in dump_uberblock()
4467 snprintf_blkptr(blkbuf, sizeof (blkbuf), &ub->ub_rootbp); in dump_uberblock()
4471 (u_longlong_t)ub->ub_mmp_magic); in dump_uberblock()
4474 (u_longlong_t)ub->ub_mmp_delay); in dump_uberblock()
4486 (unsigned int) ub->ub_mmp_config & 0xFF); in dump_uberblock()
4491 snprintf_blkptr(blkbuf, sizeof (blkbuf), &ub->ub_rootbp); in dump_uberblock()
4495 (u_longlong_t)ub->ub_checkpoint_txg); in dump_uberblock()
4512 error = dmu_bonus_hold(spa->spa_meta_objset, in dump_config()
4513 spa->spa_config_object, FTAG, &db); in dump_config()
4516 nvsize = *(uint64_t *)db->db_data; in dump_config()
4520 dump_packed_nvlist(spa->spa_meta_objset, in dump_config()
4521 spa->spa_config_object, (void *)&nvsize, 1); in dump_config()
4524 (u_longlong_t)spa->spa_config_object, error); in dump_config()
4595 stats->zns_list_count++; in collect_nvlist_stats()
4602 fnvlist_add_string(stats->zns_string, name, in collect_nvlist_stats()
4606 fnvlist_add_uint64(stats->zns_uint64, name, in collect_nvlist_stats()
4610 fnvlist_add_boolean(stats->zns_boolean, name); in collect_nvlist_stats()
4629 stats->zns_leaf_total += size; in collect_nvlist_stats()
4630 if (size > stats->zns_leaf_largest) in collect_nvlist_stats()
4631 stats->zns_leaf_largest = size; in collect_nvlist_stats()
4632 stats->zns_leaf_count++; in collect_nvlist_stats()
4649 /* requires nvlist with non-unique names for stat collection */ in dump_nvlist_stats()
4659 (int)total, (int)(cap - total), 100.0 * total / cap); in dump_nvlist_stats()
4664 size -= noise; in dump_nvlist_stats()
4671 size -= noise; in dump_nvlist_stats()
4678 size -= noise; in dump_nvlist_stats()
4684 size = total - sum; /* treat remainder as nvlist overhead */ in dump_nvlist_stats()
4698 (int)((cap - total) / average)); in dump_nvlist_stats()
4718 int arraysize = ARRAY_SIZE(l->cksum.zc_word); in cksum_record_compare()
4722 difference = TREE_CMP(l->cksum.zc_word[i], r->cksum.zc_word[i]); in cksum_record_compare()
4736 rec->cksum = *cksum; in cksum_record_alloc()
4737 rec->labels[l] = B_TRUE; in cksum_record_alloc()
4758 rec->labels[l] = B_TRUE; in cksum_record_insert()
4771 if (rec->labels[i]) in first_label()
4774 return (-1); in first_label()
4782 if (rec->labels[i] == B_TRUE) in print_label_numbers()
4807 if (label->header_printed == B_TRUE) in print_label_header()
4810 (void) printf("------------------------------------\n"); in print_label_header()
4812 label->cksum_valid ? "" : "(Bad label cksum)"); in print_label_header()
4813 (void) printf("------------------------------------\n"); in print_label_header()
4815 label->header_printed = B_TRUE; in print_label_header()
4821 (void) printf("------------------------------------\n"); in print_l2arc_header()
4823 (void) printf("------------------------------------\n"); in print_l2arc_header()
4829 (void) printf("------------------------------------\n"); in print_l2arc_log_blocks()
4831 (void) printf("------------------------------------\n"); in print_l2arc_log_blocks()
4849 (u_longlong_t)L2BLK_GET_LSIZE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4851 (u_longlong_t)L2BLK_GET_PSIZE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4853 (u_longlong_t)L2BLK_GET_COMPRESS((&le[j])->le_prop)); in dump_l2arc_log_entries()
4855 (u_longlong_t)(&le[j])->le_complevel); in dump_l2arc_log_entries()
4857 (u_longlong_t)L2BLK_GET_TYPE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4859 (u_longlong_t)L2BLK_GET_PROTECTED((&le[j])->le_prop)); in dump_l2arc_log_entries()
4861 (u_longlong_t)L2BLK_GET_PREFETCH((&le[j])->le_prop)); in dump_l2arc_log_entries()
4865 (u_longlong_t)L2BLK_GET_STATE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4874 (void) printf("|\t\tdaddr: %llu\n", (u_longlong_t)lbps->lbp_daddr); in dump_l2arc_log_blkptr()
4876 (u_longlong_t)lbps->lbp_payload_asize); in dump_l2arc_log_blkptr()
4878 (u_longlong_t)lbps->lbp_payload_start); in dump_l2arc_log_blkptr()
4880 (u_longlong_t)L2BLK_GET_LSIZE(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4882 (u_longlong_t)L2BLK_GET_PSIZE(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4884 (u_longlong_t)L2BLK_GET_COMPRESS(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4886 (u_longlong_t)L2BLK_GET_CHECKSUM(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4903 memcpy(lbps, l2dhdr->dh_start_lbps, sizeof (lbps)); in dump_l2arc_log_blocks()
4905 dev.l2ad_evict = l2dhdr->dh_evict; in dump_l2arc_log_blocks()
4906 dev.l2ad_start = l2dhdr->dh_start; in dump_l2arc_log_blocks()
4907 dev.l2ad_end = l2dhdr->dh_end; in dump_l2arc_log_blocks()
4909 if (l2dhdr->dh_start_lbps[0].lbp_daddr == 0) { in dump_l2arc_log_blocks()
4918 L2BLK_GET_PSIZE((&lbps[0])->lbp_prop); in dump_l2arc_log_blocks()
4921 dev.l2ad_first = !!(l2dhdr->dh_flags & L2ARC_DEV_HDR_EVICT_FIRST); in dump_l2arc_log_blocks()
4928 asize = L2BLK_GET_PSIZE((&lbps[0])->lbp_prop); in dump_l2arc_log_blocks()
4947 switch (L2BLK_GET_COMPRESS((&lbps[0])->lbp_prop)) { in dump_l2arc_log_blocks()
4957 (&lbps[0])->lbp_prop), abd, &dabd, in dump_l2arc_log_blocks()
4978 rebuild->dh_lb_count++; in dump_l2arc_log_blocks()
4979 rebuild->dh_lb_asize += asize; in dump_l2arc_log_blocks()
4982 (u_longlong_t)rebuild->dh_lb_count, in dump_l2arc_log_blocks()
4988 dump_l2arc_log_entries(l2dhdr->dh_log_entries, in dump_l2arc_log_blocks()
4990 rebuild->dh_lb_count); in dump_l2arc_log_blocks()
5003 (u_longlong_t)rebuild->dh_lb_count); in dump_l2arc_log_blocks()
5006 (u_longlong_t)rebuild->dh_lb_asize); in dump_l2arc_log_blocks()
5094 if ((dump_opt['l'] < 3) && (first_label(label->config) != l)) in dump_config_from_label()
5098 dump_nvlist(label->config_nv, 4); in dump_config_from_label()
5099 print_label_numbers(" labels = ", label->config); in dump_config_from_label()
5102 dump_nvlist_stats(label->config_nv, buflen); in dump_config_from_label()
5119 uberblock_t *ub = (void *)((char *)&label->label + uoff); in dump_label_uberblocks()
5120 cksum_record_t *rec = label->uberblocks[i]; in dump_label_uberblocks()
5134 (ub->ub_mmp_magic == MMP_MAGIC) && ub->ub_mmp_delay && in dump_label_uberblocks()
5135 (i >= VDEV_UBERBLOCK_COUNT(&vd) - MMP_BLOCKS_PER_LABEL)) in dump_label_uberblocks()
5215 (void) fprintf(stderr, "object %llu has non-file/directory " in dump_path_impl()
5270 len -= nwritten; in dump_backup_bytes()
5357 if (fd == -1) in zdb_copy_object()
5361 * allocation failures and nigh-infinite printing if the in zdb_copy_object()
5373 readsize = MIN(size - offset, 1 << 20); in zdb_copy_object()
5420 eck = (zio_eck_t *)((char *)(data) + VDEV_PHYS_SIZE) - 1; in label_cksum_valid()
5425 byteswap = (eck->zec_magic == BSWAP_64(ZEC_MAGIC)); in label_cksum_valid()
5429 expected_cksum = eck->zec_cksum; in label_cksum_valid()
5430 eck->zec_cksum = verifier; in label_cksum_valid()
5433 ci->ci_func[byteswap](abd, VDEV_PHYS_SIZE, NULL, &actual_cksum); in label_cksum_valid()
5471 if (zfs_append_partition(path, MAXPATHLEN) == -1) in dump_label()
5515 char *buf = label->label.vl_vdev_phys.vp_nvlist; in dump_label()
5516 size_t buflen = sizeof (label->label.vl_vdev_phys.vp_nvlist); in dump_label()
5522 label->label_offset = vdev_label_offset(psize, l, 0); in dump_label()
5524 if (pread64(fd, &label->label, sizeof (label->label), in dump_label()
5525 label->label_offset) != sizeof (label->label)) { in dump_label()
5528 label->read_failed = B_TRUE; in dump_label()
5533 label->read_failed = B_FALSE; in dump_label()
5534 label->cksum_valid = label_cksum_valid(&label->label, in dump_label()
5535 label->label_offset); in dump_label()
5562 label->config = rec; in dump_label()
5563 label->config_nv = config; in dump_label()
5582 label->uberblocks[i] = rec; in dump_label()
5591 size_t buflen = sizeof (label->label.vl_vdev_phys.vp_nvlist); in dump_label()
5593 if (label->read_failed == B_TRUE) in dump_label()
5596 if (label->config_nv) { in dump_label()
5606 nvlist_free(label->config_nv); in dump_label()
5661 avl_first(&dmu_objset_ds(os)->ds_bookmarks); dbn != NULL; in dump_one_objset()
5662 dbn = AVL_NEXT(&dmu_objset_ds(os)->ds_bookmarks, dbn)) { in dump_one_objset()
5663 mos_obj_refd(dbn->dbn_phys.zbm_redaction_obj); in dump_one_objset()
5664 if (dbn->dbn_phys.zbm_redaction_obj != 0) { in dump_one_objset()
5667 objset_t *mos = os->os_spa->spa_meta_objset; in dump_one_objset()
5670 dbn->dbn_phys.zbm_redaction_obj, FTAG, &rl)); in dump_one_objset()
5671 if (rl->dn_have_spill) { in dump_one_objset()
5676 if (dbn->dbn_phys.zbm_flags & ZBM_FLAG_HAS_FBN) in dump_one_objset()
5680 if (dsl_deadlist_is_open(&dmu_objset_ds(os)->ds_dir->dd_livelist) && in dump_one_objset()
5707 * Extended object types to report deferred frees and dedup auto-ditto blocks.
5767 uint64_t ms_shift = vd->vdev_ms_shift; in same_metaslab()
5789 * histograms showing by blocksize of 512 - 2^ SPA_MAX_FOR_16M
5796 * if the '-P' parameter is specified then the full raw number (parseable)
5825 parm_histo[0].count = zcb->zcb_psize_count; in dump_size_histograms()
5826 parm_histo[0].len = zcb->zcb_psize_len; in dump_size_histograms()
5830 parm_histo[1].count = zcb->zcb_lsize_count; in dump_size_histograms()
5831 parm_histo[1].len = zcb->zcb_lsize_len; in dump_size_histograms()
5835 parm_histo[2].count = zcb->zcb_asize_count; in dump_size_histograms()
5836 parm_histo[2].len = zcb->zcb_asize_len; in dump_size_histograms()
5865 buf[strlen(buf)-2] = '\0'; in dump_size_histograms()
5879 if (j < NUM_HISTO - 1) { in dump_size_histograms()
5886 if (j < NUM_HISTO - 1) { in dump_size_histograms()
5888 (void) printf("%-7s ", in dump_size_histograms()
5987 spa_config_enter(zcb->zcb_spa, SCL_CONFIG, FTAG, RW_READER); in zdb_count_block()
6006 ddt_t *ddt = ddt_select(zcb->zcb_spa, bp); in zdb_count_block()
6041 boolean_t seen = !!(((uintptr_t)dde->dde_io) & (1 << v)); in zdb_count_block()
6043 dde->dde_io = in zdb_count_block()
6044 (void *)(((uintptr_t)dde->dde_io) | (1 << v)); in zdb_count_block()
6047 if (ddt_phys_total_refcnt(ddt, dde->dde_phys) > 0) in zdb_count_block()
6048 ddt_phys_decref(dde->dde_phys, v); in zdb_count_block()
6057 * claimed it as-is, then we would miss the claim on some in zdb_count_block()
6069 ddt_phys_birth(dde->dde_phys, v)); in zdb_count_block()
6071 ddt_bp_fill(dde->dde_phys, v, &tempbp, in zdb_count_block()
6081 zcb->zcb_dedup_asize += BP_GET_ASIZE(bp); in zdb_count_block()
6082 zcb->zcb_dedup_blocks++; in zdb_count_block()
6089 } else if (zcb->zcb_brt_is_active && in zdb_count_block()
6090 brt_maybe_exists(zcb->zcb_spa, bp)) { in zdb_count_block()
6096 * To do this, we keep our own in-memory BRT. For each block in zdb_count_block()
6105 zbre_search.zbre_dva = bp->blk_dva[0]; in zdb_count_block()
6106 zbre = avl_find(&zcb->zcb_brt, &zbre_search, &where); in zdb_count_block()
6110 brt_entry_get_refcount(zcb->zcb_spa, bp); in zdb_count_block()
6114 zbre->zbre_dva = bp->blk_dva[0]; in zdb_count_block()
6115 zbre->zbre_refcount = refcnt; in zdb_count_block()
6116 avl_insert(&zcb->zcb_brt, zbre, where); in zdb_count_block()
6123 zcb->zcb_clone_asize += BP_GET_ASIZE(bp); in zdb_count_block()
6124 zcb->zcb_clone_blocks++; in zdb_count_block()
6126 zbre->zbre_refcount--; in zdb_count_block()
6127 if (zbre->zbre_refcount == 0) { in zdb_count_block()
6128 avl_remove(&zcb->zcb_brt, zbre); in zdb_count_block()
6142 zdb_blkstats_t *zb = &zcb->zcb_type[l][t]; in zdb_count_block()
6144 zb->zb_asize += BP_GET_ASIZE(bp); in zdb_count_block()
6145 zb->zb_lsize += BP_GET_LSIZE(bp); in zdb_count_block()
6146 zb->zb_psize += BP_GET_PSIZE(bp); in zdb_count_block()
6147 zb->zb_count++; in zdb_count_block()
6156 zb->zb_psize_histogram[idx]++; in zdb_count_block()
6158 zb->zb_gangs += BP_COUNT_GANG(bp); in zdb_count_block()
6162 if (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6163 DVA_GET_VDEV(&bp->blk_dva[1])) { in zdb_count_block()
6164 zb->zb_ditto_samevdev++; in zdb_count_block()
6166 if (same_metaslab(zcb->zcb_spa, in zdb_count_block()
6167 DVA_GET_VDEV(&bp->blk_dva[0]), in zdb_count_block()
6168 DVA_GET_OFFSET(&bp->blk_dva[0]), in zdb_count_block()
6169 DVA_GET_OFFSET(&bp->blk_dva[1]))) in zdb_count_block()
6170 zb->zb_ditto_same_ms++; in zdb_count_block()
6174 equal = (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6175 DVA_GET_VDEV(&bp->blk_dva[1])) + in zdb_count_block()
6176 (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6177 DVA_GET_VDEV(&bp->blk_dva[2])) + in zdb_count_block()
6178 (DVA_GET_VDEV(&bp->blk_dva[1]) == in zdb_count_block()
6179 DVA_GET_VDEV(&bp->blk_dva[2])); in zdb_count_block()
6181 zb->zb_ditto_samevdev++; in zdb_count_block()
6183 if (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6184 DVA_GET_VDEV(&bp->blk_dva[1]) && in zdb_count_block()
6185 same_metaslab(zcb->zcb_spa, in zdb_count_block()
6186 DVA_GET_VDEV(&bp->blk_dva[0]), in zdb_count_block()
6187 DVA_GET_OFFSET(&bp->blk_dva[0]), in zdb_count_block()
6188 DVA_GET_OFFSET(&bp->blk_dva[1]))) in zdb_count_block()
6189 zb->zb_ditto_same_ms++; in zdb_count_block()
6190 else if (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6191 DVA_GET_VDEV(&bp->blk_dva[2]) && in zdb_count_block()
6192 same_metaslab(zcb->zcb_spa, in zdb_count_block()
6193 DVA_GET_VDEV(&bp->blk_dva[0]), in zdb_count_block()
6194 DVA_GET_OFFSET(&bp->blk_dva[0]), in zdb_count_block()
6195 DVA_GET_OFFSET(&bp->blk_dva[2]))) in zdb_count_block()
6196 zb->zb_ditto_same_ms++; in zdb_count_block()
6197 else if (DVA_GET_VDEV(&bp->blk_dva[1]) == in zdb_count_block()
6198 DVA_GET_VDEV(&bp->blk_dva[2]) && in zdb_count_block()
6199 same_metaslab(zcb->zcb_spa, in zdb_count_block()
6200 DVA_GET_VDEV(&bp->blk_dva[1]), in zdb_count_block()
6201 DVA_GET_OFFSET(&bp->blk_dva[1]), in zdb_count_block()
6202 DVA_GET_OFFSET(&bp->blk_dva[2]))) in zdb_count_block()
6203 zb->zb_ditto_same_ms++; in zdb_count_block()
6209 spa_config_exit(zcb->zcb_spa, SCL_CONFIG, FTAG); in zdb_count_block()
6212 zcb->zcb_embedded_blocks[BPE_GET_ETYPE(bp)]++; in zdb_count_block()
6213 zcb->zcb_embedded_histogram[BPE_GET_ETYPE(bp)] in zdb_count_block()
6219 spa_config_enter(zcb->zcb_spa, SCL_CONFIG, FTAG, RW_READER); in zdb_count_block()
6221 uint64_t vdev = DVA_GET_VDEV(&bp->blk_dva[0]); in zdb_count_block()
6222 uint64_t offset = DVA_GET_OFFSET(&bp->blk_dva[0]); in zdb_count_block()
6223 vdev_t *vd = vdev_lookup_top(zcb->zcb_spa, vdev); in zdb_count_block()
6225 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in zdb_count_block()
6227 metaslab_group_t *mg = ms->ms_group; in zdb_count_block()
6229 metaslab_class_t *mc = mg->mg_class; in zdb_count_block()
6232 spa_config_exit(zcb->zcb_spa, SCL_CONFIG, FTAG); in zdb_count_block()
6235 if (mc == spa_normal_class(zcb->zcb_spa)) { in zdb_count_block()
6237 } else if (mc == spa_special_class(zcb->zcb_spa)) { in zdb_count_block()
6239 } else if (mc == spa_dedup_class(zcb->zcb_spa)) { in zdb_count_block()
6262 #define BIN(size) (highbit64((size) - 1)) in zdb_count_block()
6275 zcb->zcb_psize_count[bin]++; in zdb_count_block()
6276 zcb->zcb_psize_len[bin] += BP_GET_PSIZE(bp); in zdb_count_block()
6277 zcb->zcb_psize_total += BP_GET_PSIZE(bp); in zdb_count_block()
6282 zcb->zcb_lsize_count[bin]++; in zdb_count_block()
6283 zcb->zcb_lsize_len[bin] += BP_GET_LSIZE(bp); in zdb_count_block()
6284 zcb->zcb_lsize_total += BP_GET_LSIZE(bp); in zdb_count_block()
6289 zcb->zcb_asize_count[bin]++; in zdb_count_block()
6290 zcb->zcb_asize_len[bin] += BP_GET_ASIZE(bp); in zdb_count_block()
6291 zcb->zcb_asize_total += BP_GET_ASIZE(bp); in zdb_count_block()
6299 VERIFY0(zio_wait(zio_claim(NULL, zcb->zcb_spa, in zdb_count_block()
6300 spa_min_claim_txg(zcb->zcb_spa), bp, NULL, NULL, in zdb_count_block()
6307 spa_t *spa = zio->io_spa; in zdb_blkptr_done()
6308 blkptr_t *bp = zio->io_bp; in zdb_blkptr_done()
6309 int ioerr = zio->io_error; in zdb_blkptr_done()
6310 zdb_cb_t *zcb = zio->io_private; in zdb_blkptr_done()
6311 zbookmark_phys_t *zb = &zio->io_bookmark; in zdb_blkptr_done()
6313 mutex_enter(&spa->spa_scrub_lock); in zdb_blkptr_done()
6314 spa->spa_load_verify_bytes -= BP_GET_PSIZE(bp); in zdb_blkptr_done()
6315 cv_broadcast(&spa->spa_scrub_io_cv); in zdb_blkptr_done()
6317 if (ioerr && !(zio->io_flags & ZIO_FLAG_SPECULATIVE)) { in zdb_blkptr_done()
6320 zcb->zcb_haderrors = 1; in zdb_blkptr_done()
6321 zcb->zcb_errors[ioerr]++; in zdb_blkptr_done()
6330 "<%llu, %llu, %lld, %llx> %s -- skipping\n", in zdb_blkptr_done()
6332 (u_longlong_t)zb->zb_objset, in zdb_blkptr_done()
6333 (u_longlong_t)zb->zb_object, in zdb_blkptr_done()
6334 (u_longlong_t)zb->zb_level, in zdb_blkptr_done()
6335 (u_longlong_t)zb->zb_blkid, in zdb_blkptr_done()
6338 mutex_exit(&spa->spa_scrub_lock); in zdb_blkptr_done()
6340 abd_free(zio->io_abd); in zdb_blkptr_done()
6351 if (zb->zb_level == ZB_DNODE_LEVEL) in zdb_blkptr_cb()
6359 (u_longlong_t)zb->zb_objset, in zdb_blkptr_cb()
6360 (u_longlong_t)zb->zb_object, in zdb_blkptr_cb()
6361 (longlong_t)zb->zb_level, in zdb_blkptr_cb()
6383 if (zb->zb_level == ZB_ZIL_LEVEL) in zdb_blkptr_cb()
6386 mutex_enter(&spa->spa_scrub_lock); in zdb_blkptr_cb()
6387 while (spa->spa_load_verify_bytes > max_inflight_bytes) in zdb_blkptr_cb()
6388 cv_wait(&spa->spa_scrub_io_cv, &spa->spa_scrub_lock); in zdb_blkptr_cb()
6389 spa->spa_load_verify_bytes += size; in zdb_blkptr_cb()
6390 mutex_exit(&spa->spa_scrub_lock); in zdb_blkptr_cb()
6396 zcb->zcb_readfails = 0; in zdb_blkptr_cb()
6405 if (dump_opt['b'] < 5 && gethrtime() > zcb->zcb_lastprint + NANOSEC) { in zdb_blkptr_cb()
6408 uint64_t bytes = zcb->zcb_type[ZB_TOTAL][ZDB_OT_TOTAL].zb_asize; in zdb_blkptr_cb()
6410 1 + bytes / (1 + ((now - zcb->zcb_start) / 1000 / 1000)); in zdb_blkptr_cb()
6412 (zcb->zcb_totalasize - bytes) / 1024 / kb_per_sec; in zdb_blkptr_cb()
6427 zcb->zcb_lastprint = now; in zdb_blkptr_cb()
6439 (u_longlong_t)vd->vdev_id, (u_longlong_t)start, (u_longlong_t)size); in zdb_leak()
6452 uint64_t offset = sme->sme_offset; in load_unflushed_svr_segs_cb()
6453 uint64_t size = sme->sme_run; in load_unflushed_svr_segs_cb()
6456 if (sme->sme_vdev != svr->svr_vdev_id) in load_unflushed_svr_segs_cb()
6459 vdev_t *vd = vdev_lookup_top(spa, sme->sme_vdev); in load_unflushed_svr_segs_cb()
6460 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in load_unflushed_svr_segs_cb()
6461 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in load_unflushed_svr_segs_cb()
6466 if (sme->sme_type == SM_ALLOC) in load_unflushed_svr_segs_cb()
6467 zfs_range_tree_add(svr->svr_allocd_segs, offset, size); in load_unflushed_svr_segs_cb()
6469 zfs_range_tree_remove(svr->svr_allocd_segs, offset, size); in load_unflushed_svr_segs_cb()
6489 spa_min_claim_txg(vd->vdev_spa))); in claim_segment_impl_cb()
6514 if (spa->spa_vdev_removal == NULL) in zdb_claim_removing()
6519 spa_vdev_removal_t *svr = spa->spa_vdev_removal; in zdb_claim_removing()
6520 vdev_t *vd = vdev_lookup_top(spa, svr->svr_vdev_id); in zdb_claim_removing()
6521 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in zdb_claim_removing()
6523 ASSERT0(zfs_range_tree_space(svr->svr_allocd_segs)); in zdb_claim_removing()
6528 for (uint64_t msi = 0; msi < vd->vdev_ms_count; msi++) { in zdb_claim_removing()
6529 metaslab_t *msp = vd->vdev_ms[msi]; in zdb_claim_removing()
6532 if (msp->ms_sm != NULL) in zdb_claim_removing()
6533 VERIFY0(space_map_load(msp->ms_sm, allocs, SM_ALLOC)); in zdb_claim_removing()
6535 svr->svr_allocd_segs); in zdb_claim_removing()
6546 zfs_range_tree_clear(svr->svr_allocd_segs, in zdb_claim_removing()
6548 vd->vdev_asize - vdev_indirect_mapping_max_offset(vim)); in zdb_claim_removing()
6550 zcb->zcb_removing_size += zfs_range_tree_space(svr->svr_allocd_segs); in zdb_claim_removing()
6551 zfs_range_tree_vacate(svr->svr_allocd_segs, claim_segment_cb, vd); in zdb_claim_removing()
6562 spa_t *spa = zcb->zcb_spa; in increment_indirect_mapping_cb()
6564 const dva_t *dva = &bp->blk_dva[0]; in increment_indirect_mapping_cb()
6571 vd = vdev_lookup_top(zcb->zcb_spa, DVA_GET_VDEV(dva)); in increment_indirect_mapping_cb()
6575 ASSERT(vd->vdev_indirect_config.vic_mapping_object != 0); in increment_indirect_mapping_cb()
6576 ASSERT3P(zcb->zcb_vd_obsolete_counts[vd->vdev_id], !=, NULL); in increment_indirect_mapping_cb()
6579 vd->vdev_indirect_mapping, in increment_indirect_mapping_cb()
6581 zcb->zcb_vd_obsolete_counts[vd->vdev_id]); in increment_indirect_mapping_cb()
6589 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in zdb_load_obsolete_counts()
6590 spa_t *spa = vd->vdev_spa; in zdb_load_obsolete_counts()
6592 &spa->spa_condensing_indirect_phys; in zdb_load_obsolete_counts()
6597 EQUIV(obsolete_sm_object != 0, vd->vdev_obsolete_sm != NULL); in zdb_load_obsolete_counts()
6599 if (vd->vdev_obsolete_sm != NULL) { in zdb_load_obsolete_counts()
6601 vd->vdev_obsolete_sm); in zdb_load_obsolete_counts()
6603 if (scip->scip_vdev == vd->vdev_id && in zdb_load_obsolete_counts()
6604 scip->scip_prev_obsolete_sm_object != 0) { in zdb_load_obsolete_counts()
6606 VERIFY0(space_map_open(&prev_obsolete_sm, spa->spa_meta_objset, in zdb_load_obsolete_counts()
6607 scip->scip_prev_obsolete_sm_object, 0, vd->vdev_asize, 0)); in zdb_load_obsolete_counts()
6624 vdev_t *vd = cseea->cseea_vd; in checkpoint_sm_exclude_entry_cb()
6625 metaslab_t *ms = vd->vdev_ms[sme->sme_offset >> vd->vdev_ms_shift]; in checkpoint_sm_exclude_entry_cb()
6626 uint64_t end = sme->sme_offset + sme->sme_run; in checkpoint_sm_exclude_entry_cb()
6628 ASSERT(sme->sme_type == SM_FREE); in checkpoint_sm_exclude_entry_cb()
6644 * that handles metaslab-crossing segments in the future. in checkpoint_sm_exclude_entry_cb()
6646 VERIFY3U(sme->sme_offset, >=, ms->ms_start); in checkpoint_sm_exclude_entry_cb()
6647 VERIFY3U(end, <=, ms->ms_start + ms->ms_size); in checkpoint_sm_exclude_entry_cb()
6653 mutex_enter(&ms->ms_lock); in checkpoint_sm_exclude_entry_cb()
6654 zfs_range_tree_remove(ms->ms_allocatable, sme->sme_offset, in checkpoint_sm_exclude_entry_cb()
6655 sme->sme_run); in checkpoint_sm_exclude_entry_cb()
6656 mutex_exit(&ms->ms_lock); in checkpoint_sm_exclude_entry_cb()
6658 cseea->cseea_checkpoint_size += sme->sme_run; in checkpoint_sm_exclude_entry_cb()
6665 spa_t *spa = vd->vdev_spa; in zdb_leak_init_vdev_exclude_checkpoint()
6673 if (vd->vdev_top_zap == 0) in zdb_leak_init_vdev_exclude_checkpoint()
6688 if (zap_contains(spa_meta_objset(spa), vd->vdev_top_zap, in zdb_leak_init_vdev_exclude_checkpoint()
6692 VERIFY0(zap_lookup(spa_meta_objset(spa), vd->vdev_top_zap, in zdb_leak_init_vdev_exclude_checkpoint()
6701 checkpoint_sm_obj, 0, vd->vdev_asize, vd->vdev_ashift)); in zdb_leak_init_vdev_exclude_checkpoint()
6708 zcb->zcb_checkpoint_size += cseea.cseea_checkpoint_size; in zdb_leak_init_vdev_exclude_checkpoint()
6716 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_init_exclude_checkpoint()
6717 for (uint64_t c = 0; c < rvd->vdev_children; c++) { in zdb_leak_init_exclude_checkpoint()
6718 ASSERT3U(c, ==, rvd->vdev_child[c]->vdev_id); in zdb_leak_init_exclude_checkpoint()
6719 zdb_leak_init_vdev_exclude_checkpoint(rvd->vdev_child[c], zcb); in zdb_leak_init_exclude_checkpoint()
6729 uint64_t offset = sme->sme_offset; in count_unflushed_space_cb()
6730 uint64_t vdev_id = sme->sme_vdev; in count_unflushed_space_cb()
6736 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in count_unflushed_space_cb()
6737 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in count_unflushed_space_cb()
6742 if (sme->sme_type == SM_ALLOC) in count_unflushed_space_cb()
6743 *ualloc_space += sme->sme_run; in count_unflushed_space_cb()
6745 *ualloc_space -= sme->sme_run; in count_unflushed_space_cb()
6767 uint64_t offset = sme->sme_offset; in load_unflushed_cb()
6768 uint64_t size = sme->sme_run; in load_unflushed_cb()
6769 uint64_t vdev_id = sme->sme_vdev; in load_unflushed_cb()
6777 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in load_unflushed_cb()
6779 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in load_unflushed_cb()
6785 if (*uic_maptype == sme->sme_type) in load_unflushed_cb()
6786 zfs_range_tree_add(ms->ms_allocatable, offset, size); in load_unflushed_cb()
6788 zfs_range_tree_remove(ms->ms_allocatable, offset, size); in load_unflushed_cb()
6802 vdev_t *rvd = spa->spa_root_vdev; in load_concrete_ms_allocatable_trees()
6803 for (uint64_t i = 0; i < rvd->vdev_children; i++) { in load_concrete_ms_allocatable_trees()
6804 vdev_t *vd = rvd->vdev_child[i]; in load_concrete_ms_allocatable_trees()
6806 ASSERT3U(i, ==, vd->vdev_id); in load_concrete_ms_allocatable_trees()
6808 if (vd->vdev_ops == &vdev_indirect_ops) in load_concrete_ms_allocatable_trees()
6811 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in load_concrete_ms_allocatable_trees()
6812 metaslab_t *msp = vd->vdev_ms[m]; in load_concrete_ms_allocatable_trees()
6817 (longlong_t)vd->vdev_id, in load_concrete_ms_allocatable_trees()
6818 (longlong_t)msp->ms_id, in load_concrete_ms_allocatable_trees()
6819 (longlong_t)vd->vdev_ms_count); in load_concrete_ms_allocatable_trees()
6821 mutex_enter(&msp->ms_lock); in load_concrete_ms_allocatable_trees()
6822 zfs_range_tree_vacate(msp->ms_allocatable, NULL, NULL); in load_concrete_ms_allocatable_trees()
6826 * size-ordered tree, so clear the range_tree ops. in load_concrete_ms_allocatable_trees()
6828 msp->ms_allocatable->rt_ops = NULL; in load_concrete_ms_allocatable_trees()
6830 if (msp->ms_sm != NULL) { in load_concrete_ms_allocatable_trees()
6831 VERIFY0(space_map_load(msp->ms_sm, in load_concrete_ms_allocatable_trees()
6832 msp->ms_allocatable, maptype)); in load_concrete_ms_allocatable_trees()
6834 if (!msp->ms_loaded) in load_concrete_ms_allocatable_trees()
6835 msp->ms_loaded = B_TRUE; in load_concrete_ms_allocatable_trees()
6836 mutex_exit(&msp->ms_lock); in load_concrete_ms_allocatable_trees()
6844 * vm_idxp is an in-out parameter which (for indirect vdevs) is the
6852 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in load_indirect_ms_allocatable_tree()
6854 mutex_enter(&msp->ms_lock); in load_indirect_ms_allocatable_tree()
6855 zfs_range_tree_vacate(msp->ms_allocatable, NULL, NULL); in load_indirect_ms_allocatable_tree()
6859 * size-ordered tree, so clear the range_tree ops. in load_indirect_ms_allocatable_tree()
6861 msp->ms_allocatable->rt_ops = NULL; in load_indirect_ms_allocatable_tree()
6866 &vim->vim_entries[*vim_idxp]; in load_indirect_ms_allocatable_tree()
6868 uint64_t ent_len = DVA_GET_ASIZE(&vimep->vimep_dst); in load_indirect_ms_allocatable_tree()
6869 ASSERT3U(ent_offset, >=, msp->ms_start); in load_indirect_ms_allocatable_tree()
6870 if (ent_offset >= msp->ms_start + msp->ms_size) in load_indirect_ms_allocatable_tree()
6878 msp->ms_start + msp->ms_size); in load_indirect_ms_allocatable_tree()
6879 zfs_range_tree_add(msp->ms_allocatable, ent_offset, ent_len); in load_indirect_ms_allocatable_tree()
6882 if (!msp->ms_loaded) in load_indirect_ms_allocatable_tree()
6883 msp->ms_loaded = B_TRUE; in load_indirect_ms_allocatable_tree()
6884 mutex_exit(&msp->ms_lock); in load_indirect_ms_allocatable_tree()
6892 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_init_prepare_indirect_vdevs()
6893 for (uint64_t c = 0; c < rvd->vdev_children; c++) { in zdb_leak_init_prepare_indirect_vdevs()
6894 vdev_t *vd = rvd->vdev_child[c]; in zdb_leak_init_prepare_indirect_vdevs()
6896 ASSERT3U(c, ==, vd->vdev_id); in zdb_leak_init_prepare_indirect_vdevs()
6898 if (vd->vdev_ops != &vdev_indirect_ops) in zdb_leak_init_prepare_indirect_vdevs()
6906 zcb->zcb_vd_obsolete_counts[c] = zdb_load_obsolete_counts(vd); in zdb_leak_init_prepare_indirect_vdevs()
6917 vd->vdev_indirect_mapping; in zdb_leak_init_prepare_indirect_vdevs()
6919 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in zdb_leak_init_prepare_indirect_vdevs()
6924 (longlong_t)vd->vdev_id, in zdb_leak_init_prepare_indirect_vdevs()
6925 (longlong_t)vd->vdev_ms[m]->ms_id, in zdb_leak_init_prepare_indirect_vdevs()
6926 (longlong_t)vd->vdev_ms_count); in zdb_leak_init_prepare_indirect_vdevs()
6928 load_indirect_ms_allocatable_tree(vd, vd->vdev_ms[m], in zdb_leak_init_prepare_indirect_vdevs()
6938 zcb->zcb_spa = spa; in zdb_leak_init()
6943 dsl_pool_t *dp = spa->spa_dsl_pool; in zdb_leak_init()
6944 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_init()
6951 spa->spa_normal_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6952 spa->spa_log_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6953 spa->spa_embedded_log_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6954 spa->spa_special_embedded_log_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6956 zcb->zcb_vd_obsolete_counts = in zdb_leak_init()
6957 umem_zalloc(rvd->vdev_children * sizeof (uint32_t *), in zdb_leak_init()
6976 * order to avoid false-positives during leak-detection, we in zdb_leak_init()
6989 ASSERT3U(zcb->zcb_checkpoint_size, ==, spa_get_checkpoint_space(spa)); in zdb_leak_init()
6994 if (bpobj_is_open(&dp->dp_obsolete_bpobj)) { in zdb_leak_init()
6997 (void) bpobj_iterate_nofree(&dp->dp_obsolete_bpobj, in zdb_leak_init()
7006 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in zdb_check_for_obsolete_leaks()
7014 &vim->vim_entries[i]; in zdb_check_for_obsolete_leaks()
7017 metaslab_t *msp = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in zdb_check_for_obsolete_leaks()
7024 inner_offset < DVA_GET_ASIZE(&vimep->vimep_dst); in zdb_check_for_obsolete_leaks()
7025 inner_offset += 1ULL << vd->vdev_ashift) { in zdb_check_for_obsolete_leaks()
7026 if (zfs_range_tree_contains(msp->ms_allocatable, in zdb_check_for_obsolete_leaks()
7027 offset + inner_offset, 1ULL << vd->vdev_ashift)) { in zdb_check_for_obsolete_leaks()
7028 obsolete_bytes += 1ULL << vd->vdev_ashift; in zdb_check_for_obsolete_leaks()
7032 int64_t bytes_leaked = obsolete_bytes - in zdb_check_for_obsolete_leaks()
7033 zcb->zcb_vd_obsolete_counts[vd->vdev_id][i]; in zdb_check_for_obsolete_leaks()
7034 ASSERT3U(DVA_GET_ASIZE(&vimep->vimep_dst), >=, in zdb_check_for_obsolete_leaks()
7035 zcb->zcb_vd_obsolete_counts[vd->vdev_id][i]); in zdb_check_for_obsolete_leaks()
7041 (u_longlong_t)vd->vdev_id, in zdb_check_for_obsolete_leaks()
7043 (u_longlong_t)DVA_GET_ASIZE(&vimep->vimep_dst), in zdb_check_for_obsolete_leaks()
7057 (u_longlong_t)vd->vdev_id, pct_leaked, in zdb_check_for_obsolete_leaks()
7061 "for vdev %llu -- %llx total bytes mismatched\n", in zdb_check_for_obsolete_leaks()
7062 (u_longlong_t)vd->vdev_id, in zdb_check_for_obsolete_leaks()
7068 zcb->zcb_vd_obsolete_counts[vd->vdev_id]); in zdb_check_for_obsolete_leaks()
7069 zcb->zcb_vd_obsolete_counts[vd->vdev_id] = NULL; in zdb_check_for_obsolete_leaks()
7081 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_fini()
7082 for (unsigned c = 0; c < rvd->vdev_children; c++) { in zdb_leak_fini()
7083 vdev_t *vd = rvd->vdev_child[c]; in zdb_leak_fini()
7085 if (zcb->zcb_vd_obsolete_counts[c] != NULL) { in zdb_leak_fini()
7089 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in zdb_leak_fini()
7090 metaslab_t *msp = vd->vdev_ms[m]; in zdb_leak_fini()
7091 ASSERT3P(msp->ms_group, ==, (msp->ms_group->mg_class == in zdb_leak_fini()
7093 msp->ms_group->mg_class == in zdb_leak_fini()
7095 vd->vdev_log_mg : vd->vdev_mg); in zdb_leak_fini()
7110 if (vd->vdev_ops == &vdev_indirect_ops) { in zdb_leak_fini()
7111 zfs_range_tree_vacate(msp->ms_allocatable, in zdb_leak_fini()
7114 zfs_range_tree_vacate(msp->ms_allocatable, in zdb_leak_fini()
7117 if (msp->ms_loaded) { in zdb_leak_fini()
7118 msp->ms_loaded = B_FALSE; in zdb_leak_fini()
7123 umem_free(zcb->zcb_vd_obsolete_counts, in zdb_leak_fini()
7124 rvd->vdev_children * sizeof (uint32_t *)); in zdb_leak_fini()
7125 zcb->zcb_vd_obsolete_counts = NULL; in zdb_leak_fini()
7153 objset_t *mos = spa->spa_meta_objset; in iterate_deleted_livelists()
7169 VERIFY0(dsl_deadlist_open(&ll, mos, attrp->za_first_integer)); in iterate_deleted_livelists()
7192 VERIFY0(dsl_process_sub_livelist(&dle->dle_bpobj, &blks, NULL, NULL)); in livelist_entry_count_blocks_cb()
7232 objset_t *mos = spa->spa_meta_objset; in deleted_livelists_dump_mos()
7244 const dva_t *dva1 = &((const zdb_brt_entry_t *)zcn1)->zbre_dva; in zdb_brt_entry_compare()
7245 const dva_t *dva2 = &((const zdb_brt_entry_t *)zcn2)->zbre_dva; in zdb_brt_entry_compare()
7272 avl_create(&zcb->zcb_brt, zdb_brt_entry_compare, in dump_block_stats()
7275 zcb->zcb_brt_is_active = B_TRUE; in dump_block_stats()
7293 * When leak detection is disabled (-L option) we still traverse the in dump_block_stats()
7300 * If there's a deferred-free bplist, process that first. in dump_block_stats()
7302 (void) bpobj_iterate_nofree(&spa->spa_deferred_bpobj, in dump_block_stats()
7306 (void) bpobj_iterate_nofree(&spa->spa_dsl_pool->dp_free_bpobj, in dump_block_stats()
7313 VERIFY3U(0, ==, bptree_iterate(spa->spa_meta_objset, in dump_block_stats()
7314 spa->spa_dsl_pool->dp_bptree_obj, B_FALSE, count_block_cb, in dump_block_stats()
7323 zcb->zcb_totalasize = metaslab_class_get_alloc(spa_normal_class(spa)); in dump_block_stats()
7324 zcb->zcb_totalasize += metaslab_class_get_alloc(spa_special_class(spa)); in dump_block_stats()
7325 zcb->zcb_totalasize += metaslab_class_get_alloc(spa_dedup_class(spa)); in dump_block_stats()
7326 zcb->zcb_totalasize += in dump_block_stats()
7328 zcb->zcb_totalasize += in dump_block_stats()
7330 zcb->zcb_start = zcb->zcb_lastprint = gethrtime(); in dump_block_stats()
7340 (void) zio_wait(spa->spa_async_zio_root[c]); in dump_block_stats()
7341 spa->spa_async_zio_root[c] = zio_root(spa, NULL, NULL, in dump_block_stats()
7346 ASSERT0(spa->spa_load_verify_bytes); in dump_block_stats()
7352 zcb->zcb_haderrors |= err; in dump_block_stats()
7354 if (zcb->zcb_haderrors) { in dump_block_stats()
7358 if (zcb->zcb_errors[e] != 0) { in dump_block_stats()
7360 e, (u_longlong_t)zcb->zcb_errors[e]); in dump_block_stats()
7370 tzb = &zcb->zcb_type[ZB_TOTAL][ZDB_OT_TOTAL]; in dump_block_stats()
7383 tzb->zb_asize - zcb->zcb_dedup_asize - zcb->zcb_clone_asize + in dump_block_stats()
7384 zcb->zcb_removing_size + zcb->zcb_checkpoint_size; in dump_block_stats()
7395 (longlong_t)(total_alloc - total_found)); in dump_block_stats()
7398 if (tzb->zb_count == 0) { in dump_block_stats()
7404 (void) printf("\t%-16s %14llu\n", "bp count:", in dump_block_stats()
7405 (u_longlong_t)tzb->zb_count); in dump_block_stats()
7406 (void) printf("\t%-16s %14llu\n", "ganged count:", in dump_block_stats()
7407 (longlong_t)tzb->zb_gangs); in dump_block_stats()
7408 (void) printf("\t%-16s %14llu avg: %6llu\n", "bp logical:", in dump_block_stats()
7409 (u_longlong_t)tzb->zb_lsize, in dump_block_stats()
7410 (u_longlong_t)(tzb->zb_lsize / tzb->zb_count)); in dump_block_stats()
7411 (void) printf("\t%-16s %14llu avg: %6llu compression: %6.2f\n", in dump_block_stats()
7412 "bp physical:", (u_longlong_t)tzb->zb_psize, in dump_block_stats()
7413 (u_longlong_t)(tzb->zb_psize / tzb->zb_count), in dump_block_stats()
7414 (double)tzb->zb_lsize / tzb->zb_psize); in dump_block_stats()
7415 (void) printf("\t%-16s %14llu avg: %6llu compression: %6.2f\n", in dump_block_stats()
7416 "bp allocated:", (u_longlong_t)tzb->zb_asize, in dump_block_stats()
7417 (u_longlong_t)(tzb->zb_asize / tzb->zb_count), in dump_block_stats()
7418 (double)tzb->zb_lsize / tzb->zb_asize); in dump_block_stats()
7419 (void) printf("\t%-16s %14llu ref>1: %6llu deduplication: %6.2f\n", in dump_block_stats()
7420 "bp deduped:", (u_longlong_t)zcb->zcb_dedup_asize, in dump_block_stats()
7421 (u_longlong_t)zcb->zcb_dedup_blocks, in dump_block_stats()
7422 (double)zcb->zcb_dedup_asize / tzb->zb_asize + 1.0); in dump_block_stats()
7423 (void) printf("\t%-16s %14llu count: %6llu\n", in dump_block_stats()
7424 "bp cloned:", (u_longlong_t)zcb->zcb_clone_asize, in dump_block_stats()
7425 (u_longlong_t)zcb->zcb_clone_blocks); in dump_block_stats()
7426 (void) printf("\t%-16s %14llu used: %5.2f%%\n", "Normal class:", in dump_block_stats()
7429 if (spa_special_class(spa)->mc_allocator[0].mca_rotor != NULL) { in dump_block_stats()
7435 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7440 if (spa_dedup_class(spa)->mc_allocator[0].mca_rotor != NULL) { in dump_block_stats()
7446 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7451 if (spa_embedded_log_class(spa)->mc_allocator[0].mca_rotor != NULL) { in dump_block_stats()
7457 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7462 if (spa_special_embedded_log_class(spa)->mc_allocator[0].mca_rotor in dump_block_stats()
7469 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7475 if (zcb->zcb_embedded_blocks[i] == 0) in dump_block_stats()
7478 (void) printf("\tadditional, non-pointer bps of type %u: " in dump_block_stats()
7480 i, (u_longlong_t)zcb->zcb_embedded_blocks[i]); in dump_block_stats()
7485 dump_histogram(zcb->zcb_embedded_histogram[i], in dump_block_stats()
7486 sizeof (zcb->zcb_embedded_histogram[i]) / in dump_block_stats()
7487 sizeof (zcb->zcb_embedded_histogram[i][0]), 0); in dump_block_stats()
7491 if (tzb->zb_ditto_samevdev != 0) { in dump_block_stats()
7493 (longlong_t)tzb->zb_ditto_samevdev); in dump_block_stats()
7495 if (tzb->zb_ditto_same_ms != 0) { in dump_block_stats()
7497 (longlong_t)tzb->zb_ditto_same_ms); in dump_block_stats()
7500 for (uint64_t v = 0; v < spa->spa_root_vdev->vdev_children; v++) { in dump_block_stats()
7501 vdev_t *vd = spa->spa_root_vdev->vdev_child[v]; in dump_block_stats()
7502 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in dump_block_stats()
7514 (longlong_t)vd->vdev_id, in dump_block_stats()
7548 typename = zdb_ot_extname[t - DMU_OT_NUMTYPES]; in dump_block_stats()
7550 if (zcb->zcb_type[ZB_TOTAL][t].zb_asize == 0) { in dump_block_stats()
7553 "-", in dump_block_stats()
7554 "-", in dump_block_stats()
7555 "-", in dump_block_stats()
7556 "-", in dump_block_stats()
7557 "-", in dump_block_stats()
7558 "-", in dump_block_stats()
7559 "-", in dump_block_stats()
7564 for (l = ZB_TOTAL - 1; l >= -1; l--) { in dump_block_stats()
7565 level = (l == -1 ? ZB_TOTAL : l); in dump_block_stats()
7566 zb = &zcb->zcb_type[level][t]; in dump_block_stats()
7568 if (zb->zb_asize == 0) in dump_block_stats()
7573 mdstats->zb_count += zb->zb_count; in dump_block_stats()
7574 mdstats->zb_lsize += zb->zb_lsize; in dump_block_stats()
7575 mdstats->zb_psize += zb->zb_psize; in dump_block_stats()
7576 mdstats->zb_asize += zb->zb_asize; in dump_block_stats()
7577 mdstats->zb_gangs += zb->zb_gangs; in dump_block_stats()
7583 if (level == 0 && zb->zb_asize == in dump_block_stats()
7584 zcb->zcb_type[ZB_TOTAL][t].zb_asize) in dump_block_stats()
7587 zdb_nicenum(zb->zb_count, csize, in dump_block_stats()
7589 zdb_nicenum(zb->zb_lsize, lsize, in dump_block_stats()
7591 zdb_nicenum(zb->zb_psize, psize, in dump_block_stats()
7593 zdb_nicenum(zb->zb_asize, asize, in dump_block_stats()
7595 zdb_nicenum(zb->zb_asize / zb->zb_count, avg, in dump_block_stats()
7597 zdb_nicenum(zb->zb_gangs, gang, sizeof (gang)); in dump_block_stats()
7602 (double)zb->zb_lsize / zb->zb_psize, in dump_block_stats()
7603 100.0 * zb->zb_asize / tzb->zb_asize); in dump_block_stats()
7611 if (dump_opt['b'] >= 3 && zb->zb_gangs > 0) { in dump_block_stats()
7612 (void) printf("\t number of ganged " in dump_block_stats()
7618 "(in 512-byte sectors): " in dump_block_stats()
7620 dump_histogram(zb->zb_psize_histogram, in dump_block_stats()
7625 zdb_nicenum(mdstats->zb_count, csize, in dump_block_stats()
7627 zdb_nicenum(mdstats->zb_lsize, lsize, in dump_block_stats()
7629 zdb_nicenum(mdstats->zb_psize, psize, in dump_block_stats()
7631 zdb_nicenum(mdstats->zb_asize, asize, in dump_block_stats()
7633 zdb_nicenum(mdstats->zb_asize / mdstats->zb_count, avg, in dump_block_stats()
7635 zdb_nicenum(mdstats->zb_gangs, gang, sizeof (gang)); in dump_block_stats()
7640 (double)mdstats->zb_lsize / mdstats->zb_psize, in dump_block_stats()
7641 100.0 * mdstats->zb_asize / tzb->zb_asize); in dump_block_stats()
7659 if (zcb->zcb_haderrors) { in dump_block_stats()
7687 if (zb->zb_level == ZB_DNODE_LEVEL || BP_IS_HOLE(bp) || in zdb_ddt_add_cb()
7691 if (dump_opt['S'] > 1 && zb->zb_level == ZB_ROOT_LEVEL) { in zdb_ddt_add_cb()
7694 (u_longlong_t)zb->zb_objset, in zdb_ddt_add_cb()
7709 zdde->zdde_key = zdde_search.zdde_key; in zdb_ddt_add_cb()
7713 zdde->zdde_ref_blocks += 1; in zdb_ddt_add_cb()
7714 zdde->zdde_ref_lsize += BP_GET_LSIZE(bp); in zdb_ddt_add_cb()
7715 zdde->zdde_ref_psize += BP_GET_PSIZE(bp); in zdb_ddt_add_cb()
7716 zdde->zdde_ref_dsize += bp_get_dsize_sync(spa, bp); in zdb_ddt_add_cb()
7741 uint64_t refcnt = zdde->zdde_ref_blocks; in dump_simulated_ddt()
7744 ddt_stat_t *dds = &ddh_total.ddh_stat[highbit64(refcnt) - 1]; in dump_simulated_ddt()
7746 dds->dds_blocks += zdde->zdde_ref_blocks / refcnt; in dump_simulated_ddt()
7747 dds->dds_lsize += zdde->zdde_ref_lsize / refcnt; in dump_simulated_ddt()
7748 dds->dds_psize += zdde->zdde_ref_psize / refcnt; in dump_simulated_ddt()
7749 dds->dds_dsize += zdde->zdde_ref_dsize / refcnt; in dump_simulated_ddt()
7751 dds->dds_ref_blocks += zdde->zdde_ref_blocks; in dump_simulated_ddt()
7752 dds->dds_ref_lsize += zdde->zdde_ref_lsize; in dump_simulated_ddt()
7753 dds->dds_ref_psize += zdde->zdde_ref_psize; in dump_simulated_ddt()
7754 dds->dds_ref_dsize += zdde->zdde_ref_dsize; in dump_simulated_ddt()
7785 &spa->spa_condensing_indirect_phys; in verify_device_removal_feature_counts()
7786 if (scip->scip_next_mapping_object != 0) { in verify_device_removal_feature_counts()
7787 vdev_t *vd = spa->spa_root_vdev->vdev_child[scip->scip_vdev]; in verify_device_removal_feature_counts()
7788 ASSERT(scip->scip_prev_obsolete_sm_object != 0); in verify_device_removal_feature_counts()
7789 ASSERT3P(vd->vdev_ops, ==, &vdev_indirect_ops); in verify_device_removal_feature_counts()
7793 (u_longlong_t)scip->scip_vdev, in verify_device_removal_feature_counts()
7794 (u_longlong_t)scip->scip_next_mapping_object, in verify_device_removal_feature_counts()
7795 (u_longlong_t)scip->scip_prev_obsolete_sm_object); in verify_device_removal_feature_counts()
7796 if (scip->scip_prev_obsolete_sm_object != 0) { in verify_device_removal_feature_counts()
7799 spa->spa_meta_objset, in verify_device_removal_feature_counts()
7800 scip->scip_prev_obsolete_sm_object, in verify_device_removal_feature_counts()
7801 0, vd->vdev_asize, 0)); in verify_device_removal_feature_counts()
7802 dump_spacemap(spa->spa_meta_objset, prev_obsolete_sm); in verify_device_removal_feature_counts()
7810 for (uint64_t i = 0; i < spa->spa_root_vdev->vdev_children; i++) { in verify_device_removal_feature_counts()
7811 vdev_t *vd = spa->spa_root_vdev->vdev_child[i]; in verify_device_removal_feature_counts()
7812 vdev_indirect_config_t *vic = &vd->vdev_indirect_config; in verify_device_removal_feature_counts()
7814 if (vic->vic_mapping_object != 0) { in verify_device_removal_feature_counts()
7815 ASSERT(vd->vdev_ops == &vdev_indirect_ops || in verify_device_removal_feature_counts()
7816 vd->vdev_removing); in verify_device_removal_feature_counts()
7819 if (vd->vdev_indirect_mapping->vim_havecounts) { in verify_device_removal_feature_counts()
7827 ASSERT(vic->vic_mapping_object != 0); in verify_device_removal_feature_counts()
7834 ASSERT(vic->vic_mapping_object != 0); in verify_device_removal_feature_counts()
7904 spa->spa_import_flags |= ZFS_IMPORT_SKIP_MMP; in zdb_set_skip_mmp()
7922 * The function returns a newly-allocated copy of the name of the
7940 size_t poolname_len = path_start - target; in import_checkpointed_state()
7955 if (asprintf(&bogus_name, "%s%s", poolname, BOGUS_SUFFIX) == -1) { in import_checkpointed_state()
7974 path_start != NULL ? path_start : "") == -1) { in import_checkpointed_state()
8002 vdev_t *vd = vcsec->vcsec_vd; in verify_checkpoint_sm_entry_cb()
8003 metaslab_t *ms = vd->vdev_ms[sme->sme_offset >> vd->vdev_ms_shift]; in verify_checkpoint_sm_entry_cb()
8004 uint64_t end = sme->sme_offset + sme->sme_run; in verify_checkpoint_sm_entry_cb()
8006 ASSERT(sme->sme_type == SM_FREE); in verify_checkpoint_sm_entry_cb()
8008 if ((vcsec->vcsec_entryid % ENTRIES_PER_PROGRESS_UPDATE) == 0) { in verify_checkpoint_sm_entry_cb()
8011 (longlong_t)vd->vdev_id, in verify_checkpoint_sm_entry_cb()
8012 (longlong_t)vcsec->vcsec_entryid, in verify_checkpoint_sm_entry_cb()
8013 (longlong_t)vcsec->vcsec_num_entries); in verify_checkpoint_sm_entry_cb()
8015 vcsec->vcsec_entryid++; in verify_checkpoint_sm_entry_cb()
8020 VERIFY3U(sme->sme_offset, >=, ms->ms_start); in verify_checkpoint_sm_entry_cb()
8021 VERIFY3U(end, <=, ms->ms_start + ms->ms_size); in verify_checkpoint_sm_entry_cb()
8028 mutex_enter(&ms->ms_lock); in verify_checkpoint_sm_entry_cb()
8029 zfs_range_tree_verify_not_present(ms->ms_allocatable, in verify_checkpoint_sm_entry_cb()
8030 sme->sme_offset, sme->sme_run); in verify_checkpoint_sm_entry_cb()
8031 mutex_exit(&ms->ms_lock); in verify_checkpoint_sm_entry_cb()
8053 vdev_t *ckpoint_rvd = checkpoint->spa_root_vdev; in verify_checkpoint_vdev_spacemaps()
8054 vdev_t *current_rvd = current->spa_root_vdev; in verify_checkpoint_vdev_spacemaps()
8058 for (uint64_t c = 0; c < ckpoint_rvd->vdev_children; c++) { in verify_checkpoint_vdev_spacemaps()
8059 vdev_t *ckpoint_vd = ckpoint_rvd->vdev_child[c]; in verify_checkpoint_vdev_spacemaps()
8060 vdev_t *current_vd = current_rvd->vdev_child[c]; in verify_checkpoint_vdev_spacemaps()
8065 if (ckpoint_vd->vdev_ops == &vdev_indirect_ops) { in verify_checkpoint_vdev_spacemaps()
8072 ASSERT3P(current_vd->vdev_ops, ==, &vdev_indirect_ops); in verify_checkpoint_vdev_spacemaps()
8080 if (current_vd->vdev_top_zap == 0 || in verify_checkpoint_vdev_spacemaps()
8082 current_vd->vdev_top_zap, in verify_checkpoint_vdev_spacemaps()
8087 current_vd->vdev_top_zap, VDEV_TOP_ZAP_POOL_CHECKPOINT_SM, in verify_checkpoint_vdev_spacemaps()
8091 checkpoint_sm_obj, 0, current_vd->vdev_asize, in verify_checkpoint_vdev_spacemaps()
8092 current_vd->vdev_ashift)); in verify_checkpoint_vdev_spacemaps()
8103 dump_spacemap(current->spa_meta_objset, checkpoint_sm); in verify_checkpoint_vdev_spacemaps()
8111 if (ckpoint_rvd->vdev_children < current_rvd->vdev_children) { in verify_checkpoint_vdev_spacemaps()
8112 for (uint64_t c = ckpoint_rvd->vdev_children; in verify_checkpoint_vdev_spacemaps()
8113 c < current_rvd->vdev_children; c++) { in verify_checkpoint_vdev_spacemaps()
8114 vdev_t *current_vd = current_rvd->vdev_child[c]; in verify_checkpoint_vdev_spacemaps()
8115 VERIFY0P(current_vd->vdev_checkpoint_sm); in verify_checkpoint_vdev_spacemaps()
8138 vdev_t *ckpoint_rvd = checkpoint->spa_root_vdev; in verify_checkpoint_ms_spacemaps()
8139 vdev_t *current_rvd = current->spa_root_vdev; in verify_checkpoint_ms_spacemaps()
8144 for (uint64_t i = 0; i < ckpoint_rvd->vdev_children; i++) { in verify_checkpoint_ms_spacemaps()
8145 vdev_t *ckpoint_vd = ckpoint_rvd->vdev_child[i]; in verify_checkpoint_ms_spacemaps()
8146 vdev_t *current_vd = current_rvd->vdev_child[i]; in verify_checkpoint_ms_spacemaps()
8148 if (ckpoint_vd->vdev_ops == &vdev_indirect_ops) { in verify_checkpoint_ms_spacemaps()
8152 ASSERT3P(current_vd->vdev_ops, ==, &vdev_indirect_ops); in verify_checkpoint_ms_spacemaps()
8156 for (uint64_t m = 0; m < ckpoint_vd->vdev_ms_count; m++) { in verify_checkpoint_ms_spacemaps()
8157 metaslab_t *ckpoint_msp = ckpoint_vd->vdev_ms[m]; in verify_checkpoint_ms_spacemaps()
8158 metaslab_t *current_msp = current_vd->vdev_ms[m]; in verify_checkpoint_ms_spacemaps()
8163 (longlong_t)current_vd->vdev_id, in verify_checkpoint_ms_spacemaps()
8164 (longlong_t)current_rvd->vdev_children, in verify_checkpoint_ms_spacemaps()
8165 (longlong_t)current_vd->vdev_ms[m]->ms_id, in verify_checkpoint_ms_spacemaps()
8166 (longlong_t)current_vd->vdev_ms_count); in verify_checkpoint_ms_spacemaps()
8180 zfs_range_tree_walk(ckpoint_msp->ms_allocatable, in verify_checkpoint_ms_spacemaps()
8183 current_msp->ms_allocatable); in verify_checkpoint_ms_spacemaps()
8205 checkpoint_pool = import_checkpointed_state(spa->spa_name, NULL, B_TRUE, in verify_checkpoint_blocks()
8207 ASSERT(strcmp(spa->spa_name, checkpoint_pool) != 0); in verify_checkpoint_blocks()
8239 vdev_t *rvd = spa->spa_root_vdev; in dump_leftover_checkpoint_blocks()
8241 for (uint64_t i = 0; i < rvd->vdev_children; i++) { in dump_leftover_checkpoint_blocks()
8242 vdev_t *vd = rvd->vdev_child[i]; in dump_leftover_checkpoint_blocks()
8247 if (vd->vdev_top_zap == 0) in dump_leftover_checkpoint_blocks()
8250 if (zap_contains(spa_meta_objset(spa), vd->vdev_top_zap, in dump_leftover_checkpoint_blocks()
8254 VERIFY0(zap_lookup(spa_meta_objset(spa), vd->vdev_top_zap, in dump_leftover_checkpoint_blocks()
8259 checkpoint_sm_obj, 0, vd->vdev_asize, vd->vdev_ashift)); in dump_leftover_checkpoint_blocks()
8260 dump_spacemap(spa->spa_meta_objset, checkpoint_sm); in dump_leftover_checkpoint_blocks()
8274 error = zap_lookup(spa->spa_meta_objset, DMU_POOL_DIRECTORY_OBJECT, in verify_checkpoint()
8340 int error = zap_lookup(spa_meta_objset(vd->vdev_spa), in mos_leak_vdev_top_zap()
8341 vd->vdev_top_zap, VDEV_TOP_ZAP_MS_UNFLUSHED_PHYS_TXGS, in mos_leak_vdev_top_zap()
8353 mos_obj_refd(vd->vdev_dtl_object); in mos_leak_vdev()
8354 mos_obj_refd(vd->vdev_ms_array); in mos_leak_vdev()
8355 mos_obj_refd(vd->vdev_indirect_config.vic_births_object); in mos_leak_vdev()
8356 mos_obj_refd(vd->vdev_indirect_config.vic_mapping_object); in mos_leak_vdev()
8357 mos_obj_refd(vd->vdev_leaf_zap); in mos_leak_vdev()
8358 if (vd->vdev_checkpoint_sm != NULL) in mos_leak_vdev()
8359 mos_obj_refd(vd->vdev_checkpoint_sm->sm_object); in mos_leak_vdev()
8360 if (vd->vdev_indirect_mapping != NULL) { in mos_leak_vdev()
8361 mos_obj_refd(vd->vdev_indirect_mapping-> in mos_leak_vdev()
8362 vim_phys->vimp_counts_object); in mos_leak_vdev()
8364 if (vd->vdev_obsolete_sm != NULL) in mos_leak_vdev()
8365 mos_obj_refd(vd->vdev_obsolete_sm->sm_object); in mos_leak_vdev()
8367 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in mos_leak_vdev()
8368 metaslab_t *ms = vd->vdev_ms[m]; in mos_leak_vdev()
8369 mos_obj_refd(space_map_object(ms->ms_sm)); in mos_leak_vdev()
8372 if (vd->vdev_root_zap != 0) in mos_leak_vdev()
8373 mos_obj_refd(vd->vdev_root_zap); in mos_leak_vdev()
8375 if (vd->vdev_top_zap != 0) { in mos_leak_vdev()
8376 mos_obj_refd(vd->vdev_top_zap); in mos_leak_vdev()
8380 for (uint64_t c = 0; c < vd->vdev_children; c++) { in mos_leak_vdev()
8381 mos_leak_vdev(vd->vdev_child[c]); in mos_leak_vdev()
8397 for (spa_log_sm_t *sls = avl_first(&spa->spa_sm_logs_by_txg); in mos_leak_log_spacemaps()
8398 sls; sls = AVL_NEXT(&spa->spa_sm_logs_by_txg, sls)) in mos_leak_log_spacemaps()
8399 mos_obj_refd(sls->sls_sm_obj); in mos_leak_log_spacemaps()
8410 mos_obj_refd(za->za_first_integer); in errorlog_count_refd()
8420 objset_t *mos = spa->spa_meta_objset; in dump_mos_leaks()
8421 dsl_pool_t *dp = spa->spa_dsl_pool; in dump_mos_leaks()
8426 mos_obj_refd(spa->spa_pool_props_object); in dump_mos_leaks()
8427 mos_obj_refd(spa->spa_config_object); in dump_mos_leaks()
8428 mos_obj_refd(spa->spa_ddt_stat_object); in dump_mos_leaks()
8429 mos_obj_refd(spa->spa_feat_desc_obj); in dump_mos_leaks()
8430 mos_obj_refd(spa->spa_feat_enabled_txg_obj); in dump_mos_leaks()
8431 mos_obj_refd(spa->spa_feat_for_read_obj); in dump_mos_leaks()
8432 mos_obj_refd(spa->spa_feat_for_write_obj); in dump_mos_leaks()
8433 mos_obj_refd(spa->spa_history); in dump_mos_leaks()
8434 mos_obj_refd(spa->spa_errlog_last); in dump_mos_leaks()
8435 mos_obj_refd(spa->spa_errlog_scrub); in dump_mos_leaks()
8438 errorlog_count_refd(mos, spa->spa_errlog_last); in dump_mos_leaks()
8439 errorlog_count_refd(mos, spa->spa_errlog_scrub); in dump_mos_leaks()
8442 mos_obj_refd(spa->spa_all_vdev_zaps); in dump_mos_leaks()
8443 mos_obj_refd(spa->spa_dsl_pool->dp_bptree_obj); in dump_mos_leaks()
8444 mos_obj_refd(spa->spa_dsl_pool->dp_tmp_userrefs_obj); in dump_mos_leaks()
8445 mos_obj_refd(spa->spa_dsl_pool->dp_scan->scn_phys.scn_queue_obj); in dump_mos_leaks()
8446 bpobj_count_refd(&spa->spa_deferred_bpobj); in dump_mos_leaks()
8447 mos_obj_refd(dp->dp_empty_bpobj); in dump_mos_leaks()
8448 bpobj_count_refd(&dp->dp_obsolete_bpobj); in dump_mos_leaks()
8449 bpobj_count_refd(&dp->dp_free_bpobj); in dump_mos_leaks()
8450 mos_obj_refd(spa->spa_l2cache.sav_object); in dump_mos_leaks()
8451 mos_obj_refd(spa->spa_spares.sav_object); in dump_mos_leaks()
8453 if (spa->spa_syncing_log_sm != NULL) in dump_mos_leaks()
8454 mos_obj_refd(spa->spa_syncing_log_sm->sm_object); in dump_mos_leaks()
8457 mos_obj_refd(spa->spa_condensing_indirect_phys. in dump_mos_leaks()
8459 mos_obj_refd(spa->spa_condensing_indirect_phys. in dump_mos_leaks()
8461 if (spa->spa_condensing_indirect_phys.scip_next_mapping_object != 0) { in dump_mos_leaks()
8464 spa->spa_condensing_indirect_phys.scip_next_mapping_object); in dump_mos_leaks()
8465 mos_obj_refd(vim->vim_phys->vimp_counts_object); in dump_mos_leaks()
8470 if (dp->dp_origin_snap != NULL) { in dump_mos_leaks()
8475 dsl_dataset_phys(dp->dp_origin_snap)->ds_next_snap_obj, in dump_mos_leaks()
8478 dump_blkptr_list(&ds->ds_deadlist, "Deadlist"); in dump_mos_leaks()
8482 count_ds_mos_objects(dp->dp_origin_snap); in dump_mos_leaks()
8483 dump_blkptr_list(&dp->dp_origin_snap->ds_deadlist, "Deadlist"); in dump_mos_leaks()
8485 count_dir_mos_objects(dp->dp_mos_dir); in dump_mos_leaks()
8486 if (dp->dp_free_dir != NULL) in dump_mos_leaks()
8487 count_dir_mos_objects(dp->dp_free_dir); in dump_mos_leaks()
8488 if (dp->dp_leak_dir != NULL) in dump_mos_leaks()
8489 count_dir_mos_objects(dp->dp_leak_dir); in dump_mos_leaks()
8491 mos_leak_vdev(spa->spa_root_vdev); in dump_mos_leaks()
8494 ddt_t *ddt = spa->spa_ddt[c]; in dump_mos_leaks()
8495 if (!ddt || ddt->ddt_version == DDT_VERSION_UNCONFIGURED) in dump_mos_leaks()
8502 mos_obj_refd(ddt->ddt_object[type][class]); in dump_mos_leaks()
8507 if (ddt->ddt_version == DDT_VERSION_FDT) in dump_mos_leaks()
8508 mos_obj_refd(ddt->ddt_dir_object); in dump_mos_leaks()
8511 if (ddt->ddt_flags & DDT_FLAG_LOG) { in dump_mos_leaks()
8512 mos_obj_refd(ddt->ddt_log[0].ddl_object); in dump_mos_leaks()
8513 mos_obj_refd(ddt->ddt_log[1].ddl_object); in dump_mos_leaks()
8517 for (uint64_t vdevid = 0; vdevid < spa->spa_brt_nvdevs; vdevid++) { in dump_mos_leaks()
8518 brt_vdev_t *brtvd = spa->spa_brt_vdevs[vdevid]; in dump_mos_leaks()
8519 if (brtvd->bv_initiated) { in dump_mos_leaks()
8520 mos_obj_refd(brtvd->bv_mos_brtvdev); in dump_mos_leaks()
8521 mos_obj_refd(brtvd->bv_mos_entries); in dump_mos_leaks()
8573 uint64_t offset = sme->sme_offset; in log_spacemap_obsolete_stats_cb()
8574 uint64_t vdev_id = sme->sme_vdev; in log_spacemap_obsolete_stats_cb()
8576 if (lsos->lsos_current_txg == 0) { in log_spacemap_obsolete_stats_cb()
8578 lsos->lsos_current_txg = txg; in log_spacemap_obsolete_stats_cb()
8579 } else if (lsos->lsos_current_txg < txg) { in log_spacemap_obsolete_stats_cb()
8580 /* we just changed log - print stats and reset */ in log_spacemap_obsolete_stats_cb()
8581 (void) printf("%-8llu valid entries out of %-8llu - txg %llu\n", in log_spacemap_obsolete_stats_cb()
8582 (u_longlong_t)lsos->lsos_valid_sm_entries, in log_spacemap_obsolete_stats_cb()
8583 (u_longlong_t)lsos->lsos_sm_entries, in log_spacemap_obsolete_stats_cb()
8584 (u_longlong_t)lsos->lsos_current_txg); in log_spacemap_obsolete_stats_cb()
8585 lsos->lsos_valid_sm_entries = 0; in log_spacemap_obsolete_stats_cb()
8586 lsos->lsos_sm_entries = 0; in log_spacemap_obsolete_stats_cb()
8587 lsos->lsos_current_txg = txg; in log_spacemap_obsolete_stats_cb()
8589 ASSERT3U(lsos->lsos_current_txg, ==, txg); in log_spacemap_obsolete_stats_cb()
8591 lsos->lsos_sm_entries++; in log_spacemap_obsolete_stats_cb()
8592 lsos->lsos_total_entries++; in log_spacemap_obsolete_stats_cb()
8598 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in log_spacemap_obsolete_stats_cb()
8599 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in log_spacemap_obsolete_stats_cb()
8603 lsos->lsos_valid_sm_entries++; in log_spacemap_obsolete_stats_cb()
8604 lsos->lsos_valid_entries++; in log_spacemap_obsolete_stats_cb()
8622 (void) printf("%-8llu valid entries out of %-8llu - txg %llu\n", in dump_log_spacemap_obsolete_stats()
8627 (void) printf("%-8llu valid entries out of %-8llu - total\n\n", in dump_log_spacemap_obsolete_stats()
8649 dump_nvlist(spa->spa_config, 8); in dump_zpool()
8656 dump_uberblock(&spa->spa_uberblock, "\nUberblock:\n", "\n"); in dump_zpool()
8678 dump_objset(dp->dp_meta_objset); in dump_zpool()
8681 dsl_pool_t *dp = spa->spa_dsl_pool; in dump_zpool()
8682 dump_full_bpobj(&spa->spa_deferred_bpobj, in dump_zpool()
8685 dump_full_bpobj(&dp->dp_free_bpobj, in dump_zpool()
8688 if (bpobj_is_open(&dp->dp_obsolete_bpobj)) { in dump_zpool()
8691 dump_full_bpobj(&dp->dp_obsolete_bpobj, in dump_zpool()
8697 dump_bptree(spa->spa_meta_objset, in dump_zpool()
8698 dp->dp_bptree_obj, in dump_zpool()
8701 dump_dtl(spa->spa_root_vdev, 0); in dump_zpool()
8863 * leaf_name - For example: c1t0d0 or /tmp/ztest.0a
8864 * child[.child]* - For example: 0.1.1
8868 * RAID-Zs, you can specify either RAID-Z vdev with 0.0 or 0.1 .
8883 if (i >= vdev->vdev_children) in zdb_vdev_lookup()
8886 vdev = vdev->vdev_child[i]; in zdb_vdev_lookup()
8892 for (i = 0; i < vdev->vdev_children; i++) { in zdb_vdev_lookup()
8893 vdev_t *vc = vdev->vdev_child[i]; in zdb_vdev_lookup()
8895 if (vc->vdev_path == NULL) { in zdb_vdev_lookup()
8903 p = strrchr(vc->vdev_path, '/'); in zdb_vdev_lookup()
8904 p = p ? p + 1 : vc->vdev_path; in zdb_vdev_lookup()
8905 q = &vc->vdev_path[strlen(vc->vdev_path) - 2]; in zdb_vdev_lookup()
8907 if (strcmp(vc->vdev_path, path) == 0) in zdb_vdev_lookup()
8911 if (strcmp(q, "s0") == 0 && strncmp(p, path, q - p) == 0) in zdb_vdev_lookup()
8923 dsl_pool_config_enter(spa->spa_dsl_pool, FTAG); in name_from_objset_id()
8924 int error = dsl_dataset_hold_obj(spa->spa_dsl_pool, objset_id, in name_from_objset_id()
8929 dsl_pool_config_exit(spa->spa_dsl_pool, FTAG); in name_from_objset_id()
8934 dsl_pool_config_exit(spa->spa_dsl_pool, FTAG); in name_from_objset_id()
8963 "Trying %05llx -> %05llx (%s)\n", in try_decompress_block()
9072 return (lsize > maxlsize ? -1 : lsize); in zdb_decompress_block()
9081 * pool - The name of the pool you wish to read from
9082 * vdev_specifier - Which vdev (see comment for zdb_vdev_lookup)
9083 * offset - offset, in hex, in bytes
9084 * size - Amount of data to read, in hex, in bytes
9085 * flags - A string of characters specifying options
9100 dva_t *dva = bp->blk_dva; in zdb_read_block()
9128 (void) printf("Invalid block specifier: %s - %s\n", in zdb_read_block()
9186 vd = zdb_vdev_lookup(spa->spa_root_vdev, vdev); in zdb_read_block()
9191 if (vd->vdev_path) in zdb_read_block()
9193 vd->vdev_path); in zdb_read_block()
9196 vd->vdev_ops->vdev_op_type); in zdb_read_block()
9204 DVA_SET_VDEV(&dva[0], vd->vdev_id); in zdb_read_block()
9223 if (vd == vd->vdev_top) { in zdb_read_block()
9254 if (lsize == -1) { in zdb_read_block()
9279 if (lsize == -1 || zfs_blkptr_verify(spa, b, in zdb_read_block()
9320 if (vd == vd->vdev_top) { in zdb_read_block()
9340 ck_zio->io_offset = in zdb_read_block()
9341 DVA_GET_OFFSET(&bp->blk_dva[0]); in zdb_read_block()
9342 ck_zio->io_bp = bp; in zdb_read_block()
9348 (u_longlong_t)bp->blk_cksum.zc_word[0], in zdb_read_block()
9349 (u_longlong_t)bp->blk_cksum.zc_word[1], in zdb_read_block()
9350 (u_longlong_t)bp->blk_cksum.zc_word[2], in zdb_read_block()
9351 (u_longlong_t)bp->blk_cksum.zc_word[3]); in zdb_read_block()
9447 int64_t objset_id = -1; in main()
9462 * Set up signal handlers, so if we crash due to bad on-disk data we in main()
9480 * default spa_config_path setting. If -U flag is specified it will in main()
9495 {"ignore-assertions", no_argument, NULL, 'A'}, in main()
9496 {"block-stats", no_argument, NULL, 'b'}, in main()
9501 {"dedup-stats", no_argument, NULL, 'D'}, in main()
9503 {"embedded-block-pointer", no_argument, NULL, 'E'}, in main()
9504 {"automatic-rewind", no_argument, NULL, 'F'}, in main()
9505 {"dump-debug-msg", no_argument, NULL, 'G'}, in main()
9507 {"intent-logs", no_argument, NULL, 'i'}, in main()
9509 {"checkpointed-state", no_argument, NULL, 'k'}, in main()
9512 {"disable-leak-tracking", no_argument, NULL, 'L'}, in main()
9514 {"metaslab-groups", no_argument, NULL, 'M'}, in main()
9517 {"object-lookups", no_argument, NULL, 'O'}, in main()
9520 {"skip-label", no_argument, NULL, 'q'}, in main()
9521 {"copy-object", no_argument, NULL, 'r'}, in main()
9522 {"read-block", no_argument, NULL, 'R'}, in main()
9523 {"io-stats", no_argument, NULL, 's'}, in main()
9524 {"simulate-dedup", no_argument, NULL, 'S'}, in main()
9526 {"brt-stats", no_argument, NULL, 'T'}, in main()
9531 {"dump-blocks", required_argument, NULL, 'x'}, in main()
9532 {"extreme-rewind", no_argument, NULL, 'X'}, in main()
9533 {"all-reconstruction", no_argument, NULL, 'Y'}, in main()
9535 {"zstd-headers", no_argument, NULL, 'Z'}, in main()
9536 {"allocated-map", no_argument, NULL, in main()
9547 long_options, NULL)) != -1) { in main()
9664 "--bin=\"%s\" must be one of \"lsize\", " in main()
9687 "--class=\"%s\" must be a " in main()
9688 "comma-separated list of either " in main()
9699 "--class= must be a comma-separated " in main()
9715 (void) fprintf(stderr, "-p option requires use of -e\n"); in main()
9720 * ZDB does not typically re-read blocks; therefore limit the ARC in main()
9728 * "zdb -c" uses checksum-verifying scrub i/os which are async reads. in main()
9729 * "zdb -b" uses traversal prefetch which uses async reads. in main()
9741 * to load non-idle pools. in main()
9763 argc -= optind; in main()
9789 value = "-"; in main()
9798 } else if ((strcmp(pbuf, "-") == 0 && in main()
9848 /* -N implies -d */ in main()
9867 if (targetlen && target[targetlen - 1] == '/') in main()
9868 target[targetlen - 1] = '\0'; in main()
9871 * See if an objset ID was supplied (-d <pool>/<objset ID>). in main()
9873 * if -N was given, otherwise 100 is an objsetID iff in main()
9891 objset_id = -1; in main()
9895 printf("Supply a numeric objset ID with -N\n"); in main()
9953 * We need to make sure to process -O option or call in main()
9954 * dump_path after the -e option has been processed, in main()
9980 * it always after the -e option has been processed, which in main()
10027 spa->spa_log_state == SPA_LOG_MISSING) { in main()
10028 spa->spa_log_state = SPA_LOG_CLEAR; in main()
10059 * If -N was supplied, the user has indicated that in main()
10060 * zdb -d <pool>/<objsetID> is in effect. Otherwise in main()
10107 * Set the pool failure mode to panic in order to prevent the pool in main()
10112 spa->spa_failmode = ZIO_FAILURE_MODE_PANIC; in main()
10115 argc--; in main()
10157 dump_objset(spa->spa_meta_objset); in main()