Lines Matching +full:noise +full:- +full:sensitive

9  * or https://opensource.org/licenses/CDDL-1.0.
171 return (livelist_compare(&l->svbr_blk, &r->svbr_blk)); in sublivelist_block_refcnt_compare()
185 * This field is not used for our B-Tree comparisons in sublivelist_verify_blkptr()
193 zfs_btree_find(&sv->sv_pair, &current, &where); in sublivelist_verify_blkptr()
197 zfs_btree_add(&sv->sv_pair, &current); in sublivelist_verify_blkptr()
199 pair->svbr_refcnt++; in sublivelist_verify_blkptr()
205 if (DVA_IS_EMPTY(&bp->blk_dva[i])) in sublivelist_verify_blkptr()
208 .svb_dva = bp->blk_dva[i], in sublivelist_verify_blkptr()
213 if (zfs_btree_find(&sv->sv_leftover, &svb, in sublivelist_verify_blkptr()
215 zfs_btree_add_idx(&sv->sv_leftover, in sublivelist_verify_blkptr()
221 pair->svbr_refcnt--; in sublivelist_verify_blkptr()
222 if (pair->svbr_refcnt == 0) { in sublivelist_verify_blkptr()
224 zfs_btree_remove_idx(&sv->sv_pair, &where); in sublivelist_verify_blkptr()
238 zfs_btree_create(&sv->sv_pair, sublivelist_block_refcnt_compare, NULL, in sublivelist_verify_func()
241 err = bpobj_iterate_nofree(&dle->dle_bpobj, sublivelist_verify_blkptr, in sublivelist_verify_func()
246 while ((e = zfs_btree_destroy_nodes(&sv->sv_pair, &cookie)) != NULL) { in sublivelist_verify_func()
249 &e->svbr_blk, B_TRUE); in sublivelist_verify_func()
251 e->svbr_refcnt, blkbuf); in sublivelist_verify_func()
253 zfs_btree_destroy(&sv->sv_pair); in sublivelist_verify_func()
264 if (DVA_GET_VDEV(&l->svb_dva) < DVA_GET_VDEV(&r->svb_dva)) in livelist_block_compare()
265 return (-1); in livelist_block_compare()
266 else if (DVA_GET_VDEV(&l->svb_dva) > DVA_GET_VDEV(&r->svb_dva)) in livelist_block_compare()
269 if (DVA_GET_OFFSET(&l->svb_dva) < DVA_GET_OFFSET(&r->svb_dva)) in livelist_block_compare()
270 return (-1); in livelist_block_compare()
271 else if (DVA_GET_OFFSET(&l->svb_dva) > DVA_GET_OFFSET(&r->svb_dva)) in livelist_block_compare()
274 if (DVA_GET_ASIZE(&l->svb_dva) < DVA_GET_ASIZE(&r->svb_dva)) in livelist_block_compare()
275 return (-1); in livelist_block_compare()
276 else if (DVA_GET_ASIZE(&l->svb_dva) > DVA_GET_ASIZE(&r->svb_dva)) in livelist_block_compare()
284 * sublivelist_verify_t: sv->sv_leftover
347 return (uic->uic_cb(uic->uic_spa, sme, uic->uic_txg, uic->uic_arg)); in iterate_through_spacemap_logs_cb()
357 for (spa_log_sm_t *sls = avl_first(&spa->spa_sm_logs_by_txg); in iterate_through_spacemap_logs()
358 sls; sls = AVL_NEXT(&spa->spa_sm_logs_by_txg, sls)) { in iterate_through_spacemap_logs()
361 sls->sls_sm_obj, 0, UINT64_MAX, SPA_MINBLOCKSHIFT)); in iterate_through_spacemap_logs()
365 .uic_txg = sls->sls_txg, in iterate_through_spacemap_logs()
381 DVA_SET_VDEV(&svb.svb_dva, mv->mv_vdid); in verify_livelist_allocs()
393 zfs_btree_find(&mv->mv_livelist_allocs, &svb, &where); in verify_livelist_allocs()
395 found = zfs_btree_next(&mv->mv_livelist_allocs, &where, &where); in verify_livelist_allocs()
397 for (; found != NULL && DVA_GET_VDEV(&found->svb_dva) == mv->mv_vdid && in verify_livelist_allocs()
398 DVA_GET_OFFSET(&found->svb_dva) < end_offset; in verify_livelist_allocs()
399 found = zfs_btree_next(&mv->mv_livelist_allocs, &where, &where)) { in verify_livelist_allocs()
400 if (found->svb_allocated_txg <= txg) { in verify_livelist_allocs()
403 (u_longlong_t)DVA_GET_OFFSET(&found->svb_dva), in verify_livelist_allocs()
404 (u_longlong_t)DVA_GET_ASIZE(&found->svb_dva), in verify_livelist_allocs()
405 (u_longlong_t)found->svb_allocated_txg, in verify_livelist_allocs()
415 uint64_t offset = sme->sme_offset; in metaslab_spacemap_validation_cb()
416 uint64_t size = sme->sme_run; in metaslab_spacemap_validation_cb()
417 uint64_t txg = sme->sme_txg; in metaslab_spacemap_validation_cb()
419 if (sme->sme_type == SM_ALLOC) { in metaslab_spacemap_validation_cb()
420 if (range_tree_contains(mv->mv_allocated, in metaslab_spacemap_validation_cb()
426 (u_longlong_t)size, (u_longlong_t)mv->mv_vdid, in metaslab_spacemap_validation_cb()
427 (u_longlong_t)mv->mv_msid); in metaslab_spacemap_validation_cb()
429 range_tree_add(mv->mv_allocated, in metaslab_spacemap_validation_cb()
433 if (!range_tree_contains(mv->mv_allocated, in metaslab_spacemap_validation_cb()
439 (u_longlong_t)size, (u_longlong_t)mv->mv_vdid, in metaslab_spacemap_validation_cb()
440 (u_longlong_t)mv->mv_msid); in metaslab_spacemap_validation_cb()
442 range_tree_remove(mv->mv_allocated, in metaslab_spacemap_validation_cb()
447 if (sme->sme_type != SM_ALLOC) { in metaslab_spacemap_validation_cb()
462 uint64_t offset = sme->sme_offset; in spacemap_check_sm_log_cb()
463 uint64_t vdev_id = sme->sme_vdev; in spacemap_check_sm_log_cb()
471 if (vdev_id != mv->mv_vdid) in spacemap_check_sm_log_cb()
474 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in spacemap_check_sm_log_cb()
475 if (ms->ms_id != mv->mv_msid) in spacemap_check_sm_log_cb()
482 ASSERT3U(txg, ==, sme->sme_txg); in spacemap_check_sm_log_cb()
513 ASSERT3U(zfs_btree_numnodes(&mv->mv_livelist_allocs), ==, 0); in mv_populate_livelist_allocs()
514 for (svb = zfs_btree_first(&sv->sv_leftover, &where); in mv_populate_livelist_allocs()
516 svb = zfs_btree_next(&sv->sv_leftover, &where, &where)) { in mv_populate_livelist_allocs()
517 if (DVA_GET_VDEV(&svb->svb_dva) != mv->mv_vdid) in mv_populate_livelist_allocs()
520 if (DVA_GET_OFFSET(&svb->svb_dva) < mv->mv_start && in mv_populate_livelist_allocs()
521 (DVA_GET_OFFSET(&svb->svb_dva) + in mv_populate_livelist_allocs()
522 DVA_GET_ASIZE(&svb->svb_dva)) > mv->mv_start) { in mv_populate_livelist_allocs()
525 (u_longlong_t)DVA_GET_VDEV(&svb->svb_dva), in mv_populate_livelist_allocs()
526 (u_longlong_t)DVA_GET_OFFSET(&svb->svb_dva), in mv_populate_livelist_allocs()
527 (u_longlong_t)DVA_GET_ASIZE(&svb->svb_dva)); in mv_populate_livelist_allocs()
531 if (DVA_GET_OFFSET(&svb->svb_dva) < mv->mv_start) in mv_populate_livelist_allocs()
534 if (DVA_GET_OFFSET(&svb->svb_dva) >= mv->mv_end) in mv_populate_livelist_allocs()
537 if ((DVA_GET_OFFSET(&svb->svb_dva) + in mv_populate_livelist_allocs()
538 DVA_GET_ASIZE(&svb->svb_dva)) > mv->mv_end) { in mv_populate_livelist_allocs()
541 (u_longlong_t)DVA_GET_VDEV(&svb->svb_dva), in mv_populate_livelist_allocs()
542 (u_longlong_t)DVA_GET_OFFSET(&svb->svb_dva), in mv_populate_livelist_allocs()
543 (u_longlong_t)DVA_GET_ASIZE(&svb->svb_dva)); in mv_populate_livelist_allocs()
547 zfs_btree_add(&mv->mv_livelist_allocs, svb); in mv_populate_livelist_allocs()
550 for (svb = zfs_btree_first(&mv->mv_livelist_allocs, &where); in mv_populate_livelist_allocs()
552 svb = zfs_btree_next(&mv->mv_livelist_allocs, &where, &where)) { in mv_populate_livelist_allocs()
553 zfs_btree_remove(&sv->sv_leftover, svb); in mv_populate_livelist_allocs()
560 * - report leftover frees (**)
561 * - record leftover ALLOCs together with their TXG [see Cross Check]
570 * - iterate over spacemap and then the metaslab's entries in the
599 vdev_t *rvd = spa->spa_root_vdev; in livelist_metaslab_validate()
600 for (uint64_t c = 0; c < rvd->vdev_children; c++) { in livelist_metaslab_validate()
601 vdev_t *vd = rvd->vdev_child[c]; in livelist_metaslab_validate()
606 for (uint64_t mid = 0; mid < vd->vdev_ms_count; mid++) { in livelist_metaslab_validate()
607 metaslab_t *m = vd->vdev_ms[mid]; in livelist_metaslab_validate()
612 (longlong_t)vd->vdev_id, in livelist_metaslab_validate()
614 (longlong_t)vd->vdev_ms_count); in livelist_metaslab_validate()
623 mv.mv_vdid = vd->vdev_id; in livelist_metaslab_validate()
624 mv.mv_msid = m->ms_id; in livelist_metaslab_validate()
625 mv.mv_start = m->ms_start; in livelist_metaslab_validate()
626 mv.mv_end = m->ms_start + m->ms_size; in livelist_metaslab_validate()
633 spacemap_check_ms_sm(m->ms_sm, &mv); in livelist_metaslab_validate()
661 int vdev_id = DVA_GET_VDEV(&svb->svb_dva); in livelist_metaslab_validate()
662 ASSERT3U(vdev_id, <, rvd->vdev_children); in livelist_metaslab_validate()
663 vdev_t *vd = rvd->vdev_child[vdev_id]; in livelist_metaslab_validate()
666 vdev_id, (u_longlong_t)DVA_GET_OFFSET(&svb->svb_dva), in livelist_metaslab_validate()
667 (u_longlong_t)DVA_GET_ASIZE(&svb->svb_dva), in livelist_metaslab_validate()
668 (u_longlong_t)svb->svb_allocated_txg); in livelist_metaslab_validate()
694 "Usage:\t%s [-AbcdDFGhikLMPsvXy] [-e [-V] [-p <path> ...]] " in usage()
695 "[-I <inflight I/Os>]\n" in usage()
696 "\t\t[-o <var>=<value>]... [-t <txg>] [-U <cache>] [-x <dumpdir>]\n" in usage()
697 "\t\t[-K <key>]\n" in usage()
699 "\t%s [-AdiPv] [-e [-V] [-p <path> ...]] [-U <cache>] [-K <key>]\n" in usage()
701 "\t%s -B [-e [-V] [-p <path> ...]] [-I <inflight I/Os>]\n" in usage()
702 "\t\t[-o <var>=<value>]... [-t <txg>] [-U <cache>] [-x <dumpdir>]\n" in usage()
703 "\t\t[-K <key>] <poolname>/<objset id> [<backupflags>]\n" in usage()
704 "\t%s [-v] <bookmark>\n" in usage()
705 "\t%s -C [-A] [-U <cache>] [<poolname>]\n" in usage()
706 "\t%s -l [-Aqu] <device>\n" in usage()
707 "\t%s -m [-AFLPX] [-e [-V] [-p <path> ...]] [-t <txg>] " in usage()
708 "[-U <cache>]\n\t\t<poolname> [<vdev> [<metaslab> ...]]\n" in usage()
709 "\t%s -O [-K <key>] <dataset> <path>\n" in usage()
710 "\t%s -r [-K <key>] <dataset> <path> <destination>\n" in usage()
711 "\t%s -R [-A] [-e [-V] [-p <path> ...]] [-U <cache>]\n" in usage()
713 "\t%s -E [-A] word0:word1:...:word15\n" in usage()
714 "\t%s -S [-AP] [-e [-V] [-p <path> ...]] [-U <cache>] " in usage()
729 " end Ending object number, or -1 for no upper bound\n" in usage()
736 " - Negate effect of next flag\n\n"); in usage()
738 (void) fprintf(stderr, " -b --block-stats " in usage()
740 (void) fprintf(stderr, " -B --backup " in usage()
742 (void) fprintf(stderr, " -c --checksum " in usage()
744 (void) fprintf(stderr, " -C --config " in usage()
746 (void) fprintf(stderr, " -d --datasets " in usage()
748 (void) fprintf(stderr, " -D --dedup-stats " in usage()
750 (void) fprintf(stderr, " -E --embedded-block-pointer=INTEGER\n" in usage()
753 (void) fprintf(stderr, " -h --history " in usage()
755 (void) fprintf(stderr, " -i --intent-logs " in usage()
757 (void) fprintf(stderr, " -l --label " in usage()
759 (void) fprintf(stderr, " -k --checkpointed-state " in usage()
761 (void) fprintf(stderr, " -L --disable-leak-tracking " in usage()
763 (void) fprintf(stderr, " -m --metaslabs " in usage()
765 (void) fprintf(stderr, " -M --metaslab-groups " in usage()
767 (void) fprintf(stderr, " -O --object-lookups " in usage()
769 (void) fprintf(stderr, " -r --copy-object " in usage()
771 (void) fprintf(stderr, " -R --read-block " in usage()
773 (void) fprintf(stderr, " -s --io-stats " in usage()
775 (void) fprintf(stderr, " -S --simulate-dedup " in usage()
777 (void) fprintf(stderr, " -v --verbose " in usage()
779 (void) fprintf(stderr, " -y --livelist " in usage()
784 (void) fprintf(stderr, " -A --ignore-assertions " in usage()
785 "ignore assertions (-A), enable panic recovery (-AA) or both " in usage()
786 "(-AAA)\n"); in usage()
787 (void) fprintf(stderr, " -e --exported " in usage()
789 (void) fprintf(stderr, " -F --automatic-rewind " in usage()
792 (void) fprintf(stderr, " -G --dump-debug-msg " in usage()
794 (void) fprintf(stderr, " -I --inflight=INTEGER " in usage()
797 (void) fprintf(stderr, " -K --key=KEY " in usage()
799 (void) fprintf(stderr, " -o --option=\"OPTION=INTEGER\" " in usage()
800 "set global variable to an unsigned 32-bit integer\n"); in usage()
801 (void) fprintf(stderr, " -p --path==PATH " in usage()
802 "use one or more with -e to specify path to vdev dir\n"); in usage()
803 (void) fprintf(stderr, " -P --parseable " in usage()
805 (void) fprintf(stderr, " -q --skip-label " in usage()
807 (void) fprintf(stderr, " -t --txg=INTEGER " in usage()
809 (void) fprintf(stderr, " -T --brt-stats " in usage()
811 (void) fprintf(stderr, " -u --uberblock " in usage()
813 (void) fprintf(stderr, " -U --cachefile=PATH " in usage()
815 (void) fprintf(stderr, " -V --verbatim " in usage()
817 (void) fprintf(stderr, " -x --dump-blocks=PATH " in usage()
819 (void) fprintf(stderr, " -X --extreme-rewind " in usage()
821 (void) fprintf(stderr, " -Y --all-reconstruction " in usage()
823 (void) fprintf(stderr, " -Z --zstd-headers " in usage()
825 (void) fprintf(stderr, "Specify an option more than once (e.g. -bb) " in usage()
827 (void) fprintf(stderr, "Default is to dump everything non-verbosely\n"); in usage()
854 * Restore default action and re-raise signal so SIGSEGV and in sig_handler()
914 (u_longlong_t)shp->sh_pool_create_len); in dump_history_offsets()
916 (u_longlong_t)shp->sh_phys_max_off); in dump_history_offsets()
918 (u_longlong_t)shp->sh_bof); in dump_history_offsets()
920 (u_longlong_t)shp->sh_eof); in dump_history_offsets()
922 (u_longlong_t)shp->sh_records_lost); in dump_history_offsets()
944 static const uint64_t histo_width = sizeof (histo_stars) - 1;
950 int minidx = size - 1; in dump_histogram()
971 &histo_stars[(max - histo[i]) * histo_width / max]); in dump_histogram()
1072 * allocation failures and nigh-infinite printing if the in dump_uint64()
1135 *(uint64_t *)attrp->za_name); in dump_zap()
1137 (void) printf("\t\t%s = ", attrp->za_name); in dump_zap()
1139 if (attrp->za_num_integers == 0) { in dump_zap()
1143 prop = umem_zalloc(attrp->za_num_integers * in dump_zap()
1144 attrp->za_integer_length, UMEM_NOFAIL); in dump_zap()
1148 (const uint64_t *)attrp->za_name, 1, in dump_zap()
1149 attrp->za_integer_length, attrp->za_num_integers, in dump_zap()
1152 (void) zap_lookup(os, object, attrp->za_name, in dump_zap()
1153 attrp->za_integer_length, attrp->za_num_integers, in dump_zap()
1156 if (attrp->za_integer_length == 1 && !key64) { in dump_zap()
1157 if (strcmp(attrp->za_name, in dump_zap()
1159 strcmp(attrp->za_name, in dump_zap()
1161 strcmp(attrp->za_name, DSL_CRYPTO_KEY_IV) == 0 || in dump_zap()
1162 strcmp(attrp->za_name, DSL_CRYPTO_KEY_MAC) == 0 || in dump_zap()
1163 strcmp(attrp->za_name, in dump_zap()
1167 for (i = 0; i < attrp->za_num_integers; i++) { in dump_zap()
1174 for (i = 0; i < attrp->za_num_integers; i++) { in dump_zap()
1175 switch (attrp->za_integer_length) { in dump_zap()
1197 attrp->za_num_integers * attrp->za_integer_length); in dump_zap()
1218 zdb_nicenum(bpop->bpo_bytes, bytes, sizeof (bytes)); in dump_bpobj()
1219 zdb_nicenum(bpop->bpo_comp, comp, sizeof (comp)); in dump_bpobj()
1220 zdb_nicenum(bpop->bpo_uncomp, uncomp, sizeof (uncomp)); in dump_bpobj()
1223 (u_longlong_t)bpop->bpo_num_blkptrs); in dump_bpobj()
1231 (u_longlong_t)bpop->bpo_subobjs); in dump_bpobj()
1233 (u_longlong_t)bpop->bpo_num_subobjs); in dump_bpobj()
1237 (u_longlong_t)bpop->bpo_num_freed); in dump_bpobj()
1243 for (i = 0; i < bpop->bpo_num_blkptrs; i++) { in dump_bpobj()
1276 int64_t last_nonzero = -1; in dump_bpobj_subobjs()
1309 (void) printf("\t\t%s = ", attrp->za_name); in dump_sa_attrs()
1310 if (attrp->za_num_integers == 0) { in dump_sa_attrs()
1315 (u_longlong_t)attrp->za_first_integer, in dump_sa_attrs()
1316 (int)ATTR_LENGTH(attrp->za_first_integer), in dump_sa_attrs()
1317 (int)ATTR_BSWAP(attrp->za_first_integer), in dump_sa_attrs()
1318 (int)ATTR_NUM(attrp->za_first_integer)); in dump_sa_attrs()
1339 (void) printf("\t\t%s = [", attrp->za_name); in dump_sa_layouts()
1340 if (attrp->za_num_integers == 0) { in dump_sa_layouts()
1345 VERIFY(attrp->za_integer_length == 2); in dump_sa_layouts()
1346 layout_attrs = umem_zalloc(attrp->za_num_integers * in dump_sa_layouts()
1347 attrp->za_integer_length, UMEM_NOFAIL); in dump_sa_layouts()
1349 VERIFY(zap_lookup(os, object, attrp->za_name, in dump_sa_layouts()
1350 attrp->za_integer_length, in dump_sa_layouts()
1351 attrp->za_num_integers, layout_attrs) == 0); in dump_sa_layouts()
1353 for (i = 0; i != attrp->za_num_integers; i++) in dump_sa_layouts()
1357 attrp->za_num_integers * attrp->za_integer_length); in dump_sa_layouts()
1395 attrp->za_name, ZFS_DIRENT_OBJ(attrp->za_first_integer), in dump_zpldir()
1396 typenames[ZFS_DIRENT_TYPE(attrp->za_first_integer)]); in dump_zpldir()
1407 if (vd->vdev_ops->vdev_op_leaf) { in get_dtl_refcount()
1408 space_map_t *sm = vd->vdev_dtl_sm; in get_dtl_refcount()
1411 sm->sm_dbuf->db_size == sizeof (space_map_phys_t)) in get_dtl_refcount()
1416 for (unsigned c = 0; c < vd->vdev_children; c++) in get_dtl_refcount()
1417 refcount += get_dtl_refcount(vd->vdev_child[c]); in get_dtl_refcount()
1426 if (vd->vdev_top == vd) { in get_metaslab_refcount()
1427 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in get_metaslab_refcount()
1428 space_map_t *sm = vd->vdev_ms[m]->ms_sm; in get_metaslab_refcount()
1431 sm->sm_dbuf->db_size == sizeof (space_map_phys_t)) in get_metaslab_refcount()
1435 for (unsigned c = 0; c < vd->vdev_children; c++) in get_metaslab_refcount()
1436 refcount += get_metaslab_refcount(vd->vdev_child[c]); in get_metaslab_refcount()
1448 if (vd->vdev_top == vd && obsolete_sm_object != 0) { in get_obsolete_refcount()
1450 VERIFY0(dmu_object_info(vd->vdev_spa->spa_meta_objset, in get_obsolete_refcount()
1456 ASSERT3P(vd->vdev_obsolete_sm, ==, NULL); in get_obsolete_refcount()
1459 for (unsigned c = 0; c < vd->vdev_children; c++) { in get_obsolete_refcount()
1460 refcount += get_obsolete_refcount(vd->vdev_child[c]); in get_obsolete_refcount()
1470 spa->spa_condensing_indirect_phys.scip_prev_obsolete_sm_object; in get_prev_obsolete_spacemap_refcount()
1473 VERIFY0(dmu_object_info(spa->spa_meta_objset, prev_obj, &doi)); in get_prev_obsolete_spacemap_refcount()
1486 if (vd->vdev_top == vd && vd->vdev_top_zap != 0 && in get_checkpoint_refcount()
1487 zap_contains(spa_meta_objset(vd->vdev_spa), in get_checkpoint_refcount()
1488 vd->vdev_top_zap, VDEV_TOP_ZAP_POOL_CHECKPOINT_SM) == 0) in get_checkpoint_refcount()
1491 for (uint64_t c = 0; c < vd->vdev_children; c++) in get_checkpoint_refcount()
1492 refcount += get_checkpoint_refcount(vd->vdev_child[c]); in get_checkpoint_refcount()
1500 return (avl_numnodes(&spa->spa_sm_logs_by_txg)); in get_log_spacemap_refcount()
1512 actual_refcount = get_dtl_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1513 actual_refcount += get_metaslab_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1514 actual_refcount += get_obsolete_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1516 actual_refcount += get_checkpoint_refcount(spa->spa_root_vdev); in verify_spacemap_refcounts()
1539 (longlong_t)sm->sm_object); in dump_spacemap()
1541 (longlong_t)sm->sm_phys->smp_length); in dump_spacemap()
1543 (longlong_t)sm->sm_phys->smp_alloc); in dump_spacemap()
1551 uint8_t mapshift = sm->sm_shift; in dump_spacemap()
1587 sm->sm_start; in dump_spacemap()
1591 /* it is a two-word entry so we read another word */ in dump_spacemap()
1607 mapshift) + sm->sm_start; in dump_spacemap()
1612 " %010llx-%010llx size: %06llx vdev: %06llu words: %u\n", in dump_spacemap()
1622 alloc -= entry_run; in dump_spacemap()
1636 range_tree_t *rt = msp->ms_allocatable; in dump_metaslab_stats()
1637 zfs_btree_t *t = &msp->ms_allocatable_by_size; in dump_metaslab_stats()
1638 int free_pct = range_tree_space(rt) * 100 / msp->ms_size; in dump_metaslab_stats()
1648 (void) printf("\tIn-memory histogram:\n"); in dump_metaslab_stats()
1649 dump_histogram(rt->rt_histogram, RANGE_TREE_HISTOGRAM_SIZE, 0); in dump_metaslab_stats()
1655 vdev_t *vd = msp->ms_group->mg_vd; in dump_metaslab()
1656 spa_t *spa = vd->vdev_spa; in dump_metaslab()
1657 space_map_t *sm = msp->ms_sm; in dump_metaslab()
1660 zdb_nicenum(msp->ms_size - space_map_allocated(sm), freebuf, in dump_metaslab()
1665 (u_longlong_t)msp->ms_id, (u_longlong_t)msp->ms_start, in dump_metaslab()
1669 mutex_enter(&msp->ms_lock); in dump_metaslab()
1671 range_tree_stat_verify(msp->ms_allocatable); in dump_metaslab()
1674 mutex_exit(&msp->ms_lock); in dump_metaslab()
1683 (void) printf("\tOn-disk histogram:\t\tfragmentation %llu\n", in dump_metaslab()
1684 (u_longlong_t)msp->ms_fragmentation); in dump_metaslab()
1685 dump_histogram(sm->sm_phys->smp_histogram, in dump_metaslab()
1686 SPACE_MAP_HISTOGRAM_SIZE, sm->sm_shift); in dump_metaslab()
1689 if (vd->vdev_ops == &vdev_draid_ops) in dump_metaslab()
1690 ASSERT3U(msp->ms_size, <=, 1ULL << vd->vdev_ms_shift); in dump_metaslab()
1692 ASSERT3U(msp->ms_size, ==, 1ULL << vd->vdev_ms_shift); in dump_metaslab()
1694 dump_spacemap(spa->spa_meta_objset, msp->ms_sm); in dump_metaslab()
1705 vdev_alloc_bias_t alloc_bias = vd->vdev_alloc_bias; in print_vdev_metaslab_header()
1707 if (alloc_bias == VDEV_BIAS_LOG || vd->vdev_islog) { in print_vdev_metaslab_header()
1716 if (vd->vdev_top_zap != 0) { in print_vdev_metaslab_header()
1717 int error = zap_lookup(spa_meta_objset(vd->vdev_spa), in print_vdev_metaslab_header()
1718 vd->vdev_top_zap, VDEV_TOP_ZAP_MS_UNFLUSHED_PHYS_TXGS, in print_vdev_metaslab_header()
1726 (u_longlong_t)vd->vdev_id, bias_str); in print_vdev_metaslab_header()
1733 (void) printf("\n\t%-10s%5llu %-19s %-15s %-12s\n", in print_vdev_metaslab_header()
1734 "metaslabs", (u_longlong_t)vd->vdev_ms_count, in print_vdev_metaslab_header()
1737 "---------------", "-------------------", in print_vdev_metaslab_header()
1738 "---------------", "------------"); in print_vdev_metaslab_header()
1744 vdev_t *rvd = spa->spa_root_vdev; in dump_metaslab_groups()
1751 for (unsigned c = 0; c < rvd->vdev_children; c++) { in dump_metaslab_groups()
1752 vdev_t *tvd = rvd->vdev_child[c]; in dump_metaslab_groups()
1753 metaslab_group_t *mg = tvd->vdev_mg; in dump_metaslab_groups()
1755 if (mg == NULL || (mg->mg_class != mc && in dump_metaslab_groups()
1756 (!show_special || mg->mg_class != smc))) in dump_metaslab_groups()
1760 mg->mg_fragmentation = metaslab_group_fragmentation(mg); in dump_metaslab_groups()
1764 (u_longlong_t)tvd->vdev_id, in dump_metaslab_groups()
1765 (u_longlong_t)tvd->vdev_ms_count); in dump_metaslab_groups()
1766 if (mg->mg_fragmentation == ZFS_FRAG_INVALID) { in dump_metaslab_groups()
1767 (void) printf("%3s\n", "-"); in dump_metaslab_groups()
1770 (u_longlong_t)mg->mg_fragmentation); in dump_metaslab_groups()
1772 dump_histogram(mg->mg_histogram, RANGE_TREE_HISTOGRAM_SIZE, 0); in dump_metaslab_groups()
1778 (void) printf("\t%3s\n", "-"); in dump_metaslab_groups()
1781 dump_histogram(mc->mc_histogram, RANGE_TREE_HISTOGRAM_SIZE, 0); in dump_metaslab_groups()
1787 vdev_indirect_config_t *vic = &vd->vdev_indirect_config; in print_vdev_indirect()
1788 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in print_vdev_indirect()
1789 vdev_indirect_births_t *vib = vd->vdev_indirect_births; in print_vdev_indirect()
1797 vic->vic_mapping_object); in print_vdev_indirect()
1799 vic->vic_births_object); in print_vdev_indirect()
1802 (longlong_t)vic->vic_births_object); in print_vdev_indirect()
1807 &vib->vib_entries[i]; in print_vdev_indirect()
1808 (void) printf("\toffset %llx -> txg %llu\n", in print_vdev_indirect()
1809 (longlong_t)cur_vibe->vibe_offset, in print_vdev_indirect()
1810 (longlong_t)cur_vibe->vibe_phys_birth_txg); in print_vdev_indirect()
1815 (longlong_t)vic->vic_mapping_object); in print_vdev_indirect()
1830 &vim->vim_entries[i]; in print_vdev_indirect()
1831 (void) printf("\t<%llx:%llx:%llx> -> " in print_vdev_indirect()
1833 (longlong_t)vd->vdev_id, in print_vdev_indirect()
1835 (longlong_t)DVA_GET_ASIZE(&vimep->vimep_dst), in print_vdev_indirect()
1836 (longlong_t)DVA_GET_VDEV(&vimep->vimep_dst), in print_vdev_indirect()
1837 (longlong_t)DVA_GET_OFFSET(&vimep->vimep_dst), in print_vdev_indirect()
1838 (longlong_t)DVA_GET_ASIZE(&vimep->vimep_dst), in print_vdev_indirect()
1846 objset_t *mos = vd->vdev_spa->spa_meta_objset; in print_vdev_indirect()
1849 ASSERT(vd->vdev_obsolete_sm != NULL); in print_vdev_indirect()
1850 ASSERT3U(space_map_object(vd->vdev_obsolete_sm), ==, in print_vdev_indirect()
1852 dump_spacemap(mos, vd->vdev_obsolete_sm); in print_vdev_indirect()
1860 vdev_t *vd, *rvd = spa->spa_root_vdev; in dump_metaslabs()
1861 uint64_t m, c = 0, children = rvd->vdev_children; in dump_metaslabs()
1872 vd = rvd->vdev_child[c]; in dump_metaslabs()
1876 if (zopt_metaslab[m] < vd->vdev_ms_count) in dump_metaslabs()
1878 vd->vdev_ms[zopt_metaslab[m]]); in dump_metaslabs()
1890 vd = rvd->vdev_child[c]; in dump_metaslabs()
1895 for (m = 0; m < vd->vdev_ms_count; m++) in dump_metaslabs()
1896 dump_metaslab(vd->vdev_ms[m]); in dump_metaslabs()
1908 for (spa_log_sm_t *sls = avl_first(&spa->spa_sm_logs_by_txg); in dump_log_spacemaps()
1909 sls; sls = AVL_NEXT(&spa->spa_sm_logs_by_txg, sls)) { in dump_log_spacemaps()
1912 sls->sls_sm_obj, 0, UINT64_MAX, SPA_MINBLOCKSHIFT)); in dump_log_spacemaps()
1915 (u_longlong_t)sls->sls_sm_obj, (u_longlong_t)sls->sls_txg); in dump_log_spacemaps()
1916 dump_spacemap(spa->spa_meta_objset, sm); in dump_log_spacemaps()
1926 const ddt_key_t *ddk = &ddlwe->ddlwe_key; in dump_ddt_entry()
1932 const ddt_univ_phys_t *ddp = &ddlwe->ddlwe_phys; in dump_ddt_entry()
1937 ddt_bp_create(ddt->ddt_checksum, ddk, ddp, v, &blk); in dump_ddt_entry()
1950 if (dds->dds_blocks == 0) in dump_dedup_ratio()
1953 rL = (double)dds->dds_ref_lsize; in dump_dedup_ratio()
1954 rP = (double)dds->dds_ref_psize; in dump_dedup_ratio()
1955 rD = (double)dds->dds_ref_dsize; in dump_dedup_ratio()
1956 D = (double)dds->dds_dsize; in dump_dedup_ratio()
1970 if (ddt->ddt_version != DDT_VERSION_FDT || in dump_ddt_log()
1971 !(ddt->ddt_flags & DDT_FLAG_LOG)) in dump_ddt_log()
1975 ddt_log_t *ddl = &ddt->ddt_log[n]; in dump_ddt_log()
1978 if (ddl->ddl_flags > 0) { in dump_ddt_log()
1981 if (ddl->ddl_flags & DDL_FLAG_FLUSHING) in dump_ddt_log()
1983 sizeof (flagstr) - c); in dump_ddt_log()
1984 if (ddl->ddl_flags & DDL_FLAG_CHECKPOINT) in dump_ddt_log()
1986 sizeof (flagstr) - c); in dump_ddt_log()
1987 if (ddl->ddl_flags & in dump_ddt_log()
1990 sizeof (flagstr) - c); in dump_ddt_log()
1995 uint64_t count = avl_numnodes(&ddl->ddl_tree); in dump_ddt_log()
1999 zio_checksum_table[ddt->ddt_checksum].ci_name, n, in dump_ddt_log()
2000 ddl->ddl_flags, flagstr, in dump_ddt_log()
2001 (u_longlong_t)ddl->ddl_object, in dump_ddt_log()
2002 (u_longlong_t)ddl->ddl_length, in dump_ddt_log()
2003 (u_longlong_t)ddl->ddl_first_txg, (u_longlong_t)count); in dump_ddt_log()
2005 if (ddl->ddl_flags & DDL_FLAG_CHECKPOINT) { in dump_ddt_log()
2006 const ddt_key_t *ddk = &ddl->ddl_checkpoint; in dump_ddt_log()
2009 (u_longlong_t)ddk->ddk_cksum.zc_word[0], in dump_ddt_log()
2010 (u_longlong_t)ddk->ddk_cksum.zc_word[1], in dump_ddt_log()
2011 (u_longlong_t)ddk->ddk_cksum.zc_word[2], in dump_ddt_log()
2012 (u_longlong_t)ddk->ddk_cksum.zc_word[3], in dump_ddt_log()
2013 (u_longlong_t)ddk->ddk_prop); in dump_ddt_log()
2021 for (ddt_log_entry_t *ddle = avl_first(&ddl->ddl_tree); in dump_ddt_log()
2022 ddle; ddle = AVL_NEXT(&ddl->ddl_tree, ddle)) { in dump_ddt_log()
2061 zpool_dump_ddt(NULL, &ddt->ddt_histogram[type][class]); in dump_ddt_object()
2082 if (!ddt || ddt->ddt_version == DDT_VERSION_UNCONFIGURED) in dump_ddt()
2086 if (ddt->ddt_flags > 0) { in dump_ddt()
2089 if (ddt->ddt_flags & DDT_FLAG_FLAT) in dump_ddt()
2091 sizeof (flagstr) - c); in dump_ddt()
2092 if (ddt->ddt_flags & DDT_FLAG_LOG) in dump_ddt()
2094 sizeof (flagstr) - c); in dump_ddt()
2095 if (ddt->ddt_flags & ~DDT_FLAG_MASK) in dump_ddt()
2097 sizeof (flagstr) - c); in dump_ddt()
2102 printf("DDT-%s: version=%llu [%s]; flags=0x%02llx%s; rootobj=%llu\n", in dump_ddt()
2103 zio_checksum_table[ddt->ddt_checksum].ci_name, in dump_ddt()
2104 (u_longlong_t)ddt->ddt_version, in dump_ddt()
2105 (ddt->ddt_version == 0) ? "LEGACY" : in dump_ddt()
2106 (ddt->ddt_version == 1) ? "FDT" : "UNKNOWN", in dump_ddt()
2107 (u_longlong_t)ddt->ddt_flags, flagstr, in dump_ddt()
2108 (u_longlong_t)ddt->ddt_dir_object); in dump_ddt()
2124 dump_ddt(spa->spa_ddt[c]); in dump_all_ddts()
2159 "-----", "---------", "----"); in dump_all_ddts()
2193 for (uint64_t vdevid = 0; vdevid < spa->spa_brt_nvdevs; vdevid++) { in dump_brt()
2194 brt_vdev_t *brtvd = spa->spa_brt_vdevs[vdevid]; in dump_brt()
2195 if (!brtvd->bv_initiated) { in dump_brt()
2200 zdb_nicenum(brtvd->bv_totalcount, count, sizeof (count)); in dump_brt()
2201 zdb_nicebytes(brtvd->bv_usedspace, used, sizeof (used)); in dump_brt()
2202 zdb_nicebytes(brtvd->bv_savedspace, saved, sizeof (saved)); in dump_brt()
2210 /* -TTT shows a per-vdev histograms; -TTTT shows all entries */ in dump_brt()
2216 printf("\n%-16s %-10s\n", "DVA", "REFCNT"); in dump_brt()
2218 for (uint64_t vdevid = 0; vdevid < spa->spa_brt_nvdevs; vdevid++) { in dump_brt()
2219 brt_vdev_t *brtvd = spa->spa_brt_vdevs[vdevid]; in dump_brt()
2220 if (!brtvd->bv_initiated) in dump_brt()
2227 for (zap_cursor_init(&zc, spa->spa_meta_objset, in dump_brt()
2228 brtvd->bv_mos_entries); in dump_brt()
2232 VERIFY0(zap_lookup_uint64(spa->spa_meta_objset, in dump_brt()
2233 brtvd->bv_mos_entries, in dump_brt()
2234 (const uint64_t *)za->za_name, 1, in dump_brt()
2235 za->za_integer_length, za->za_num_integers, in dump_brt()
2242 *(const uint64_t *)za->za_name; in dump_brt()
2246 printf("%-16s %-10llu\n", dva, in dump_brt()
2276 spa_t *spa = vd->vdev_spa; in dump_dtl()
2290 vd->vdev_path ? vd->vdev_path : in dump_dtl()
2291 vd->vdev_parent ? vd->vdev_ops->vdev_op_type : spa_name(spa), in dump_dtl()
2292 required ? "DTL-required" : "DTL-expendable"); in dump_dtl()
2295 range_tree_t *rt = vd->vdev_dtl[t]; in dump_dtl()
2301 if (dump_opt['d'] > 5 && vd->vdev_children == 0) in dump_dtl()
2302 dump_spacemap(spa->spa_meta_objset, in dump_dtl()
2303 vd->vdev_dtl_sm); in dump_dtl()
2306 for (unsigned c = 0; c < vd->vdev_children; c++) in dump_dtl()
2307 dump_dtl(vd->vdev_child[c], indent + 4); in dump_dtl()
2339 off -= resid; in dump_history()
2440 ASSERT(zb->zb_level < 0); in blkid2offset()
2441 if (zb->zb_object == 0) in blkid2offset()
2442 return (zb->zb_blkid); in blkid2offset()
2443 return (zb->zb_blkid * BP_GET_LSIZE(bp)); in blkid2offset()
2446 ASSERT(zb->zb_level >= 0); in blkid2offset()
2448 return ((zb->zb_blkid << in blkid2offset()
2449 (zb->zb_level * (dnp->dn_indblkshift - SPA_BLKPTRSHIFT))) * in blkid2offset()
2450 dnp->dn_datablkszsec << SPA_MINBLOCKSHIFT); in blkid2offset()
2481 buflen - strlen(blkbuf), in snprintf_zstd_header()
2507 buflen - strlen(blkbuf), in snprintf_zstd_header()
2519 const dva_t *dva = bp->blk_dva; in snprintf_blkptr_compact()
2527 buflen - strlen(blkbuf), " %s", "FREE"); in snprintf_blkptr_compact()
2546 buflen - strlen(blkbuf), "%llu:%llx:%llx ", in snprintf_blkptr_compact()
2553 buflen - strlen(blkbuf), in snprintf_blkptr_compact()
2559 buflen - strlen(blkbuf), in snprintf_blkptr_compact()
2568 buflen - strlen(blkbuf), " %s", "FREE"); in snprintf_blkptr_compact()
2570 buflen - strlen(blkbuf), in snprintf_blkptr_compact()
2572 (u_longlong_t)bp->blk_cksum.zc_word[0], in snprintf_blkptr_compact()
2573 (u_longlong_t)bp->blk_cksum.zc_word[1], in snprintf_blkptr_compact()
2574 (u_longlong_t)bp->blk_cksum.zc_word[2], in snprintf_blkptr_compact()
2575 (u_longlong_t)bp->blk_cksum.zc_word[3]); in snprintf_blkptr_compact()
2587 ASSERT3U(BP_GET_TYPE(bp), ==, dnp->dn_type); in print_indirect()
2588 ASSERT3U(BP_GET_LEVEL(bp), ==, zb->zb_level); in print_indirect()
2593 ASSERT(zb->zb_level >= 0); in print_indirect()
2595 for (l = dnp->dn_nlevels - 1; l >= -1; l--) { in print_indirect()
2596 if (l == zb->zb_level) { in print_indirect()
2597 (void) printf("L%llx", (u_longlong_t)zb->zb_level); in print_indirect()
2633 ASSERT(buf->b_data); in visit_indirect()
2636 cbp = buf->b_data; in visit_indirect()
2640 SET_BOOKMARK(&czb, zb->zb_objset, zb->zb_object, in visit_indirect()
2641 zb->zb_level - 1, in visit_indirect()
2642 zb->zb_blkid * epb + i); in visit_indirect()
2659 dnode_phys_t *dnp = dn->dn_phys; in dump_indirect()
2664 SET_BOOKMARK(&czb, dmu_objset_id(dn->dn_objset), in dump_indirect()
2665 dn->dn_object, dnp->dn_nlevels - 1, 0); in dump_indirect()
2666 for (int j = 0; j < dnp->dn_nblkptr; j++) { in dump_indirect()
2668 (void) visit_indirect(dmu_objset_spa(dn->dn_objset), dnp, in dump_indirect()
2669 &dnp->dn_blkptr[j], &czb); in dump_indirect()
2691 crtime = dd->dd_creation_time; in dump_dsl_dir()
2694 (u_longlong_t)dd->dd_head_dataset_obj); in dump_dsl_dir()
2696 (u_longlong_t)dd->dd_parent_obj); in dump_dsl_dir()
2698 (u_longlong_t)dd->dd_origin_obj); in dump_dsl_dir()
2700 (u_longlong_t)dd->dd_child_dir_zapobj); in dump_dsl_dir()
2701 zdb_nicenum(dd->dd_used_bytes, nice, sizeof (nice)); in dump_dsl_dir()
2703 zdb_nicenum(dd->dd_compressed_bytes, nice, sizeof (nice)); in dump_dsl_dir()
2705 zdb_nicenum(dd->dd_uncompressed_bytes, nice, sizeof (nice)); in dump_dsl_dir()
2707 zdb_nicenum(dd->dd_quota, nice, sizeof (nice)); in dump_dsl_dir()
2709 zdb_nicenum(dd->dd_reserved, nice, sizeof (nice)); in dump_dsl_dir()
2712 (u_longlong_t)dd->dd_props_zapobj); in dump_dsl_dir()
2714 (u_longlong_t)dd->dd_deleg_zapobj); in dump_dsl_dir()
2716 (u_longlong_t)dd->dd_flags); in dump_dsl_dir()
2719 zdb_nicenum(dd->dd_used_breakdown[DD_USED_ ## which], nice, \ in dump_dsl_dir()
2729 (u_longlong_t)dd->dd_clones); in dump_dsl_dir()
2753 crtime = ds->ds_creation_time; in dump_dsl_dataset()
2754 zdb_nicenum(ds->ds_referenced_bytes, used, sizeof (used)); in dump_dsl_dataset()
2755 zdb_nicenum(ds->ds_compressed_bytes, compressed, sizeof (compressed)); in dump_dsl_dataset()
2756 zdb_nicenum(ds->ds_uncompressed_bytes, uncompressed, in dump_dsl_dataset()
2758 zdb_nicenum(ds->ds_unique_bytes, unique, sizeof (unique)); in dump_dsl_dataset()
2759 snprintf_blkptr(blkbuf, sizeof (blkbuf), &ds->ds_bp); in dump_dsl_dataset()
2762 (u_longlong_t)ds->ds_dir_obj); in dump_dsl_dataset()
2764 (u_longlong_t)ds->ds_prev_snap_obj); in dump_dsl_dataset()
2766 (u_longlong_t)ds->ds_prev_snap_txg); in dump_dsl_dataset()
2768 (u_longlong_t)ds->ds_next_snap_obj); in dump_dsl_dataset()
2770 (u_longlong_t)ds->ds_snapnames_zapobj); in dump_dsl_dataset()
2772 (u_longlong_t)ds->ds_num_children); in dump_dsl_dataset()
2774 (u_longlong_t)ds->ds_userrefs_obj); in dump_dsl_dataset()
2777 (u_longlong_t)ds->ds_creation_txg); in dump_dsl_dataset()
2779 (u_longlong_t)ds->ds_deadlist_obj); in dump_dsl_dataset()
2785 (u_longlong_t)ds->ds_fsid_guid); in dump_dsl_dataset()
2787 (u_longlong_t)ds->ds_guid); in dump_dsl_dataset()
2789 (u_longlong_t)ds->ds_flags); in dump_dsl_dataset()
2791 (u_longlong_t)ds->ds_next_clones_obj); in dump_dsl_dataset()
2793 (u_longlong_t)ds->ds_props_obj); in dump_dsl_dataset()
2824 bt = db->db_data; in dump_bptree()
2825 zdb_nicenum(bt->bt_bytes, bytes, sizeof (bytes)); in dump_bptree()
2827 name, (unsigned long long)(bt->bt_end - bt->bt_begin), bytes); in dump_bptree()
2866 zdb_nicenum(bpo->bpo_phys->bpo_bytes, bytes, sizeof (bytes)); in dump_full_bpobj()
2867 if (bpo->bpo_havesubobj && bpo->bpo_phys->bpo_subobjs != 0) { in dump_full_bpobj()
2868 zdb_nicenum(bpo->bpo_phys->bpo_comp, comp, sizeof (comp)); in dump_full_bpobj()
2869 zdb_nicenum(bpo->bpo_phys->bpo_uncomp, uncomp, sizeof (uncomp)); in dump_full_bpobj()
2870 if (bpo->bpo_havefreed) { in dump_full_bpobj()
2875 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
2876 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
2877 (u_longlong_t)bpo->bpo_phys->bpo_num_freed, in dump_full_bpobj()
2878 (u_longlong_t)bpo->bpo_phys->bpo_num_subobjs, in dump_full_bpobj()
2879 (u_longlong_t)bpo->bpo_phys->bpo_subobjs, in dump_full_bpobj()
2886 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
2887 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
2888 (u_longlong_t)bpo->bpo_phys->bpo_num_subobjs, in dump_full_bpobj()
2889 (u_longlong_t)bpo->bpo_phys->bpo_subobjs, in dump_full_bpobj()
2893 for (i = 0; i < bpo->bpo_phys->bpo_num_subobjs; i++) { in dump_full_bpobj()
2897 VERIFY0(dmu_read(bpo->bpo_os, in dump_full_bpobj()
2898 bpo->bpo_phys->bpo_subobjs, in dump_full_bpobj()
2900 error = bpobj_open(&subbpo, bpo->bpo_os, subobj); in dump_full_bpobj()
2911 if (bpo->bpo_havefreed) { in dump_full_bpobj()
2915 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
2916 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
2917 (u_longlong_t)bpo->bpo_phys->bpo_num_freed, in dump_full_bpobj()
2923 (u_longlong_t)bpo->bpo_object, in dump_full_bpobj()
2924 (u_longlong_t)bpo->bpo_phys->bpo_num_blkptrs, in dump_full_bpobj()
2945 objset_t *mos = dp->dp_spa->spa_meta_objset; in dump_bookmark()
2967 redaction_list_phys_t *rlp = rl->rl_phys; in dump_bookmark()
2969 if (rlp->rlp_last_object != UINT64_MAX || in dump_bookmark()
2970 rlp->rlp_last_blkid != UINT64_MAX) { in dump_bookmark()
2972 (u_longlong_t)rlp->rlp_last_object, in dump_bookmark()
2973 (u_longlong_t)rlp->rlp_last_blkid); in dump_bookmark()
2978 for (unsigned int i = 0; i < rlp->rlp_num_snaps; i++) { in dump_bookmark()
2982 (u_longlong_t)rlp->rlp_snaps[i]); in dump_bookmark()
2985 (u_longlong_t)rlp->rlp_num_entries); in dump_bookmark()
2992 if (rlp->rlp_num_entries == 0) { in dump_bookmark()
3021 for (size_t i = 1; i < rlp->rlp_num_entries; i++) { in dump_bookmark()
3041 dsl_pool_t *dp = spa_get_dsl(os->os_spa); in dump_bookmarks()
3042 objset_t *mos = os->os_spa->spa_meta_objset; in dump_bookmarks()
3048 for (zap_cursor_init(&zc, mos, ds->ds_bookmarks_obj); in dump_bookmarks()
3056 attrp->za_name); in dump_bookmarks()
3068 mos_obj_refd(bpo->bpo_object); in bpobj_count_refd()
3070 if (bpo->bpo_havesubobj && bpo->bpo_phys->bpo_subobjs != 0) { in bpobj_count_refd()
3071 mos_obj_refd(bpo->bpo_phys->bpo_subobjs); in bpobj_count_refd()
3072 for (uint64_t i = 0; i < bpo->bpo_phys->bpo_num_subobjs; i++) { in bpobj_count_refd()
3076 VERIFY0(dmu_read(bpo->bpo_os, in bpobj_count_refd()
3077 bpo->bpo_phys->bpo_subobjs, in bpobj_count_refd()
3079 error = bpobj_open(&subbpo, bpo->bpo_os, subobj); in bpobj_count_refd()
3096 uint64_t empty_bpobj = spa->spa_dsl_pool->dp_empty_bpobj; in dsl_deadlist_entry_count_refd()
3097 if (dle->dle_bpobj.bpo_object != empty_bpobj) in dsl_deadlist_entry_count_refd()
3098 bpobj_count_refd(&dle->dle_bpobj); in dsl_deadlist_entry_count_refd()
3109 "mintxg %llu -> obj %llu", in dsl_deadlist_entry_dump()
3110 (longlong_t)dle->dle_mintxg, in dsl_deadlist_entry_dump()
3111 (longlong_t)dle->dle_bpobj.bpo_object); in dsl_deadlist_entry_dump()
3113 dump_full_bpobj(&dle->dle_bpobj, buf, 0); in dsl_deadlist_entry_dump()
3115 (void) printf("mintxg %llu -> obj %llu\n", in dsl_deadlist_entry_dump()
3116 (longlong_t)dle->dle_mintxg, in dsl_deadlist_entry_dump()
3117 (longlong_t)dle->dle_bpobj.bpo_object); in dsl_deadlist_entry_dump()
3129 spa_t *spa = dmu_objset_spa(dl->dl_os); in dump_blkptr_list()
3130 uint64_t empty_bpobj = spa->spa_dsl_pool->dp_empty_bpobj; in dump_blkptr_list()
3132 if (dl->dl_oldfmt) { in dump_blkptr_list()
3133 if (dl->dl_bpobj.bpo_object != empty_bpobj) in dump_blkptr_list()
3134 bpobj_count_refd(&dl->dl_bpobj); in dump_blkptr_list()
3136 mos_obj_refd(dl->dl_object); in dump_blkptr_list()
3149 if (dl->dl_oldfmt) { in dump_blkptr_list()
3150 dump_full_bpobj(&dl->dl_bpobj, "old-format deadlist", 0); in dump_blkptr_list()
3154 zdb_nicenum(dl->dl_phys->dl_used, bytes, sizeof (bytes)); in dump_blkptr_list()
3155 zdb_nicenum(dl->dl_phys->dl_comp, comp, sizeof (comp)); in dump_blkptr_list()
3156 zdb_nicenum(dl->dl_phys->dl_uncomp, uncomp, sizeof (uncomp)); in dump_blkptr_list()
3157 zdb_nicenum(avl_numnodes(&dl->dl_tree), entries, sizeof (entries)); in dump_blkptr_list()
3173 dsl_pool_t *dp = spa_get_dsl(os->os_spa); in verify_dd_livelist()
3174 dsl_dir_t *dd = os->os_dsl_dataset->ds_dir; in verify_dd_livelist()
3177 if (!dsl_deadlist_is_open(&dd->dd_livelist)) in verify_dd_livelist()
3181 dsl_deadlist_iterate(&dd->dd_livelist, sublivelist_verify_lightweight, in verify_dd_livelist()
3185 dsl_deadlist_space(&dd->dd_livelist, &ll_used, in verify_dd_livelist()
3191 dsl_dir_phys(dd)->dd_origin_obj, FTAG, &origin_ds)); in verify_dd_livelist()
3192 VERIFY0(dsl_dataset_space_written(origin_ds, os->os_dsl_dataset, in verify_dd_livelist()
3227 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, dd->dd_crypto_obj, in zdb_derive_key()
3244 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, in zdb_derive_key()
3245 dd->dd_crypto_obj, zfs_prop_to_name(ZFS_PROP_PBKDF2_SALT), in zdb_derive_key()
3247 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, in zdb_derive_key()
3248 dd->dd_crypto_obj, zfs_prop_to_name(ZFS_PROP_PBKDF2_ITERS), in zdb_derive_key()
3278 dp = spa_get_dsl(os->os_spa); in zdb_load_key()
3279 dd = os->os_dsl_dataset->ds_dir; in zdb_load_key()
3282 VERIFY0(zap_lookup(dd->dd_pool->dp_meta_objset, dd->dd_crypto_obj, in zdb_load_key()
3284 VERIFY0(dsl_dir_hold_obj(dd->dd_pool, rddobj, NULL, FTAG, &rdd)); in zdb_load_key()
3383 (key_loaded || !(*osp)->os_encrypted)) { in open_objset()
3410 if (os->os_sa != NULL) in close_objset()
3445 ddt_t *ddt = spa->spa_ddt[c]; in zdb_ddt_cleanup()
3451 ddt_entry_t *dde = avl_first(&ddt->ddt_tree), *next; in zdb_ddt_cleanup()
3453 next = AVL_NEXT(&ddt->ddt_tree, dde); in zdb_ddt_cleanup()
3454 dde->dde_io = NULL; in zdb_ddt_cleanup()
3487 * the domain-rid string.
3495 (void) printf("\t%s %llx [%s-%d]\n", id_type, in print_idstr()
3806 * set, the user combined the all-types flag (A) with in match_object_type()
3807 * a negated flag to exclude some types (e.g. A-f to in match_object_type()
3854 * Encrypted datasets will have sensitive bonus buffers in dump_object()
3862 if (!key_loaded && os->os_encrypted && in dump_object()
3873 bonus = db->db_data; in dump_object()
3874 bsize = db->db_size; in dump_object()
3902 (void) snprintf(aux + strlen(aux), sizeof (aux) - strlen(aux), in dump_object()
3907 ZIO_COMPRESS_HASLEVEL(os->os_compress) && verbosity >= 6) { in dump_object()
3910 ZIO_COMPRESS_RAW(os->os_compress, os->os_complevel), in dump_object()
3913 sizeof (aux) - strlen(aux), " (Z=inherit=%s)", in dump_object()
3917 sizeof (aux) - strlen(aux), in dump_object()
3918 " (Z=inherit=%s-unknown)", in dump_object()
3919 ZDB_COMPRESS_NAME(os->os_compress)); in dump_object()
3922 (void) snprintf(aux + strlen(aux), sizeof (aux) - strlen(aux), in dump_object()
3923 " (Z=inherit=%s)", ZDB_COMPRESS_NAME(os->os_compress)); in dump_object()
3925 (void) snprintf(aux + strlen(aux), sizeof (aux) - strlen(aux), in dump_object()
3941 (dn->dn_phys->dn_flags & DNODE_FLAG_USED_BYTES) ? in dump_object()
3943 (dn->dn_phys->dn_flags & DNODE_FLAG_USERUSED_ACCOUNTED) ? in dump_object()
3945 (dn->dn_phys->dn_flags & DNODE_FLAG_USEROBJUSED_ACCOUNTED) ? in dump_object()
3947 (dn->dn_phys->dn_flags & DNODE_FLAG_SPILL_BLKPTR) ? in dump_object()
3950 (longlong_t)dn->dn_phys->dn_maxblkid); in dump_object()
3960 (!os->os_encrypted || !DMU_OT_IS_ENCRYPTED(doi.doi_type))) { in dump_object()
3971 if (dn->dn_phys->dn_flags & DNODE_FLAG_SPILL_BLKPTR) { in dump_object()
3974 DN_SPILL_BLKPTR(dn->dn_phys), B_FALSE); in dump_object()
3989 if (dn->dn_type == DMU_OT_DNODE) { in dump_object()
4006 zdb_nicenum(end - start, segsize, sizeof (segsize)); in dump_object()
4026 mos_obj_refd(dd->dd_object); in count_dir_mos_objects()
4027 mos_obj_refd(dsl_dir_phys(dd)->dd_child_dir_zapobj); in count_dir_mos_objects()
4028 mos_obj_refd(dsl_dir_phys(dd)->dd_deleg_zapobj); in count_dir_mos_objects()
4029 mos_obj_refd(dsl_dir_phys(dd)->dd_props_zapobj); in count_dir_mos_objects()
4030 mos_obj_refd(dsl_dir_phys(dd)->dd_clones); in count_dir_mos_objects()
4036 mos_obj_refd_multiple(dd->dd_crypto_obj); in count_dir_mos_objects()
4042 mos_obj_refd(ds->ds_object); in count_ds_mos_objects()
4043 mos_obj_refd(dsl_dataset_phys(ds)->ds_next_clones_obj); in count_ds_mos_objects()
4044 mos_obj_refd(dsl_dataset_phys(ds)->ds_props_obj); in count_ds_mos_objects()
4045 mos_obj_refd(dsl_dataset_phys(ds)->ds_userrefs_obj); in count_ds_mos_objects()
4046 mos_obj_refd(dsl_dataset_phys(ds)->ds_snapnames_zapobj); in count_ds_mos_objects()
4047 mos_obj_refd(ds->ds_bookmarks_obj); in count_ds_mos_objects()
4050 count_dir_mos_objects(ds->ds_dir); in count_ds_mos_objects()
4073 zor->zor_obj_start = strtoull(range, &p, 0); in parse_object_range()
4078 zor->zor_obj_start = ZDB_MAP_OBJECT_ID(zor->zor_obj_start); in parse_object_range()
4079 zor->zor_obj_end = zor->zor_obj_start; in parse_object_range()
4090 if (range[len - 1] == ':') { in parse_object_range()
4098 zor->zor_obj_start = strtoull(s, &p, 0); in parse_object_range()
4107 zor->zor_obj_end = strtoull(s, &p, 0); in parse_object_range()
4115 if (zor->zor_obj_start > zor->zor_obj_end) { in parse_object_range()
4123 zor->zor_flags = ZOR_FLAG_ALL_TYPES; in parse_object_range()
4126 *msg = "Invalid colon-delimited field after flags"; in parse_object_range()
4134 boolean_t negation = (flagstr[i] == '-'); in parse_object_range()
4155 zor->zor_flags = flags; in parse_object_range()
4157 zor->zor_obj_start = ZDB_MAP_OBJECT_ID(zor->zor_obj_start); in parse_object_range()
4158 zor->zor_obj_end = ZDB_MAP_OBJECT_ID(zor->zor_obj_end); in parse_object_range()
4200 usedobjs = BP_GET_FILL(os->os_rootbp); in dump_objset()
4201 refdbytes = dsl_dir_phys(os->os_spa->spa_dsl_pool->dp_mos_dir)-> in dump_objset()
4207 ASSERT3U(usedobjs, ==, BP_GET_FILL(os->os_rootbp)); in dump_objset()
4214 sizeof (blkbuf) - strlen(blkbuf), os->os_rootbp); in dump_objset()
4238 object--; in dump_objset()
4257 dump_blkptr_list(&ds->ds_deadlist, "Deadlist"); in dump_objset()
4258 if (dsl_deadlist_is_open(&ds->ds_dir->dd_livelist) && in dump_objset()
4260 dump_blkptr_list(&ds->ds_dir->dd_livelist, "Livelist"); in dump_objset()
4267 dump_blkptr_list(&ds->ds_remap_deadlist, "Deadlist"); in dump_objset()
4278 if (BP_IS_HOLE(os->os_rootbp)) in dump_objset()
4284 DMU_USERUSED_DNODE(os)->dn_type != 0) { in dump_objset()
4292 DMU_PROJECTUSED_DNODE(os)->dn_type != 0) in dump_objset()
4302 max_slot_used = object + dnode_slots - 1; in dump_objset()
4313 (double)(max_slot_used - total_slots_used)*100 / in dump_objset()
4334 time_t timestamp = ub->ub_timestamp; in dump_uberblock()
4337 (void) printf("\tmagic = %016llx\n", (u_longlong_t)ub->ub_magic); in dump_uberblock()
4338 (void) printf("\tversion = %llu\n", (u_longlong_t)ub->ub_version); in dump_uberblock()
4339 (void) printf("\ttxg = %llu\n", (u_longlong_t)ub->ub_txg); in dump_uberblock()
4340 (void) printf("\tguid_sum = %llu\n", (u_longlong_t)ub->ub_guid_sum); in dump_uberblock()
4342 (u_longlong_t)ub->ub_timestamp, ctime(&timestamp)); in dump_uberblock()
4345 snprintf_blkptr(blkbuf, sizeof (blkbuf), &ub->ub_rootbp); in dump_uberblock()
4349 (u_longlong_t)ub->ub_mmp_magic); in dump_uberblock()
4352 (u_longlong_t)ub->ub_mmp_delay); in dump_uberblock()
4364 (unsigned int) ub->ub_mmp_config & 0xFF); in dump_uberblock()
4369 snprintf_blkptr(blkbuf, sizeof (blkbuf), &ub->ub_rootbp); in dump_uberblock()
4373 (u_longlong_t)ub->ub_checkpoint_txg); in dump_uberblock()
4390 error = dmu_bonus_hold(spa->spa_meta_objset, in dump_config()
4391 spa->spa_config_object, FTAG, &db); in dump_config()
4394 nvsize = *(uint64_t *)db->db_data; in dump_config()
4398 dump_packed_nvlist(spa->spa_meta_objset, in dump_config()
4399 spa->spa_config_object, (void *)&nvsize, 1); in dump_config()
4402 (u_longlong_t)spa->spa_config_object, error); in dump_config()
4473 stats->zns_list_count++; in collect_nvlist_stats()
4480 fnvlist_add_string(stats->zns_string, name, in collect_nvlist_stats()
4484 fnvlist_add_uint64(stats->zns_uint64, name, in collect_nvlist_stats()
4488 fnvlist_add_boolean(stats->zns_boolean, name); in collect_nvlist_stats()
4507 stats->zns_leaf_total += size; in collect_nvlist_stats()
4508 if (size > stats->zns_leaf_largest) in collect_nvlist_stats()
4509 stats->zns_leaf_largest = size; in collect_nvlist_stats()
4510 stats->zns_leaf_count++; in collect_nvlist_stats()
4525 size_t noise; in dump_nvlist_stats() local
4527 /* requires nvlist with non-unique names for stat collection */ in dump_nvlist_stats()
4531 VERIFY0(nvlist_size(stats.zns_boolean, &noise, NV_ENCODE_XDR)); in dump_nvlist_stats()
4537 (int)total, (int)(cap - total), 100.0 * total / cap); in dump_nvlist_stats()
4542 size -= noise; in dump_nvlist_stats()
4549 size -= noise; in dump_nvlist_stats()
4556 size -= noise; in dump_nvlist_stats()
4562 size = total - sum; /* treat remainder as nvlist overhead */ in dump_nvlist_stats()
4576 (int)((cap - total) / average)); in dump_nvlist_stats()
4596 int arraysize = ARRAY_SIZE(l->cksum.zc_word); in cksum_record_compare()
4600 difference = TREE_CMP(l->cksum.zc_word[i], r->cksum.zc_word[i]); in cksum_record_compare()
4614 rec->cksum = *cksum; in cksum_record_alloc()
4615 rec->labels[l] = B_TRUE; in cksum_record_alloc()
4636 rec->labels[l] = B_TRUE; in cksum_record_insert()
4649 if (rec->labels[i]) in first_label()
4652 return (-1); in first_label()
4660 if (rec->labels[i] == B_TRUE) in print_label_numbers()
4685 if (label->header_printed == B_TRUE) in print_label_header()
4688 (void) printf("------------------------------------\n"); in print_label_header()
4690 label->cksum_valid ? "" : "(Bad label cksum)"); in print_label_header()
4691 (void) printf("------------------------------------\n"); in print_label_header()
4693 label->header_printed = B_TRUE; in print_label_header()
4699 (void) printf("------------------------------------\n"); in print_l2arc_header()
4701 (void) printf("------------------------------------\n"); in print_l2arc_header()
4707 (void) printf("------------------------------------\n"); in print_l2arc_log_blocks()
4709 (void) printf("------------------------------------\n"); in print_l2arc_log_blocks()
4727 (u_longlong_t)L2BLK_GET_LSIZE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4729 (u_longlong_t)L2BLK_GET_PSIZE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4731 (u_longlong_t)L2BLK_GET_COMPRESS((&le[j])->le_prop)); in dump_l2arc_log_entries()
4733 (u_longlong_t)(&le[j])->le_complevel); in dump_l2arc_log_entries()
4735 (u_longlong_t)L2BLK_GET_TYPE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4737 (u_longlong_t)L2BLK_GET_PROTECTED((&le[j])->le_prop)); in dump_l2arc_log_entries()
4739 (u_longlong_t)L2BLK_GET_PREFETCH((&le[j])->le_prop)); in dump_l2arc_log_entries()
4743 (u_longlong_t)L2BLK_GET_STATE((&le[j])->le_prop)); in dump_l2arc_log_entries()
4752 (void) printf("|\t\tdaddr: %llu\n", (u_longlong_t)lbps->lbp_daddr); in dump_l2arc_log_blkptr()
4754 (u_longlong_t)lbps->lbp_payload_asize); in dump_l2arc_log_blkptr()
4756 (u_longlong_t)lbps->lbp_payload_start); in dump_l2arc_log_blkptr()
4758 (u_longlong_t)L2BLK_GET_LSIZE(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4760 (u_longlong_t)L2BLK_GET_PSIZE(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4762 (u_longlong_t)L2BLK_GET_COMPRESS(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4764 (u_longlong_t)L2BLK_GET_CHECKSUM(lbps->lbp_prop)); in dump_l2arc_log_blkptr()
4781 memcpy(lbps, l2dhdr->dh_start_lbps, sizeof (lbps)); in dump_l2arc_log_blocks()
4783 dev.l2ad_evict = l2dhdr->dh_evict; in dump_l2arc_log_blocks()
4784 dev.l2ad_start = l2dhdr->dh_start; in dump_l2arc_log_blocks()
4785 dev.l2ad_end = l2dhdr->dh_end; in dump_l2arc_log_blocks()
4787 if (l2dhdr->dh_start_lbps[0].lbp_daddr == 0) { in dump_l2arc_log_blocks()
4796 L2BLK_GET_PSIZE((&lbps[0])->lbp_prop); in dump_l2arc_log_blocks()
4799 dev.l2ad_first = !!(l2dhdr->dh_flags & L2ARC_DEV_HDR_EVICT_FIRST); in dump_l2arc_log_blocks()
4806 asize = L2BLK_GET_PSIZE((&lbps[0])->lbp_prop); in dump_l2arc_log_blocks()
4825 switch (L2BLK_GET_COMPRESS((&lbps[0])->lbp_prop)) { in dump_l2arc_log_blocks()
4835 (&lbps[0])->lbp_prop), abd, &dabd, in dump_l2arc_log_blocks()
4856 rebuild->dh_lb_count++; in dump_l2arc_log_blocks()
4857 rebuild->dh_lb_asize += asize; in dump_l2arc_log_blocks()
4860 (u_longlong_t)rebuild->dh_lb_count, in dump_l2arc_log_blocks()
4866 dump_l2arc_log_entries(l2dhdr->dh_log_entries, in dump_l2arc_log_blocks()
4868 rebuild->dh_lb_count); in dump_l2arc_log_blocks()
4881 (u_longlong_t)rebuild->dh_lb_count); in dump_l2arc_log_blocks()
4884 (u_longlong_t)rebuild->dh_lb_asize); in dump_l2arc_log_blocks()
4972 if ((dump_opt['l'] < 3) && (first_label(label->config) != l)) in dump_config_from_label()
4976 dump_nvlist(label->config_nv, 4); in dump_config_from_label()
4977 print_label_numbers(" labels = ", label->config); in dump_config_from_label()
4980 dump_nvlist_stats(label->config_nv, buflen); in dump_config_from_label()
4997 uberblock_t *ub = (void *)((char *)&label->label + uoff); in dump_label_uberblocks()
4998 cksum_record_t *rec = label->uberblocks[i]; in dump_label_uberblocks()
5012 (ub->ub_mmp_magic == MMP_MAGIC) && ub->ub_mmp_delay && in dump_label_uberblocks()
5013 (i >= VDEV_UBERBLOCK_COUNT(&vd) - MMP_BLOCKS_PER_LABEL)) in dump_label_uberblocks()
5093 (void) fprintf(stderr, "object %llu has non-file/directory " in dump_path_impl()
5148 len -= nwritten; in dump_backup_bytes()
5235 if (fd == -1) in zdb_copy_object()
5239 * allocation failures and nigh-infinite printing if the in zdb_copy_object()
5251 readsize = MIN(size - offset, 1 << 20); in zdb_copy_object()
5298 eck = (zio_eck_t *)((char *)(data) + VDEV_PHYS_SIZE) - 1; in label_cksum_valid()
5303 byteswap = (eck->zec_magic == BSWAP_64(ZEC_MAGIC)); in label_cksum_valid()
5307 expected_cksum = eck->zec_cksum; in label_cksum_valid()
5308 eck->zec_cksum = verifier; in label_cksum_valid()
5311 ci->ci_func[byteswap](abd, VDEV_PHYS_SIZE, NULL, &actual_cksum); in label_cksum_valid()
5349 if (zfs_append_partition(path, MAXPATHLEN) == -1) in dump_label()
5393 char *buf = label->label.vl_vdev_phys.vp_nvlist; in dump_label()
5394 size_t buflen = sizeof (label->label.vl_vdev_phys.vp_nvlist); in dump_label()
5400 label->label_offset = vdev_label_offset(psize, l, 0); in dump_label()
5402 if (pread64(fd, &label->label, sizeof (label->label), in dump_label()
5403 label->label_offset) != sizeof (label->label)) { in dump_label()
5406 label->read_failed = B_TRUE; in dump_label()
5411 label->read_failed = B_FALSE; in dump_label()
5412 label->cksum_valid = label_cksum_valid(&label->label, in dump_label()
5413 label->label_offset); in dump_label()
5440 label->config = rec; in dump_label()
5441 label->config_nv = config; in dump_label()
5460 label->uberblocks[i] = rec; in dump_label()
5469 size_t buflen = sizeof (label->label.vl_vdev_phys.vp_nvlist); in dump_label()
5471 if (label->read_failed == B_TRUE) in dump_label()
5474 if (label->config_nv) { in dump_label()
5484 nvlist_free(label->config_nv); in dump_label()
5539 avl_first(&dmu_objset_ds(os)->ds_bookmarks); dbn != NULL; in dump_one_objset()
5540 dbn = AVL_NEXT(&dmu_objset_ds(os)->ds_bookmarks, dbn)) { in dump_one_objset()
5541 mos_obj_refd(dbn->dbn_phys.zbm_redaction_obj); in dump_one_objset()
5542 if (dbn->dbn_phys.zbm_redaction_obj != 0) { in dump_one_objset()
5545 objset_t *mos = os->os_spa->spa_meta_objset; in dump_one_objset()
5548 dbn->dbn_phys.zbm_redaction_obj, FTAG, &rl)); in dump_one_objset()
5549 if (rl->dn_have_spill) { in dump_one_objset()
5554 if (dbn->dbn_phys.zbm_flags & ZBM_FLAG_HAS_FBN) in dump_one_objset()
5558 if (dsl_deadlist_is_open(&dmu_objset_ds(os)->ds_dir->dd_livelist) && in dump_one_objset()
5585 * Extended object types to report deferred frees and dedup auto-ditto blocks.
5645 uint64_t ms_shift = vd->vdev_ms_shift; in same_metaslab()
5667 * histograms showing by blocksize of 512 - 2^ SPA_MAX_FOR_16M
5674 * if the '-P' parameter is specified then the full raw number (parseable)
5703 parm_histo[0].count = zcb->zcb_psize_count; in dump_size_histograms()
5704 parm_histo[0].len = zcb->zcb_psize_len; in dump_size_histograms()
5708 parm_histo[1].count = zcb->zcb_lsize_count; in dump_size_histograms()
5709 parm_histo[1].len = zcb->zcb_lsize_len; in dump_size_histograms()
5713 parm_histo[2].count = zcb->zcb_asize_count; in dump_size_histograms()
5714 parm_histo[2].len = zcb->zcb_asize_len; in dump_size_histograms()
5729 if (j < NUM_HISTO - 1) { in dump_size_histograms()
5736 if (j < NUM_HISTO - 1) { in dump_size_histograms()
5738 (void) printf("%-7s ", in dump_size_histograms()
5837 spa_config_enter(zcb->zcb_spa, SCL_CONFIG, FTAG, RW_READER); in zdb_count_block()
5856 ddt_t *ddt = ddt_select(zcb->zcb_spa, bp); in zdb_count_block()
5891 boolean_t seen = !!(((uintptr_t)dde->dde_io) & (1 << v)); in zdb_count_block()
5893 dde->dde_io = in zdb_count_block()
5894 (void *)(((uintptr_t)dde->dde_io) | (1 << v)); in zdb_count_block()
5897 if (ddt_phys_total_refcnt(ddt, dde->dde_phys) > 0) in zdb_count_block()
5898 ddt_phys_decref(dde->dde_phys, v); in zdb_count_block()
5907 * claimed it as-is, then we would miss the claim on some in zdb_count_block()
5919 ddt_phys_birth(dde->dde_phys, v)); in zdb_count_block()
5921 ddt_bp_fill(dde->dde_phys, v, &tempbp, in zdb_count_block()
5931 zcb->zcb_dedup_asize += BP_GET_ASIZE(bp); in zdb_count_block()
5932 zcb->zcb_dedup_blocks++; in zdb_count_block()
5939 } else if (zcb->zcb_brt_is_active && in zdb_count_block()
5940 brt_maybe_exists(zcb->zcb_spa, bp)) { in zdb_count_block()
5946 * To do this, we keep our own in-memory BRT. For each block in zdb_count_block()
5955 zbre_search.zbre_dva = bp->blk_dva[0]; in zdb_count_block()
5956 zbre = avl_find(&zcb->zcb_brt, &zbre_search, &where); in zdb_count_block()
5960 brt_entry_get_refcount(zcb->zcb_spa, bp); in zdb_count_block()
5964 zbre->zbre_dva = bp->blk_dva[0]; in zdb_count_block()
5965 zbre->zbre_refcount = refcnt; in zdb_count_block()
5966 avl_insert(&zcb->zcb_brt, zbre, where); in zdb_count_block()
5973 zcb->zcb_clone_asize += BP_GET_ASIZE(bp); in zdb_count_block()
5974 zcb->zcb_clone_blocks++; in zdb_count_block()
5976 zbre->zbre_refcount--; in zdb_count_block()
5977 if (zbre->zbre_refcount == 0) { in zdb_count_block()
5978 avl_remove(&zcb->zcb_brt, zbre); in zdb_count_block()
5992 zdb_blkstats_t *zb = &zcb->zcb_type[l][t]; in zdb_count_block()
5994 zb->zb_asize += BP_GET_ASIZE(bp); in zdb_count_block()
5995 zb->zb_lsize += BP_GET_LSIZE(bp); in zdb_count_block()
5996 zb->zb_psize += BP_GET_PSIZE(bp); in zdb_count_block()
5997 zb->zb_count++; in zdb_count_block()
6006 zb->zb_psize_histogram[idx]++; in zdb_count_block()
6008 zb->zb_gangs += BP_COUNT_GANG(bp); in zdb_count_block()
6012 if (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6013 DVA_GET_VDEV(&bp->blk_dva[1])) { in zdb_count_block()
6014 zb->zb_ditto_samevdev++; in zdb_count_block()
6016 if (same_metaslab(zcb->zcb_spa, in zdb_count_block()
6017 DVA_GET_VDEV(&bp->blk_dva[0]), in zdb_count_block()
6018 DVA_GET_OFFSET(&bp->blk_dva[0]), in zdb_count_block()
6019 DVA_GET_OFFSET(&bp->blk_dva[1]))) in zdb_count_block()
6020 zb->zb_ditto_same_ms++; in zdb_count_block()
6024 equal = (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6025 DVA_GET_VDEV(&bp->blk_dva[1])) + in zdb_count_block()
6026 (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6027 DVA_GET_VDEV(&bp->blk_dva[2])) + in zdb_count_block()
6028 (DVA_GET_VDEV(&bp->blk_dva[1]) == in zdb_count_block()
6029 DVA_GET_VDEV(&bp->blk_dva[2])); in zdb_count_block()
6031 zb->zb_ditto_samevdev++; in zdb_count_block()
6033 if (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6034 DVA_GET_VDEV(&bp->blk_dva[1]) && in zdb_count_block()
6035 same_metaslab(zcb->zcb_spa, in zdb_count_block()
6036 DVA_GET_VDEV(&bp->blk_dva[0]), in zdb_count_block()
6037 DVA_GET_OFFSET(&bp->blk_dva[0]), in zdb_count_block()
6038 DVA_GET_OFFSET(&bp->blk_dva[1]))) in zdb_count_block()
6039 zb->zb_ditto_same_ms++; in zdb_count_block()
6040 else if (DVA_GET_VDEV(&bp->blk_dva[0]) == in zdb_count_block()
6041 DVA_GET_VDEV(&bp->blk_dva[2]) && in zdb_count_block()
6042 same_metaslab(zcb->zcb_spa, in zdb_count_block()
6043 DVA_GET_VDEV(&bp->blk_dva[0]), in zdb_count_block()
6044 DVA_GET_OFFSET(&bp->blk_dva[0]), in zdb_count_block()
6045 DVA_GET_OFFSET(&bp->blk_dva[2]))) in zdb_count_block()
6046 zb->zb_ditto_same_ms++; in zdb_count_block()
6047 else if (DVA_GET_VDEV(&bp->blk_dva[1]) == in zdb_count_block()
6048 DVA_GET_VDEV(&bp->blk_dva[2]) && in zdb_count_block()
6049 same_metaslab(zcb->zcb_spa, in zdb_count_block()
6050 DVA_GET_VDEV(&bp->blk_dva[1]), in zdb_count_block()
6051 DVA_GET_OFFSET(&bp->blk_dva[1]), in zdb_count_block()
6052 DVA_GET_OFFSET(&bp->blk_dva[2]))) in zdb_count_block()
6053 zb->zb_ditto_same_ms++; in zdb_count_block()
6059 spa_config_exit(zcb->zcb_spa, SCL_CONFIG, FTAG); in zdb_count_block()
6062 zcb->zcb_embedded_blocks[BPE_GET_ETYPE(bp)]++; in zdb_count_block()
6063 zcb->zcb_embedded_histogram[BPE_GET_ETYPE(bp)] in zdb_count_block()
6072 int bin = highbit64(BP_GET_PSIZE(bp)) - 1; in zdb_count_block()
6074 zcb->zcb_psize_count[bin]++; in zdb_count_block()
6075 zcb->zcb_psize_len[bin] += BP_GET_PSIZE(bp); in zdb_count_block()
6076 zcb->zcb_psize_total += BP_GET_PSIZE(bp); in zdb_count_block()
6078 bin = highbit64(BP_GET_LSIZE(bp)) - 1; in zdb_count_block()
6080 zcb->zcb_lsize_count[bin]++; in zdb_count_block()
6081 zcb->zcb_lsize_len[bin] += BP_GET_LSIZE(bp); in zdb_count_block()
6082 zcb->zcb_lsize_total += BP_GET_LSIZE(bp); in zdb_count_block()
6084 bin = highbit64(BP_GET_ASIZE(bp)) - 1; in zdb_count_block()
6086 zcb->zcb_asize_count[bin]++; in zdb_count_block()
6087 zcb->zcb_asize_len[bin] += BP_GET_ASIZE(bp); in zdb_count_block()
6088 zcb->zcb_asize_total += BP_GET_ASIZE(bp); in zdb_count_block()
6093 VERIFY0(zio_wait(zio_claim(NULL, zcb->zcb_spa, in zdb_count_block()
6094 spa_min_claim_txg(zcb->zcb_spa), bp, NULL, NULL, in zdb_count_block()
6101 spa_t *spa = zio->io_spa; in zdb_blkptr_done()
6102 blkptr_t *bp = zio->io_bp; in zdb_blkptr_done()
6103 int ioerr = zio->io_error; in zdb_blkptr_done()
6104 zdb_cb_t *zcb = zio->io_private; in zdb_blkptr_done()
6105 zbookmark_phys_t *zb = &zio->io_bookmark; in zdb_blkptr_done()
6107 mutex_enter(&spa->spa_scrub_lock); in zdb_blkptr_done()
6108 spa->spa_load_verify_bytes -= BP_GET_PSIZE(bp); in zdb_blkptr_done()
6109 cv_broadcast(&spa->spa_scrub_io_cv); in zdb_blkptr_done()
6111 if (ioerr && !(zio->io_flags & ZIO_FLAG_SPECULATIVE)) { in zdb_blkptr_done()
6114 zcb->zcb_haderrors = 1; in zdb_blkptr_done()
6115 zcb->zcb_errors[ioerr]++; in zdb_blkptr_done()
6124 "<%llu, %llu, %lld, %llx> %s -- skipping\n", in zdb_blkptr_done()
6126 (u_longlong_t)zb->zb_objset, in zdb_blkptr_done()
6127 (u_longlong_t)zb->zb_object, in zdb_blkptr_done()
6128 (u_longlong_t)zb->zb_level, in zdb_blkptr_done()
6129 (u_longlong_t)zb->zb_blkid, in zdb_blkptr_done()
6132 mutex_exit(&spa->spa_scrub_lock); in zdb_blkptr_done()
6134 abd_free(zio->io_abd); in zdb_blkptr_done()
6145 if (zb->zb_level == ZB_DNODE_LEVEL) in zdb_blkptr_cb()
6153 (u_longlong_t)zb->zb_objset, in zdb_blkptr_cb()
6154 (u_longlong_t)zb->zb_object, in zdb_blkptr_cb()
6155 (longlong_t)zb->zb_level, in zdb_blkptr_cb()
6177 if (zb->zb_level == ZB_ZIL_LEVEL) in zdb_blkptr_cb()
6180 mutex_enter(&spa->spa_scrub_lock); in zdb_blkptr_cb()
6181 while (spa->spa_load_verify_bytes > max_inflight_bytes) in zdb_blkptr_cb()
6182 cv_wait(&spa->spa_scrub_io_cv, &spa->spa_scrub_lock); in zdb_blkptr_cb()
6183 spa->spa_load_verify_bytes += size; in zdb_blkptr_cb()
6184 mutex_exit(&spa->spa_scrub_lock); in zdb_blkptr_cb()
6190 zcb->zcb_readfails = 0; in zdb_blkptr_cb()
6199 if (dump_opt['b'] < 5 && gethrtime() > zcb->zcb_lastprint + NANOSEC) { in zdb_blkptr_cb()
6202 uint64_t bytes = zcb->zcb_type[ZB_TOTAL][ZDB_OT_TOTAL].zb_asize; in zdb_blkptr_cb()
6204 1 + bytes / (1 + ((now - zcb->zcb_start) / 1000 / 1000)); in zdb_blkptr_cb()
6206 (zcb->zcb_totalasize - bytes) / 1024 / kb_per_sec; in zdb_blkptr_cb()
6221 zcb->zcb_lastprint = now; in zdb_blkptr_cb()
6233 (u_longlong_t)vd->vdev_id, (u_longlong_t)start, (u_longlong_t)size); in zdb_leak()
6246 uint64_t offset = sme->sme_offset; in load_unflushed_svr_segs_cb()
6247 uint64_t size = sme->sme_run; in load_unflushed_svr_segs_cb()
6250 if (sme->sme_vdev != svr->svr_vdev_id) in load_unflushed_svr_segs_cb()
6253 vdev_t *vd = vdev_lookup_top(spa, sme->sme_vdev); in load_unflushed_svr_segs_cb()
6254 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in load_unflushed_svr_segs_cb()
6255 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in load_unflushed_svr_segs_cb()
6260 if (sme->sme_type == SM_ALLOC) in load_unflushed_svr_segs_cb()
6261 range_tree_add(svr->svr_allocd_segs, offset, size); in load_unflushed_svr_segs_cb()
6263 range_tree_remove(svr->svr_allocd_segs, offset, size); in load_unflushed_svr_segs_cb()
6283 spa_min_claim_txg(vd->vdev_spa))); in claim_segment_impl_cb()
6308 if (spa->spa_vdev_removal == NULL) in zdb_claim_removing()
6313 spa_vdev_removal_t *svr = spa->spa_vdev_removal; in zdb_claim_removing()
6314 vdev_t *vd = vdev_lookup_top(spa, svr->svr_vdev_id); in zdb_claim_removing()
6315 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in zdb_claim_removing()
6317 ASSERT0(range_tree_space(svr->svr_allocd_segs)); in zdb_claim_removing()
6320 for (uint64_t msi = 0; msi < vd->vdev_ms_count; msi++) { in zdb_claim_removing()
6321 metaslab_t *msp = vd->vdev_ms[msi]; in zdb_claim_removing()
6324 if (msp->ms_sm != NULL) in zdb_claim_removing()
6325 VERIFY0(space_map_load(msp->ms_sm, allocs, SM_ALLOC)); in zdb_claim_removing()
6326 range_tree_vacate(allocs, range_tree_add, svr->svr_allocd_segs); in zdb_claim_removing()
6337 range_tree_clear(svr->svr_allocd_segs, in zdb_claim_removing()
6339 vd->vdev_asize - vdev_indirect_mapping_max_offset(vim)); in zdb_claim_removing()
6341 zcb->zcb_removing_size += range_tree_space(svr->svr_allocd_segs); in zdb_claim_removing()
6342 range_tree_vacate(svr->svr_allocd_segs, claim_segment_cb, vd); in zdb_claim_removing()
6353 spa_t *spa = zcb->zcb_spa; in increment_indirect_mapping_cb()
6355 const dva_t *dva = &bp->blk_dva[0]; in increment_indirect_mapping_cb()
6362 vd = vdev_lookup_top(zcb->zcb_spa, DVA_GET_VDEV(dva)); in increment_indirect_mapping_cb()
6366 ASSERT(vd->vdev_indirect_config.vic_mapping_object != 0); in increment_indirect_mapping_cb()
6367 ASSERT3P(zcb->zcb_vd_obsolete_counts[vd->vdev_id], !=, NULL); in increment_indirect_mapping_cb()
6370 vd->vdev_indirect_mapping, in increment_indirect_mapping_cb()
6372 zcb->zcb_vd_obsolete_counts[vd->vdev_id]); in increment_indirect_mapping_cb()
6380 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in zdb_load_obsolete_counts()
6381 spa_t *spa = vd->vdev_spa; in zdb_load_obsolete_counts()
6383 &spa->spa_condensing_indirect_phys; in zdb_load_obsolete_counts()
6388 EQUIV(obsolete_sm_object != 0, vd->vdev_obsolete_sm != NULL); in zdb_load_obsolete_counts()
6390 if (vd->vdev_obsolete_sm != NULL) { in zdb_load_obsolete_counts()
6392 vd->vdev_obsolete_sm); in zdb_load_obsolete_counts()
6394 if (scip->scip_vdev == vd->vdev_id && in zdb_load_obsolete_counts()
6395 scip->scip_prev_obsolete_sm_object != 0) { in zdb_load_obsolete_counts()
6397 VERIFY0(space_map_open(&prev_obsolete_sm, spa->spa_meta_objset, in zdb_load_obsolete_counts()
6398 scip->scip_prev_obsolete_sm_object, 0, vd->vdev_asize, 0)); in zdb_load_obsolete_counts()
6415 vdev_t *vd = cseea->cseea_vd; in checkpoint_sm_exclude_entry_cb()
6416 metaslab_t *ms = vd->vdev_ms[sme->sme_offset >> vd->vdev_ms_shift]; in checkpoint_sm_exclude_entry_cb()
6417 uint64_t end = sme->sme_offset + sme->sme_run; in checkpoint_sm_exclude_entry_cb()
6419 ASSERT(sme->sme_type == SM_FREE); in checkpoint_sm_exclude_entry_cb()
6435 * that handles metaslab-crossing segments in the future. in checkpoint_sm_exclude_entry_cb()
6437 VERIFY3U(sme->sme_offset, >=, ms->ms_start); in checkpoint_sm_exclude_entry_cb()
6438 VERIFY3U(end, <=, ms->ms_start + ms->ms_size); in checkpoint_sm_exclude_entry_cb()
6444 mutex_enter(&ms->ms_lock); in checkpoint_sm_exclude_entry_cb()
6445 range_tree_remove(ms->ms_allocatable, sme->sme_offset, sme->sme_run); in checkpoint_sm_exclude_entry_cb()
6446 mutex_exit(&ms->ms_lock); in checkpoint_sm_exclude_entry_cb()
6448 cseea->cseea_checkpoint_size += sme->sme_run; in checkpoint_sm_exclude_entry_cb()
6455 spa_t *spa = vd->vdev_spa; in zdb_leak_init_vdev_exclude_checkpoint()
6463 if (vd->vdev_top_zap == 0) in zdb_leak_init_vdev_exclude_checkpoint()
6478 if (zap_contains(spa_meta_objset(spa), vd->vdev_top_zap, in zdb_leak_init_vdev_exclude_checkpoint()
6482 VERIFY0(zap_lookup(spa_meta_objset(spa), vd->vdev_top_zap, in zdb_leak_init_vdev_exclude_checkpoint()
6491 checkpoint_sm_obj, 0, vd->vdev_asize, vd->vdev_ashift)); in zdb_leak_init_vdev_exclude_checkpoint()
6498 zcb->zcb_checkpoint_size += cseea.cseea_checkpoint_size; in zdb_leak_init_vdev_exclude_checkpoint()
6506 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_init_exclude_checkpoint()
6507 for (uint64_t c = 0; c < rvd->vdev_children; c++) { in zdb_leak_init_exclude_checkpoint()
6508 ASSERT3U(c, ==, rvd->vdev_child[c]->vdev_id); in zdb_leak_init_exclude_checkpoint()
6509 zdb_leak_init_vdev_exclude_checkpoint(rvd->vdev_child[c], zcb); in zdb_leak_init_exclude_checkpoint()
6519 uint64_t offset = sme->sme_offset; in count_unflushed_space_cb()
6520 uint64_t vdev_id = sme->sme_vdev; in count_unflushed_space_cb()
6526 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in count_unflushed_space_cb()
6527 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in count_unflushed_space_cb()
6532 if (sme->sme_type == SM_ALLOC) in count_unflushed_space_cb()
6533 *ualloc_space += sme->sme_run; in count_unflushed_space_cb()
6535 *ualloc_space -= sme->sme_run; in count_unflushed_space_cb()
6557 uint64_t offset = sme->sme_offset; in load_unflushed_cb()
6558 uint64_t size = sme->sme_run; in load_unflushed_cb()
6559 uint64_t vdev_id = sme->sme_vdev; in load_unflushed_cb()
6567 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in load_unflushed_cb()
6569 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in load_unflushed_cb()
6575 if (*uic_maptype == sme->sme_type) in load_unflushed_cb()
6576 range_tree_add(ms->ms_allocatable, offset, size); in load_unflushed_cb()
6578 range_tree_remove(ms->ms_allocatable, offset, size); in load_unflushed_cb()
6592 vdev_t *rvd = spa->spa_root_vdev; in load_concrete_ms_allocatable_trees()
6593 for (uint64_t i = 0; i < rvd->vdev_children; i++) { in load_concrete_ms_allocatable_trees()
6594 vdev_t *vd = rvd->vdev_child[i]; in load_concrete_ms_allocatable_trees()
6596 ASSERT3U(i, ==, vd->vdev_id); in load_concrete_ms_allocatable_trees()
6598 if (vd->vdev_ops == &vdev_indirect_ops) in load_concrete_ms_allocatable_trees()
6601 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in load_concrete_ms_allocatable_trees()
6602 metaslab_t *msp = vd->vdev_ms[m]; in load_concrete_ms_allocatable_trees()
6607 (longlong_t)vd->vdev_id, in load_concrete_ms_allocatable_trees()
6608 (longlong_t)msp->ms_id, in load_concrete_ms_allocatable_trees()
6609 (longlong_t)vd->vdev_ms_count); in load_concrete_ms_allocatable_trees()
6611 mutex_enter(&msp->ms_lock); in load_concrete_ms_allocatable_trees()
6612 range_tree_vacate(msp->ms_allocatable, NULL, NULL); in load_concrete_ms_allocatable_trees()
6616 * size-ordered tree, so clear the range_tree ops. in load_concrete_ms_allocatable_trees()
6618 msp->ms_allocatable->rt_ops = NULL; in load_concrete_ms_allocatable_trees()
6620 if (msp->ms_sm != NULL) { in load_concrete_ms_allocatable_trees()
6621 VERIFY0(space_map_load(msp->ms_sm, in load_concrete_ms_allocatable_trees()
6622 msp->ms_allocatable, maptype)); in load_concrete_ms_allocatable_trees()
6624 if (!msp->ms_loaded) in load_concrete_ms_allocatable_trees()
6625 msp->ms_loaded = B_TRUE; in load_concrete_ms_allocatable_trees()
6626 mutex_exit(&msp->ms_lock); in load_concrete_ms_allocatable_trees()
6634 * vm_idxp is an in-out parameter which (for indirect vdevs) is the
6642 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in load_indirect_ms_allocatable_tree()
6644 mutex_enter(&msp->ms_lock); in load_indirect_ms_allocatable_tree()
6645 range_tree_vacate(msp->ms_allocatable, NULL, NULL); in load_indirect_ms_allocatable_tree()
6649 * size-ordered tree, so clear the range_tree ops. in load_indirect_ms_allocatable_tree()
6651 msp->ms_allocatable->rt_ops = NULL; in load_indirect_ms_allocatable_tree()
6656 &vim->vim_entries[*vim_idxp]; in load_indirect_ms_allocatable_tree()
6658 uint64_t ent_len = DVA_GET_ASIZE(&vimep->vimep_dst); in load_indirect_ms_allocatable_tree()
6659 ASSERT3U(ent_offset, >=, msp->ms_start); in load_indirect_ms_allocatable_tree()
6660 if (ent_offset >= msp->ms_start + msp->ms_size) in load_indirect_ms_allocatable_tree()
6668 msp->ms_start + msp->ms_size); in load_indirect_ms_allocatable_tree()
6669 range_tree_add(msp->ms_allocatable, ent_offset, ent_len); in load_indirect_ms_allocatable_tree()
6672 if (!msp->ms_loaded) in load_indirect_ms_allocatable_tree()
6673 msp->ms_loaded = B_TRUE; in load_indirect_ms_allocatable_tree()
6674 mutex_exit(&msp->ms_lock); in load_indirect_ms_allocatable_tree()
6682 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_init_prepare_indirect_vdevs()
6683 for (uint64_t c = 0; c < rvd->vdev_children; c++) { in zdb_leak_init_prepare_indirect_vdevs()
6684 vdev_t *vd = rvd->vdev_child[c]; in zdb_leak_init_prepare_indirect_vdevs()
6686 ASSERT3U(c, ==, vd->vdev_id); in zdb_leak_init_prepare_indirect_vdevs()
6688 if (vd->vdev_ops != &vdev_indirect_ops) in zdb_leak_init_prepare_indirect_vdevs()
6696 zcb->zcb_vd_obsolete_counts[c] = zdb_load_obsolete_counts(vd); in zdb_leak_init_prepare_indirect_vdevs()
6707 vd->vdev_indirect_mapping; in zdb_leak_init_prepare_indirect_vdevs()
6709 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in zdb_leak_init_prepare_indirect_vdevs()
6714 (longlong_t)vd->vdev_id, in zdb_leak_init_prepare_indirect_vdevs()
6715 (longlong_t)vd->vdev_ms[m]->ms_id, in zdb_leak_init_prepare_indirect_vdevs()
6716 (longlong_t)vd->vdev_ms_count); in zdb_leak_init_prepare_indirect_vdevs()
6718 load_indirect_ms_allocatable_tree(vd, vd->vdev_ms[m], in zdb_leak_init_prepare_indirect_vdevs()
6728 zcb->zcb_spa = spa; in zdb_leak_init()
6733 dsl_pool_t *dp = spa->spa_dsl_pool; in zdb_leak_init()
6734 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_init()
6741 spa->spa_normal_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6742 spa->spa_log_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6743 spa->spa_embedded_log_class->mc_ops = &zdb_metaslab_ops; in zdb_leak_init()
6745 zcb->zcb_vd_obsolete_counts = in zdb_leak_init()
6746 umem_zalloc(rvd->vdev_children * sizeof (uint32_t *), in zdb_leak_init()
6765 * order to avoid false-positives during leak-detection, we in zdb_leak_init()
6778 ASSERT3U(zcb->zcb_checkpoint_size, ==, spa_get_checkpoint_space(spa)); in zdb_leak_init()
6783 if (bpobj_is_open(&dp->dp_obsolete_bpobj)) { in zdb_leak_init()
6786 (void) bpobj_iterate_nofree(&dp->dp_obsolete_bpobj, in zdb_leak_init()
6795 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in zdb_check_for_obsolete_leaks()
6803 &vim->vim_entries[i]; in zdb_check_for_obsolete_leaks()
6806 metaslab_t *msp = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in zdb_check_for_obsolete_leaks()
6813 inner_offset < DVA_GET_ASIZE(&vimep->vimep_dst); in zdb_check_for_obsolete_leaks()
6814 inner_offset += 1ULL << vd->vdev_ashift) { in zdb_check_for_obsolete_leaks()
6815 if (range_tree_contains(msp->ms_allocatable, in zdb_check_for_obsolete_leaks()
6816 offset + inner_offset, 1ULL << vd->vdev_ashift)) { in zdb_check_for_obsolete_leaks()
6817 obsolete_bytes += 1ULL << vd->vdev_ashift; in zdb_check_for_obsolete_leaks()
6821 int64_t bytes_leaked = obsolete_bytes - in zdb_check_for_obsolete_leaks()
6822 zcb->zcb_vd_obsolete_counts[vd->vdev_id][i]; in zdb_check_for_obsolete_leaks()
6823 ASSERT3U(DVA_GET_ASIZE(&vimep->vimep_dst), >=, in zdb_check_for_obsolete_leaks()
6824 zcb->zcb_vd_obsolete_counts[vd->vdev_id][i]); in zdb_check_for_obsolete_leaks()
6830 (u_longlong_t)vd->vdev_id, in zdb_check_for_obsolete_leaks()
6832 (u_longlong_t)DVA_GET_ASIZE(&vimep->vimep_dst), in zdb_check_for_obsolete_leaks()
6846 (u_longlong_t)vd->vdev_id, pct_leaked, in zdb_check_for_obsolete_leaks()
6850 "for vdev %llu -- %llx total bytes mismatched\n", in zdb_check_for_obsolete_leaks()
6851 (u_longlong_t)vd->vdev_id, in zdb_check_for_obsolete_leaks()
6857 zcb->zcb_vd_obsolete_counts[vd->vdev_id]); in zdb_check_for_obsolete_leaks()
6858 zcb->zcb_vd_obsolete_counts[vd->vdev_id] = NULL; in zdb_check_for_obsolete_leaks()
6870 vdev_t *rvd = spa->spa_root_vdev; in zdb_leak_fini()
6871 for (unsigned c = 0; c < rvd->vdev_children; c++) { in zdb_leak_fini()
6872 vdev_t *vd = rvd->vdev_child[c]; in zdb_leak_fini()
6874 if (zcb->zcb_vd_obsolete_counts[c] != NULL) { in zdb_leak_fini()
6878 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in zdb_leak_fini()
6879 metaslab_t *msp = vd->vdev_ms[m]; in zdb_leak_fini()
6880 ASSERT3P(msp->ms_group, ==, (msp->ms_group->mg_class == in zdb_leak_fini()
6882 vd->vdev_log_mg : vd->vdev_mg); in zdb_leak_fini()
6897 if (vd->vdev_ops == &vdev_indirect_ops) { in zdb_leak_fini()
6898 range_tree_vacate(msp->ms_allocatable, in zdb_leak_fini()
6901 range_tree_vacate(msp->ms_allocatable, in zdb_leak_fini()
6904 if (msp->ms_loaded) { in zdb_leak_fini()
6905 msp->ms_loaded = B_FALSE; in zdb_leak_fini()
6910 umem_free(zcb->zcb_vd_obsolete_counts, in zdb_leak_fini()
6911 rvd->vdev_children * sizeof (uint32_t *)); in zdb_leak_fini()
6912 zcb->zcb_vd_obsolete_counts = NULL; in zdb_leak_fini()
6940 objset_t *mos = spa->spa_meta_objset; in iterate_deleted_livelists()
6956 VERIFY0(dsl_deadlist_open(&ll, mos, attrp->za_first_integer)); in iterate_deleted_livelists()
6979 VERIFY0(dsl_process_sub_livelist(&dle->dle_bpobj, &blks, NULL, NULL)); in livelist_entry_count_blocks_cb()
7019 objset_t *mos = spa->spa_meta_objset; in deleted_livelists_dump_mos()
7031 const dva_t *dva1 = &((const zdb_brt_entry_t *)zcn1)->zbre_dva; in zdb_brt_entry_compare()
7032 const dva_t *dva2 = &((const zdb_brt_entry_t *)zcn2)->zbre_dva; in zdb_brt_entry_compare()
7059 avl_create(&zcb->zcb_brt, zdb_brt_entry_compare, in dump_block_stats()
7062 zcb->zcb_brt_is_active = B_TRUE; in dump_block_stats()
7080 * When leak detection is disabled (-L option) we still traverse the in dump_block_stats()
7087 * If there's a deferred-free bplist, process that first. in dump_block_stats()
7089 (void) bpobj_iterate_nofree(&spa->spa_deferred_bpobj, in dump_block_stats()
7093 (void) bpobj_iterate_nofree(&spa->spa_dsl_pool->dp_free_bpobj, in dump_block_stats()
7100 VERIFY3U(0, ==, bptree_iterate(spa->spa_meta_objset, in dump_block_stats()
7101 spa->spa_dsl_pool->dp_bptree_obj, B_FALSE, count_block_cb, in dump_block_stats()
7110 zcb->zcb_totalasize = metaslab_class_get_alloc(spa_normal_class(spa)); in dump_block_stats()
7111 zcb->zcb_totalasize += metaslab_class_get_alloc(spa_special_class(spa)); in dump_block_stats()
7112 zcb->zcb_totalasize += metaslab_class_get_alloc(spa_dedup_class(spa)); in dump_block_stats()
7113 zcb->zcb_totalasize += in dump_block_stats()
7115 zcb->zcb_start = zcb->zcb_lastprint = gethrtime(); in dump_block_stats()
7125 (void) zio_wait(spa->spa_async_zio_root[c]); in dump_block_stats()
7126 spa->spa_async_zio_root[c] = zio_root(spa, NULL, NULL, in dump_block_stats()
7131 ASSERT0(spa->spa_load_verify_bytes); in dump_block_stats()
7137 zcb->zcb_haderrors |= err; in dump_block_stats()
7139 if (zcb->zcb_haderrors) { in dump_block_stats()
7143 if (zcb->zcb_errors[e] != 0) { in dump_block_stats()
7145 e, (u_longlong_t)zcb->zcb_errors[e]); in dump_block_stats()
7155 tzb = &zcb->zcb_type[ZB_TOTAL][ZDB_OT_TOTAL]; in dump_block_stats()
7167 tzb->zb_asize - zcb->zcb_dedup_asize - zcb->zcb_clone_asize + in dump_block_stats()
7168 zcb->zcb_removing_size + zcb->zcb_checkpoint_size; in dump_block_stats()
7179 (longlong_t)(total_alloc - total_found)); in dump_block_stats()
7182 if (tzb->zb_count == 0) { in dump_block_stats()
7188 (void) printf("\t%-16s %14llu\n", "bp count:", in dump_block_stats()
7189 (u_longlong_t)tzb->zb_count); in dump_block_stats()
7190 (void) printf("\t%-16s %14llu\n", "ganged count:", in dump_block_stats()
7191 (longlong_t)tzb->zb_gangs); in dump_block_stats()
7192 (void) printf("\t%-16s %14llu avg: %6llu\n", "bp logical:", in dump_block_stats()
7193 (u_longlong_t)tzb->zb_lsize, in dump_block_stats()
7194 (u_longlong_t)(tzb->zb_lsize / tzb->zb_count)); in dump_block_stats()
7195 (void) printf("\t%-16s %14llu avg: %6llu compression: %6.2f\n", in dump_block_stats()
7196 "bp physical:", (u_longlong_t)tzb->zb_psize, in dump_block_stats()
7197 (u_longlong_t)(tzb->zb_psize / tzb->zb_count), in dump_block_stats()
7198 (double)tzb->zb_lsize / tzb->zb_psize); in dump_block_stats()
7199 (void) printf("\t%-16s %14llu avg: %6llu compression: %6.2f\n", in dump_block_stats()
7200 "bp allocated:", (u_longlong_t)tzb->zb_asize, in dump_block_stats()
7201 (u_longlong_t)(tzb->zb_asize / tzb->zb_count), in dump_block_stats()
7202 (double)tzb->zb_lsize / tzb->zb_asize); in dump_block_stats()
7203 (void) printf("\t%-16s %14llu ref>1: %6llu deduplication: %6.2f\n", in dump_block_stats()
7204 "bp deduped:", (u_longlong_t)zcb->zcb_dedup_asize, in dump_block_stats()
7205 (u_longlong_t)zcb->zcb_dedup_blocks, in dump_block_stats()
7206 (double)zcb->zcb_dedup_asize / tzb->zb_asize + 1.0); in dump_block_stats()
7207 (void) printf("\t%-16s %14llu count: %6llu\n", in dump_block_stats()
7208 "bp cloned:", (u_longlong_t)zcb->zcb_clone_asize, in dump_block_stats()
7209 (u_longlong_t)zcb->zcb_clone_blocks); in dump_block_stats()
7210 (void) printf("\t%-16s %14llu used: %5.2f%%\n", "Normal class:", in dump_block_stats()
7213 if (spa_special_class(spa)->mc_allocator[0].mca_rotor != NULL) { in dump_block_stats()
7219 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7224 if (spa_dedup_class(spa)->mc_allocator[0].mca_rotor != NULL) { in dump_block_stats()
7230 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7235 if (spa_embedded_log_class(spa)->mc_allocator[0].mca_rotor != NULL) { in dump_block_stats()
7241 (void) printf("\t%-16s %14llu used: %5.2f%%\n", in dump_block_stats()
7247 if (zcb->zcb_embedded_blocks[i] == 0) in dump_block_stats()
7250 (void) printf("\tadditional, non-pointer bps of type %u: " in dump_block_stats()
7252 i, (u_longlong_t)zcb->zcb_embedded_blocks[i]); in dump_block_stats()
7257 dump_histogram(zcb->zcb_embedded_histogram[i], in dump_block_stats()
7258 sizeof (zcb->zcb_embedded_histogram[i]) / in dump_block_stats()
7259 sizeof (zcb->zcb_embedded_histogram[i][0]), 0); in dump_block_stats()
7263 if (tzb->zb_ditto_samevdev != 0) { in dump_block_stats()
7265 (longlong_t)tzb->zb_ditto_samevdev); in dump_block_stats()
7267 if (tzb->zb_ditto_same_ms != 0) { in dump_block_stats()
7269 (longlong_t)tzb->zb_ditto_same_ms); in dump_block_stats()
7272 for (uint64_t v = 0; v < spa->spa_root_vdev->vdev_children; v++) { in dump_block_stats()
7273 vdev_t *vd = spa->spa_root_vdev->vdev_child[v]; in dump_block_stats()
7274 vdev_indirect_mapping_t *vim = vd->vdev_indirect_mapping; in dump_block_stats()
7286 (longlong_t)vd->vdev_id, in dump_block_stats()
7320 typename = zdb_ot_extname[t - DMU_OT_NUMTYPES]; in dump_block_stats()
7322 if (zcb->zcb_type[ZB_TOTAL][t].zb_asize == 0) { in dump_block_stats()
7325 "-", in dump_block_stats()
7326 "-", in dump_block_stats()
7327 "-", in dump_block_stats()
7328 "-", in dump_block_stats()
7329 "-", in dump_block_stats()
7330 "-", in dump_block_stats()
7331 "-", in dump_block_stats()
7336 for (l = ZB_TOTAL - 1; l >= -1; l--) { in dump_block_stats()
7337 level = (l == -1 ? ZB_TOTAL : l); in dump_block_stats()
7338 zb = &zcb->zcb_type[level][t]; in dump_block_stats()
7340 if (zb->zb_asize == 0) in dump_block_stats()
7345 mdstats->zb_count += zb->zb_count; in dump_block_stats()
7346 mdstats->zb_lsize += zb->zb_lsize; in dump_block_stats()
7347 mdstats->zb_psize += zb->zb_psize; in dump_block_stats()
7348 mdstats->zb_asize += zb->zb_asize; in dump_block_stats()
7349 mdstats->zb_gangs += zb->zb_gangs; in dump_block_stats()
7355 if (level == 0 && zb->zb_asize == in dump_block_stats()
7356 zcb->zcb_type[ZB_TOTAL][t].zb_asize) in dump_block_stats()
7359 zdb_nicenum(zb->zb_count, csize, in dump_block_stats()
7361 zdb_nicenum(zb->zb_lsize, lsize, in dump_block_stats()
7363 zdb_nicenum(zb->zb_psize, psize, in dump_block_stats()
7365 zdb_nicenum(zb->zb_asize, asize, in dump_block_stats()
7367 zdb_nicenum(zb->zb_asize / zb->zb_count, avg, in dump_block_stats()
7369 zdb_nicenum(zb->zb_gangs, gang, sizeof (gang)); in dump_block_stats()
7374 (double)zb->zb_lsize / zb->zb_psize, in dump_block_stats()
7375 100.0 * zb->zb_asize / tzb->zb_asize); in dump_block_stats()
7383 if (dump_opt['b'] >= 3 && zb->zb_gangs > 0) { in dump_block_stats()
7390 "(in 512-byte sectors): " in dump_block_stats()
7392 dump_histogram(zb->zb_psize_histogram, in dump_block_stats()
7397 zdb_nicenum(mdstats->zb_count, csize, in dump_block_stats()
7399 zdb_nicenum(mdstats->zb_lsize, lsize, in dump_block_stats()
7401 zdb_nicenum(mdstats->zb_psize, psize, in dump_block_stats()
7403 zdb_nicenum(mdstats->zb_asize, asize, in dump_block_stats()
7405 zdb_nicenum(mdstats->zb_asize / mdstats->zb_count, avg, in dump_block_stats()
7407 zdb_nicenum(mdstats->zb_gangs, gang, sizeof (gang)); in dump_block_stats()
7412 (double)mdstats->zb_lsize / mdstats->zb_psize, in dump_block_stats()
7413 100.0 * mdstats->zb_asize / tzb->zb_asize); in dump_block_stats()
7431 if (zcb->zcb_haderrors) { in dump_block_stats()
7459 if (zb->zb_level == ZB_DNODE_LEVEL || BP_IS_HOLE(bp) || in zdb_ddt_add_cb()
7463 if (dump_opt['S'] > 1 && zb->zb_level == ZB_ROOT_LEVEL) { in zdb_ddt_add_cb()
7466 (u_longlong_t)zb->zb_objset, in zdb_ddt_add_cb()
7481 zdde->zdde_key = zdde_search.zdde_key; in zdb_ddt_add_cb()
7485 zdde->zdde_ref_blocks += 1; in zdb_ddt_add_cb()
7486 zdde->zdde_ref_lsize += BP_GET_LSIZE(bp); in zdb_ddt_add_cb()
7487 zdde->zdde_ref_psize += BP_GET_PSIZE(bp); in zdb_ddt_add_cb()
7488 zdde->zdde_ref_dsize += bp_get_dsize_sync(spa, bp); in zdb_ddt_add_cb()
7513 uint64_t refcnt = zdde->zdde_ref_blocks; in dump_simulated_ddt()
7516 ddt_stat_t *dds = &ddh_total.ddh_stat[highbit64(refcnt) - 1]; in dump_simulated_ddt()
7518 dds->dds_blocks += zdde->zdde_ref_blocks / refcnt; in dump_simulated_ddt()
7519 dds->dds_lsize += zdde->zdde_ref_lsize / refcnt; in dump_simulated_ddt()
7520 dds->dds_psize += zdde->zdde_ref_psize / refcnt; in dump_simulated_ddt()
7521 dds->dds_dsize += zdde->zdde_ref_dsize / refcnt; in dump_simulated_ddt()
7523 dds->dds_ref_blocks += zdde->zdde_ref_blocks; in dump_simulated_ddt()
7524 dds->dds_ref_lsize += zdde->zdde_ref_lsize; in dump_simulated_ddt()
7525 dds->dds_ref_psize += zdde->zdde_ref_psize; in dump_simulated_ddt()
7526 dds->dds_ref_dsize += zdde->zdde_ref_dsize; in dump_simulated_ddt()
7557 &spa->spa_condensing_indirect_phys; in verify_device_removal_feature_counts()
7558 if (scip->scip_next_mapping_object != 0) { in verify_device_removal_feature_counts()
7559 vdev_t *vd = spa->spa_root_vdev->vdev_child[scip->scip_vdev]; in verify_device_removal_feature_counts()
7560 ASSERT(scip->scip_prev_obsolete_sm_object != 0); in verify_device_removal_feature_counts()
7561 ASSERT3P(vd->vdev_ops, ==, &vdev_indirect_ops); in verify_device_removal_feature_counts()
7565 (u_longlong_t)scip->scip_vdev, in verify_device_removal_feature_counts()
7566 (u_longlong_t)scip->scip_next_mapping_object, in verify_device_removal_feature_counts()
7567 (u_longlong_t)scip->scip_prev_obsolete_sm_object); in verify_device_removal_feature_counts()
7568 if (scip->scip_prev_obsolete_sm_object != 0) { in verify_device_removal_feature_counts()
7571 spa->spa_meta_objset, in verify_device_removal_feature_counts()
7572 scip->scip_prev_obsolete_sm_object, in verify_device_removal_feature_counts()
7573 0, vd->vdev_asize, 0)); in verify_device_removal_feature_counts()
7574 dump_spacemap(spa->spa_meta_objset, prev_obsolete_sm); in verify_device_removal_feature_counts()
7582 for (uint64_t i = 0; i < spa->spa_root_vdev->vdev_children; i++) { in verify_device_removal_feature_counts()
7583 vdev_t *vd = spa->spa_root_vdev->vdev_child[i]; in verify_device_removal_feature_counts()
7584 vdev_indirect_config_t *vic = &vd->vdev_indirect_config; in verify_device_removal_feature_counts()
7586 if (vic->vic_mapping_object != 0) { in verify_device_removal_feature_counts()
7587 ASSERT(vd->vdev_ops == &vdev_indirect_ops || in verify_device_removal_feature_counts()
7588 vd->vdev_removing); in verify_device_removal_feature_counts()
7591 if (vd->vdev_indirect_mapping->vim_havecounts) { in verify_device_removal_feature_counts()
7599 ASSERT(vic->vic_mapping_object != 0); in verify_device_removal_feature_counts()
7606 ASSERT(vic->vic_mapping_object != 0); in verify_device_removal_feature_counts()
7676 spa->spa_import_flags |= ZFS_IMPORT_SKIP_MMP; in zdb_set_skip_mmp()
7694 * The function returns a newly-allocated copy of the name of the
7709 size_t poolname_len = path_start - target; in import_checkpointed_state()
7726 if (asprintf(&bogus_name, "%s%s", poolname, BOGUS_SUFFIX) == -1) { in import_checkpointed_state()
7744 if (asprintf(new_path, "%s%s", bogus_name, path_start) == -1) { in import_checkpointed_state()
7772 vdev_t *vd = vcsec->vcsec_vd; in verify_checkpoint_sm_entry_cb()
7773 metaslab_t *ms = vd->vdev_ms[sme->sme_offset >> vd->vdev_ms_shift]; in verify_checkpoint_sm_entry_cb()
7774 uint64_t end = sme->sme_offset + sme->sme_run; in verify_checkpoint_sm_entry_cb()
7776 ASSERT(sme->sme_type == SM_FREE); in verify_checkpoint_sm_entry_cb()
7778 if ((vcsec->vcsec_entryid % ENTRIES_PER_PROGRESS_UPDATE) == 0) { in verify_checkpoint_sm_entry_cb()
7781 (longlong_t)vd->vdev_id, in verify_checkpoint_sm_entry_cb()
7782 (longlong_t)vcsec->vcsec_entryid, in verify_checkpoint_sm_entry_cb()
7783 (longlong_t)vcsec->vcsec_num_entries); in verify_checkpoint_sm_entry_cb()
7785 vcsec->vcsec_entryid++; in verify_checkpoint_sm_entry_cb()
7790 VERIFY3U(sme->sme_offset, >=, ms->ms_start); in verify_checkpoint_sm_entry_cb()
7791 VERIFY3U(end, <=, ms->ms_start + ms->ms_size); in verify_checkpoint_sm_entry_cb()
7798 mutex_enter(&ms->ms_lock); in verify_checkpoint_sm_entry_cb()
7799 range_tree_verify_not_present(ms->ms_allocatable, in verify_checkpoint_sm_entry_cb()
7800 sme->sme_offset, sme->sme_run); in verify_checkpoint_sm_entry_cb()
7801 mutex_exit(&ms->ms_lock); in verify_checkpoint_sm_entry_cb()
7823 vdev_t *ckpoint_rvd = checkpoint->spa_root_vdev; in verify_checkpoint_vdev_spacemaps()
7824 vdev_t *current_rvd = current->spa_root_vdev; in verify_checkpoint_vdev_spacemaps()
7828 for (uint64_t c = 0; c < ckpoint_rvd->vdev_children; c++) { in verify_checkpoint_vdev_spacemaps()
7829 vdev_t *ckpoint_vd = ckpoint_rvd->vdev_child[c]; in verify_checkpoint_vdev_spacemaps()
7830 vdev_t *current_vd = current_rvd->vdev_child[c]; in verify_checkpoint_vdev_spacemaps()
7835 if (ckpoint_vd->vdev_ops == &vdev_indirect_ops) { in verify_checkpoint_vdev_spacemaps()
7842 ASSERT3P(current_vd->vdev_ops, ==, &vdev_indirect_ops); in verify_checkpoint_vdev_spacemaps()
7850 if (current_vd->vdev_top_zap == 0 || in verify_checkpoint_vdev_spacemaps()
7852 current_vd->vdev_top_zap, in verify_checkpoint_vdev_spacemaps()
7857 current_vd->vdev_top_zap, VDEV_TOP_ZAP_POOL_CHECKPOINT_SM, in verify_checkpoint_vdev_spacemaps()
7861 checkpoint_sm_obj, 0, current_vd->vdev_asize, in verify_checkpoint_vdev_spacemaps()
7862 current_vd->vdev_ashift)); in verify_checkpoint_vdev_spacemaps()
7873 dump_spacemap(current->spa_meta_objset, checkpoint_sm); in verify_checkpoint_vdev_spacemaps()
7881 if (ckpoint_rvd->vdev_children < current_rvd->vdev_children) { in verify_checkpoint_vdev_spacemaps()
7882 for (uint64_t c = ckpoint_rvd->vdev_children; in verify_checkpoint_vdev_spacemaps()
7883 c < current_rvd->vdev_children; c++) { in verify_checkpoint_vdev_spacemaps()
7884 vdev_t *current_vd = current_rvd->vdev_child[c]; in verify_checkpoint_vdev_spacemaps()
7885 VERIFY3P(current_vd->vdev_checkpoint_sm, ==, NULL); in verify_checkpoint_vdev_spacemaps()
7908 vdev_t *ckpoint_rvd = checkpoint->spa_root_vdev; in verify_checkpoint_ms_spacemaps()
7909 vdev_t *current_rvd = current->spa_root_vdev; in verify_checkpoint_ms_spacemaps()
7914 for (uint64_t i = 0; i < ckpoint_rvd->vdev_children; i++) { in verify_checkpoint_ms_spacemaps()
7915 vdev_t *ckpoint_vd = ckpoint_rvd->vdev_child[i]; in verify_checkpoint_ms_spacemaps()
7916 vdev_t *current_vd = current_rvd->vdev_child[i]; in verify_checkpoint_ms_spacemaps()
7918 if (ckpoint_vd->vdev_ops == &vdev_indirect_ops) { in verify_checkpoint_ms_spacemaps()
7922 ASSERT3P(current_vd->vdev_ops, ==, &vdev_indirect_ops); in verify_checkpoint_ms_spacemaps()
7926 for (uint64_t m = 0; m < ckpoint_vd->vdev_ms_count; m++) { in verify_checkpoint_ms_spacemaps()
7927 metaslab_t *ckpoint_msp = ckpoint_vd->vdev_ms[m]; in verify_checkpoint_ms_spacemaps()
7928 metaslab_t *current_msp = current_vd->vdev_ms[m]; in verify_checkpoint_ms_spacemaps()
7933 (longlong_t)current_vd->vdev_id, in verify_checkpoint_ms_spacemaps()
7934 (longlong_t)current_rvd->vdev_children, in verify_checkpoint_ms_spacemaps()
7935 (longlong_t)current_vd->vdev_ms[m]->ms_id, in verify_checkpoint_ms_spacemaps()
7936 (longlong_t)current_vd->vdev_ms_count); in verify_checkpoint_ms_spacemaps()
7950 range_tree_walk(ckpoint_msp->ms_allocatable, in verify_checkpoint_ms_spacemaps()
7952 current_msp->ms_allocatable); in verify_checkpoint_ms_spacemaps()
7974 checkpoint_pool = import_checkpointed_state(spa->spa_name, NULL, in verify_checkpoint_blocks()
7976 ASSERT(strcmp(spa->spa_name, checkpoint_pool) != 0); in verify_checkpoint_blocks()
8008 vdev_t *rvd = spa->spa_root_vdev; in dump_leftover_checkpoint_blocks()
8010 for (uint64_t i = 0; i < rvd->vdev_children; i++) { in dump_leftover_checkpoint_blocks()
8011 vdev_t *vd = rvd->vdev_child[i]; in dump_leftover_checkpoint_blocks()
8016 if (vd->vdev_top_zap == 0) in dump_leftover_checkpoint_blocks()
8019 if (zap_contains(spa_meta_objset(spa), vd->vdev_top_zap, in dump_leftover_checkpoint_blocks()
8023 VERIFY0(zap_lookup(spa_meta_objset(spa), vd->vdev_top_zap, in dump_leftover_checkpoint_blocks()
8028 checkpoint_sm_obj, 0, vd->vdev_asize, vd->vdev_ashift)); in dump_leftover_checkpoint_blocks()
8029 dump_spacemap(spa->spa_meta_objset, checkpoint_sm); in dump_leftover_checkpoint_blocks()
8043 error = zap_lookup(spa->spa_meta_objset, DMU_POOL_DIRECTORY_OBJECT, in verify_checkpoint()
8109 int error = zap_lookup(spa_meta_objset(vd->vdev_spa), in mos_leak_vdev_top_zap()
8110 vd->vdev_top_zap, VDEV_TOP_ZAP_MS_UNFLUSHED_PHYS_TXGS, in mos_leak_vdev_top_zap()
8122 mos_obj_refd(vd->vdev_dtl_object); in mos_leak_vdev()
8123 mos_obj_refd(vd->vdev_ms_array); in mos_leak_vdev()
8124 mos_obj_refd(vd->vdev_indirect_config.vic_births_object); in mos_leak_vdev()
8125 mos_obj_refd(vd->vdev_indirect_config.vic_mapping_object); in mos_leak_vdev()
8126 mos_obj_refd(vd->vdev_leaf_zap); in mos_leak_vdev()
8127 if (vd->vdev_checkpoint_sm != NULL) in mos_leak_vdev()
8128 mos_obj_refd(vd->vdev_checkpoint_sm->sm_object); in mos_leak_vdev()
8129 if (vd->vdev_indirect_mapping != NULL) { in mos_leak_vdev()
8130 mos_obj_refd(vd->vdev_indirect_mapping-> in mos_leak_vdev()
8131 vim_phys->vimp_counts_object); in mos_leak_vdev()
8133 if (vd->vdev_obsolete_sm != NULL) in mos_leak_vdev()
8134 mos_obj_refd(vd->vdev_obsolete_sm->sm_object); in mos_leak_vdev()
8136 for (uint64_t m = 0; m < vd->vdev_ms_count; m++) { in mos_leak_vdev()
8137 metaslab_t *ms = vd->vdev_ms[m]; in mos_leak_vdev()
8138 mos_obj_refd(space_map_object(ms->ms_sm)); in mos_leak_vdev()
8141 if (vd->vdev_root_zap != 0) in mos_leak_vdev()
8142 mos_obj_refd(vd->vdev_root_zap); in mos_leak_vdev()
8144 if (vd->vdev_top_zap != 0) { in mos_leak_vdev()
8145 mos_obj_refd(vd->vdev_top_zap); in mos_leak_vdev()
8149 for (uint64_t c = 0; c < vd->vdev_children; c++) { in mos_leak_vdev()
8150 mos_leak_vdev(vd->vdev_child[c]); in mos_leak_vdev()
8166 for (spa_log_sm_t *sls = avl_first(&spa->spa_sm_logs_by_txg); in mos_leak_log_spacemaps()
8167 sls; sls = AVL_NEXT(&spa->spa_sm_logs_by_txg, sls)) in mos_leak_log_spacemaps()
8168 mos_obj_refd(sls->sls_sm_obj); in mos_leak_log_spacemaps()
8179 mos_obj_refd(za->za_first_integer); in errorlog_count_refd()
8189 objset_t *mos = spa->spa_meta_objset; in dump_mos_leaks()
8190 dsl_pool_t *dp = spa->spa_dsl_pool; in dump_mos_leaks()
8195 mos_obj_refd(spa->spa_pool_props_object); in dump_mos_leaks()
8196 mos_obj_refd(spa->spa_config_object); in dump_mos_leaks()
8197 mos_obj_refd(spa->spa_ddt_stat_object); in dump_mos_leaks()
8198 mos_obj_refd(spa->spa_feat_desc_obj); in dump_mos_leaks()
8199 mos_obj_refd(spa->spa_feat_enabled_txg_obj); in dump_mos_leaks()
8200 mos_obj_refd(spa->spa_feat_for_read_obj); in dump_mos_leaks()
8201 mos_obj_refd(spa->spa_feat_for_write_obj); in dump_mos_leaks()
8202 mos_obj_refd(spa->spa_history); in dump_mos_leaks()
8203 mos_obj_refd(spa->spa_errlog_last); in dump_mos_leaks()
8204 mos_obj_refd(spa->spa_errlog_scrub); in dump_mos_leaks()
8207 errorlog_count_refd(mos, spa->spa_errlog_last); in dump_mos_leaks()
8208 errorlog_count_refd(mos, spa->spa_errlog_scrub); in dump_mos_leaks()
8211 mos_obj_refd(spa->spa_all_vdev_zaps); in dump_mos_leaks()
8212 mos_obj_refd(spa->spa_dsl_pool->dp_bptree_obj); in dump_mos_leaks()
8213 mos_obj_refd(spa->spa_dsl_pool->dp_tmp_userrefs_obj); in dump_mos_leaks()
8214 mos_obj_refd(spa->spa_dsl_pool->dp_scan->scn_phys.scn_queue_obj); in dump_mos_leaks()
8215 bpobj_count_refd(&spa->spa_deferred_bpobj); in dump_mos_leaks()
8216 mos_obj_refd(dp->dp_empty_bpobj); in dump_mos_leaks()
8217 bpobj_count_refd(&dp->dp_obsolete_bpobj); in dump_mos_leaks()
8218 bpobj_count_refd(&dp->dp_free_bpobj); in dump_mos_leaks()
8219 mos_obj_refd(spa->spa_l2cache.sav_object); in dump_mos_leaks()
8220 mos_obj_refd(spa->spa_spares.sav_object); in dump_mos_leaks()
8222 if (spa->spa_syncing_log_sm != NULL) in dump_mos_leaks()
8223 mos_obj_refd(spa->spa_syncing_log_sm->sm_object); in dump_mos_leaks()
8226 mos_obj_refd(spa->spa_condensing_indirect_phys. in dump_mos_leaks()
8228 mos_obj_refd(spa->spa_condensing_indirect_phys. in dump_mos_leaks()
8230 if (spa->spa_condensing_indirect_phys.scip_next_mapping_object != 0) { in dump_mos_leaks()
8233 spa->spa_condensing_indirect_phys.scip_next_mapping_object); in dump_mos_leaks()
8234 mos_obj_refd(vim->vim_phys->vimp_counts_object); in dump_mos_leaks()
8239 if (dp->dp_origin_snap != NULL) { in dump_mos_leaks()
8244 dsl_dataset_phys(dp->dp_origin_snap)->ds_next_snap_obj, in dump_mos_leaks()
8247 dump_blkptr_list(&ds->ds_deadlist, "Deadlist"); in dump_mos_leaks()
8251 count_ds_mos_objects(dp->dp_origin_snap); in dump_mos_leaks()
8252 dump_blkptr_list(&dp->dp_origin_snap->ds_deadlist, "Deadlist"); in dump_mos_leaks()
8254 count_dir_mos_objects(dp->dp_mos_dir); in dump_mos_leaks()
8255 if (dp->dp_free_dir != NULL) in dump_mos_leaks()
8256 count_dir_mos_objects(dp->dp_free_dir); in dump_mos_leaks()
8257 if (dp->dp_leak_dir != NULL) in dump_mos_leaks()
8258 count_dir_mos_objects(dp->dp_leak_dir); in dump_mos_leaks()
8260 mos_leak_vdev(spa->spa_root_vdev); in dump_mos_leaks()
8263 ddt_t *ddt = spa->spa_ddt[c]; in dump_mos_leaks()
8264 if (!ddt || ddt->ddt_version == DDT_VERSION_UNCONFIGURED) in dump_mos_leaks()
8271 mos_obj_refd(ddt->ddt_object[type][class]); in dump_mos_leaks()
8276 if (ddt->ddt_version == DDT_VERSION_FDT) in dump_mos_leaks()
8277 mos_obj_refd(ddt->ddt_dir_object); in dump_mos_leaks()
8280 if (ddt->ddt_flags & DDT_FLAG_LOG) { in dump_mos_leaks()
8281 mos_obj_refd(ddt->ddt_log[0].ddl_object); in dump_mos_leaks()
8282 mos_obj_refd(ddt->ddt_log[1].ddl_object); in dump_mos_leaks()
8286 for (uint64_t vdevid = 0; vdevid < spa->spa_brt_nvdevs; vdevid++) { in dump_mos_leaks()
8287 brt_vdev_t *brtvd = spa->spa_brt_vdevs[vdevid]; in dump_mos_leaks()
8288 if (brtvd->bv_initiated) { in dump_mos_leaks()
8289 mos_obj_refd(brtvd->bv_mos_brtvdev); in dump_mos_leaks()
8290 mos_obj_refd(brtvd->bv_mos_entries); in dump_mos_leaks()
8342 uint64_t offset = sme->sme_offset; in log_spacemap_obsolete_stats_cb()
8343 uint64_t vdev_id = sme->sme_vdev; in log_spacemap_obsolete_stats_cb()
8345 if (lsos->lsos_current_txg == 0) { in log_spacemap_obsolete_stats_cb()
8347 lsos->lsos_current_txg = txg; in log_spacemap_obsolete_stats_cb()
8348 } else if (lsos->lsos_current_txg < txg) { in log_spacemap_obsolete_stats_cb()
8349 /* we just changed log - print stats and reset */ in log_spacemap_obsolete_stats_cb()
8350 (void) printf("%-8llu valid entries out of %-8llu - txg %llu\n", in log_spacemap_obsolete_stats_cb()
8351 (u_longlong_t)lsos->lsos_valid_sm_entries, in log_spacemap_obsolete_stats_cb()
8352 (u_longlong_t)lsos->lsos_sm_entries, in log_spacemap_obsolete_stats_cb()
8353 (u_longlong_t)lsos->lsos_current_txg); in log_spacemap_obsolete_stats_cb()
8354 lsos->lsos_valid_sm_entries = 0; in log_spacemap_obsolete_stats_cb()
8355 lsos->lsos_sm_entries = 0; in log_spacemap_obsolete_stats_cb()
8356 lsos->lsos_current_txg = txg; in log_spacemap_obsolete_stats_cb()
8358 ASSERT3U(lsos->lsos_current_txg, ==, txg); in log_spacemap_obsolete_stats_cb()
8360 lsos->lsos_sm_entries++; in log_spacemap_obsolete_stats_cb()
8361 lsos->lsos_total_entries++; in log_spacemap_obsolete_stats_cb()
8367 metaslab_t *ms = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in log_spacemap_obsolete_stats_cb()
8368 ASSERT(sme->sme_type == SM_ALLOC || sme->sme_type == SM_FREE); in log_spacemap_obsolete_stats_cb()
8372 lsos->lsos_valid_sm_entries++; in log_spacemap_obsolete_stats_cb()
8373 lsos->lsos_valid_entries++; in log_spacemap_obsolete_stats_cb()
8391 (void) printf("%-8llu valid entries out of %-8llu - txg %llu\n", in dump_log_spacemap_obsolete_stats()
8396 (void) printf("%-8llu valid entries out of %-8llu - total\n\n", in dump_log_spacemap_obsolete_stats()
8418 dump_nvlist(spa->spa_config, 8); in dump_zpool()
8425 dump_uberblock(&spa->spa_uberblock, "\nUberblock:\n", "\n"); in dump_zpool()
8446 dump_objset(dp->dp_meta_objset); in dump_zpool()
8449 dsl_pool_t *dp = spa->spa_dsl_pool; in dump_zpool()
8450 dump_full_bpobj(&spa->spa_deferred_bpobj, in dump_zpool()
8453 dump_full_bpobj(&dp->dp_free_bpobj, in dump_zpool()
8456 if (bpobj_is_open(&dp->dp_obsolete_bpobj)) { in dump_zpool()
8459 dump_full_bpobj(&dp->dp_obsolete_bpobj, in dump_zpool()
8465 dump_bptree(spa->spa_meta_objset, in dump_zpool()
8466 dp->dp_bptree_obj, in dump_zpool()
8469 dump_dtl(spa->spa_root_vdev, 0); in dump_zpool()
8631 * leaf_name - For example: c1t0d0 or /tmp/ztest.0a
8632 * child[.child]* - For example: 0.1.1
8636 * RAID-Zs, you can specify either RAID-Z vdev with 0.0 or 0.1 .
8651 if (i >= vdev->vdev_children) in zdb_vdev_lookup()
8654 vdev = vdev->vdev_child[i]; in zdb_vdev_lookup()
8660 for (i = 0; i < vdev->vdev_children; i++) { in zdb_vdev_lookup()
8661 vdev_t *vc = vdev->vdev_child[i]; in zdb_vdev_lookup()
8663 if (vc->vdev_path == NULL) { in zdb_vdev_lookup()
8671 p = strrchr(vc->vdev_path, '/'); in zdb_vdev_lookup()
8672 p = p ? p + 1 : vc->vdev_path; in zdb_vdev_lookup()
8673 q = &vc->vdev_path[strlen(vc->vdev_path) - 2]; in zdb_vdev_lookup()
8675 if (strcmp(vc->vdev_path, path) == 0) in zdb_vdev_lookup()
8679 if (strcmp(q, "s0") == 0 && strncmp(p, path, q - p) == 0) in zdb_vdev_lookup()
8691 dsl_pool_config_enter(spa->spa_dsl_pool, FTAG); in name_from_objset_id()
8692 int error = dsl_dataset_hold_obj(spa->spa_dsl_pool, objset_id, in name_from_objset_id()
8697 dsl_pool_config_exit(spa->spa_dsl_pool, FTAG); in name_from_objset_id()
8702 dsl_pool_config_exit(spa->spa_dsl_pool, FTAG); in name_from_objset_id()
8731 "Trying %05llx -> %05llx (%s)\n", in try_decompress_block()
8842 return (lsize > maxlsize ? -1 : lsize); in zdb_decompress_block()
8851 * pool - The name of the pool you wish to read from
8852 * vdev_specifier - Which vdev (see comment for zdb_vdev_lookup)
8853 * offset - offset, in hex, in bytes
8854 * size - Amount of data to read, in hex, in bytes
8855 * flags - A string of characters specifying options
8870 dva_t *dva = bp->blk_dva; in zdb_read_block()
8898 (void) printf("Invalid block specifier: %s - %s\n", in zdb_read_block()
8956 vd = zdb_vdev_lookup(spa->spa_root_vdev, vdev); in zdb_read_block()
8961 if (vd->vdev_path) in zdb_read_block()
8963 vd->vdev_path); in zdb_read_block()
8966 vd->vdev_ops->vdev_op_type); in zdb_read_block()
8974 DVA_SET_VDEV(&dva[0], vd->vdev_id); in zdb_read_block()
8993 if (vd == vd->vdev_top) { in zdb_read_block()
9024 if (lsize == -1) { in zdb_read_block()
9049 if (lsize == -1 || zfs_blkptr_verify(spa, b, in zdb_read_block()
9090 if (vd == vd->vdev_top) { in zdb_read_block()
9110 ck_zio->io_offset = in zdb_read_block()
9111 DVA_GET_OFFSET(&bp->blk_dva[0]); in zdb_read_block()
9112 ck_zio->io_bp = bp; in zdb_read_block()
9118 (u_longlong_t)bp->blk_cksum.zc_word[0], in zdb_read_block()
9119 (u_longlong_t)bp->blk_cksum.zc_word[1], in zdb_read_block()
9120 (u_longlong_t)bp->blk_cksum.zc_word[2], in zdb_read_block()
9121 (u_longlong_t)bp->blk_cksum.zc_word[3]); in zdb_read_block()
9217 int64_t objset_id = -1; in main()
9232 * Set up signal handlers, so if we crash due to bad on-disk data we in main()
9250 * default spa_config_path setting. If -U flag is specified it will in main()
9265 {"ignore-assertions", no_argument, NULL, 'A'}, in main()
9266 {"block-stats", no_argument, NULL, 'b'}, in main()
9271 {"dedup-stats", no_argument, NULL, 'D'}, in main()
9273 {"embedded-block-pointer", no_argument, NULL, 'E'}, in main()
9274 {"automatic-rewind", no_argument, NULL, 'F'}, in main()
9275 {"dump-debug-msg", no_argument, NULL, 'G'}, in main()
9277 {"intent-logs", no_argument, NULL, 'i'}, in main()
9279 {"checkpointed-state", no_argument, NULL, 'k'}, in main()
9282 {"disable-leak-tracking", no_argument, NULL, 'L'}, in main()
9284 {"metaslab-groups", no_argument, NULL, 'M'}, in main()
9287 {"object-lookups", no_argument, NULL, 'O'}, in main()
9290 {"skip-label", no_argument, NULL, 'q'}, in main()
9291 {"copy-object", no_argument, NULL, 'r'}, in main()
9292 {"read-block", no_argument, NULL, 'R'}, in main()
9293 {"io-stats", no_argument, NULL, 's'}, in main()
9294 {"simulate-dedup", no_argument, NULL, 'S'}, in main()
9296 {"brt-stats", no_argument, NULL, 'T'}, in main()
9301 {"dump-blocks", required_argument, NULL, 'x'}, in main()
9302 {"extreme-rewind", no_argument, NULL, 'X'}, in main()
9303 {"all-reconstruction", no_argument, NULL, 'Y'}, in main()
9305 {"zstd-headers", no_argument, NULL, 'Z'}, in main()
9311 long_options, NULL)) != -1) { in main()
9423 (void) fprintf(stderr, "-p option requires use of -e\n"); in main()
9428 * ZDB does not typically re-read blocks; therefore limit the ARC in main()
9436 * "zdb -c" uses checksum-verifying scrub i/os which are async reads. in main()
9437 * "zdb -b" uses traversal prefetch which uses async reads. in main()
9449 * to load non-idle pools. in main()
9471 argc -= optind; in main()
9497 value = "-"; in main()
9506 } else if ((strcmp(pbuf, "-") == 0 && in main()
9550 /* -N implies -d */ in main()
9569 if (targetlen && target[targetlen - 1] == '/') in main()
9570 target[targetlen - 1] = '\0'; in main()
9573 * See if an objset ID was supplied (-d <pool>/<objset ID>). in main()
9575 * if -N was given, otherwise 100 is an objsetID iff in main()
9593 objset_id = -1; in main()
9597 printf("Supply a numeric objset ID with -N\n"); in main()
9655 * We need to make sure to process -O option or call in main()
9656 * dump_path after the -e option has been processed, in main()
9682 * it always after the -e option has been processed, which in main()
9729 spa->spa_log_state == SPA_LOG_MISSING) { in main()
9730 spa->spa_log_state = SPA_LOG_CLEAR; in main()
9761 * If -N was supplied, the user has indicated that in main()
9762 * zdb -d <pool>/<objsetID> is in effect. Otherwise in main()
9814 spa->spa_failmode = ZIO_FAILURE_MODE_PANIC; in main()
9817 argc--; in main()
9859 dump_objset(spa->spa_meta_objset); in main()