Lines Matching refs:vd

174 vdev_dbgmsg(vdev_t *vd, const char *fmt, ...)  in vdev_dbgmsg()  argument
183 if (vd->vdev_path != NULL) { in vdev_dbgmsg()
184 zfs_dbgmsg("%s vdev '%s': %s", vd->vdev_ops->vdev_op_type, in vdev_dbgmsg()
185 vd->vdev_path, buf); in vdev_dbgmsg()
188 vd->vdev_ops->vdev_op_type, in vdev_dbgmsg()
189 (u_longlong_t)vd->vdev_id, in vdev_dbgmsg()
190 (u_longlong_t)vd->vdev_guid, buf); in vdev_dbgmsg()
195 vdev_dbgmsg_print_tree(vdev_t *vd, int indent) in vdev_dbgmsg_print_tree() argument
199 if (vd->vdev_ishole || vd->vdev_ops == &vdev_missing_ops) { in vdev_dbgmsg_print_tree()
201 (u_longlong_t)vd->vdev_id, in vdev_dbgmsg_print_tree()
202 vd->vdev_ops->vdev_op_type); in vdev_dbgmsg_print_tree()
206 switch (vd->vdev_state) { in vdev_dbgmsg_print_tree()
233 (uint_t)vd->vdev_state); in vdev_dbgmsg_print_tree()
237 "", (int)vd->vdev_id, vd->vdev_ops->vdev_op_type, in vdev_dbgmsg_print_tree()
238 vd->vdev_islog ? " (log)" : "", in vdev_dbgmsg_print_tree()
239 (u_longlong_t)vd->vdev_guid, in vdev_dbgmsg_print_tree()
240 vd->vdev_path ? vd->vdev_path : "N/A", state); in vdev_dbgmsg_print_tree()
242 for (uint64_t i = 0; i < vd->vdev_children; i++) in vdev_dbgmsg_print_tree()
243 vdev_dbgmsg_print_tree(vd->vdev_child[i], indent + 2); in vdev_dbgmsg_print_tree()
288 vdev_get_mg(vdev_t *vd, metaslab_class_t *mc) in vdev_get_mg() argument
290 if (mc == spa_embedded_log_class(vd->vdev_spa) && in vdev_get_mg()
291 vd->vdev_log_mg != NULL) in vdev_get_mg()
292 return (vd->vdev_log_mg); in vdev_get_mg()
294 return (vd->vdev_mg); in vdev_get_mg()
298 vdev_default_xlate(vdev_t *vd, const zfs_range_seg64_t *logical_rs, in vdev_default_xlate() argument
301 (void) vd, (void) remain_rs; in vdev_default_xlate()
327 vdev_default_psize(vdev_t *vd, uint64_t asize, uint64_t txg) in vdev_default_psize() argument
329 ASSERT0(asize % (1ULL << vd->vdev_top->vdev_ashift)); in vdev_default_psize()
331 for (int c = 0; c < vd->vdev_children; c++) { in vdev_default_psize()
332 csize = vdev_asize_to_psize_txg(vd->vdev_child[c], asize, txg); in vdev_default_psize()
344 vdev_default_asize(vdev_t *vd, uint64_t psize, uint64_t txg) in vdev_default_asize() argument
346 uint64_t asize = P2ROUNDUP(psize, 1ULL << vd->vdev_top->vdev_ashift); in vdev_default_asize()
349 for (int c = 0; c < vd->vdev_children; c++) { in vdev_default_asize()
350 csize = vdev_psize_to_asize_txg(vd->vdev_child[c], psize, txg); in vdev_default_asize()
358 vdev_default_min_asize(vdev_t *vd) in vdev_default_min_asize() argument
360 return (vd->vdev_min_asize); in vdev_default_min_asize()
370 vdev_get_min_asize(vdev_t *vd) in vdev_get_min_asize() argument
372 vdev_t *pvd = vd->vdev_parent; in vdev_get_min_asize()
379 return (vd->vdev_asize); in vdev_get_min_asize()
385 if (vd == vd->vdev_top) in vdev_get_min_asize()
386 return (P2ALIGN_TYPED(vd->vdev_asize, 1ULL << vd->vdev_ms_shift, in vdev_get_min_asize()
393 vdev_set_min_asize(vdev_t *vd) in vdev_set_min_asize() argument
395 vd->vdev_min_asize = vdev_get_min_asize(vd); in vdev_set_min_asize()
397 for (int c = 0; c < vd->vdev_children; c++) in vdev_set_min_asize()
398 vdev_set_min_asize(vd->vdev_child[c]); in vdev_set_min_asize()
405 vdev_get_min_alloc(vdev_t *vd) in vdev_get_min_alloc() argument
407 uint64_t min_alloc = 1ULL << vd->vdev_ashift; in vdev_get_min_alloc()
409 if (vd->vdev_ops->vdev_op_min_alloc != NULL) in vdev_get_min_alloc()
410 min_alloc = vd->vdev_ops->vdev_op_min_alloc(vd); in vdev_get_min_alloc()
419 vdev_get_nparity(vdev_t *vd) in vdev_get_nparity() argument
423 if (vd->vdev_ops->vdev_op_nparity != NULL) in vdev_get_nparity()
424 nparity = vd->vdev_ops->vdev_op_nparity(vd); in vdev_get_nparity()
430 vdev_prop_get_int(vdev_t *vd, vdev_prop_t prop, uint64_t *value) in vdev_prop_get_int() argument
432 spa_t *spa = vd->vdev_spa; in vdev_prop_get_int()
437 if (vd->vdev_root_zap != 0) { in vdev_prop_get_int()
438 objid = vd->vdev_root_zap; in vdev_prop_get_int()
439 } else if (vd->vdev_top_zap != 0) { in vdev_prop_get_int()
440 objid = vd->vdev_top_zap; in vdev_prop_get_int()
441 } else if (vd->vdev_leaf_zap != 0) { in vdev_prop_get_int()
442 objid = vd->vdev_leaf_zap; in vdev_prop_get_int()
460 vdev_get_ndisks(vdev_t *vd) in vdev_get_ndisks() argument
464 if (vd->vdev_ops->vdev_op_ndisks != NULL) in vdev_get_ndisks()
465 ndisks = vd->vdev_ops->vdev_op_ndisks(vd); in vdev_get_ndisks()
486 vdev_lookup_by_guid(vdev_t *vd, uint64_t guid) in vdev_lookup_by_guid() argument
490 if (vd->vdev_guid == guid) in vdev_lookup_by_guid()
491 return (vd); in vdev_lookup_by_guid()
493 for (int c = 0; c < vd->vdev_children; c++) in vdev_lookup_by_guid()
494 if ((mvd = vdev_lookup_by_guid(vd->vdev_child[c], guid)) != in vdev_lookup_by_guid()
502 vdev_count_leaves_impl(vdev_t *vd) in vdev_count_leaves_impl() argument
506 if (vd->vdev_ops->vdev_op_leaf) in vdev_count_leaves_impl()
509 for (int c = 0; c < vd->vdev_children; c++) in vdev_count_leaves_impl()
510 n += vdev_count_leaves_impl(vd->vdev_child[c]); in vdev_count_leaves_impl()
655 vdev_t *vd; in vdev_alloc_common() local
658 vd = kmem_zalloc(sizeof (vdev_t), KM_SLEEP); in vdev_alloc_common()
659 vic = &vd->vdev_indirect_config; in vdev_alloc_common()
663 spa->spa_root_vdev = vd; in vdev_alloc_common()
668 if (spa->spa_root_vdev == vd) { in vdev_alloc_common()
683 vd->vdev_spa = spa; in vdev_alloc_common()
684 vd->vdev_id = id; in vdev_alloc_common()
685 vd->vdev_guid = guid; in vdev_alloc_common()
686 vd->vdev_guid_sum = guid; in vdev_alloc_common()
687 vd->vdev_ops = ops; in vdev_alloc_common()
688 vd->vdev_state = VDEV_STATE_CLOSED; in vdev_alloc_common()
689 vd->vdev_ishole = (ops == &vdev_hole_ops); in vdev_alloc_common()
692 rw_init(&vd->vdev_indirect_rwlock, NULL, RW_DEFAULT, NULL); in vdev_alloc_common()
693 mutex_init(&vd->vdev_obsolete_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
694 vd->vdev_obsolete_segments = zfs_range_tree_create(NULL, in vdev_alloc_common()
702 zfs_ratelimit_init(&vd->vdev_delay_rl, &zfs_slow_io_events_per_second, in vdev_alloc_common()
704 zfs_ratelimit_init(&vd->vdev_deadman_rl, &zfs_deadman_events_per_second, in vdev_alloc_common()
706 zfs_ratelimit_init(&vd->vdev_dio_verify_rl, in vdev_alloc_common()
708 zfs_ratelimit_init(&vd->vdev_checksum_rl, in vdev_alloc_common()
714 vd->vdev_checksum_n = vdev_prop_default_numeric(VDEV_PROP_CHECKSUM_N); in vdev_alloc_common()
715 vd->vdev_checksum_t = vdev_prop_default_numeric(VDEV_PROP_CHECKSUM_T); in vdev_alloc_common()
716 vd->vdev_io_n = vdev_prop_default_numeric(VDEV_PROP_IO_N); in vdev_alloc_common()
717 vd->vdev_io_t = vdev_prop_default_numeric(VDEV_PROP_IO_T); in vdev_alloc_common()
718 vd->vdev_slow_io_n = vdev_prop_default_numeric(VDEV_PROP_SLOW_IO_N); in vdev_alloc_common()
719 vd->vdev_slow_io_t = vdev_prop_default_numeric(VDEV_PROP_SLOW_IO_T); in vdev_alloc_common()
721 list_link_init(&vd->vdev_config_dirty_node); in vdev_alloc_common()
722 list_link_init(&vd->vdev_state_dirty_node); in vdev_alloc_common()
723 list_link_init(&vd->vdev_initialize_node); in vdev_alloc_common()
724 list_link_init(&vd->vdev_leaf_node); in vdev_alloc_common()
725 list_link_init(&vd->vdev_trim_node); in vdev_alloc_common()
727 mutex_init(&vd->vdev_dtl_lock, NULL, MUTEX_NOLOCKDEP, NULL); in vdev_alloc_common()
728 mutex_init(&vd->vdev_stat_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
729 mutex_init(&vd->vdev_probe_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
730 mutex_init(&vd->vdev_scan_io_queue_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
732 mutex_init(&vd->vdev_initialize_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
733 mutex_init(&vd->vdev_initialize_io_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
734 cv_init(&vd->vdev_initialize_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
735 cv_init(&vd->vdev_initialize_io_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
737 mutex_init(&vd->vdev_trim_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
738 mutex_init(&vd->vdev_autotrim_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
739 mutex_init(&vd->vdev_trim_io_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
740 cv_init(&vd->vdev_trim_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
741 cv_init(&vd->vdev_autotrim_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
742 cv_init(&vd->vdev_autotrim_kick_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
743 cv_init(&vd->vdev_trim_io_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
745 mutex_init(&vd->vdev_rebuild_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_alloc_common()
746 cv_init(&vd->vdev_rebuild_cv, NULL, CV_DEFAULT, NULL); in vdev_alloc_common()
749 vd->vdev_dtl[t] = zfs_range_tree_create(NULL, ZFS_RANGE_SEG64, in vdev_alloc_common()
753 txg_list_create(&vd->vdev_ms_list, spa, in vdev_alloc_common()
755 txg_list_create(&vd->vdev_dtl_list, spa, in vdev_alloc_common()
757 vd->vdev_stat.vs_timestamp = gethrtime(); in vdev_alloc_common()
758 vdev_queue_init(vd); in vdev_alloc_common()
760 return (vd); in vdev_alloc_common()
775 vdev_t *vd; in vdev_alloc() local
871 vd = vdev_alloc_common(spa, id, guid, ops); in vdev_alloc()
872 vd->vdev_tsd = tsd; in vdev_alloc()
873 vd->vdev_islog = islog; in vdev_alloc()
876 vd->vdev_alloc_bias = alloc_bias; in vdev_alloc()
879 vd->vdev_path = spa_strdup(tmp); in vdev_alloc()
888 vd->vdev_stat.vs_aux = VDEV_AUX_EXTERNAL; in vdev_alloc()
889 vd->vdev_faulted = 1; in vdev_alloc()
890 vd->vdev_label_aux = VDEV_AUX_EXTERNAL; in vdev_alloc()
894 vd->vdev_devid = spa_strdup(tmp); in vdev_alloc()
896 vd->vdev_physpath = spa_strdup(tmp); in vdev_alloc()
900 vd->vdev_enc_sysfs_path = spa_strdup(tmp); in vdev_alloc()
903 vd->vdev_fru = spa_strdup(tmp); in vdev_alloc()
910 &vd->vdev_wholedisk) != 0) in vdev_alloc()
911 vd->vdev_wholedisk = -1ULL; in vdev_alloc()
913 vic = &vd->vdev_indirect_config; in vdev_alloc()
930 &vd->vdev_not_present); in vdev_alloc()
938 &vd->vdev_ashift); in vdev_alloc()
940 vd->vdev_attaching = B_TRUE; in vdev_alloc()
947 &vd->vdev_crtxg); in vdev_alloc()
949 if (vd->vdev_ops == &vdev_root_ops && in vdev_alloc()
954 &vd->vdev_root_zap); in vdev_alloc()
963 &vd->vdev_ms_array); in vdev_alloc()
965 &vd->vdev_ms_shift); in vdev_alloc()
967 &vd->vdev_asize); in vdev_alloc()
969 &vd->vdev_noalloc); in vdev_alloc()
971 &vd->vdev_removing); in vdev_alloc()
973 &vd->vdev_top_zap); in vdev_alloc()
974 vd->vdev_rz_expanding = nvlist_exists(nv, in vdev_alloc()
977 ASSERT0(vd->vdev_top_zap); in vdev_alloc()
988 if (vd->vdev_ops->vdev_op_leaf && in vdev_alloc()
991 ZPOOL_CONFIG_VDEV_LEAF_ZAP, &vd->vdev_leaf_zap); in vdev_alloc()
993 ASSERT0(vd->vdev_leaf_zap); in vdev_alloc()
1000 if (vd->vdev_ops->vdev_op_leaf && in vdev_alloc()
1005 &vd->vdev_dtl_object); in vdev_alloc()
1007 &vd->vdev_unspare); in vdev_alloc()
1015 spa_spare_add(vd); in vdev_alloc()
1019 &vd->vdev_offline); in vdev_alloc()
1022 &vd->vdev_resilver_txg); in vdev_alloc()
1025 &vd->vdev_rebuild_txg); in vdev_alloc()
1028 vdev_defer_resilver(vd); in vdev_alloc()
1043 &vd->vdev_faulted); in vdev_alloc()
1045 &vd->vdev_degraded); in vdev_alloc()
1047 &vd->vdev_removed); in vdev_alloc()
1049 if (vd->vdev_faulted || vd->vdev_degraded) { in vdev_alloc()
1052 vd->vdev_label_aux = in vdev_alloc()
1057 vd->vdev_label_aux = VDEV_AUX_EXTERNAL; in vdev_alloc()
1059 vd->vdev_faulted = 0ULL; in vdev_alloc()
1067 vdev_add_child(parent, vd); in vdev_alloc()
1069 *vdp = vd; in vdev_alloc()
1075 vdev_free(vdev_t *vd) in vdev_free() argument
1077 spa_t *spa = vd->vdev_spa; in vdev_free()
1079 ASSERT3P(vd->vdev_initialize_thread, ==, NULL); in vdev_free()
1080 ASSERT3P(vd->vdev_trim_thread, ==, NULL); in vdev_free()
1081 ASSERT3P(vd->vdev_autotrim_thread, ==, NULL); in vdev_free()
1082 ASSERT3P(vd->vdev_rebuild_thread, ==, NULL); in vdev_free()
1089 if (vd->vdev_scan_io_queue != NULL) { in vdev_free()
1090 mutex_enter(&vd->vdev_scan_io_queue_lock); in vdev_free()
1091 dsl_scan_io_queue_destroy(vd->vdev_scan_io_queue); in vdev_free()
1092 vd->vdev_scan_io_queue = NULL; in vdev_free()
1093 mutex_exit(&vd->vdev_scan_io_queue_lock); in vdev_free()
1100 vdev_close(vd); in vdev_free()
1102 ASSERT(!list_link_active(&vd->vdev_config_dirty_node)); in vdev_free()
1103 ASSERT(!list_link_active(&vd->vdev_state_dirty_node)); in vdev_free()
1108 for (int c = 0; c < vd->vdev_children; c++) in vdev_free()
1109 vdev_free(vd->vdev_child[c]); in vdev_free()
1111 ASSERT(vd->vdev_child == NULL); in vdev_free()
1112 ASSERT(vd->vdev_guid_sum == vd->vdev_guid); in vdev_free()
1114 if (vd->vdev_ops->vdev_op_fini != NULL) in vdev_free()
1115 vd->vdev_ops->vdev_op_fini(vd); in vdev_free()
1120 if (vd->vdev_mg != NULL) { in vdev_free()
1121 vdev_metaslab_fini(vd); in vdev_free()
1122 metaslab_group_destroy(vd->vdev_mg); in vdev_free()
1123 vd->vdev_mg = NULL; in vdev_free()
1125 if (vd->vdev_log_mg != NULL) { in vdev_free()
1126 ASSERT0(vd->vdev_ms_count); in vdev_free()
1127 metaslab_group_destroy(vd->vdev_log_mg); in vdev_free()
1128 vd->vdev_log_mg = NULL; in vdev_free()
1131 ASSERT0(vd->vdev_stat.vs_space); in vdev_free()
1132 ASSERT0(vd->vdev_stat.vs_dspace); in vdev_free()
1133 ASSERT0(vd->vdev_stat.vs_alloc); in vdev_free()
1138 vdev_remove_child(vd->vdev_parent, vd); in vdev_free()
1140 ASSERT(vd->vdev_parent == NULL); in vdev_free()
1141 ASSERT(!list_link_active(&vd->vdev_leaf_node)); in vdev_free()
1146 vdev_queue_fini(vd); in vdev_free()
1148 if (vd->vdev_path) in vdev_free()
1149 spa_strfree(vd->vdev_path); in vdev_free()
1150 if (vd->vdev_devid) in vdev_free()
1151 spa_strfree(vd->vdev_devid); in vdev_free()
1152 if (vd->vdev_physpath) in vdev_free()
1153 spa_strfree(vd->vdev_physpath); in vdev_free()
1155 if (vd->vdev_enc_sysfs_path) in vdev_free()
1156 spa_strfree(vd->vdev_enc_sysfs_path); in vdev_free()
1158 if (vd->vdev_fru) in vdev_free()
1159 spa_strfree(vd->vdev_fru); in vdev_free()
1161 if (vd->vdev_isspare) in vdev_free()
1162 spa_spare_remove(vd); in vdev_free()
1163 if (vd->vdev_isl2cache) in vdev_free()
1164 spa_l2cache_remove(vd); in vdev_free()
1166 txg_list_destroy(&vd->vdev_ms_list); in vdev_free()
1167 txg_list_destroy(&vd->vdev_dtl_list); in vdev_free()
1169 mutex_enter(&vd->vdev_dtl_lock); in vdev_free()
1170 space_map_close(vd->vdev_dtl_sm); in vdev_free()
1172 zfs_range_tree_vacate(vd->vdev_dtl[t], NULL, NULL); in vdev_free()
1173 zfs_range_tree_destroy(vd->vdev_dtl[t]); in vdev_free()
1175 mutex_exit(&vd->vdev_dtl_lock); in vdev_free()
1177 EQUIV(vd->vdev_indirect_births != NULL, in vdev_free()
1178 vd->vdev_indirect_mapping != NULL); in vdev_free()
1179 if (vd->vdev_indirect_births != NULL) { in vdev_free()
1180 vdev_indirect_mapping_close(vd->vdev_indirect_mapping); in vdev_free()
1181 vdev_indirect_births_close(vd->vdev_indirect_births); in vdev_free()
1184 if (vd->vdev_obsolete_sm != NULL) { in vdev_free()
1185 ASSERT(vd->vdev_removing || in vdev_free()
1186 vd->vdev_ops == &vdev_indirect_ops); in vdev_free()
1187 space_map_close(vd->vdev_obsolete_sm); in vdev_free()
1188 vd->vdev_obsolete_sm = NULL; in vdev_free()
1190 zfs_range_tree_destroy(vd->vdev_obsolete_segments); in vdev_free()
1191 rw_destroy(&vd->vdev_indirect_rwlock); in vdev_free()
1192 mutex_destroy(&vd->vdev_obsolete_lock); in vdev_free()
1194 mutex_destroy(&vd->vdev_dtl_lock); in vdev_free()
1195 mutex_destroy(&vd->vdev_stat_lock); in vdev_free()
1196 mutex_destroy(&vd->vdev_probe_lock); in vdev_free()
1197 mutex_destroy(&vd->vdev_scan_io_queue_lock); in vdev_free()
1199 mutex_destroy(&vd->vdev_initialize_lock); in vdev_free()
1200 mutex_destroy(&vd->vdev_initialize_io_lock); in vdev_free()
1201 cv_destroy(&vd->vdev_initialize_io_cv); in vdev_free()
1202 cv_destroy(&vd->vdev_initialize_cv); in vdev_free()
1204 mutex_destroy(&vd->vdev_trim_lock); in vdev_free()
1205 mutex_destroy(&vd->vdev_autotrim_lock); in vdev_free()
1206 mutex_destroy(&vd->vdev_trim_io_lock); in vdev_free()
1207 cv_destroy(&vd->vdev_trim_cv); in vdev_free()
1208 cv_destroy(&vd->vdev_autotrim_cv); in vdev_free()
1209 cv_destroy(&vd->vdev_autotrim_kick_cv); in vdev_free()
1210 cv_destroy(&vd->vdev_trim_io_cv); in vdev_free()
1212 mutex_destroy(&vd->vdev_rebuild_lock); in vdev_free()
1213 cv_destroy(&vd->vdev_rebuild_cv); in vdev_free()
1215 zfs_ratelimit_fini(&vd->vdev_delay_rl); in vdev_free()
1216 zfs_ratelimit_fini(&vd->vdev_deadman_rl); in vdev_free()
1217 zfs_ratelimit_fini(&vd->vdev_dio_verify_rl); in vdev_free()
1218 zfs_ratelimit_fini(&vd->vdev_checksum_rl); in vdev_free()
1220 if (vd == spa->spa_root_vdev) in vdev_free()
1223 kmem_free(vd, sizeof (vdev_t)); in vdev_free()
1234 vdev_t *vd; in vdev_top_transfer() local
1316 while ((vd = txg_list_remove(&svd->vdev_dtl_list, t)) != NULL) in vdev_top_transfer()
1317 (void) txg_list_add(&tvd->vdev_dtl_list, vd, t); in vdev_top_transfer()
1342 vdev_top_update(vdev_t *tvd, vdev_t *vd) in vdev_top_update() argument
1344 if (vd == NULL) in vdev_top_update()
1347 vd->vdev_top = tvd; in vdev_top_update()
1349 for (int c = 0; c < vd->vdev_children; c++) in vdev_top_update()
1350 vdev_top_update(tvd, vd->vdev_child[c]); in vdev_top_update()
1476 vdev_metaslab_group_create(vdev_t *vd) in vdev_metaslab_group_create() argument
1478 spa_t *spa = vd->vdev_spa; in vdev_metaslab_group_create()
1483 if (vd->vdev_mg == NULL) { in vdev_metaslab_group_create()
1486 if (vd->vdev_islog && vd->vdev_alloc_bias == VDEV_BIAS_NONE) in vdev_metaslab_group_create()
1487 vd->vdev_alloc_bias = VDEV_BIAS_LOG; in vdev_metaslab_group_create()
1489 ASSERT3U(vd->vdev_islog, ==, in vdev_metaslab_group_create()
1490 (vd->vdev_alloc_bias == VDEV_BIAS_LOG)); in vdev_metaslab_group_create()
1492 switch (vd->vdev_alloc_bias) { in vdev_metaslab_group_create()
1506 vd->vdev_mg = metaslab_group_create(mc, vd); in vdev_metaslab_group_create()
1508 if (!vd->vdev_islog) { in vdev_metaslab_group_create()
1509 vd->vdev_log_mg = metaslab_group_create( in vdev_metaslab_group_create()
1510 spa_embedded_log_class(spa), vd); in vdev_metaslab_group_create()
1518 if (vd->vdev_top == vd && vd->vdev_ashift != 0 && in vdev_metaslab_group_create()
1519 mc == spa_normal_class(spa) && vd->vdev_aux == NULL) { in vdev_metaslab_group_create()
1520 if (vd->vdev_ashift > spa->spa_max_ashift) in vdev_metaslab_group_create()
1521 spa->spa_max_ashift = vd->vdev_ashift; in vdev_metaslab_group_create()
1522 if (vd->vdev_ashift < spa->spa_min_ashift) in vdev_metaslab_group_create()
1523 spa->spa_min_ashift = vd->vdev_ashift; in vdev_metaslab_group_create()
1525 uint64_t min_alloc = vdev_get_min_alloc(vd); in vdev_metaslab_group_create()
1532 vdev_update_nonallocating_space(vdev_t *vd, boolean_t add) in vdev_update_nonallocating_space() argument
1534 spa_t *spa = vd->vdev_spa; in vdev_update_nonallocating_space()
1536 if (vd->vdev_mg->mg_class != spa_normal_class(spa)) in vdev_update_nonallocating_space()
1539 uint64_t raw_space = metaslab_group_get_space(vd->vdev_mg); in vdev_update_nonallocating_space()
1541 vdev_deflated_space(vd, raw_space) : raw_space; in vdev_update_nonallocating_space()
1551 vdev_metaslab_init(vdev_t *vd, uint64_t txg) in vdev_metaslab_init() argument
1553 spa_t *spa = vd->vdev_spa; in vdev_metaslab_init()
1554 uint64_t oldc = vd->vdev_ms_count; in vdev_metaslab_init()
1555 uint64_t newc = vd->vdev_asize >> vd->vdev_ms_shift; in vdev_metaslab_init()
1565 if (vd->vdev_ms_shift == 0) in vdev_metaslab_init()
1568 ASSERT(!vd->vdev_ishole); in vdev_metaslab_init()
1575 memcpy(mspp, vd->vdev_ms, oldc * sizeof (*mspp)); in vdev_metaslab_init()
1576 vmem_free(vd->vdev_ms, oldc * sizeof (*mspp)); in vdev_metaslab_init()
1579 vd->vdev_ms = mspp; in vdev_metaslab_init()
1580 vd->vdev_ms_count = newc; in vdev_metaslab_init()
1589 if (txg == 0 && vd->vdev_ms_array != 0) { in vdev_metaslab_init()
1591 vd->vdev_ms_array, in vdev_metaslab_init()
1595 vdev_dbgmsg(vd, "unable to read the metaslab " in vdev_metaslab_init()
1601 error = metaslab_init(vd->vdev_mg, m, object, txg, in vdev_metaslab_init()
1602 &(vd->vdev_ms[m])); in vdev_metaslab_init()
1604 vdev_dbgmsg(vd, "metaslab_init failed [error=%d]", in vdev_metaslab_init()
1615 if (vd->vdev_mg->mg_class == spa_normal_class(spa) && in vdev_metaslab_init()
1616 vd->vdev_ms_count > zfs_embedded_slog_min_ms && in vdev_metaslab_init()
1617 avl_is_empty(&vd->vdev_log_mg->mg_metaslab_tree)) { in vdev_metaslab_init()
1629 space_map_allocated(vd->vdev_ms[m]->ms_sm); in vdev_metaslab_init()
1635 metaslab_t *slog_ms = vd->vdev_ms[slog_msid]; in vdev_metaslab_init()
1642 (void) txg_list_remove_this(&vd->vdev_ms_list, in vdev_metaslab_init()
1647 VERIFY0(metaslab_init(vd->vdev_log_mg, slog_msid, sm_obj, txg, in vdev_metaslab_init()
1648 &vd->vdev_ms[slog_msid])); in vdev_metaslab_init()
1659 if (vd->vdev_noalloc) { in vdev_metaslab_init()
1661 vdev_update_nonallocating_space(vd, B_TRUE); in vdev_metaslab_init()
1663 metaslab_group_activate(vd->vdev_mg); in vdev_metaslab_init()
1664 if (vd->vdev_log_mg != NULL) in vdev_metaslab_init()
1665 metaslab_group_activate(vd->vdev_log_mg); in vdev_metaslab_init()
1675 vdev_metaslab_fini(vdev_t *vd) in vdev_metaslab_fini() argument
1677 if (vd->vdev_checkpoint_sm != NULL) { in vdev_metaslab_fini()
1678 ASSERT(spa_feature_is_active(vd->vdev_spa, in vdev_metaslab_fini()
1680 space_map_close(vd->vdev_checkpoint_sm); in vdev_metaslab_fini()
1689 vd->vdev_checkpoint_sm = NULL; in vdev_metaslab_fini()
1692 if (vd->vdev_ms != NULL) { in vdev_metaslab_fini()
1693 metaslab_group_t *mg = vd->vdev_mg; in vdev_metaslab_fini()
1696 if (vd->vdev_log_mg != NULL) { in vdev_metaslab_fini()
1697 ASSERT(!vd->vdev_islog); in vdev_metaslab_fini()
1698 metaslab_group_passivate(vd->vdev_log_mg); in vdev_metaslab_fini()
1701 uint64_t count = vd->vdev_ms_count; in vdev_metaslab_fini()
1703 metaslab_t *msp = vd->vdev_ms[m]; in vdev_metaslab_fini()
1707 vmem_free(vd->vdev_ms, count * sizeof (metaslab_t *)); in vdev_metaslab_fini()
1708 vd->vdev_ms = NULL; in vdev_metaslab_fini()
1709 vd->vdev_ms_count = 0; in vdev_metaslab_fini()
1713 if (vd->vdev_log_mg != NULL) in vdev_metaslab_fini()
1714 ASSERT0(vd->vdev_log_mg->mg_histogram[i]); in vdev_metaslab_fini()
1717 ASSERT0(vd->vdev_ms_count); in vdev_metaslab_fini()
1731 vdev_t *vd = zio->io_vd; in vdev_probe_done() local
1734 ASSERT(vd->vdev_probe_zio != NULL); in vdev_probe_done()
1740 zio_nowait(zio_write_phys(vd->vdev_probe_zio, vd, in vdev_probe_done()
1755 vd->vdev_cant_read |= !vps->vps_readable; in vdev_probe_done()
1756 vd->vdev_cant_write |= !vps->vps_writeable; in vdev_probe_done()
1757 vdev_dbgmsg(vd, "probe done, cant_read=%u cant_write=%u", in vdev_probe_done()
1758 vd->vdev_cant_read, vd->vdev_cant_write); in vdev_probe_done()
1760 if (vdev_readable(vd) && in vdev_probe_done()
1761 (vdev_writeable(vd) || !spa_writeable(spa))) { in vdev_probe_done()
1765 vdev_dbgmsg(vd, "failed probe"); in vdev_probe_done()
1767 spa, vd, NULL, NULL, 0); in vdev_probe_done()
1777 vd->vdev_fault_wanted = B_TRUE; in vdev_probe_done()
1782 mutex_enter(&vd->vdev_probe_lock); in vdev_probe_done()
1783 ASSERT(vd->vdev_probe_zio == zio); in vdev_probe_done()
1784 vd->vdev_probe_zio = NULL; in vdev_probe_done()
1785 mutex_exit(&vd->vdev_probe_lock); in vdev_probe_done()
1789 if (!vdev_accessible(vd, pio)) in vdev_probe_done()
1804 vdev_probe(vdev_t *vd, zio_t *zio) in vdev_probe() argument
1806 spa_t *spa = vd->vdev_spa; in vdev_probe()
1810 ASSERT(vd->vdev_ops->vdev_op_leaf); in vdev_probe()
1823 mutex_enter(&vd->vdev_probe_lock); in vdev_probe()
1825 if ((pio = vd->vdev_probe_zio) == NULL) { in vdev_probe()
1850 vd->vdev_cant_read = B_FALSE; in vdev_probe()
1851 vd->vdev_cant_write = B_FALSE; in vdev_probe()
1854 vd->vdev_probe_zio = pio = zio_null(NULL, spa, vd, in vdev_probe()
1862 mutex_exit(&vd->vdev_probe_lock); in vdev_probe()
1870 zio_nowait(zio_read_phys(pio, vd, in vdev_probe()
1871 vdev_label_offset(vd->vdev_psize, l, in vdev_probe()
1888 vdev_t *vd = arg; in vdev_load_child() local
1890 vd->vdev_load_error = vdev_load(vd); in vdev_load_child()
1896 vdev_t *vd = arg; in vdev_open_child() local
1898 vd->vdev_open_thread = curthread; in vdev_open_child()
1899 vd->vdev_open_error = vdev_open(vd); in vdev_open_child()
1900 vd->vdev_open_thread = NULL; in vdev_open_child()
1904 vdev_uses_zvols(vdev_t *vd) in vdev_uses_zvols() argument
1907 if (zvol_is_zvol(vd->vdev_path)) in vdev_uses_zvols()
1911 for (int c = 0; c < vd->vdev_children; c++) in vdev_uses_zvols()
1912 if (vdev_uses_zvols(vd->vdev_child[c])) in vdev_uses_zvols()
1922 vdev_default_open_children_func(vdev_t *vd) in vdev_default_open_children_func() argument
1924 (void) vd; in vdev_default_open_children_func()
1934 vdev_open_children_impl(vdev_t *vd, vdev_open_children_func_t *open_func) in vdev_open_children_impl() argument
1936 int children = vd->vdev_children; in vdev_open_children_impl()
1940 vd->vdev_nonrot = B_TRUE; in vdev_open_children_impl()
1943 vdev_t *cvd = vd->vdev_child[c]; in vdev_open_children_impl()
1948 if (tq == NULL || vdev_uses_zvols(vd)) { in vdev_open_children_impl()
1959 vdev_t *cvd = vd->vdev_child[c]; in vdev_open_children_impl()
1960 vd->vdev_nonrot &= cvd->vdev_nonrot; in vdev_open_children_impl()
1971 vdev_open_children(vdev_t *vd) in vdev_open_children() argument
1973 vdev_open_children_impl(vd, vdev_default_open_children_func); in vdev_open_children()
1980 vdev_open_children_subset(vdev_t *vd, vdev_open_children_func_t *open_func) in vdev_open_children_subset() argument
1982 vdev_open_children_impl(vd, open_func); in vdev_open_children_subset()
1994 vdev_set_deflate_ratio(vdev_t *vd) in vdev_set_deflate_ratio() argument
1996 if (vd == vd->vdev_top && !vd->vdev_ishole && vd->vdev_ashift != 0) { in vdev_set_deflate_ratio()
1997 vd->vdev_deflate_ratio = (1 << 17) / in vdev_set_deflate_ratio()
1998 (vdev_psize_to_asize_txg(vd, 1 << 17, 0) >> in vdev_set_deflate_ratio()
2028 vdev_ashift_optimize(vdev_t *vd) in vdev_ashift_optimize() argument
2030 ASSERT(vd == vd->vdev_top); in vdev_ashift_optimize()
2032 if (vd->vdev_ashift < vd->vdev_physical_ashift && in vdev_ashift_optimize()
2033 vd->vdev_physical_ashift <= zfs_vdev_max_auto_ashift) { in vdev_ashift_optimize()
2034 vd->vdev_ashift = MIN( in vdev_ashift_optimize()
2035 MAX(zfs_vdev_max_auto_ashift, vd->vdev_ashift), in vdev_ashift_optimize()
2037 vd->vdev_physical_ashift)); in vdev_ashift_optimize()
2049 vd->vdev_ashift = MAX(zfs_vdev_min_auto_ashift, in vdev_ashift_optimize()
2050 vd->vdev_ashift); in vdev_ashift_optimize()
2058 vdev_open(vdev_t *vd) in vdev_open() argument
2060 spa_t *spa = vd->vdev_spa; in vdev_open()
2068 ASSERT(vd->vdev_open_thread == curthread || in vdev_open()
2070 ASSERT(vd->vdev_state == VDEV_STATE_CLOSED || in vdev_open()
2071 vd->vdev_state == VDEV_STATE_CANT_OPEN || in vdev_open()
2072 vd->vdev_state == VDEV_STATE_OFFLINE); in vdev_open()
2074 vd->vdev_stat.vs_aux = VDEV_AUX_NONE; in vdev_open()
2075 vd->vdev_cant_read = B_FALSE; in vdev_open()
2076 vd->vdev_cant_write = B_FALSE; in vdev_open()
2077 vd->vdev_fault_wanted = B_FALSE; in vdev_open()
2078 vd->vdev_remove_wanted = B_FALSE; in vdev_open()
2079 vd->vdev_min_asize = vdev_get_min_asize(vd); in vdev_open()
2085 if (!vd->vdev_removed && vd->vdev_faulted) { in vdev_open()
2086 ASSERT(vd->vdev_children == 0); in vdev_open()
2087 ASSERT(vd->vdev_label_aux == VDEV_AUX_ERR_EXCEEDED || in vdev_open()
2088 vd->vdev_label_aux == VDEV_AUX_EXTERNAL); in vdev_open()
2089 vdev_set_state(vd, B_TRUE, VDEV_STATE_FAULTED, in vdev_open()
2090 vd->vdev_label_aux); in vdev_open()
2092 } else if (vd->vdev_offline) { in vdev_open()
2093 ASSERT(vd->vdev_children == 0); in vdev_open()
2094 vdev_set_state(vd, B_TRUE, VDEV_STATE_OFFLINE, VDEV_AUX_NONE); in vdev_open()
2098 error = vd->vdev_ops->vdev_op_open(vd, &osize, &max_osize, in vdev_open()
2102 if (error == ENOENT && vd->vdev_removed) { in vdev_open()
2103 vdev_set_state(vd, B_TRUE, VDEV_STATE_REMOVED, in vdev_open()
2114 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2123 vd->vdev_reopening = B_FALSE; in vdev_open()
2125 error = zio_handle_device_injection(vd, NULL, SET_ERROR(ENXIO)); in vdev_open()
2128 if (vd->vdev_removed && in vdev_open()
2129 vd->vdev_stat.vs_aux != VDEV_AUX_OPEN_FAILED) in vdev_open()
2130 vd->vdev_removed = B_FALSE; in vdev_open()
2132 if (vd->vdev_stat.vs_aux == VDEV_AUX_CHILDREN_OFFLINE) { in vdev_open()
2133 vdev_set_state(vd, B_TRUE, VDEV_STATE_OFFLINE, in vdev_open()
2134 vd->vdev_stat.vs_aux); in vdev_open()
2136 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2137 vd->vdev_stat.vs_aux); in vdev_open()
2142 vd->vdev_removed = B_FALSE; in vdev_open()
2148 if (vd->vdev_faulted) { in vdev_open()
2149 ASSERT(vd->vdev_children == 0); in vdev_open()
2150 ASSERT(vd->vdev_label_aux == VDEV_AUX_ERR_EXCEEDED || in vdev_open()
2151 vd->vdev_label_aux == VDEV_AUX_EXTERNAL); in vdev_open()
2152 vdev_set_state(vd, B_TRUE, VDEV_STATE_FAULTED, in vdev_open()
2153 vd->vdev_label_aux); in vdev_open()
2157 if (vd->vdev_degraded) { in vdev_open()
2158 ASSERT(vd->vdev_children == 0); in vdev_open()
2159 vdev_set_state(vd, B_TRUE, VDEV_STATE_DEGRADED, in vdev_open()
2162 vdev_set_state(vd, B_TRUE, VDEV_STATE_HEALTHY, 0); in vdev_open()
2168 if (vd->vdev_ishole || vd->vdev_ops == &vdev_missing_ops) in vdev_open()
2171 for (int c = 0; c < vd->vdev_children; c++) { in vdev_open()
2172 if (vd->vdev_child[c]->vdev_state != VDEV_STATE_HEALTHY) { in vdev_open()
2173 vdev_set_state(vd, B_TRUE, VDEV_STATE_DEGRADED, in vdev_open()
2182 if (vd->vdev_children == 0) { in vdev_open()
2184 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2193 if (vd->vdev_parent != NULL && osize < SPA_MINDEVSIZE - in vdev_open()
2195 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2208 if ((psize > vd->vdev_psize) && (vd->vdev_psize != 0)) in vdev_open()
2209 vd->vdev_copy_uberblocks = B_TRUE; in vdev_open()
2211 vd->vdev_psize = psize; in vdev_open()
2216 if (asize < vd->vdev_min_asize) { in vdev_open()
2217 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2229 vd->vdev_physical_ashift = in vdev_open()
2230 MAX(physical_ashift, vd->vdev_physical_ashift); in vdev_open()
2231 vd->vdev_logical_ashift = MAX(logical_ashift, in vdev_open()
2232 vd->vdev_logical_ashift); in vdev_open()
2234 if (vd->vdev_asize == 0) { in vdev_open()
2239 vd->vdev_asize = asize; in vdev_open()
2240 vd->vdev_max_asize = max_asize; in vdev_open()
2247 if (vd->vdev_ashift < vd->vdev_logical_ashift) { in vdev_open()
2248 vd->vdev_ashift = vd->vdev_logical_ashift; in vdev_open()
2250 if (vd->vdev_logical_ashift > ASHIFT_MAX) { in vdev_open()
2251 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2256 if (vd->vdev_top == vd && vd->vdev_attaching == B_FALSE) in vdev_open()
2257 vdev_ashift_optimize(vd); in vdev_open()
2258 vd->vdev_attaching = B_FALSE; in vdev_open()
2260 if (vd->vdev_ashift != 0 && (vd->vdev_ashift < ASHIFT_MIN || in vdev_open()
2261 vd->vdev_ashift > ASHIFT_MAX)) { in vdev_open()
2262 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2270 if (vd->vdev_ashift > vd->vdev_top->vdev_ashift && in vdev_open()
2271 vd->vdev_ops->vdev_op_leaf) { in vdev_open()
2274 spa, vd, NULL, NULL, 0); in vdev_open()
2275 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_open()
2279 vd->vdev_max_asize = max_asize; in vdev_open()
2294 if (vd->vdev_state == VDEV_STATE_HEALTHY && in vdev_open()
2295 ((asize > vd->vdev_asize && in vdev_open()
2296 (vd->vdev_expanding || spa->spa_autoexpand)) || in vdev_open()
2297 (asize < vd->vdev_asize))) in vdev_open()
2298 vd->vdev_asize = asize; in vdev_open()
2300 vdev_set_min_asize(vd); in vdev_open()
2306 if (vd->vdev_ops->vdev_op_leaf && in vdev_open()
2307 (error = zio_wait(vdev_probe(vd, NULL))) != 0) { in vdev_open()
2308 vdev_set_state(vd, B_TRUE, VDEV_STATE_FAULTED, in vdev_open()
2316 if (vd->vdev_top == vd && vd->vdev_ashift != 0 && in vdev_open()
2317 vd->vdev_islog == 0 && vd->vdev_aux == NULL) { in vdev_open()
2318 uint64_t min_alloc = vdev_get_min_alloc(vd); in vdev_open()
2327 if (vd->vdev_ops->vdev_op_leaf && !spa->spa_scrub_reopen) in vdev_open()
2328 dsl_scan_assess_vdev(spa->spa_dsl_pool, vd); in vdev_open()
2336 vdev_t *vd = arg; in vdev_validate_child() local
2338 vd->vdev_validate_thread = curthread; in vdev_validate_child()
2339 vd->vdev_validate_error = vdev_validate(vd); in vdev_validate_child()
2340 vd->vdev_validate_thread = NULL; in vdev_validate_child()
2354 vdev_validate(vdev_t *vd) in vdev_validate() argument
2356 spa_t *spa = vd->vdev_spa; in vdev_validate()
2363 int children = vd->vdev_children; in vdev_validate()
2374 vdev_t *cvd = vd->vdev_child[c]; in vdev_validate()
2388 int error = vd->vdev_child[c]->vdev_validate_error; in vdev_validate()
2400 if (!vd->vdev_ops->vdev_op_leaf || !vdev_readable(vd)) in vdev_validate()
2415 if ((label = vdev_label_read_config(vd, txg)) == NULL) { in vdev_validate()
2416 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2418 vdev_dbgmsg(vd, "vdev_validate: failed reading config for " in vdev_validate()
2429 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2432 vdev_dbgmsg(vd, "vdev_validate: vdev split into other pool"); in vdev_validate()
2437 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2440 vdev_dbgmsg(vd, "vdev_validate: '%s' missing from label", in vdev_validate()
2453 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2456 vdev_dbgmsg(vd, "vdev_validate: vdev label pool_guid doesn't " in vdev_validate()
2468 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2471 vdev_dbgmsg(vd, "vdev_validate: '%s' missing from label", in vdev_validate()
2478 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2481 vdev_dbgmsg(vd, "vdev_validate: '%s' missing from label", in vdev_validate()
2501 if (vd->vdev_guid != guid && vd->vdev_guid != aux_guid) { in vdev_validate()
2504 if (vd != vd->vdev_top || vd->vdev_guid != top_guid) in vdev_validate()
2507 if (vd->vdev_guid != top_guid && in vdev_validate()
2508 vd->vdev_top->vdev_guid != guid) in vdev_validate()
2513 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2516 vdev_dbgmsg(vd, "vdev_validate: config guid " in vdev_validate()
2518 vdev_dbgmsg(vd, "CONFIG: guid %llu, top_guid %llu", in vdev_validate()
2519 (u_longlong_t)vd->vdev_guid, in vdev_validate()
2520 (u_longlong_t)vd->vdev_top->vdev_guid); in vdev_validate()
2521 vdev_dbgmsg(vd, "LABEL: guid %llu, top_guid %llu, " in vdev_validate()
2530 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_validate()
2533 vdev_dbgmsg(vd, "vdev_validate: '%s' missing from label", in vdev_validate()
2547 vdev_dbgmsg(vd, "vdev_validate: invalid pool state (%llu) " in vdev_validate()
2557 if (vd->vdev_not_present) in vdev_validate()
2558 vd->vdev_not_present = 0; in vdev_validate()
2683 vdev_t *vd = vdev_lookup_by_guid(stvd, dvd->vdev_guid); in vdev_copy_path_search() local
2685 if (vd == NULL || vd->vdev_ops != dvd->vdev_ops) in vdev_copy_path_search()
2688 ASSERT(vd->vdev_ops->vdev_op_leaf); in vdev_copy_path_search()
2690 vdev_copy_path_impl(vd, dvd); in vdev_copy_path_search()
2716 vdev_close(vdev_t *vd) in vdev_close() argument
2718 vdev_t *pvd = vd->vdev_parent; in vdev_close()
2719 spa_t *spa __maybe_unused = vd->vdev_spa; in vdev_close()
2721 ASSERT(vd != NULL); in vdev_close()
2722 ASSERT(vd->vdev_open_thread == curthread || in vdev_close()
2730 vd->vdev_reopening = (pvd->vdev_reopening && !vd->vdev_offline); in vdev_close()
2732 vd->vdev_ops->vdev_op_close(vd); in vdev_close()
2739 vd->vdev_prevstate = vd->vdev_state; in vdev_close()
2741 if (vd->vdev_offline) in vdev_close()
2742 vd->vdev_state = VDEV_STATE_OFFLINE; in vdev_close()
2744 vd->vdev_state = VDEV_STATE_CLOSED; in vdev_close()
2745 vd->vdev_stat.vs_aux = VDEV_AUX_NONE; in vdev_close()
2749 vdev_hold(vdev_t *vd) in vdev_hold() argument
2751 spa_t *spa = vd->vdev_spa; in vdev_hold()
2757 for (int c = 0; c < vd->vdev_children; c++) in vdev_hold()
2758 vdev_hold(vd->vdev_child[c]); in vdev_hold()
2760 if (vd->vdev_ops->vdev_op_leaf && vd->vdev_ops->vdev_op_hold != NULL) in vdev_hold()
2761 vd->vdev_ops->vdev_op_hold(vd); in vdev_hold()
2765 vdev_rele(vdev_t *vd) in vdev_rele() argument
2767 ASSERT(spa_is_root(vd->vdev_spa)); in vdev_rele()
2768 for (int c = 0; c < vd->vdev_children; c++) in vdev_rele()
2769 vdev_rele(vd->vdev_child[c]); in vdev_rele()
2771 if (vd->vdev_ops->vdev_op_leaf && vd->vdev_ops->vdev_op_rele != NULL) in vdev_rele()
2772 vd->vdev_ops->vdev_op_rele(vd); in vdev_rele()
2782 vdev_reopen(vdev_t *vd) in vdev_reopen() argument
2784 spa_t *spa = vd->vdev_spa; in vdev_reopen()
2789 vd->vdev_reopening = !vd->vdev_offline; in vdev_reopen()
2790 vdev_close(vd); in vdev_reopen()
2791 (void) vdev_open(vd); in vdev_reopen()
2798 if (vd->vdev_aux) { in vdev_reopen()
2799 (void) vdev_validate_aux(vd); in vdev_reopen()
2800 if (vdev_readable(vd) && vdev_writeable(vd) && in vdev_reopen()
2801 vd->vdev_aux == &spa->spa_l2cache) { in vdev_reopen()
2807 if (l2arc_vdev_present(vd)) { in vdev_reopen()
2808 l2arc_rebuild_vdev(vd, B_TRUE); in vdev_reopen()
2810 l2arc_add_vdev(spa, vd); in vdev_reopen()
2816 (void) vdev_validate(vd); in vdev_reopen()
2833 vdev_propagate_state(vd); in vdev_reopen()
2837 vdev_create(vdev_t *vd, uint64_t txg, boolean_t isreplacing) in vdev_create() argument
2846 error = vdev_open(vd); in vdev_create()
2848 if (error || vd->vdev_state != VDEV_STATE_HEALTHY) { in vdev_create()
2849 vdev_close(vd); in vdev_create()
2856 if ((error = vdev_dtl_load(vd)) != 0 || in vdev_create()
2857 (error = vdev_label_init(vd, txg, isreplacing ? in vdev_create()
2859 vdev_close(vd); in vdev_create()
2867 vdev_metaslab_set_size(vdev_t *vd) in vdev_metaslab_set_size() argument
2869 uint64_t asize = vd->vdev_asize; in vdev_metaslab_set_size()
2933 vd->vdev_ms_shift = ms_shift; in vdev_metaslab_set_size()
2934 ASSERT3U(vd->vdev_ms_shift, >=, SPA_MAXBLOCKSHIFT); in vdev_metaslab_set_size()
2938 vdev_dirty(vdev_t *vd, int flags, void *arg, uint64_t txg) in vdev_dirty() argument
2940 ASSERT(vd == vd->vdev_top); in vdev_dirty()
2942 ASSERT(vdev_is_concrete(vd) || flags == 0); in vdev_dirty()
2944 ASSERT(spa_writeable(vd->vdev_spa)); in vdev_dirty()
2947 (void) txg_list_add(&vd->vdev_ms_list, arg, txg); in vdev_dirty()
2950 (void) txg_list_add(&vd->vdev_dtl_list, arg, txg); in vdev_dirty()
2952 (void) txg_list_add(&vd->vdev_spa->spa_vdev_txg_list, vd, txg); in vdev_dirty()
2956 vdev_dirty_leaves(vdev_t *vd, int flags, uint64_t txg) in vdev_dirty_leaves() argument
2958 for (int c = 0; c < vd->vdev_children; c++) in vdev_dirty_leaves()
2959 vdev_dirty_leaves(vd->vdev_child[c], flags, txg); in vdev_dirty_leaves()
2961 if (vd->vdev_ops->vdev_op_leaf) in vdev_dirty_leaves()
2962 vdev_dirty(vd->vdev_top, flags, vd, txg); in vdev_dirty_leaves()
3004 vdev_dtl_dirty(vdev_t *vd, vdev_dtl_type_t t, uint64_t txg, uint64_t size) in vdev_dtl_dirty() argument
3006 zfs_range_tree_t *rt = vd->vdev_dtl[t]; in vdev_dtl_dirty()
3009 ASSERT(vd != vd->vdev_spa->spa_root_vdev); in vdev_dtl_dirty()
3010 ASSERT(spa_writeable(vd->vdev_spa)); in vdev_dtl_dirty()
3012 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_dirty()
3015 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_dirty()
3019 vdev_dtl_contains(vdev_t *vd, vdev_dtl_type_t t, uint64_t txg, uint64_t size) in vdev_dtl_contains() argument
3021 zfs_range_tree_t *rt = vd->vdev_dtl[t]; in vdev_dtl_contains()
3025 ASSERT(vd != vd->vdev_spa->spa_root_vdev); in vdev_dtl_contains()
3035 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_contains()
3038 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_contains()
3044 vdev_dtl_empty(vdev_t *vd, vdev_dtl_type_t t) in vdev_dtl_empty() argument
3046 zfs_range_tree_t *rt = vd->vdev_dtl[t]; in vdev_dtl_empty()
3049 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_empty()
3051 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_empty()
3061 vdev_default_need_resilver(vdev_t *vd, const dva_t *dva, size_t psize, in vdev_default_need_resilver() argument
3070 return (vdev_dtl_contains(vd, DTL_PARTIAL, phys_birth, 1)); in vdev_default_need_resilver()
3077 vdev_dtl_need_resilver(vdev_t *vd, const dva_t *dva, size_t psize, in vdev_dtl_need_resilver() argument
3080 ASSERT(vd != vd->vdev_spa->spa_root_vdev); in vdev_dtl_need_resilver()
3082 if (vd->vdev_ops->vdev_op_need_resilver == NULL || in vdev_dtl_need_resilver()
3083 vd->vdev_ops->vdev_op_leaf) in vdev_dtl_need_resilver()
3086 return (vd->vdev_ops->vdev_op_need_resilver(vd, dva, psize, in vdev_dtl_need_resilver()
3094 vdev_dtl_min(vdev_t *vd) in vdev_dtl_min() argument
3096 ASSERT(MUTEX_HELD(&vd->vdev_dtl_lock)); in vdev_dtl_min()
3097 ASSERT3U(zfs_range_tree_space(vd->vdev_dtl[DTL_MISSING]), !=, 0); in vdev_dtl_min()
3098 ASSERT0(vd->vdev_children); in vdev_dtl_min()
3100 return (zfs_range_tree_min(vd->vdev_dtl[DTL_MISSING]) - 1); in vdev_dtl_min()
3107 vdev_dtl_max(vdev_t *vd) in vdev_dtl_max() argument
3109 ASSERT(MUTEX_HELD(&vd->vdev_dtl_lock)); in vdev_dtl_max()
3110 ASSERT3U(zfs_range_tree_space(vd->vdev_dtl[DTL_MISSING]), !=, 0); in vdev_dtl_max()
3111 ASSERT0(vd->vdev_children); in vdev_dtl_max()
3113 return (zfs_range_tree_max(vd->vdev_dtl[DTL_MISSING])); in vdev_dtl_max()
3125 vdev_dtl_should_excise(vdev_t *vd, boolean_t rebuild_done) in vdev_dtl_should_excise() argument
3127 ASSERT0(vd->vdev_children); in vdev_dtl_should_excise()
3129 if (vd->vdev_state < VDEV_STATE_DEGRADED) in vdev_dtl_should_excise()
3132 if (vd->vdev_resilver_deferred) in vdev_dtl_should_excise()
3135 if (zfs_range_tree_is_empty(vd->vdev_dtl[DTL_MISSING])) in vdev_dtl_should_excise()
3139 vdev_rebuild_t *vr = &vd->vdev_top->vdev_rebuild_config; in vdev_dtl_should_excise()
3143 if (vd->vdev_rebuild_txg == 0) in vdev_dtl_should_excise()
3152 vdev_dtl_max(vd) <= vrp->vrp_max_txg) { in vdev_dtl_should_excise()
3153 ASSERT3U(vrp->vrp_min_txg, <=, vdev_dtl_min(vd)); in vdev_dtl_should_excise()
3154 ASSERT3U(vrp->vrp_min_txg, <, vd->vdev_rebuild_txg); in vdev_dtl_should_excise()
3155 ASSERT3U(vd->vdev_rebuild_txg, <=, vrp->vrp_max_txg); in vdev_dtl_should_excise()
3159 dsl_scan_t *scn = vd->vdev_spa->spa_dsl_pool->dp_scan; in vdev_dtl_should_excise()
3163 if (vd->vdev_resilver_txg == 0) in vdev_dtl_should_excise()
3173 if (vdev_dtl_max(vd) <= scn->scn_phys.scn_max_txg) { in vdev_dtl_should_excise()
3174 ASSERT3U(scnp->scn_min_txg, <=, vdev_dtl_min(vd)); in vdev_dtl_should_excise()
3175 ASSERT3U(scnp->scn_min_txg, <, vd->vdev_resilver_txg); in vdev_dtl_should_excise()
3176 ASSERT3U(vd->vdev_resilver_txg, <=, scnp->scn_max_txg); in vdev_dtl_should_excise()
3189 vdev_dtl_reassess_impl(vdev_t *vd, uint64_t txg, uint64_t scrub_txg, in vdev_dtl_reassess_impl() argument
3192 spa_t *spa = vd->vdev_spa; in vdev_dtl_reassess_impl()
3198 for (int c = 0; c < vd->vdev_children; c++) in vdev_dtl_reassess_impl()
3199 vdev_dtl_reassess_impl(vd->vdev_child[c], txg, in vdev_dtl_reassess_impl()
3202 if (vd == spa->spa_root_vdev || !vdev_is_concrete(vd) || vd->vdev_aux) in vdev_dtl_reassess_impl()
3205 if (vd->vdev_ops->vdev_op_leaf) { in vdev_dtl_reassess_impl()
3207 vdev_rebuild_t *vr = &vd->vdev_top->vdev_rebuild_config; in vdev_dtl_reassess_impl()
3211 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_reassess_impl()
3224 !zfs_range_tree_is_empty(vd->vdev_dtl[DTL_MISSING])) { in vdev_dtl_reassess_impl()
3228 (u_longlong_t)vd->vdev_guid, (u_longlong_t)txg, in vdev_dtl_reassess_impl()
3230 (u_longlong_t)vdev_dtl_min(vd), in vdev_dtl_reassess_impl()
3231 (u_longlong_t)vdev_dtl_max(vd), in vdev_dtl_reassess_impl()
3252 vdev_dtl_should_excise(vd, rebuild_done)) { in vdev_dtl_reassess_impl()
3272 vd->vdev_dtl[DTL_MISSING], 1); in vdev_dtl_reassess_impl()
3275 vd->vdev_dtl[DTL_SCRUB], 2); in vdev_dtl_reassess_impl()
3277 vd->vdev_dtl[DTL_MISSING], 1); in vdev_dtl_reassess_impl()
3281 vd->vdev_dtl[DTL_MISSING])) { in vdev_dtl_reassess_impl()
3283 (u_longlong_t)vdev_dtl_min(vd), in vdev_dtl_reassess_impl()
3284 (u_longlong_t)vdev_dtl_max(vd)); in vdev_dtl_reassess_impl()
3289 zfs_range_tree_vacate(vd->vdev_dtl[DTL_PARTIAL], NULL, NULL); in vdev_dtl_reassess_impl()
3290 zfs_range_tree_walk(vd->vdev_dtl[DTL_MISSING], in vdev_dtl_reassess_impl()
3291 zfs_range_tree_add, vd->vdev_dtl[DTL_PARTIAL]); in vdev_dtl_reassess_impl()
3293 zfs_range_tree_vacate(vd->vdev_dtl[DTL_SCRUB], NULL, in vdev_dtl_reassess_impl()
3295 zfs_range_tree_vacate(vd->vdev_dtl[DTL_OUTAGE], NULL, NULL); in vdev_dtl_reassess_impl()
3303 if (!vdev_readable(vd) || in vdev_dtl_reassess_impl()
3304 (faulting && vd->vdev_parent != NULL && in vdev_dtl_reassess_impl()
3305 vd->vdev_parent->vdev_ops == &vdev_replacing_ops)) { in vdev_dtl_reassess_impl()
3306 zfs_range_tree_add(vd->vdev_dtl[DTL_OUTAGE], 0, -1ULL); in vdev_dtl_reassess_impl()
3308 zfs_range_tree_walk(vd->vdev_dtl[DTL_MISSING], in vdev_dtl_reassess_impl()
3309 zfs_range_tree_add, vd->vdev_dtl[DTL_OUTAGE]); in vdev_dtl_reassess_impl()
3318 zfs_range_tree_is_empty(vd->vdev_dtl[DTL_MISSING]) && in vdev_dtl_reassess_impl()
3319 zfs_range_tree_is_empty(vd->vdev_dtl[DTL_OUTAGE])) { in vdev_dtl_reassess_impl()
3320 if (vd->vdev_rebuild_txg != 0) { in vdev_dtl_reassess_impl()
3321 vd->vdev_rebuild_txg = 0; in vdev_dtl_reassess_impl()
3322 vdev_config_dirty(vd->vdev_top); in vdev_dtl_reassess_impl()
3323 } else if (vd->vdev_resilver_txg != 0) { in vdev_dtl_reassess_impl()
3324 vd->vdev_resilver_txg = 0; in vdev_dtl_reassess_impl()
3325 vdev_config_dirty(vd->vdev_top); in vdev_dtl_reassess_impl()
3329 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_reassess_impl()
3332 vdev_dirty(vd->vdev_top, VDD_DTL, vd, txg); in vdev_dtl_reassess_impl()
3334 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_reassess_impl()
3345 } else if (vdev_get_nparity(vd) != 0) { in vdev_dtl_reassess_impl()
3347 minref = vdev_get_nparity(vd) + 1; in vdev_dtl_reassess_impl()
3350 minref = vd->vdev_children; in vdev_dtl_reassess_impl()
3353 for (int c = 0; c < vd->vdev_children; c++) { in vdev_dtl_reassess_impl()
3354 vdev_t *cvd = vd->vdev_child[c]; in vdev_dtl_reassess_impl()
3361 vd->vdev_dtl[t], minref); in vdev_dtl_reassess_impl()
3364 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_reassess_impl()
3367 if (vd->vdev_top->vdev_ops == &vdev_raidz_ops) { in vdev_dtl_reassess_impl()
3368 raidz_dtl_reassessed(vd); in vdev_dtl_reassess_impl()
3373 vdev_dtl_reassess(vdev_t *vd, uint64_t txg, uint64_t scrub_txg, in vdev_dtl_reassess() argument
3376 return (vdev_dtl_reassess_impl(vd, txg, scrub_txg, scrub_done, in vdev_dtl_reassess()
3384 vdev_post_kobj_evt(vdev_t *vd) in vdev_post_kobj_evt() argument
3386 if (vd->vdev_ops->vdev_op_kobj_evt_post && in vdev_post_kobj_evt()
3387 vd->vdev_kobj_flag == B_FALSE) { in vdev_post_kobj_evt()
3388 vd->vdev_kobj_flag = B_TRUE; in vdev_post_kobj_evt()
3389 vd->vdev_ops->vdev_op_kobj_evt_post(vd); in vdev_post_kobj_evt()
3392 for (int c = 0; c < vd->vdev_children; c++) in vdev_post_kobj_evt()
3393 vdev_post_kobj_evt(vd->vdev_child[c]); in vdev_post_kobj_evt()
3400 vdev_clear_kobj_evt(vdev_t *vd) in vdev_clear_kobj_evt() argument
3402 vd->vdev_kobj_flag = B_FALSE; in vdev_clear_kobj_evt()
3404 for (int c = 0; c < vd->vdev_children; c++) in vdev_clear_kobj_evt()
3405 vdev_clear_kobj_evt(vd->vdev_child[c]); in vdev_clear_kobj_evt()
3409 vdev_dtl_load(vdev_t *vd) in vdev_dtl_load() argument
3411 spa_t *spa = vd->vdev_spa; in vdev_dtl_load()
3416 if (vd->vdev_ops->vdev_op_leaf && vd->vdev_dtl_object != 0) { in vdev_dtl_load()
3417 ASSERT(vdev_is_concrete(vd)); in vdev_dtl_load()
3425 error = space_map_open(&vd->vdev_dtl_sm, mos, in vdev_dtl_load()
3426 vd->vdev_dtl_object, 0, -1ULL, 0); in vdev_dtl_load()
3429 ASSERT(vd->vdev_dtl_sm != NULL); in vdev_dtl_load()
3432 error = space_map_load(vd->vdev_dtl_sm, rt, SM_ALLOC); in vdev_dtl_load()
3434 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_load()
3436 vd->vdev_dtl[DTL_MISSING]); in vdev_dtl_load()
3437 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_load()
3446 for (int c = 0; c < vd->vdev_children; c++) { in vdev_dtl_load()
3447 error = vdev_dtl_load(vd->vdev_child[c]); in vdev_dtl_load()
3456 vdev_zap_allocation_data(vdev_t *vd, dmu_tx_t *tx) in vdev_zap_allocation_data() argument
3458 spa_t *spa = vd->vdev_spa; in vdev_zap_allocation_data()
3460 vdev_alloc_bias_t alloc_bias = vd->vdev_alloc_bias; in vdev_zap_allocation_data()
3471 VERIFY0(zap_add(mos, vd->vdev_top_zap, VDEV_TOP_ZAP_ALLOCATION_BIAS, in vdev_zap_allocation_data()
3480 vdev_destroy_unlink_zap(vdev_t *vd, uint64_t zapobj, dmu_tx_t *tx) in vdev_destroy_unlink_zap() argument
3482 spa_t *spa = vd->vdev_spa; in vdev_destroy_unlink_zap()
3490 vdev_create_link_zap(vdev_t *vd, dmu_tx_t *tx) in vdev_create_link_zap() argument
3492 spa_t *spa = vd->vdev_spa; in vdev_create_link_zap()
3504 vdev_construct_zaps(vdev_t *vd, dmu_tx_t *tx) in vdev_construct_zaps() argument
3506 if (vd->vdev_ops != &vdev_hole_ops && in vdev_construct_zaps()
3507 vd->vdev_ops != &vdev_missing_ops && in vdev_construct_zaps()
3508 vd->vdev_ops != &vdev_root_ops && in vdev_construct_zaps()
3509 !vd->vdev_top->vdev_removing) { in vdev_construct_zaps()
3510 if (vd->vdev_ops->vdev_op_leaf && vd->vdev_leaf_zap == 0) { in vdev_construct_zaps()
3511 vd->vdev_leaf_zap = vdev_create_link_zap(vd, tx); in vdev_construct_zaps()
3513 if (vd == vd->vdev_top && vd->vdev_top_zap == 0) { in vdev_construct_zaps()
3514 vd->vdev_top_zap = vdev_create_link_zap(vd, tx); in vdev_construct_zaps()
3515 if (vd->vdev_alloc_bias != VDEV_BIAS_NONE) in vdev_construct_zaps()
3516 vdev_zap_allocation_data(vd, tx); in vdev_construct_zaps()
3519 if (vd->vdev_ops == &vdev_root_ops && vd->vdev_root_zap == 0 && in vdev_construct_zaps()
3520 spa_feature_is_enabled(vd->vdev_spa, SPA_FEATURE_AVZ_V2)) { in vdev_construct_zaps()
3521 if (!spa_feature_is_active(vd->vdev_spa, SPA_FEATURE_AVZ_V2)) in vdev_construct_zaps()
3522 spa_feature_incr(vd->vdev_spa, SPA_FEATURE_AVZ_V2, tx); in vdev_construct_zaps()
3523 vd->vdev_root_zap = vdev_create_link_zap(vd, tx); in vdev_construct_zaps()
3526 for (uint64_t i = 0; i < vd->vdev_children; i++) { in vdev_construct_zaps()
3527 vdev_construct_zaps(vd->vdev_child[i], tx); in vdev_construct_zaps()
3532 vdev_dtl_sync(vdev_t *vd, uint64_t txg) in vdev_dtl_sync() argument
3534 spa_t *spa = vd->vdev_spa; in vdev_dtl_sync()
3535 zfs_range_tree_t *rt = vd->vdev_dtl[DTL_MISSING]; in vdev_dtl_sync()
3539 uint64_t object = space_map_object(vd->vdev_dtl_sm); in vdev_dtl_sync()
3541 ASSERT(vdev_is_concrete(vd)); in vdev_dtl_sync()
3542 ASSERT(vd->vdev_ops->vdev_op_leaf); in vdev_dtl_sync()
3546 if (vd->vdev_detached || vd->vdev_top->vdev_removing) { in vdev_dtl_sync()
3547 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_sync()
3548 space_map_free(vd->vdev_dtl_sm, tx); in vdev_dtl_sync()
3549 space_map_close(vd->vdev_dtl_sm); in vdev_dtl_sync()
3550 vd->vdev_dtl_sm = NULL; in vdev_dtl_sync()
3551 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_sync()
3558 if (vd->vdev_leaf_zap != 0 && (vd->vdev_detached || in vdev_dtl_sync()
3559 vd->vdev_top->vdev_islog)) { in vdev_dtl_sync()
3560 vdev_destroy_unlink_zap(vd, vd->vdev_leaf_zap, tx); in vdev_dtl_sync()
3561 vd->vdev_leaf_zap = 0; in vdev_dtl_sync()
3568 if (vd->vdev_dtl_sm == NULL) { in vdev_dtl_sync()
3574 VERIFY0(space_map_open(&vd->vdev_dtl_sm, mos, new_object, in vdev_dtl_sync()
3576 ASSERT(vd->vdev_dtl_sm != NULL); in vdev_dtl_sync()
3581 mutex_enter(&vd->vdev_dtl_lock); in vdev_dtl_sync()
3583 mutex_exit(&vd->vdev_dtl_lock); in vdev_dtl_sync()
3585 space_map_truncate(vd->vdev_dtl_sm, zfs_vdev_dtl_sm_blksz, tx); in vdev_dtl_sync()
3586 space_map_write(vd->vdev_dtl_sm, rtsync, SM_ALLOC, SM_NO_VDEVID, tx); in vdev_dtl_sync()
3595 if (object != space_map_object(vd->vdev_dtl_sm)) { in vdev_dtl_sync()
3596 vdev_dbgmsg(vd, "txg %llu, spa %s, DTL old object %llu, " in vdev_dtl_sync()
3599 (u_longlong_t)space_map_object(vd->vdev_dtl_sm)); in vdev_dtl_sync()
3600 vdev_config_dirty(vd->vdev_top); in vdev_dtl_sync()
3615 vdev_dtl_required(vdev_t *vd) in vdev_dtl_required() argument
3617 spa_t *spa = vd->vdev_spa; in vdev_dtl_required()
3618 vdev_t *tvd = vd->vdev_top; in vdev_dtl_required()
3619 uint8_t cant_read = vd->vdev_cant_read; in vdev_dtl_required()
3621 boolean_t faulting = vd->vdev_state == VDEV_STATE_FAULTED; in vdev_dtl_required()
3625 if (vd == spa->spa_root_vdev || vd == tvd) in vdev_dtl_required()
3633 vd->vdev_cant_read = B_TRUE; in vdev_dtl_required()
3636 vd->vdev_cant_read = cant_read; in vdev_dtl_required()
3640 required = !!zio_handle_device_injection(vd, NULL, in vdev_dtl_required()
3651 vdev_resilver_needed(vdev_t *vd, uint64_t *minp, uint64_t *maxp) in vdev_resilver_needed() argument
3657 if (vd->vdev_children == 0) { in vdev_resilver_needed()
3658 mutex_enter(&vd->vdev_dtl_lock); in vdev_resilver_needed()
3659 if (!zfs_range_tree_is_empty(vd->vdev_dtl[DTL_MISSING]) && in vdev_resilver_needed()
3660 vdev_writeable(vd)) { in vdev_resilver_needed()
3662 thismin = vdev_dtl_min(vd); in vdev_resilver_needed()
3663 thismax = vdev_dtl_max(vd); in vdev_resilver_needed()
3666 mutex_exit(&vd->vdev_dtl_lock); in vdev_resilver_needed()
3668 for (int c = 0; c < vd->vdev_children; c++) { in vdev_resilver_needed()
3669 vdev_t *cvd = vd->vdev_child[c]; in vdev_resilver_needed()
3693 vdev_checkpoint_sm_object(vdev_t *vd, uint64_t *sm_obj) in vdev_checkpoint_sm_object() argument
3695 ASSERT0(spa_config_held(vd->vdev_spa, SCL_ALL, RW_WRITER)); in vdev_checkpoint_sm_object()
3697 if (vd->vdev_top_zap == 0) { in vdev_checkpoint_sm_object()
3702 int error = zap_lookup(spa_meta_objset(vd->vdev_spa), vd->vdev_top_zap, in vdev_checkpoint_sm_object()
3713 vdev_load(vdev_t *vd) in vdev_load() argument
3715 int children = vd->vdev_children; in vdev_load()
3724 if (vd->vdev_ops == &vdev_root_ops && vd->vdev_children > 0) { in vdev_load()
3732 for (int c = 0; c < vd->vdev_children; c++) { in vdev_load()
3733 vdev_t *cvd = vd->vdev_child[c]; in vdev_load()
3748 for (int c = 0; c < vd->vdev_children; c++) { in vdev_load()
3749 int error = vd->vdev_child[c]->vdev_load_error; in vdev_load()
3755 vdev_set_deflate_ratio(vd); in vdev_load()
3757 if (vd->vdev_ops == &vdev_raidz_ops) { in vdev_load()
3758 error = vdev_raidz_load(vd); in vdev_load()
3766 if (vd == vd->vdev_top && vd->vdev_top_zap != 0) { in vdev_load()
3767 spa_t *spa = vd->vdev_spa; in vdev_load()
3770 error = zap_lookup(spa->spa_meta_objset, vd->vdev_top_zap, in vdev_load()
3774 ASSERT(vd->vdev_alloc_bias == VDEV_BIAS_NONE); in vdev_load()
3775 vd->vdev_alloc_bias = vdev_derive_alloc_bias(bias_str); in vdev_load()
3777 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_load()
3779 vdev_dbgmsg(vd, "vdev_load: zap_lookup(top_zap=%llu) " in vdev_load()
3781 (u_longlong_t)vd->vdev_top_zap, error); in vdev_load()
3786 if (vd == vd->vdev_top && vd->vdev_top_zap != 0) { in vdev_load()
3787 spa_t *spa = vd->vdev_spa; in vdev_load()
3790 error = zap_lookup(spa->spa_meta_objset, vd->vdev_top_zap, in vdev_load()
3794 vd->vdev_failfast = failfast & 1; in vdev_load()
3796 vd->vdev_failfast = vdev_prop_default_numeric( in vdev_load()
3799 vdev_dbgmsg(vd, in vdev_load()
3802 (u_longlong_t)vd->vdev_top_zap, error); in vdev_load()
3809 if (vd == vd->vdev_top && vd->vdev_top_zap != 0) { in vdev_load()
3810 error = vdev_rebuild_load(vd); in vdev_load()
3812 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_load()
3814 vdev_dbgmsg(vd, "vdev_load: vdev_rebuild_load " in vdev_load()
3820 if (vd->vdev_top_zap != 0 || vd->vdev_leaf_zap != 0) { in vdev_load()
3823 if (vd->vdev_top_zap != 0) in vdev_load()
3824 zapobj = vd->vdev_top_zap; in vdev_load()
3826 zapobj = vd->vdev_leaf_zap; in vdev_load()
3828 error = vdev_prop_get_int(vd, VDEV_PROP_CHECKSUM_N, in vdev_load()
3829 &vd->vdev_checksum_n); in vdev_load()
3831 vdev_dbgmsg(vd, "vdev_load: zap_lookup(zap=%llu) " in vdev_load()
3834 error = vdev_prop_get_int(vd, VDEV_PROP_CHECKSUM_T, in vdev_load()
3835 &vd->vdev_checksum_t); in vdev_load()
3837 vdev_dbgmsg(vd, "vdev_load: zap_lookup(zap=%llu) " in vdev_load()
3840 error = vdev_prop_get_int(vd, VDEV_PROP_IO_N, in vdev_load()
3841 &vd->vdev_io_n); in vdev_load()
3843 vdev_dbgmsg(vd, "vdev_load: zap_lookup(zap=%llu) " in vdev_load()
3846 error = vdev_prop_get_int(vd, VDEV_PROP_IO_T, in vdev_load()
3847 &vd->vdev_io_t); in vdev_load()
3849 vdev_dbgmsg(vd, "vdev_load: zap_lookup(zap=%llu) " in vdev_load()
3852 error = vdev_prop_get_int(vd, VDEV_PROP_SLOW_IO_N, in vdev_load()
3853 &vd->vdev_slow_io_n); in vdev_load()
3855 vdev_dbgmsg(vd, "vdev_load: zap_lookup(zap=%llu) " in vdev_load()
3858 error = vdev_prop_get_int(vd, VDEV_PROP_SLOW_IO_T, in vdev_load()
3859 &vd->vdev_slow_io_t); in vdev_load()
3861 vdev_dbgmsg(vd, "vdev_load: zap_lookup(zap=%llu) " in vdev_load()
3868 if (vd == vd->vdev_top && vdev_is_concrete(vd)) { in vdev_load()
3869 vdev_metaslab_group_create(vd); in vdev_load()
3871 if (vd->vdev_ashift == 0 || vd->vdev_asize == 0) { in vdev_load()
3872 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_load()
3874 vdev_dbgmsg(vd, "vdev_load: invalid size. ashift=%llu, " in vdev_load()
3875 "asize=%llu", (u_longlong_t)vd->vdev_ashift, in vdev_load()
3876 (u_longlong_t)vd->vdev_asize); in vdev_load()
3880 error = vdev_metaslab_init(vd, 0); in vdev_load()
3882 vdev_dbgmsg(vd, "vdev_load: metaslab_init failed " in vdev_load()
3884 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_load()
3890 error = vdev_checkpoint_sm_object(vd, &checkpoint_sm_obj); in vdev_load()
3892 objset_t *mos = spa_meta_objset(vd->vdev_spa); in vdev_load()
3893 ASSERT(vd->vdev_asize != 0); in vdev_load()
3894 ASSERT3P(vd->vdev_checkpoint_sm, ==, NULL); in vdev_load()
3896 error = space_map_open(&vd->vdev_checkpoint_sm, in vdev_load()
3897 mos, checkpoint_sm_obj, 0, vd->vdev_asize, in vdev_load()
3898 vd->vdev_ashift); in vdev_load()
3900 vdev_dbgmsg(vd, "vdev_load: space_map_open " in vdev_load()
3906 ASSERT3P(vd->vdev_checkpoint_sm, !=, NULL); in vdev_load()
3914 vd->vdev_stat.vs_checkpoint_space = in vdev_load()
3915 -space_map_allocated(vd->vdev_checkpoint_sm); in vdev_load()
3916 vd->vdev_spa->spa_checkpoint_info.sci_dspace += in vdev_load()
3917 vd->vdev_stat.vs_checkpoint_space; in vdev_load()
3919 vdev_dbgmsg(vd, "vdev_load: failed to retrieve " in vdev_load()
3929 if (vd->vdev_ops->vdev_op_leaf && (error = vdev_dtl_load(vd)) != 0) { in vdev_load()
3930 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_load()
3932 vdev_dbgmsg(vd, "vdev_load: vdev_dtl_load failed " in vdev_load()
3938 error = vdev_obsolete_sm_object(vd, &obsolete_sm_object); in vdev_load()
3940 objset_t *mos = vd->vdev_spa->spa_meta_objset; in vdev_load()
3941 ASSERT(vd->vdev_asize != 0); in vdev_load()
3942 ASSERT3P(vd->vdev_obsolete_sm, ==, NULL); in vdev_load()
3944 if ((error = space_map_open(&vd->vdev_obsolete_sm, mos, in vdev_load()
3945 obsolete_sm_object, 0, vd->vdev_asize, 0))) { in vdev_load()
3946 vdev_set_state(vd, B_FALSE, VDEV_STATE_CANT_OPEN, in vdev_load()
3948 vdev_dbgmsg(vd, "vdev_load: space_map_open failed for " in vdev_load()
3954 vdev_dbgmsg(vd, "vdev_load: failed to retrieve obsolete " in vdev_load()
3970 vdev_validate_aux(vdev_t *vd) in vdev_validate_aux() argument
3976 if (!vdev_readable(vd)) in vdev_validate_aux()
3979 if ((label = vdev_label_read_config(vd, -1ULL)) == NULL) { in vdev_validate_aux()
3980 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_validate_aux()
3988 guid != vd->vdev_guid || in vdev_validate_aux()
3990 vdev_set_state(vd, B_TRUE, VDEV_STATE_CANT_OPEN, in vdev_validate_aux()
4005 vdev_destroy_ms_flush_data(vdev_t *vd, dmu_tx_t *tx) in vdev_destroy_ms_flush_data() argument
4007 objset_t *mos = spa_meta_objset(vd->vdev_spa); in vdev_destroy_ms_flush_data()
4009 if (vd->vdev_top_zap == 0) in vdev_destroy_ms_flush_data()
4013 int err = zap_lookup(mos, vd->vdev_top_zap, in vdev_destroy_ms_flush_data()
4020 VERIFY0(zap_remove(mos, vd->vdev_top_zap, in vdev_destroy_ms_flush_data()
4029 vdev_destroy_spacemaps(vdev_t *vd, dmu_tx_t *tx) in vdev_destroy_spacemaps() argument
4031 if (vd->vdev_ms_array == 0) in vdev_destroy_spacemaps()
4034 objset_t *mos = vd->vdev_spa->spa_meta_objset; in vdev_destroy_spacemaps()
4035 uint64_t array_count = vd->vdev_asize >> vd->vdev_ms_shift; in vdev_destroy_spacemaps()
4038 VERIFY0(dmu_read(mos, vd->vdev_ms_array, 0, in vdev_destroy_spacemaps()
4050 VERIFY0(dmu_object_free(mos, vd->vdev_ms_array, tx)); in vdev_destroy_spacemaps()
4051 vdev_destroy_ms_flush_data(vd, tx); in vdev_destroy_spacemaps()
4052 vd->vdev_ms_array = 0; in vdev_destroy_spacemaps()
4056 vdev_remove_empty_log(vdev_t *vd, uint64_t txg) in vdev_remove_empty_log() argument
4058 spa_t *spa = vd->vdev_spa; in vdev_remove_empty_log()
4060 ASSERT(vd->vdev_islog); in vdev_remove_empty_log()
4061 ASSERT(vd == vd->vdev_top); in vdev_remove_empty_log()
4066 vdev_destroy_spacemaps(vd, tx); in vdev_remove_empty_log()
4067 if (vd->vdev_top_zap != 0) { in vdev_remove_empty_log()
4068 vdev_destroy_unlink_zap(vd, vd->vdev_top_zap, tx); in vdev_remove_empty_log()
4069 vd->vdev_top_zap = 0; in vdev_remove_empty_log()
4076 vdev_sync_done(vdev_t *vd, uint64_t txg) in vdev_sync_done() argument
4079 boolean_t reassess = !txg_list_empty(&vd->vdev_ms_list, TXG_CLEAN(txg)); in vdev_sync_done()
4081 ASSERT(vdev_is_concrete(vd)); in vdev_sync_done()
4083 while ((msp = txg_list_remove(&vd->vdev_ms_list, TXG_CLEAN(txg))) in vdev_sync_done()
4088 metaslab_sync_reassess(vd->vdev_mg); in vdev_sync_done()
4089 if (vd->vdev_log_mg != NULL) in vdev_sync_done()
4090 metaslab_sync_reassess(vd->vdev_log_mg); in vdev_sync_done()
4095 vdev_sync(vdev_t *vd, uint64_t txg) in vdev_sync() argument
4097 spa_t *spa = vd->vdev_spa; in vdev_sync()
4103 if (zfs_range_tree_space(vd->vdev_obsolete_segments) > 0) { in vdev_sync()
4104 ASSERT(vd->vdev_removing || in vdev_sync()
4105 vd->vdev_ops == &vdev_indirect_ops); in vdev_sync()
4107 vdev_indirect_sync_obsolete(vd, tx); in vdev_sync()
4113 if (vd->vdev_ops == &vdev_indirect_ops) { in vdev_sync()
4114 ASSERT(txg_list_empty(&vd->vdev_ms_list, txg)); in vdev_sync()
4115 ASSERT(txg_list_empty(&vd->vdev_dtl_list, txg)); in vdev_sync()
4121 ASSERT(vdev_is_concrete(vd)); in vdev_sync()
4123 if (vd->vdev_ms_array == 0 && vd->vdev_ms_shift != 0 && in vdev_sync()
4124 !vd->vdev_removing) { in vdev_sync()
4125 ASSERT(vd == vd->vdev_top); in vdev_sync()
4126 ASSERT0(vd->vdev_indirect_config.vic_mapping_object); in vdev_sync()
4127 vd->vdev_ms_array = dmu_object_alloc(spa->spa_meta_objset, in vdev_sync()
4129 ASSERT(vd->vdev_ms_array != 0); in vdev_sync()
4130 vdev_config_dirty(vd); in vdev_sync()
4133 while ((msp = txg_list_remove(&vd->vdev_ms_list, txg)) != NULL) { in vdev_sync()
4135 (void) txg_list_add(&vd->vdev_ms_list, msp, TXG_CLEAN(txg)); in vdev_sync()
4138 while ((lvd = txg_list_remove(&vd->vdev_dtl_list, txg)) != NULL) in vdev_sync()
4145 if (vd->vdev_islog && vd->vdev_stat.vs_alloc == 0 && vd->vdev_removing) in vdev_sync()
4146 vdev_remove_empty_log(vd, txg); in vdev_sync()
4148 (void) txg_list_add(&spa->spa_vdev_txg_list, vd, TXG_CLEAN(txg)); in vdev_sync()
4152 vdev_asize_to_psize_txg(vdev_t *vd, uint64_t asize, uint64_t txg) in vdev_asize_to_psize_txg() argument
4154 return (vd->vdev_ops->vdev_op_asize_to_psize(vd, asize, txg)); in vdev_asize_to_psize_txg()
4164 vdev_psize_to_asize_txg(vdev_t *vd, uint64_t psize, uint64_t txg) in vdev_psize_to_asize_txg() argument
4166 return (vd->vdev_ops->vdev_op_psize_to_asize(vd, psize, txg)); in vdev_psize_to_asize_txg()
4170 vdev_psize_to_asize(vdev_t *vd, uint64_t psize) in vdev_psize_to_asize() argument
4172 return (vdev_psize_to_asize_txg(vd, psize, 0)); in vdev_psize_to_asize()
4182 vdev_t *vd, *tvd; in vdev_fault() local
4186 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_fault()
4189 if (!vd->vdev_ops->vdev_op_leaf) in vdev_fault()
4192 tvd = vd->vdev_top; in vdev_fault()
4213 vd->vdev_stat.vs_aux = VDEV_AUX_EXTERNAL; in vdev_fault()
4214 vd->vdev_tmpoffline = B_FALSE; in vdev_fault()
4217 vd->vdev_tmpoffline = B_TRUE; in vdev_fault()
4225 vd->vdev_label_aux = aux; in vdev_fault()
4230 vd->vdev_delayed_close = B_FALSE; in vdev_fault()
4231 vd->vdev_faulted = 1ULL; in vdev_fault()
4232 vd->vdev_degraded = 0ULL; in vdev_fault()
4233 vdev_set_state(vd, B_FALSE, VDEV_STATE_FAULTED, aux); in vdev_fault()
4239 if (!tvd->vdev_islog && vd->vdev_aux == NULL && vdev_dtl_required(vd)) { in vdev_fault()
4240 vd->vdev_degraded = 1ULL; in vdev_fault()
4241 vd->vdev_faulted = 0ULL; in vdev_fault()
4249 if (vdev_readable(vd)) in vdev_fault()
4250 vdev_set_state(vd, B_FALSE, VDEV_STATE_DEGRADED, aux); in vdev_fault()
4253 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_fault()
4264 vdev_t *vd; in vdev_degrade() local
4268 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_degrade()
4271 if (!vd->vdev_ops->vdev_op_leaf) in vdev_degrade()
4277 if (vd->vdev_faulted || vd->vdev_degraded) in vdev_degrade()
4280 vd->vdev_degraded = 1ULL; in vdev_degrade()
4281 if (!vdev_is_dead(vd)) in vdev_degrade()
4282 vdev_set_state(vd, B_FALSE, VDEV_STATE_DEGRADED, in vdev_degrade()
4285 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_degrade()
4291 vdev_t *vd; in vdev_remove_wanted() local
4295 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_remove_wanted()
4302 if (vd->vdev_removed || vd->vdev_expanding) in vdev_remove_wanted()
4308 if (vd->vdev_ops->vdev_op_leaf && !zio_wait(vdev_probe(vd, NULL))) in vdev_remove_wanted()
4311 vd->vdev_remove_wanted = B_TRUE; in vdev_remove_wanted()
4314 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_remove_wanted()
4329 vdev_t *vd, *tvd, *pvd, *rvd = spa->spa_root_vdev; in vdev_online() local
4335 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_online()
4338 wasoffline = (vd->vdev_offline || vd->vdev_tmpoffline); in vdev_online()
4339 oldstate = vd->vdev_state; in vdev_online()
4341 tvd = vd->vdev_top; in vdev_online()
4342 vd->vdev_offline = B_FALSE; in vdev_online()
4343 vd->vdev_tmpoffline = B_FALSE; in vdev_online()
4344 vd->vdev_checkremove = !!(flags & ZFS_ONLINE_CHECKREMOVE); in vdev_online()
4345 vd->vdev_forcefault = !!(flags & ZFS_ONLINE_FORCEFAULT); in vdev_online()
4348 if (!vd->vdev_aux) { in vdev_online()
4349 for (pvd = vd; pvd != rvd; pvd = pvd->vdev_parent) in vdev_online()
4352 vd->vdev_expansion_time = gethrestime_sec(); in vdev_online()
4356 vd->vdev_checkremove = vd->vdev_forcefault = B_FALSE; in vdev_online()
4358 if (!vd->vdev_aux) { in vdev_online()
4359 for (pvd = vd; pvd != rvd; pvd = pvd->vdev_parent) in vdev_online()
4364 *newstate = vd->vdev_state; in vdev_online()
4366 !vdev_is_dead(vd) && vd->vdev_parent && in vdev_online()
4367 vd->vdev_parent->vdev_ops == &vdev_spare_ops && in vdev_online()
4368 vd->vdev_parent->vdev_child[0] == vd) in vdev_online()
4369 vd->vdev_unspare = B_TRUE; in vdev_online()
4374 if (vd->vdev_aux) in vdev_online()
4375 return (spa_vdev_state_exit(spa, vd, ENOTSUP)); in vdev_online()
4381 mutex_enter(&vd->vdev_initialize_lock); in vdev_online()
4382 if (vdev_writeable(vd) && in vdev_online()
4383 vd->vdev_initialize_thread == NULL && in vdev_online()
4384 vd->vdev_initialize_state == VDEV_INITIALIZE_ACTIVE) { in vdev_online()
4385 (void) vdev_initialize(vd); in vdev_online()
4387 mutex_exit(&vd->vdev_initialize_lock); in vdev_online()
4395 mutex_enter(&vd->vdev_trim_lock); in vdev_online()
4396 if (vdev_writeable(vd) && !vd->vdev_isl2cache && in vdev_online()
4397 vd->vdev_trim_thread == NULL && in vdev_online()
4398 vd->vdev_trim_state == VDEV_TRIM_ACTIVE) { in vdev_online()
4399 (void) vdev_trim(vd, vd->vdev_trim_rate, vd->vdev_trim_partial, in vdev_online()
4400 vd->vdev_trim_secure); in vdev_online()
4402 mutex_exit(&vd->vdev_trim_lock); in vdev_online()
4406 vd->vdev_state >= VDEV_STATE_DEGRADED)) { in vdev_online()
4407 spa_event_notify(spa, vd, NULL, ESC_ZFS_VDEV_ONLINE); in vdev_online()
4413 if (vd->vdev_unspare && in vdev_online()
4419 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_online()
4425 vdev_t *vd, *tvd; in vdev_offline_locked() local
4433 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_offline_locked()
4436 if (!vd->vdev_ops->vdev_op_leaf) in vdev_offline_locked()
4439 if (vd->vdev_ops == &vdev_draid_spare_ops) in vdev_offline_locked()
4442 tvd = vd->vdev_top; in vdev_offline_locked()
4449 if (!vd->vdev_offline) { in vdev_offline_locked()
4455 if (!tvd->vdev_islog && vd->vdev_aux == NULL && in vdev_offline_locked()
4456 vdev_dtl_required(vd)) in vdev_offline_locked()
4472 (void) spa_vdev_state_exit(spa, vd, 0); in vdev_offline_locked()
4496 vd, error)); in vdev_offline_locked()
4497 (void) spa_vdev_state_exit(spa, vd, 0); in vdev_offline_locked()
4509 vd->vdev_offline = B_TRUE; in vdev_offline_locked()
4512 if (!tvd->vdev_islog && vd->vdev_aux == NULL && in vdev_offline_locked()
4514 vd->vdev_offline = B_FALSE; in vdev_offline_locked()
4528 vd->vdev_tmpoffline = !!(flags & ZFS_OFFLINE_TEMPORARY); in vdev_offline_locked()
4530 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_offline_locked()
4551 vdev_clear(spa_t *spa, vdev_t *vd) in vdev_clear() argument
4557 if (vd == NULL) in vdev_clear()
4558 vd = rvd; in vdev_clear()
4560 vd->vdev_stat.vs_read_errors = 0; in vdev_clear()
4561 vd->vdev_stat.vs_write_errors = 0; in vdev_clear()
4562 vd->vdev_stat.vs_checksum_errors = 0; in vdev_clear()
4563 vd->vdev_stat.vs_dio_verify_errors = 0; in vdev_clear()
4564 vd->vdev_stat.vs_slow_ios = 0; in vdev_clear()
4566 for (int c = 0; c < vd->vdev_children; c++) in vdev_clear()
4567 vdev_clear(spa, vd->vdev_child[c]); in vdev_clear()
4572 if (!vdev_is_concrete(vd) || vd->vdev_removed) in vdev_clear()
4581 if (vd->vdev_faulted || vd->vdev_degraded || in vdev_clear()
4582 !vdev_readable(vd) || !vdev_writeable(vd)) { in vdev_clear()
4588 vd->vdev_forcefault = B_TRUE; in vdev_clear()
4590 vd->vdev_faulted = vd->vdev_degraded = 0ULL; in vdev_clear()
4591 vd->vdev_cant_read = B_FALSE; in vdev_clear()
4592 vd->vdev_cant_write = B_FALSE; in vdev_clear()
4593 vd->vdev_stat.vs_aux = 0; in vdev_clear()
4595 vdev_reopen(vd == rvd ? rvd : vd->vdev_top); in vdev_clear()
4597 vd->vdev_forcefault = B_FALSE; in vdev_clear()
4599 if (vd != rvd && vdev_writeable(vd->vdev_top)) in vdev_clear()
4600 vdev_state_dirty(vd->vdev_top); in vdev_clear()
4603 if (vd->vdev_aux == NULL && !vdev_is_dead(vd) && in vdev_clear()
4608 spa_event_notify(spa, vd, NULL, ESC_ZFS_VDEV_CLEAR); in vdev_clear()
4616 if (!vdev_is_dead(vd) && vd->vdev_parent != NULL && in vdev_clear()
4617 vd->vdev_parent->vdev_ops == &vdev_spare_ops && in vdev_clear()
4618 vd->vdev_parent->vdev_child[0] == vd) in vdev_clear()
4619 vd->vdev_unspare = B_TRUE; in vdev_clear()
4622 zfs_ereport_clear(spa, vd); in vdev_clear()
4626 vdev_is_dead(vdev_t *vd) in vdev_is_dead() argument
4635 return (vd->vdev_state < VDEV_STATE_DEGRADED || in vdev_is_dead()
4636 vd->vdev_ops == &vdev_hole_ops || in vdev_is_dead()
4637 vd->vdev_ops == &vdev_missing_ops); in vdev_is_dead()
4641 vdev_readable(vdev_t *vd) in vdev_readable() argument
4643 return (!vdev_is_dead(vd) && !vd->vdev_cant_read); in vdev_readable()
4647 vdev_writeable(vdev_t *vd) in vdev_writeable() argument
4649 return (!vdev_is_dead(vd) && !vd->vdev_cant_write && in vdev_writeable()
4650 vdev_is_concrete(vd)); in vdev_writeable()
4654 vdev_allocatable(vdev_t *vd) in vdev_allocatable() argument
4656 uint64_t state = vd->vdev_state; in vdev_allocatable()
4667 !vd->vdev_cant_write && vdev_is_concrete(vd) && in vdev_allocatable()
4668 vd->vdev_mg->mg_initialized); in vdev_allocatable()
4672 vdev_accessible(vdev_t *vd, zio_t *zio) in vdev_accessible() argument
4674 ASSERT(zio->io_vd == vd); in vdev_accessible()
4676 if (vdev_is_dead(vd) || vd->vdev_remove_wanted) in vdev_accessible()
4680 return (!vd->vdev_cant_read); in vdev_accessible()
4683 return (!vd->vdev_cant_write); in vdev_accessible()
4743 vdev_is_spacemap_addressable(vdev_t *vd) in vdev_is_spacemap_addressable() argument
4745 if (spa_feature_is_active(vd->vdev_spa, SPA_FEATURE_SPACEMAP_V2)) in vdev_is_spacemap_addressable()
4755 uint64_t shift = vd->vdev_ashift + SM_OFFSET_BITS; in vdev_is_spacemap_addressable()
4760 return (vd->vdev_asize < (1ULL << shift)); in vdev_is_spacemap_addressable()
4767 vdev_get_stats_ex_impl(vdev_t *vd, vdev_stat_t *vs, vdev_stat_ex_t *vsx) in vdev_get_stats_ex_impl() argument
4774 if (!vd->vdev_ops->vdev_op_leaf) { in vdev_get_stats_ex_impl()
4782 for (int c = 0; c < vd->vdev_children; c++) { in vdev_get_stats_ex_impl()
4783 vdev_t *cvd = vd->vdev_child[c]; in vdev_get_stats_ex_impl()
4801 memcpy(vsx, &vd->vdev_stat_ex, sizeof (vd->vdev_stat_ex)); in vdev_get_stats_ex_impl()
4804 vsx->vsx_active_queue[t] = vd->vdev_queue.vq_cactive[t]; in vdev_get_stats_ex_impl()
4805 vsx->vsx_pend_queue[t] = vdev_queue_class_length(vd, t); in vdev_get_stats_ex_impl()
4811 vdev_get_stats_ex(vdev_t *vd, vdev_stat_t *vs, vdev_stat_ex_t *vsx) in vdev_get_stats_ex() argument
4813 vdev_t *tvd = vd->vdev_top; in vdev_get_stats_ex()
4814 mutex_enter(&vd->vdev_stat_lock); in vdev_get_stats_ex()
4816 memcpy(vs, &vd->vdev_stat, sizeof (*vs)); in vdev_get_stats_ex()
4818 vs->vs_state = vd->vdev_state; in vdev_get_stats_ex()
4819 vs->vs_rsize = vdev_get_min_asize(vd); in vdev_get_stats_ex()
4821 if (vd->vdev_ops->vdev_op_leaf) { in vdev_get_stats_ex()
4822 vs->vs_pspace = vd->vdev_psize; in vdev_get_stats_ex()
4831 vd->vdev_initialize_bytes_done; in vdev_get_stats_ex()
4833 vd->vdev_initialize_bytes_est; in vdev_get_stats_ex()
4834 vs->vs_initialize_state = vd->vdev_initialize_state; in vdev_get_stats_ex()
4836 vd->vdev_initialize_action_time; in vdev_get_stats_ex()
4843 vs->vs_trim_notsup = !vd->vdev_has_trim; in vdev_get_stats_ex()
4844 vs->vs_trim_bytes_done = vd->vdev_trim_bytes_done; in vdev_get_stats_ex()
4845 vs->vs_trim_bytes_est = vd->vdev_trim_bytes_est; in vdev_get_stats_ex()
4846 vs->vs_trim_state = vd->vdev_trim_state; in vdev_get_stats_ex()
4847 vs->vs_trim_action_time = vd->vdev_trim_action_time; in vdev_get_stats_ex()
4850 vs->vs_resilver_deferred = vd->vdev_resilver_deferred; in vdev_get_stats_ex()
4859 if (vd->vdev_aux == NULL && tvd != NULL) { in vdev_get_stats_ex()
4861 vd->vdev_max_asize - vd->vdev_asize, in vdev_get_stats_ex()
4865 vs->vs_configured_ashift = vd->vdev_top != NULL in vdev_get_stats_ex()
4866 ? vd->vdev_top->vdev_ashift : vd->vdev_ashift; in vdev_get_stats_ex()
4867 vs->vs_logical_ashift = vd->vdev_logical_ashift; in vdev_get_stats_ex()
4868 if (vd->vdev_physical_ashift <= ASHIFT_MAX) in vdev_get_stats_ex()
4869 vs->vs_physical_ashift = vd->vdev_physical_ashift; in vdev_get_stats_ex()
4877 if (vd->vdev_aux == NULL && vd == vd->vdev_top && in vdev_get_stats_ex()
4878 vdev_is_concrete(vd)) { in vdev_get_stats_ex()
4885 vs->vs_fragmentation = (vd->vdev_mg != NULL) ? in vdev_get_stats_ex()
4886 vd->vdev_mg->mg_fragmentation : 0; in vdev_get_stats_ex()
4888 vs->vs_noalloc = MAX(vd->vdev_noalloc, in vdev_get_stats_ex()
4892 vdev_get_stats_ex_impl(vd, vs, vsx); in vdev_get_stats_ex()
4893 mutex_exit(&vd->vdev_stat_lock); in vdev_get_stats_ex()
4897 vdev_get_stats(vdev_t *vd, vdev_stat_t *vs) in vdev_get_stats() argument
4899 return (vdev_get_stats_ex(vd, vs, NULL)); in vdev_get_stats()
4903 vdev_clear_stats(vdev_t *vd) in vdev_clear_stats() argument
4905 mutex_enter(&vd->vdev_stat_lock); in vdev_clear_stats()
4906 vd->vdev_stat.vs_space = 0; in vdev_clear_stats()
4907 vd->vdev_stat.vs_dspace = 0; in vdev_clear_stats()
4908 vd->vdev_stat.vs_alloc = 0; in vdev_clear_stats()
4909 mutex_exit(&vd->vdev_stat_lock); in vdev_clear_stats()
4913 vdev_scan_stat_init(vdev_t *vd) in vdev_scan_stat_init() argument
4915 vdev_stat_t *vs = &vd->vdev_stat; in vdev_scan_stat_init()
4917 for (int c = 0; c < vd->vdev_children; c++) in vdev_scan_stat_init()
4918 vdev_scan_stat_init(vd->vdev_child[c]); in vdev_scan_stat_init()
4920 mutex_enter(&vd->vdev_stat_lock); in vdev_scan_stat_init()
4922 mutex_exit(&vd->vdev_stat_lock); in vdev_scan_stat_init()
4930 vdev_t *vd = zio->io_vd ? zio->io_vd : rvd; in vdev_stat_update() local
4935 vdev_stat_t *vs = vd ? &vd->vdev_stat : NULL; in vdev_stat_update()
4936 vdev_stat_ex_t *vsx = vd ? &vd->vdev_stat_ex : NULL; in vdev_stat_update()
4938 vdev_stat_t *vs = &vd->vdev_stat; in vdev_stat_update()
4939 vdev_stat_ex_t *vsx = &vd->vdev_stat_ex; in vdev_stat_update()
4965 if (vd == rvd) in vdev_stat_update()
4968 ASSERT(vd == zio->io_vd); in vdev_stat_update()
4973 mutex_enter(&vd->vdev_stat_lock); in vdev_stat_update()
4985 if (vd->vdev_ops->vdev_op_leaf) in vdev_stat_update()
4997 vdev_t *tvd = vd->vdev_top; in vdev_stat_update()
5002 if (vd->vdev_ops->vdev_op_leaf && in vdev_stat_update()
5003 vd->vdev_ops != &vdev_draid_spare_ops) { in vdev_stat_update()
5017 if (vd->vdev_ops->vdev_op_leaf && in vdev_stat_update()
5073 mutex_exit(&vd->vdev_stat_lock); in vdev_stat_update()
5117 if (vd->vdev_ops->vdev_op_leaf) { in vdev_stat_update()
5122 vdev_dtl_dirty(vd, DTL_SCRUB, txg, 1); in vdev_stat_update()
5129 if (vdev_dtl_contains(vd, DTL_MISSING, txg, 1)) in vdev_stat_update()
5131 for (pvd = vd; pvd != rvd; pvd = pvd->vdev_parent) in vdev_stat_update()
5133 vdev_dirty(vd->vdev_top, VDD_DTL, vd, commit_txg); in vdev_stat_update()
5135 if (vd != rvd) in vdev_stat_update()
5136 vdev_dtl_dirty(vd, DTL_MISSING, txg, 1); in vdev_stat_update()
5141 vdev_deflated_space(vdev_t *vd, int64_t space) in vdev_deflated_space() argument
5144 ASSERT(vd->vdev_deflate_ratio != 0 || vd->vdev_isl2cache); in vdev_deflated_space()
5146 return ((space >> SPA_MINBLOCKSHIFT) * vd->vdev_deflate_ratio); in vdev_deflated_space()
5154 vdev_space_update(vdev_t *vd, int64_t alloc_delta, int64_t defer_delta, in vdev_space_update() argument
5159 spa_t *spa = vd->vdev_spa; in vdev_space_update()
5162 ASSERT(vd == vd->vdev_top); in vdev_space_update()
5170 dspace_delta = vdev_deflated_space(vd, space_delta); in vdev_space_update()
5172 mutex_enter(&vd->vdev_stat_lock); in vdev_space_update()
5175 ASSERT3U(vd->vdev_stat.vs_alloc, >=, -alloc_delta); in vdev_space_update()
5178 vd->vdev_stat.vs_alloc += alloc_delta; in vdev_space_update()
5179 vd->vdev_stat.vs_space += space_delta; in vdev_space_update()
5180 vd->vdev_stat.vs_dspace += dspace_delta; in vdev_space_update()
5181 mutex_exit(&vd->vdev_stat_lock); in vdev_space_update()
5184 if (vd->vdev_mg != NULL && !vd->vdev_islog) { in vdev_space_update()
5185 ASSERT(!vd->vdev_isl2cache); in vdev_space_update()
5201 vdev_config_dirty(vdev_t *vd) in vdev_config_dirty() argument
5203 spa_t *spa = vd->vdev_spa; in vdev_config_dirty()
5213 if (vd->vdev_aux != NULL) { in vdev_config_dirty()
5214 spa_aux_vdev_t *sav = vd->vdev_aux; in vdev_config_dirty()
5219 if (sav->sav_vdevs[c] == vd) in vdev_config_dirty()
5246 aux[c] = vdev_config_generate(spa, vd, B_TRUE, 0); in vdev_config_dirty()
5261 if (vd == rvd) { in vdev_config_dirty()
5265 ASSERT(vd == vd->vdev_top); in vdev_config_dirty()
5267 if (!list_link_active(&vd->vdev_config_dirty_node) && in vdev_config_dirty()
5268 vdev_is_concrete(vd)) { in vdev_config_dirty()
5269 list_insert_head(&spa->spa_config_dirty_list, vd); in vdev_config_dirty()
5275 vdev_config_clean(vdev_t *vd) in vdev_config_clean() argument
5277 spa_t *spa = vd->vdev_spa; in vdev_config_clean()
5283 ASSERT(list_link_active(&vd->vdev_config_dirty_node)); in vdev_config_clean()
5284 list_remove(&spa->spa_config_dirty_list, vd); in vdev_config_clean()
5294 vdev_state_dirty(vdev_t *vd) in vdev_state_dirty() argument
5296 spa_t *spa = vd->vdev_spa; in vdev_state_dirty()
5299 ASSERT(vd == vd->vdev_top); in vdev_state_dirty()
5311 if (!list_link_active(&vd->vdev_state_dirty_node) && in vdev_state_dirty()
5312 vdev_is_concrete(vd)) in vdev_state_dirty()
5313 list_insert_head(&spa->spa_state_dirty_list, vd); in vdev_state_dirty()
5317 vdev_state_clean(vdev_t *vd) in vdev_state_clean() argument
5319 spa_t *spa = vd->vdev_spa; in vdev_state_clean()
5325 ASSERT(list_link_active(&vd->vdev_state_dirty_node)); in vdev_state_clean()
5326 list_remove(&spa->spa_state_dirty_list, vd); in vdev_state_clean()
5333 vdev_propagate_state(vdev_t *vd) in vdev_propagate_state() argument
5335 spa_t *spa = vd->vdev_spa; in vdev_propagate_state()
5341 if (vd->vdev_children > 0) { in vdev_propagate_state()
5342 for (int c = 0; c < vd->vdev_children; c++) { in vdev_propagate_state()
5343 child = vd->vdev_child[c]; in vdev_propagate_state()
5359 if (child->vdev_islog && vd == rvd) in vdev_propagate_state()
5371 vd->vdev_ops->vdev_op_state_change(vd, faulted, degraded); in vdev_propagate_state()
5379 if (corrupted && vd == rvd && in vdev_propagate_state()
5385 if (vd->vdev_parent) in vdev_propagate_state()
5386 vdev_propagate_state(vd->vdev_parent); in vdev_propagate_state()
5398 vdev_set_state(vdev_t *vd, boolean_t isopen, vdev_state_t state, vdev_aux_t aux) in vdev_set_state() argument
5401 spa_t *spa = vd->vdev_spa; in vdev_set_state()
5403 if (state == vd->vdev_state) { in vdev_set_state()
5409 if (vd->vdev_ops->vdev_op_leaf && in vdev_set_state()
5411 (vd->vdev_prevstate >= VDEV_STATE_FAULTED)) { in vdev_set_state()
5413 zfs_post_state_change(spa, vd, vd->vdev_prevstate); in vdev_set_state()
5415 vd->vdev_stat.vs_aux = aux; in vdev_set_state()
5419 save_state = vd->vdev_state; in vdev_set_state()
5421 vd->vdev_state = state; in vdev_set_state()
5422 vd->vdev_stat.vs_aux = aux; in vdev_set_state()
5434 if (!vd->vdev_delayed_close && vdev_is_dead(vd) && in vdev_set_state()
5435 vd->vdev_ops->vdev_op_leaf) in vdev_set_state()
5436 vd->vdev_ops->vdev_op_close(vd); in vdev_set_state()
5438 if (vd->vdev_removed && in vdev_set_state()
5440 (aux == VDEV_AUX_OPEN_FAILED || vd->vdev_checkremove)) { in vdev_set_state()
5450 vd->vdev_state = VDEV_STATE_REMOVED; in vdev_set_state()
5451 vd->vdev_stat.vs_aux = VDEV_AUX_NONE; in vdev_set_state()
5453 vd->vdev_removed = B_TRUE; in vdev_set_state()
5463 vd->vdev_ops->vdev_op_leaf) in vdev_set_state()
5464 vd->vdev_not_present = 1; in vdev_set_state()
5481 if ((vd->vdev_prevstate != state || vd->vdev_forcefault) && in vdev_set_state()
5482 !vd->vdev_not_present && !vd->vdev_checkremove && in vdev_set_state()
5483 vd != spa->spa_root_vdev) { in vdev_set_state()
5512 (void) zfs_ereport_post(class, spa, vd, NULL, NULL, in vdev_set_state()
5517 vd->vdev_removed = B_FALSE; in vdev_set_state()
5519 vd->vdev_removed = B_FALSE; in vdev_set_state()
5526 if (vd->vdev_ops->vdev_op_leaf) { in vdev_set_state()
5528 if ((vd->vdev_prevstate != VDEV_STATE_UNKNOWN) && in vdev_set_state()
5529 (vd->vdev_prevstate != vd->vdev_state) && in vdev_set_state()
5531 save_state = vd->vdev_prevstate; in vdev_set_state()
5535 zfs_post_state_change(spa, vd, save_state); in vdev_set_state()
5538 if (!isopen && vd->vdev_parent) in vdev_set_state()
5539 vdev_propagate_state(vd->vdev_parent); in vdev_set_state()
5543 vdev_children_are_offline(vdev_t *vd) in vdev_children_are_offline() argument
5545 ASSERT(!vd->vdev_ops->vdev_op_leaf); in vdev_children_are_offline()
5547 for (uint64_t i = 0; i < vd->vdev_children; i++) { in vdev_children_are_offline()
5548 if (vd->vdev_child[i]->vdev_state != VDEV_STATE_OFFLINE) in vdev_children_are_offline()
5560 vdev_is_bootable(vdev_t *vd) in vdev_is_bootable() argument
5562 if (!vd->vdev_ops->vdev_op_leaf) { in vdev_is_bootable()
5563 const char *vdev_type = vd->vdev_ops->vdev_op_type; in vdev_is_bootable()
5569 for (int c = 0; c < vd->vdev_children; c++) { in vdev_is_bootable()
5570 if (!vdev_is_bootable(vd->vdev_child[c])) in vdev_is_bootable()
5577 vdev_is_concrete(vdev_t *vd) in vdev_is_concrete() argument
5579 vdev_ops_t *ops = vd->vdev_ops; in vdev_is_concrete()
5594 vdev_log_state_valid(vdev_t *vd) in vdev_log_state_valid() argument
5596 if (vd->vdev_ops->vdev_op_leaf && !vd->vdev_faulted && in vdev_log_state_valid()
5597 !vd->vdev_removed) in vdev_log_state_valid()
5600 for (int c = 0; c < vd->vdev_children; c++) in vdev_log_state_valid()
5601 if (vdev_log_state_valid(vd->vdev_child[c])) in vdev_log_state_valid()
5611 vdev_expand(vdev_t *vd, uint64_t txg) in vdev_expand() argument
5613 ASSERT(vd->vdev_top == vd); in vdev_expand()
5614 ASSERT(spa_config_held(vd->vdev_spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_expand()
5615 ASSERT(vdev_is_concrete(vd)); in vdev_expand()
5617 vdev_set_deflate_ratio(vd); in vdev_expand()
5619 if ((vd->vdev_spa->spa_raidz_expand == NULL || in vdev_expand()
5620 vd->vdev_spa->spa_raidz_expand->vre_vdev_id != vd->vdev_id) && in vdev_expand()
5621 (vd->vdev_asize >> vd->vdev_ms_shift) > vd->vdev_ms_count && in vdev_expand()
5622 vdev_is_concrete(vd)) { in vdev_expand()
5623 vdev_metaslab_group_create(vd); in vdev_expand()
5624 VERIFY(vdev_metaslab_init(vd, txg) == 0); in vdev_expand()
5625 vdev_config_dirty(vd); in vdev_expand()
5633 vdev_split(vdev_t *vd) in vdev_split() argument
5635 vdev_t *cvd, *pvd = vd->vdev_parent; in vdev_split()
5639 vdev_remove_child(pvd, vd); in vdev_split()
5653 vdev_deadman(vdev_t *vd, const char *tag) in vdev_deadman() argument
5655 for (int c = 0; c < vd->vdev_children; c++) { in vdev_deadman()
5656 vdev_t *cvd = vd->vdev_child[c]; in vdev_deadman()
5661 if (vd->vdev_ops->vdev_op_leaf) { in vdev_deadman()
5662 vdev_queue_t *vq = &vd->vdev_queue; in vdev_deadman()
5666 spa_t *spa = vd->vdev_spa; in vdev_deadman()
5671 vd->vdev_path, vq->vq_active); in vdev_deadman()
5688 vdev_defer_resilver(vdev_t *vd) in vdev_defer_resilver() argument
5690 ASSERT(vd->vdev_ops->vdev_op_leaf); in vdev_defer_resilver()
5692 vd->vdev_resilver_deferred = B_TRUE; in vdev_defer_resilver()
5693 vd->vdev_spa->spa_resilver_deferred = B_TRUE; in vdev_defer_resilver()
5702 vdev_clear_resilver_deferred(vdev_t *vd, dmu_tx_t *tx) in vdev_clear_resilver_deferred() argument
5705 spa_t *spa = vd->vdev_spa; in vdev_clear_resilver_deferred()
5707 for (int c = 0; c < vd->vdev_children; c++) { in vdev_clear_resilver_deferred()
5708 vdev_t *cvd = vd->vdev_child[c]; in vdev_clear_resilver_deferred()
5712 if (vd == spa->spa_root_vdev && in vdev_clear_resilver_deferred()
5715 vdev_config_dirty(vd); in vdev_clear_resilver_deferred()
5720 if (!vdev_is_concrete(vd) || vd->vdev_aux || in vdev_clear_resilver_deferred()
5721 !vd->vdev_ops->vdev_op_leaf) in vdev_clear_resilver_deferred()
5724 vd->vdev_resilver_deferred = B_FALSE; in vdev_clear_resilver_deferred()
5726 return (!vdev_is_dead(vd) && !vd->vdev_offline && in vdev_clear_resilver_deferred()
5727 vdev_resilver_needed(vd, NULL, NULL)); in vdev_clear_resilver_deferred()
5745 vdev_xlate(vdev_t *vd, const zfs_range_seg64_t *logical_rs, in vdev_xlate() argument
5751 if (vd != vd->vdev_top) { in vdev_xlate()
5752 vdev_xlate(vd->vdev_parent, logical_rs, physical_rs, in vdev_xlate()
5769 vdev_t *pvd = vd->vdev_parent; in vdev_xlate()
5779 pvd->vdev_ops->vdev_op_xlate(vd, physical_rs, &intermediate, remain_rs); in vdev_xlate()
5786 vdev_xlate_walk(vdev_t *vd, const zfs_range_seg64_t *logical_rs, in vdev_xlate_walk() argument
5795 vdev_xlate(vd, &iter_rs, &physical_rs, &remain_rs); in vdev_xlate_walk()
5810 vdev_name(vdev_t *vd, char *buf, int buflen) in vdev_name() argument
5812 if (vd->vdev_path == NULL) { in vdev_name()
5813 if (strcmp(vd->vdev_ops->vdev_op_type, "root") == 0) { in vdev_name()
5814 strlcpy(buf, vd->vdev_spa->spa_name, buflen); in vdev_name()
5815 } else if (!vd->vdev_ops->vdev_op_leaf) { in vdev_name()
5817 vd->vdev_ops->vdev_op_type, in vdev_name()
5818 (u_longlong_t)vd->vdev_id); in vdev_name()
5821 strlcpy(buf, vd->vdev_path, buflen); in vdev_name()
5879 vdev_t *vd; in vdev_props_set_sync() local
5890 vd = spa_lookup_by_guid(spa, vdev_guid, B_TRUE); in vdev_props_set_sync()
5893 if (vd == NULL) in vdev_props_set_sync()
5899 if (vd->vdev_root_zap != 0) { in vdev_props_set_sync()
5900 objid = vd->vdev_root_zap; in vdev_props_set_sync()
5901 } else if (vd->vdev_top_zap != 0) { in vdev_props_set_sync()
5902 objid = vd->vdev_top_zap; in vdev_props_set_sync()
5903 } else if (vd->vdev_leaf_zap != 0) { in vdev_props_set_sync()
5904 objid = vd->vdev_leaf_zap; in vdev_props_set_sync()
5976 vdev_prop_set(vdev_t *vd, nvlist_t *innvl, nvlist_t *outnvl) in vdev_prop_set() argument
5978 spa_t *spa = vd->vdev_spa; in vdev_prop_set()
5984 ASSERT(vd != NULL); in vdev_prop_set()
5987 if (vd->vdev_root_zap == 0 && in vdev_prop_set()
5988 vd->vdev_top_zap == 0 && in vdev_prop_set()
5989 vd->vdev_leaf_zap == 0) in vdev_prop_set()
6000 if ((vd = spa_lookup_by_guid(spa, vdev_guid, B_TRUE)) == NULL) in vdev_prop_set()
6022 if (vd->vdev_path == NULL) { in vdev_prop_set()
6042 if (intval != vd->vdev_noalloc) in vdev_prop_set()
6054 vd->vdev_failfast = intval & 1; in vdev_prop_set()
6061 vd->vdev_checksum_n = intval; in vdev_prop_set()
6068 vd->vdev_checksum_t = intval; in vdev_prop_set()
6075 vd->vdev_io_n = intval; in vdev_prop_set()
6082 vd->vdev_io_t = intval; in vdev_prop_set()
6089 vd->vdev_slow_io_n = intval; in vdev_prop_set()
6096 vd->vdev_slow_io_t = intval; in vdev_prop_set()
6115 vdev_prop_get(vdev_t *vd, nvlist_t *innvl, nvlist_t *outnvl) in vdev_prop_get() argument
6117 spa_t *spa = vd->vdev_spa; in vdev_prop_get()
6129 ASSERT(vd != NULL); in vdev_prop_get()
6138 if (vd->vdev_root_zap != 0) { in vdev_prop_get()
6139 objid = vd->vdev_root_zap; in vdev_prop_get()
6140 } else if (vd->vdev_top_zap != 0) { in vdev_prop_get()
6141 objid = vd->vdev_top_zap; in vdev_prop_get()
6142 } else if (vd->vdev_leaf_zap != 0) { in vdev_prop_get()
6143 objid = vd->vdev_leaf_zap; in vdev_prop_get()
6165 strval = vdev_name(vd, namebuf, in vdev_prop_get()
6174 intval = (vd->vdev_stat.vs_dspace == 0) ? 0 : in vdev_prop_get()
6175 (vd->vdev_stat.vs_alloc * 100 / in vdev_prop_get()
6176 vd->vdev_stat.vs_dspace); in vdev_prop_get()
6182 vd->vdev_state, ZPROP_SRC_NONE); in vdev_prop_get()
6186 vd->vdev_guid, ZPROP_SRC_NONE); in vdev_prop_get()
6190 vd->vdev_asize, ZPROP_SRC_NONE); in vdev_prop_get()
6194 vd->vdev_psize, ZPROP_SRC_NONE); in vdev_prop_get()
6198 vd->vdev_ashift, ZPROP_SRC_NONE); in vdev_prop_get()
6202 vd->vdev_stat.vs_dspace, ZPROP_SRC_NONE); in vdev_prop_get()
6206 vd->vdev_stat.vs_dspace - in vdev_prop_get()
6207 vd->vdev_stat.vs_alloc, ZPROP_SRC_NONE); in vdev_prop_get()
6211 vd->vdev_stat.vs_alloc, ZPROP_SRC_NONE); in vdev_prop_get()
6215 vd->vdev_stat.vs_esize, ZPROP_SRC_NONE); in vdev_prop_get()
6219 vd->vdev_stat.vs_fragmentation, in vdev_prop_get()
6224 vdev_get_nparity(vd), ZPROP_SRC_NONE); in vdev_prop_get()
6227 if (vd->vdev_path == NULL) in vdev_prop_get()
6230 vd->vdev_path, 0, ZPROP_SRC_NONE); in vdev_prop_get()
6233 if (vd->vdev_devid == NULL) in vdev_prop_get()
6236 vd->vdev_devid, 0, ZPROP_SRC_NONE); in vdev_prop_get()
6239 if (vd->vdev_physpath == NULL) in vdev_prop_get()
6242 vd->vdev_physpath, 0, ZPROP_SRC_NONE); in vdev_prop_get()
6245 if (vd->vdev_enc_sysfs_path == NULL) in vdev_prop_get()
6248 vd->vdev_enc_sysfs_path, 0, ZPROP_SRC_NONE); in vdev_prop_get()
6251 if (vd->vdev_fru == NULL) in vdev_prop_get()
6254 vd->vdev_fru, 0, ZPROP_SRC_NONE); in vdev_prop_get()
6257 if (vd->vdev_parent != NULL) { in vdev_prop_get()
6258 strval = vdev_name(vd->vdev_parent, in vdev_prop_get()
6265 if (vd->vdev_children > 0) in vdev_prop_get()
6268 for (uint64_t i = 0; i < vd->vdev_children; in vdev_prop_get()
6272 vname = vdev_name(vd->vdev_child[i], in vdev_prop_get()
6289 vd->vdev_children, ZPROP_SRC_NONE); in vdev_prop_get()
6293 vd->vdev_stat.vs_read_errors, in vdev_prop_get()
6298 vd->vdev_stat.vs_write_errors, in vdev_prop_get()
6303 vd->vdev_stat.vs_checksum_errors, in vdev_prop_get()
6308 vd->vdev_stat.vs_initialize_errors, in vdev_prop_get()
6313 vd->vdev_stat.vs_trim_errors, in vdev_prop_get()
6318 vd->vdev_stat.vs_slow_ios, in vdev_prop_get()
6323 vd->vdev_stat.vs_ops[ZIO_TYPE_NULL], in vdev_prop_get()
6328 vd->vdev_stat.vs_ops[ZIO_TYPE_READ], in vdev_prop_get()
6333 vd->vdev_stat.vs_ops[ZIO_TYPE_WRITE], in vdev_prop_get()
6338 vd->vdev_stat.vs_ops[ZIO_TYPE_FREE], in vdev_prop_get()
6343 vd->vdev_stat.vs_ops[ZIO_TYPE_CLAIM], in vdev_prop_get()
6354 vd->vdev_stat.vs_ops[ZIO_TYPE_FLUSH], in vdev_prop_get()
6359 vd->vdev_stat.vs_bytes[ZIO_TYPE_NULL], in vdev_prop_get()
6364 vd->vdev_stat.vs_bytes[ZIO_TYPE_READ], in vdev_prop_get()
6369 vd->vdev_stat.vs_bytes[ZIO_TYPE_WRITE], in vdev_prop_get()
6374 vd->vdev_stat.vs_bytes[ZIO_TYPE_FREE], in vdev_prop_get()
6379 vd->vdev_stat.vs_bytes[ZIO_TYPE_CLAIM], in vdev_prop_get()
6390 vd->vdev_stat.vs_bytes[ZIO_TYPE_FLUSH], in vdev_prop_get()
6395 vd->vdev_removing, ZPROP_SRC_NONE); in vdev_prop_get()
6399 if (vd->vdev_ops == &vdev_raidz_ops) { in vdev_prop_get()
6401 NULL, vd->vdev_rz_expanding, in vdev_prop_get()
6407 if (vd->vdev_ops->vdev_op_leaf) { in vdev_prop_get()
6409 NULL, vd->vdev_has_trim, in vdev_prop_get()
6416 if (vd->vdev_mg == NULL && in vdev_prop_get()
6417 vd->vdev_top != NULL) { in vdev_prop_get()
6421 err = vdev_prop_get_int(vd, prop, in vdev_prop_get()
6461 err = vdev_prop_get_int(vd, prop, &intval); in vdev_prop_get()