Lines Matching refs:vdrz
2142 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_open() local
2143 uint64_t nparity = vdrz->vd_nparity; in vdev_raidz_open()
2216 vdev_raidz_get_logical_width(vdev_raidz_t *vdrz, uint64_t txg) in vdev_raidz_get_logical_width() argument
2224 mutex_enter(&vdrz->vd_expand_lock); in vdev_raidz_get_logical_width()
2225 reflow_node_t *re = avl_find(&vdrz->vd_expand_txgs, &lookup, &where); in vdev_raidz_get_logical_width()
2229 re = avl_nearest(&vdrz->vd_expand_txgs, where, AVL_BEFORE); in vdev_raidz_get_logical_width()
2233 width = vdrz->vd_original_width; in vdev_raidz_get_logical_width()
2235 mutex_exit(&vdrz->vd_expand_lock); in vdev_raidz_get_logical_width()
2249 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_asize_to_psize() local
2252 uint64_t cols = vdrz->vd_original_width; in vdev_raidz_asize_to_psize()
2253 uint64_t nparity = vdrz->vd_nparity; in vdev_raidz_asize_to_psize()
2255 cols = vdev_raidz_get_logical_width(vdrz, txg); in vdev_raidz_asize_to_psize()
2285 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_psize_to_asize() local
2288 uint64_t cols = vdrz->vd_original_width; in vdev_raidz_psize_to_asize()
2289 uint64_t nparity = vdrz->vd_nparity; in vdev_raidz_psize_to_asize()
2291 cols = vdev_raidz_get_logical_width(vdrz, txg); in vdev_raidz_psize_to_asize()
2299 uint64_t ncols_new = vdrz->vd_physical_width; in vdev_raidz_psize_to_asize()
2567 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_io_start() local
2570 uint64_t logical_width = vdev_raidz_get_logical_width(vdrz, in vdev_raidz_io_start()
2572 if (logical_width != vdrz->vd_physical_width) { in vdev_raidz_io_start()
2589 if (vdrz->vn_vre.vre_state == DSS_SCANNING) { in vdev_raidz_io_start()
2591 &vdrz->vn_vre); in vdev_raidz_io_start()
2592 lr = zfs_rangelock_enter(&vdrz->vn_vre.vre_rangelock, in vdev_raidz_io_start()
2599 next_offset = vdrz->vn_vre.vre_offset; in vdev_raidz_io_start()
2622 tvd->vdev_ashift, vdrz->vd_physical_width, in vdev_raidz_io_start()
2623 logical_width, vdrz->vd_nparity, in vdev_raidz_io_start()
2628 tvd->vdev_ashift, logical_width, vdrz->vd_nparity); in vdev_raidz_io_start()
2630 rm->rm_original_width = vdrz->vd_original_width; in vdev_raidz_io_start()
2639 if (logical_width == vdrz->vd_physical_width) { in vdev_raidz_io_start()
3624 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_state_change() local
3625 if (faulted > vdrz->vd_nparity) in vdev_raidz_state_change()
3644 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_need_resilver() local
3650 if (vdrz->vn_vre.vre_state == DSS_SCANNING) in vdev_raidz_need_resilver()
3654 uint64_t nparity = vdrz->vd_nparity; in vdev_raidz_need_resilver()
3697 vdev_raidz_t *vdrz = raidvd->vdev_tsd; in vdev_raidz_xlate() local
3699 if (vdrz->vn_vre.vre_state == DSS_SCANNING) { in vdev_raidz_xlate()
3714 uint64_t width = vdrz->vd_physical_width; in vdev_raidz_xlate()
3791 vdev_raidz_t *vdrz = raidvd->vdev_tsd; in raidz_reflow_complete_sync() local
3798 re->re_logical_width = vdrz->vd_physical_width; in raidz_reflow_complete_sync()
3799 mutex_enter(&vdrz->vd_expand_lock); in raidz_reflow_complete_sync()
3800 avl_add(&vdrz->vd_expand_txgs, re); in raidz_reflow_complete_sync()
3801 mutex_exit(&vdrz->vd_expand_lock); in raidz_reflow_complete_sync()
4804 vdev_raidz_t *vdrz = raidvd->vdev_tsd; in vdev_raidz_attach_sync() local
4807 ASSERT3U(raidvd->vdev_children, >, vdrz->vd_original_width); in vdev_raidz_attach_sync()
4808 ASSERT3U(raidvd->vdev_children, ==, vdrz->vd_physical_width + 1); in vdev_raidz_attach_sync()
4814 vdrz->vd_physical_width++; in vdev_raidz_attach_sync()
4817 vdrz->vn_vre.vre_vdev_id = raidvd->vdev_id; in vdev_raidz_attach_sync()
4818 vdrz->vn_vre.vre_offset = 0; in vdev_raidz_attach_sync()
4819 vdrz->vn_vre.vre_failed_offset = UINT64_MAX; in vdev_raidz_attach_sync()
4820 spa->spa_raidz_expand = &vdrz->vn_vre; in vdev_raidz_attach_sync()
4829 vdrz->vn_vre.vre_start_time = gethrestime_sec(); in vdev_raidz_attach_sync()
4830 vdrz->vn_vre.vre_end_time = 0; in vdev_raidz_attach_sync()
4831 vdrz->vn_vre.vre_state = DSS_SCANNING; in vdev_raidz_attach_sync()
4832 vdrz->vn_vre.vre_bytes_copied = 0; in vdev_raidz_attach_sync()
4834 uint64_t state = vdrz->vn_vre.vre_state; in vdev_raidz_attach_sync()
4839 uint64_t start_time = vdrz->vn_vre.vre_start_time; in vdev_raidz_attach_sync()
4858 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_load() local
4896 EQUIV(vdrz->vn_vre.vre_state == DSS_SCANNING, state == DSS_SCANNING); in vdev_raidz_load()
4897 vdrz->vn_vre.vre_state = (dsl_scan_state_t)state; in vdev_raidz_load()
4898 vdrz->vn_vre.vre_start_time = start_time; in vdev_raidz_load()
4899 vdrz->vn_vre.vre_end_time = end_time; in vdev_raidz_load()
4900 vdrz->vn_vre.vre_bytes_copied = bytes_copied; in vdev_raidz_load()
4915 vdev_raidz_t *vdrz = vd->vdev_tsd; in spa_raidz_expand_get_stats() local
4917 if (vdrz->vn_vre.vre_end_time != 0 && in spa_raidz_expand_get_stats()
4919 vdrz->vn_vre.vre_end_time > in spa_raidz_expand_get_stats()
4921 vre = &vdrz->vn_vre; in spa_raidz_expand_get_stats()
4990 vdev_raidz_t *vdrz = kmem_zalloc(sizeof (*vdrz), KM_SLEEP); in vdev_raidz_init() local
4991 vdrz->vn_vre.vre_vdev_id = -1; in vdev_raidz_init()
4992 vdrz->vn_vre.vre_offset = UINT64_MAX; in vdev_raidz_init()
4993 vdrz->vn_vre.vre_failed_offset = UINT64_MAX; in vdev_raidz_init()
4994 mutex_init(&vdrz->vn_vre.vre_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_raidz_init()
4995 cv_init(&vdrz->vn_vre.vre_cv, NULL, CV_DEFAULT, NULL); in vdev_raidz_init()
4996 zfs_rangelock_init(&vdrz->vn_vre.vre_rangelock, NULL, NULL); in vdev_raidz_init()
4997 mutex_init(&vdrz->vd_expand_lock, NULL, MUTEX_DEFAULT, NULL); in vdev_raidz_init()
4998 avl_create(&vdrz->vd_expand_txgs, vdev_raidz_reflow_compare, in vdev_raidz_init()
5001 vdrz->vd_physical_width = children; in vdev_raidz_init()
5002 vdrz->vd_nparity = nparity; in vdev_raidz_init()
5006 &vdrz->vn_vre.vre_vdev_id); in vdev_raidz_init()
5011 spa->spa_raidz_expand = &vdrz->vn_vre; in vdev_raidz_init()
5012 vdrz->vn_vre.vre_state = DSS_SCANNING; in vdev_raidz_init()
5015 vdrz->vd_original_width = children; in vdev_raidz_init()
5024 re->re_logical_width = vdrz->vd_physical_width - i; in vdev_raidz_init()
5029 avl_add(&vdrz->vd_expand_txgs, re); in vdev_raidz_init()
5032 vdrz->vd_original_width = vdrz->vd_physical_width - txgs_size; in vdev_raidz_init()
5035 vdrz->vd_original_width--; in vdev_raidz_init()
5040 *tsd = vdrz; in vdev_raidz_init()
5048 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_fini() local
5049 if (vd->vdev_spa->spa_raidz_expand == &vdrz->vn_vre) in vdev_raidz_fini()
5053 avl_tree_t *tree = &vdrz->vd_expand_txgs; in vdev_raidz_fini()
5056 avl_destroy(&vdrz->vd_expand_txgs); in vdev_raidz_fini()
5057 mutex_destroy(&vdrz->vd_expand_lock); in vdev_raidz_fini()
5058 mutex_destroy(&vdrz->vn_vre.vre_lock); in vdev_raidz_fini()
5059 cv_destroy(&vdrz->vn_vre.vre_cv); in vdev_raidz_fini()
5060 zfs_rangelock_fini(&vdrz->vn_vre.vre_rangelock); in vdev_raidz_fini()
5061 kmem_free(vdrz, sizeof (*vdrz)); in vdev_raidz_fini()
5071 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_config_generate() local
5077 ASSERT(vdrz->vd_nparity == 1 || in vdev_raidz_config_generate()
5078 (vdrz->vd_nparity <= 2 && in vdev_raidz_config_generate()
5080 (vdrz->vd_nparity <= 3 && in vdev_raidz_config_generate()
5088 fnvlist_add_uint64(nv, ZPOOL_CONFIG_NPARITY, vdrz->vd_nparity); in vdev_raidz_config_generate()
5090 if (vdrz->vn_vre.vre_state == DSS_SCANNING) { in vdev_raidz_config_generate()
5094 mutex_enter(&vdrz->vd_expand_lock); in vdev_raidz_config_generate()
5095 if (!avl_is_empty(&vdrz->vd_expand_txgs)) { in vdev_raidz_config_generate()
5096 uint64_t count = avl_numnodes(&vdrz->vd_expand_txgs); in vdev_raidz_config_generate()
5101 for (reflow_node_t *re = avl_first(&vdrz->vd_expand_txgs); in vdev_raidz_config_generate()
5102 re != NULL; re = AVL_NEXT(&vdrz->vd_expand_txgs, re)) { in vdev_raidz_config_generate()
5111 mutex_exit(&vdrz->vd_expand_lock); in vdev_raidz_config_generate()
5117 vdev_raidz_t *vdrz = vd->vdev_tsd; in vdev_raidz_nparity() local
5118 return (vdrz->vd_nparity); in vdev_raidz_nparity()