Lines Matching refs:spa

152 vdev_lookup_top(spa_t *spa, uint64_t vdev)  in vdev_lookup_top()  argument
154 vdev_t *rvd = spa->spa_root_vdev; in vdev_lookup_top()
156 ASSERT(spa_config_held(spa, SCL_ALL, RW_READER) != 0); in vdev_lookup_top()
197 vdev_count_leaves(spa_t *spa) in vdev_count_leaves() argument
199 return (vdev_count_leaves_impl(spa->spa_root_vdev)); in vdev_count_leaves()
208 spa_t *spa = cvd->vdev_spa; in vdev_add_child() local
210 ASSERT(spa_config_held(spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_add_child()
311 vdev_alloc_common(spa_t *spa, uint_t id, uint64_t guid, vdev_ops_t *ops) in vdev_alloc_common() argument
317 if (spa->spa_root_vdev == NULL) { in vdev_alloc_common()
319 spa->spa_root_vdev = vd; in vdev_alloc_common()
320 spa->spa_load_guid = spa_generate_guid(NULL); in vdev_alloc_common()
324 if (spa->spa_root_vdev == vd) { in vdev_alloc_common()
334 guid = spa_generate_guid(spa); in vdev_alloc_common()
336 ASSERT(!spa_guid_exists(spa_guid(spa), guid)); in vdev_alloc_common()
339 vd->vdev_spa = spa; in vdev_alloc_common()
371 vdev_alloc(spa_t *spa, vdev_t **vdp, nvlist_t *nv, vdev_t *parent, uint_t id, in vdev_alloc() argument
379 ASSERT(spa_config_held(spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_alloc()
414 if (ops != &vdev_root_ops && spa->spa_root_vdev == NULL) in vdev_alloc()
422 if (islog && spa_version(spa) < SPA_VERSION_SLOGS) in vdev_alloc()
425 if (ops == &vdev_hole_ops && spa_version(spa) < SPA_VERSION_HOLES) in vdev_alloc()
442 spa_version(spa) < SPA_VERSION_RAIDZ2) in vdev_alloc()
445 spa_version(spa) < SPA_VERSION_RAIDZ3) in vdev_alloc()
452 if (spa_version(spa) >= SPA_VERSION_RAIDZ2) in vdev_alloc()
464 vd = vdev_alloc_common(spa, id, guid, ops); in vdev_alloc()
526 spa_log_class(spa) : spa_normal_class(spa), vd); in vdev_alloc()
562 if (spa_load_state(spa) == SPA_LOAD_OPEN) { in vdev_alloc()
596 spa_t *spa = vd->vdev_spa; in vdev_free() local
670 if (vd == spa->spa_root_vdev) in vdev_free()
671 spa->spa_root_vdev = NULL; in vdev_free()
682 spa_t *spa = svd->vdev_spa; in vdev_top_transfer() local
721 if (txg_list_remove_this(&spa->spa_vdev_txg_list, svd, t)) in vdev_top_transfer()
722 (void) txg_list_add(&spa->spa_vdev_txg_list, tvd, t); in vdev_top_transfer()
760 spa_t *spa = cvd->vdev_spa; in vdev_add_parent() local
764 ASSERT(spa_config_held(spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_add_parent()
766 mvd = vdev_alloc_common(spa, cvd->vdev_id, 0, ops); in vdev_add_parent()
833 spa_t *spa = vd->vdev_spa; in vdev_metaslab_init() local
834 objset_t *mos = spa->spa_meta_objset; in vdev_metaslab_init()
841 ASSERT(txg == 0 || spa_config_held(spa, SCL_ALLOC, RW_WRITER)); in vdev_metaslab_init()
890 spa_config_enter(spa, SCL_ALLOC, FTAG, RW_WRITER); in vdev_metaslab_init()
901 spa_config_exit(spa, SCL_ALLOC, FTAG); in vdev_metaslab_init()
934 spa_t *spa = zio->io_spa; in vdev_probe_done() local
943 if (zio->io_error == 0 && spa_writeable(spa)) { in vdev_probe_done()
962 (vdev_writeable(vd) || !spa_writeable(spa))) { in vdev_probe_done()
967 spa, vd, NULL, 0, 0); in vdev_probe_done()
994 spa_t *spa = vd->vdev_spa; in vdev_probe() local
1020 if (spa_config_held(spa, SCL_ZIO, RW_WRITER)) { in vdev_probe()
1042 vd->vdev_probe_zio = pio = zio_null(NULL, spa, vd, in vdev_probe()
1052 spa_async_request(spa, SPA_ASYNC_PROBE); in vdev_probe()
1137 spa_t *spa = vd->vdev_spa; in vdev_open() local
1145 spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_open()
1295 (vd->vdev_expanding || spa->spa_autoexpand)) in vdev_open()
1316 if (vd->vdev_ashift > spa->spa_max_ashift) in vdev_open()
1317 spa->spa_max_ashift = vd->vdev_ashift; in vdev_open()
1318 if (vd->vdev_ashift < spa->spa_min_ashift) in vdev_open()
1319 spa->spa_min_ashift = vd->vdev_ashift; in vdev_open()
1327 if (vd->vdev_ops->vdev_op_leaf && !spa->spa_scrub_reopen && in vdev_open()
1329 spa_async_request(spa, SPA_ASYNC_RESILVER); in vdev_open()
1352 spa_t *spa = vd->vdev_spa; in vdev_validate() local
1369 uint64_t txg = spa_last_synced_txg(spa) != 0 ? in vdev_validate()
1370 spa_last_synced_txg(spa) : -1ULL; in vdev_validate()
1383 &aux_guid) == 0 && aux_guid == spa_guid(spa)) { in vdev_validate()
1392 guid != spa_guid(spa))) { in vdev_validate()
1442 if (!(spa->spa_import_flags & ZFS_IMPORT_VERBATIM) && in vdev_validate()
1443 spa_load_state(spa) == SPA_LOAD_OPEN && in vdev_validate()
1465 spa_t *spa = vd->vdev_spa; in vdev_close() local
1468 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_close()
1498 spa_t *spa = vd->vdev_spa; in vdev_hold() local
1500 ASSERT(spa_is_root(spa)); in vdev_hold()
1501 if (spa->spa_state == POOL_STATE_UNINITIALIZED) in vdev_hold()
1514 spa_t *spa = vd->vdev_spa; in vdev_rele() local
1516 ASSERT(spa_is_root(spa)); in vdev_rele()
1533 spa_t *spa = vd->vdev_spa; in vdev_reopen() local
1535 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_reopen()
1550 vd->vdev_aux == &spa->spa_l2cache && in vdev_reopen()
1557 l2arc_add_vdev(spa, vd, B_TRUE); in vdev_reopen()
1762 spa_t *spa = vd->vdev_spa; in vdev_dtl_should_excise() local
1763 dsl_scan_t *scn = spa->spa_dsl_pool->dp_scan; in vdev_dtl_should_excise()
1794 spa_t *spa = vd->vdev_spa; in vdev_dtl_reassess() local
1798 ASSERT(spa_config_held(spa, SCL_ALL, RW_READER) != 0); in vdev_dtl_reassess()
1804 if (vd == spa->spa_root_vdev || vd->vdev_ishole || vd->vdev_aux) in vdev_dtl_reassess()
1808 dsl_scan_t *scn = spa->spa_dsl_pool->dp_scan; in vdev_dtl_reassess()
1819 (spa->spa_scrub_started || in vdev_dtl_reassess()
1905 spa_t *spa = vd->vdev_spa; in vdev_dtl_load() local
1906 objset_t *mos = spa->spa_meta_objset; in vdev_dtl_load()
1945 spa_t *spa = vd->vdev_spa; in vdev_dtl_sync() local
1947 objset_t *mos = spa->spa_meta_objset; in vdev_dtl_sync()
1956 tx = dmu_tx_create_assigned(spa->spa_dsl_pool, txg); in vdev_dtl_sync()
2004 "new object %llu", txg, spa_name(spa), object, in vdev_dtl_sync()
2023 spa_t *spa = vd->vdev_spa; in vdev_dtl_required() local
2028 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_dtl_required()
2030 if (vd == spa->spa_root_vdev || vd == tvd) in vdev_dtl_required()
2161 spa_t *spa = vd->vdev_spa; in vdev_remove() local
2162 objset_t *mos = spa->spa_meta_objset; in vdev_remove()
2165 tx = dmu_tx_create_assigned(spa_get_dsl(spa), txg); in vdev_remove()
2228 spa_t *spa = vd->vdev_spa; in vdev_sync() local
2237 tx = dmu_tx_create_assigned(spa->spa_dsl_pool, txg); in vdev_sync()
2238 vd->vdev_ms_array = dmu_object_alloc(spa->spa_meta_objset, in vdev_sync()
2259 (void) txg_list_add(&spa->spa_vdev_txg_list, vd, TXG_CLEAN(txg)); in vdev_sync()
2273 vdev_fault(spa_t *spa, uint64_t guid, vdev_aux_t aux) in vdev_fault() argument
2277 spa_vdev_state_enter(spa, SCL_NONE); in vdev_fault()
2279 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_fault()
2280 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_fault()
2283 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_fault()
2320 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_fault()
2329 vdev_degrade(spa_t *spa, uint64_t guid, vdev_aux_t aux) in vdev_degrade() argument
2333 spa_vdev_state_enter(spa, SCL_NONE); in vdev_degrade()
2335 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_degrade()
2336 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_degrade()
2339 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_degrade()
2345 return (spa_vdev_state_exit(spa, NULL, 0)); in vdev_degrade()
2352 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_degrade()
2364 vdev_online(spa_t *spa, uint64_t guid, uint64_t flags, vdev_state_t *newstate) in vdev_online() argument
2366 vdev_t *vd, *tvd, *pvd, *rvd = spa->spa_root_vdev; in vdev_online()
2369 spa_vdev_state_enter(spa, SCL_NONE); in vdev_online()
2371 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_online()
2372 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_online()
2375 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_online()
2409 if ((flags & ZFS_ONLINE_EXPAND) || spa->spa_autoexpand) { in vdev_online()
2413 return (spa_vdev_state_exit(spa, vd, ENOTSUP)); in vdev_online()
2414 spa_async_request(spa, SPA_ASYNC_CONFIG_UPDATE); in vdev_online()
2418 spa_event_notify(spa, vd, ESC_ZFS_VDEV_ONLINE); in vdev_online()
2420 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_online()
2424 vdev_offline_locked(spa_t *spa, uint64_t guid, uint64_t flags) in vdev_offline_locked() argument
2432 spa_vdev_state_enter(spa, SCL_ALLOC); in vdev_offline_locked()
2434 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_offline_locked()
2435 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_offline_locked()
2438 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_offline_locked()
2442 generation = spa->spa_config_generation + 1; in vdev_offline_locked()
2455 return (spa_vdev_state_exit(spa, NULL, EBUSY)); in vdev_offline_locked()
2468 (void) spa_vdev_state_exit(spa, vd, 0); in vdev_offline_locked()
2470 error = spa_offline_log(spa); in vdev_offline_locked()
2472 spa_vdev_state_enter(spa, SCL_ALLOC); in vdev_offline_locked()
2477 if (error || generation != spa->spa_config_generation) { in vdev_offline_locked()
2480 return (spa_vdev_state_exit(spa, in vdev_offline_locked()
2482 (void) spa_vdev_state_exit(spa, vd, 0); in vdev_offline_locked()
2501 return (spa_vdev_state_exit(spa, NULL, EBUSY)); in vdev_offline_locked()
2514 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_offline_locked()
2518 vdev_offline(spa_t *spa, uint64_t guid, uint64_t flags) in vdev_offline() argument
2522 mutex_enter(&spa->spa_vdev_top_lock); in vdev_offline()
2523 error = vdev_offline_locked(spa, guid, flags); in vdev_offline()
2524 mutex_exit(&spa->spa_vdev_top_lock); in vdev_offline()
2535 vdev_clear(spa_t *spa, vdev_t *vd) in vdev_clear() argument
2537 vdev_t *rvd = spa->spa_root_vdev; in vdev_clear()
2539 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_clear()
2549 vdev_clear(spa, vd->vdev_child[c]); in vdev_clear()
2579 spa_async_request(spa, SPA_ASYNC_RESILVER); in vdev_clear()
2581 spa_event_notify(spa, vd, ESC_ZFS_VDEV_CLEAR); in vdev_clear()
2661 spa_t *spa = vd->vdev_spa; in vdev_get_stats() local
2662 vdev_t *rvd = spa->spa_root_vdev; in vdev_get_stats()
2664 ASSERT(spa_config_held(spa, SCL_ALL, RW_READER) != 0); in vdev_get_stats()
2723 spa_t *spa = zio->io_spa; in vdev_stat_update() local
2724 vdev_t *rvd = spa->spa_root_vdev; in vdev_stat_update()
2766 &spa->spa_dsl_pool->dp_scan->scn_phys; in vdev_stat_update()
2821 spa->spa_claiming)) { in vdev_stat_update()
2841 ASSERT(spa_sync_pass(spa) == 1); in vdev_stat_update()
2843 commit_txg = spa_syncing_txg(spa); in vdev_stat_update()
2844 } else if (spa->spa_claiming) { in vdev_stat_update()
2846 commit_txg = spa_first_txg(spa); in vdev_stat_update()
2848 ASSERT(commit_txg >= spa_syncing_txg(spa)); in vdev_stat_update()
2869 spa_t *spa = vd->vdev_spa; in vdev_space_update() local
2870 vdev_t *rvd = spa->spa_root_vdev; in vdev_space_update()
2893 if (mc == spa_normal_class(spa)) { in vdev_space_update()
2918 spa_t *spa = vd->vdev_spa; in vdev_config_dirty() local
2919 vdev_t *rvd = spa->spa_root_vdev; in vdev_config_dirty()
2922 ASSERT(spa_writeable(spa)); in vdev_config_dirty()
2961 aux[c] = vdev_config_generate(spa, vd, B_TRUE, 0); in vdev_config_dirty()
2972 ASSERT(spa_config_held(spa, SCL_CONFIG, RW_WRITER) || in vdev_config_dirty()
2973 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_config_dirty()
2974 spa_config_held(spa, SCL_CONFIG, RW_READER))); in vdev_config_dirty()
2984 list_insert_head(&spa->spa_config_dirty_list, vd); in vdev_config_dirty()
2991 spa_t *spa = vd->vdev_spa; in vdev_config_clean() local
2993 ASSERT(spa_config_held(spa, SCL_CONFIG, RW_WRITER) || in vdev_config_clean()
2994 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_config_clean()
2995 spa_config_held(spa, SCL_CONFIG, RW_READER))); in vdev_config_clean()
2998 list_remove(&spa->spa_config_dirty_list, vd); in vdev_config_clean()
3010 spa_t *spa = vd->vdev_spa; in vdev_state_dirty() local
3012 ASSERT(spa_writeable(spa)); in vdev_state_dirty()
3021 ASSERT(spa_config_held(spa, SCL_STATE, RW_WRITER) || in vdev_state_dirty()
3022 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_state_dirty()
3023 spa_config_held(spa, SCL_STATE, RW_READER))); in vdev_state_dirty()
3026 list_insert_head(&spa->spa_state_dirty_list, vd); in vdev_state_dirty()
3032 spa_t *spa = vd->vdev_spa; in vdev_state_clean() local
3034 ASSERT(spa_config_held(spa, SCL_STATE, RW_WRITER) || in vdev_state_clean()
3035 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_state_clean()
3036 spa_config_held(spa, SCL_STATE, RW_READER))); in vdev_state_clean()
3039 list_remove(&spa->spa_state_dirty_list, vd); in vdev_state_clean()
3048 spa_t *spa = vd->vdev_spa; in vdev_propagate_state() local
3049 vdev_t *rvd = spa->spa_root_vdev; in vdev_propagate_state()
3065 (!vdev_writeable(child) && spa_writeable(spa))) { in vdev_propagate_state()
3113 spa_t *spa = vd->vdev_spa; in vdev_set_state() local
3150 zfs_post_state_change(spa, vd); in vdev_set_state()
3175 if ((spa_load_state(spa) == SPA_LOAD_IMPORT || in vdev_set_state()
3176 spa_load_state(spa) == SPA_LOAD_RECOVER) && in vdev_set_state()
3197 vd != spa->spa_root_vdev) { in vdev_set_state()
3223 zfs_ereport_post(class, spa, vd, NULL, save_state, 0); in vdev_set_state()
3273 spa_t *spa = nvd->vdev_spa; in vdev_load_log_state() local
3276 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_load_log_state()
3360 spa_t *spa = vd->vdev_spa; in vdev_deadman() local
3371 if (delta > spa_deadman_synctime(spa)) { in vdev_deadman()
3377 "hung.", spa_name(spa)); in vdev_deadman()