Lines Matching refs:spa

151 vdev_lookup_top(spa_t *spa, uint64_t vdev)  in vdev_lookup_top()  argument
153 vdev_t *rvd = spa->spa_root_vdev; in vdev_lookup_top()
155 ASSERT(spa_config_held(spa, SCL_ALL, RW_READER) != 0); in vdev_lookup_top()
196 vdev_count_leaves(spa_t *spa) in vdev_count_leaves() argument
198 return (vdev_count_leaves_impl(spa->spa_root_vdev)); in vdev_count_leaves()
207 spa_t *spa = cvd->vdev_spa; in vdev_add_child() local
209 ASSERT(spa_config_held(spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_add_child()
310 vdev_alloc_common(spa_t *spa, uint_t id, uint64_t guid, vdev_ops_t *ops) in vdev_alloc_common() argument
316 if (spa->spa_root_vdev == NULL) { in vdev_alloc_common()
318 spa->spa_root_vdev = vd; in vdev_alloc_common()
319 spa->spa_load_guid = spa_generate_guid(NULL); in vdev_alloc_common()
323 if (spa->spa_root_vdev == vd) { in vdev_alloc_common()
333 guid = spa_generate_guid(spa); in vdev_alloc_common()
335 ASSERT(!spa_guid_exists(spa_guid(spa), guid)); in vdev_alloc_common()
338 vd->vdev_spa = spa; in vdev_alloc_common()
370 vdev_alloc(spa_t *spa, vdev_t **vdp, nvlist_t *nv, vdev_t *parent, uint_t id, in vdev_alloc() argument
378 ASSERT(spa_config_held(spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_alloc()
413 if (ops != &vdev_root_ops && spa->spa_root_vdev == NULL) in vdev_alloc()
421 if (islog && spa_version(spa) < SPA_VERSION_SLOGS) in vdev_alloc()
424 if (ops == &vdev_hole_ops && spa_version(spa) < SPA_VERSION_HOLES) in vdev_alloc()
441 spa_version(spa) < SPA_VERSION_RAIDZ2) in vdev_alloc()
444 spa_version(spa) < SPA_VERSION_RAIDZ3) in vdev_alloc()
451 if (spa_version(spa) >= SPA_VERSION_RAIDZ2) in vdev_alloc()
463 vd = vdev_alloc_common(spa, id, guid, ops); in vdev_alloc()
525 spa_log_class(spa) : spa_normal_class(spa), vd); in vdev_alloc()
561 if (spa_load_state(spa) == SPA_LOAD_OPEN) { in vdev_alloc()
595 spa_t *spa = vd->vdev_spa; in vdev_free() local
669 if (vd == spa->spa_root_vdev) in vdev_free()
670 spa->spa_root_vdev = NULL; in vdev_free()
681 spa_t *spa = svd->vdev_spa; in vdev_top_transfer() local
720 if (txg_list_remove_this(&spa->spa_vdev_txg_list, svd, t)) in vdev_top_transfer()
721 (void) txg_list_add(&spa->spa_vdev_txg_list, tvd, t); in vdev_top_transfer()
759 spa_t *spa = cvd->vdev_spa; in vdev_add_parent() local
763 ASSERT(spa_config_held(spa, SCL_ALL, RW_WRITER) == SCL_ALL); in vdev_add_parent()
765 mvd = vdev_alloc_common(spa, cvd->vdev_id, 0, ops); in vdev_add_parent()
832 spa_t *spa = vd->vdev_spa; in vdev_metaslab_init() local
833 objset_t *mos = spa->spa_meta_objset; in vdev_metaslab_init()
840 ASSERT(txg == 0 || spa_config_held(spa, SCL_ALLOC, RW_WRITER)); in vdev_metaslab_init()
889 spa_config_enter(spa, SCL_ALLOC, FTAG, RW_WRITER); in vdev_metaslab_init()
900 spa_config_exit(spa, SCL_ALLOC, FTAG); in vdev_metaslab_init()
933 spa_t *spa = zio->io_spa; in vdev_probe_done() local
942 if (zio->io_error == 0 && spa_writeable(spa)) { in vdev_probe_done()
961 (vdev_writeable(vd) || !spa_writeable(spa))) { in vdev_probe_done()
966 spa, vd, NULL, 0, 0); in vdev_probe_done()
993 spa_t *spa = vd->vdev_spa; in vdev_probe() local
1019 if (spa_config_held(spa, SCL_ZIO, RW_WRITER)) { in vdev_probe()
1041 vd->vdev_probe_zio = pio = zio_null(NULL, spa, vd, in vdev_probe()
1051 spa_async_request(spa, SPA_ASYNC_PROBE); in vdev_probe()
1136 spa_t *spa = vd->vdev_spa; in vdev_open() local
1144 spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_open()
1294 (vd->vdev_expanding || spa->spa_autoexpand)) in vdev_open()
1315 if (vd->vdev_ashift > spa->spa_max_ashift) in vdev_open()
1316 spa->spa_max_ashift = vd->vdev_ashift; in vdev_open()
1317 if (vd->vdev_ashift < spa->spa_min_ashift) in vdev_open()
1318 spa->spa_min_ashift = vd->vdev_ashift; in vdev_open()
1326 if (vd->vdev_ops->vdev_op_leaf && !spa->spa_scrub_reopen && in vdev_open()
1328 spa_async_request(spa, SPA_ASYNC_RESILVER); in vdev_open()
1351 spa_t *spa = vd->vdev_spa; in vdev_validate() local
1368 uint64_t txg = spa_last_synced_txg(spa) != 0 ? in vdev_validate()
1369 spa_last_synced_txg(spa) : -1ULL; in vdev_validate()
1382 &aux_guid) == 0 && aux_guid == spa_guid(spa)) { in vdev_validate()
1391 guid != spa_guid(spa))) { in vdev_validate()
1441 if (!(spa->spa_import_flags & ZFS_IMPORT_VERBATIM) && in vdev_validate()
1442 spa_load_state(spa) == SPA_LOAD_OPEN && in vdev_validate()
1464 spa_t *spa = vd->vdev_spa; in vdev_close() local
1467 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_close()
1497 spa_t *spa = vd->vdev_spa; in vdev_hold() local
1499 ASSERT(spa_is_root(spa)); in vdev_hold()
1500 if (spa->spa_state == POOL_STATE_UNINITIALIZED) in vdev_hold()
1513 spa_t *spa = vd->vdev_spa; in vdev_rele() local
1515 ASSERT(spa_is_root(spa)); in vdev_rele()
1532 spa_t *spa = vd->vdev_spa; in vdev_reopen() local
1534 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_reopen()
1549 vd->vdev_aux == &spa->spa_l2cache && in vdev_reopen()
1556 l2arc_add_vdev(spa, vd, B_TRUE); in vdev_reopen()
1761 spa_t *spa = vd->vdev_spa; in vdev_dtl_should_excise() local
1762 dsl_scan_t *scn = spa->spa_dsl_pool->dp_scan; in vdev_dtl_should_excise()
1793 spa_t *spa = vd->vdev_spa; in vdev_dtl_reassess() local
1797 ASSERT(spa_config_held(spa, SCL_ALL, RW_READER) != 0); in vdev_dtl_reassess()
1803 if (vd == spa->spa_root_vdev || vd->vdev_ishole || vd->vdev_aux) in vdev_dtl_reassess()
1807 dsl_scan_t *scn = spa->spa_dsl_pool->dp_scan; in vdev_dtl_reassess()
1818 (spa->spa_scrub_started || in vdev_dtl_reassess()
1904 spa_t *spa = vd->vdev_spa; in vdev_dtl_load() local
1905 objset_t *mos = spa->spa_meta_objset; in vdev_dtl_load()
1944 spa_t *spa = vd->vdev_spa; in vdev_dtl_sync() local
1946 objset_t *mos = spa->spa_meta_objset; in vdev_dtl_sync()
1955 tx = dmu_tx_create_assigned(spa->spa_dsl_pool, txg); in vdev_dtl_sync()
2003 "new object %llu", txg, spa_name(spa), object, in vdev_dtl_sync()
2022 spa_t *spa = vd->vdev_spa; in vdev_dtl_required() local
2027 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_dtl_required()
2029 if (vd == spa->spa_root_vdev || vd == tvd) in vdev_dtl_required()
2160 spa_t *spa = vd->vdev_spa; in vdev_remove() local
2161 objset_t *mos = spa->spa_meta_objset; in vdev_remove()
2164 tx = dmu_tx_create_assigned(spa_get_dsl(spa), txg); in vdev_remove()
2227 spa_t *spa = vd->vdev_spa; in vdev_sync() local
2236 tx = dmu_tx_create_assigned(spa->spa_dsl_pool, txg); in vdev_sync()
2237 vd->vdev_ms_array = dmu_object_alloc(spa->spa_meta_objset, in vdev_sync()
2258 (void) txg_list_add(&spa->spa_vdev_txg_list, vd, TXG_CLEAN(txg)); in vdev_sync()
2272 vdev_fault(spa_t *spa, uint64_t guid, vdev_aux_t aux) in vdev_fault() argument
2276 spa_vdev_state_enter(spa, SCL_NONE); in vdev_fault()
2278 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_fault()
2279 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_fault()
2282 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_fault()
2319 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_fault()
2328 vdev_degrade(spa_t *spa, uint64_t guid, vdev_aux_t aux) in vdev_degrade() argument
2332 spa_vdev_state_enter(spa, SCL_NONE); in vdev_degrade()
2334 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_degrade()
2335 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_degrade()
2338 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_degrade()
2344 return (spa_vdev_state_exit(spa, NULL, 0)); in vdev_degrade()
2351 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_degrade()
2363 vdev_online(spa_t *spa, uint64_t guid, uint64_t flags, vdev_state_t *newstate) in vdev_online() argument
2365 vdev_t *vd, *tvd, *pvd, *rvd = spa->spa_root_vdev; in vdev_online()
2367 spa_vdev_state_enter(spa, SCL_NONE); in vdev_online()
2369 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_online()
2370 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_online()
2373 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_online()
2403 if ((flags & ZFS_ONLINE_EXPAND) || spa->spa_autoexpand) { in vdev_online()
2407 return (spa_vdev_state_exit(spa, vd, ENOTSUP)); in vdev_online()
2408 spa_async_request(spa, SPA_ASYNC_CONFIG_UPDATE); in vdev_online()
2410 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_online()
2414 vdev_offline_locked(spa_t *spa, uint64_t guid, uint64_t flags) in vdev_offline_locked() argument
2422 spa_vdev_state_enter(spa, SCL_ALLOC); in vdev_offline_locked()
2424 if ((vd = spa_lookup_by_guid(spa, guid, B_TRUE)) == NULL) in vdev_offline_locked()
2425 return (spa_vdev_state_exit(spa, NULL, ENODEV)); in vdev_offline_locked()
2428 return (spa_vdev_state_exit(spa, NULL, ENOTSUP)); in vdev_offline_locked()
2432 generation = spa->spa_config_generation + 1; in vdev_offline_locked()
2445 return (spa_vdev_state_exit(spa, NULL, EBUSY)); in vdev_offline_locked()
2458 (void) spa_vdev_state_exit(spa, vd, 0); in vdev_offline_locked()
2460 error = spa_offline_log(spa); in vdev_offline_locked()
2462 spa_vdev_state_enter(spa, SCL_ALLOC); in vdev_offline_locked()
2467 if (error || generation != spa->spa_config_generation) { in vdev_offline_locked()
2470 return (spa_vdev_state_exit(spa, in vdev_offline_locked()
2472 (void) spa_vdev_state_exit(spa, vd, 0); in vdev_offline_locked()
2491 return (spa_vdev_state_exit(spa, NULL, EBUSY)); in vdev_offline_locked()
2504 return (spa_vdev_state_exit(spa, vd, 0)); in vdev_offline_locked()
2508 vdev_offline(spa_t *spa, uint64_t guid, uint64_t flags) in vdev_offline() argument
2512 mutex_enter(&spa->spa_vdev_top_lock); in vdev_offline()
2513 error = vdev_offline_locked(spa, guid, flags); in vdev_offline()
2514 mutex_exit(&spa->spa_vdev_top_lock); in vdev_offline()
2525 vdev_clear(spa_t *spa, vdev_t *vd) in vdev_clear() argument
2527 vdev_t *rvd = spa->spa_root_vdev; in vdev_clear()
2529 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_clear()
2539 vdev_clear(spa, vd->vdev_child[c]); in vdev_clear()
2569 spa_async_request(spa, SPA_ASYNC_RESILVER); in vdev_clear()
2571 spa_event_notify(spa, vd, ESC_ZFS_VDEV_CLEAR); in vdev_clear()
2651 spa_t *spa = vd->vdev_spa; in vdev_get_stats() local
2652 vdev_t *rvd = spa->spa_root_vdev; in vdev_get_stats()
2654 ASSERT(spa_config_held(spa, SCL_ALL, RW_READER) != 0); in vdev_get_stats()
2713 spa_t *spa = zio->io_spa; in vdev_stat_update() local
2714 vdev_t *rvd = spa->spa_root_vdev; in vdev_stat_update()
2756 &spa->spa_dsl_pool->dp_scan->scn_phys; in vdev_stat_update()
2811 spa->spa_claiming)) { in vdev_stat_update()
2831 ASSERT(spa_sync_pass(spa) == 1); in vdev_stat_update()
2833 commit_txg = spa_syncing_txg(spa); in vdev_stat_update()
2834 } else if (spa->spa_claiming) { in vdev_stat_update()
2836 commit_txg = spa_first_txg(spa); in vdev_stat_update()
2838 ASSERT(commit_txg >= spa_syncing_txg(spa)); in vdev_stat_update()
2859 spa_t *spa = vd->vdev_spa; in vdev_space_update() local
2860 vdev_t *rvd = spa->spa_root_vdev; in vdev_space_update()
2883 if (mc == spa_normal_class(spa)) { in vdev_space_update()
2908 spa_t *spa = vd->vdev_spa; in vdev_config_dirty() local
2909 vdev_t *rvd = spa->spa_root_vdev; in vdev_config_dirty()
2912 ASSERT(spa_writeable(spa)); in vdev_config_dirty()
2951 aux[c] = vdev_config_generate(spa, vd, B_TRUE, 0); in vdev_config_dirty()
2962 ASSERT(spa_config_held(spa, SCL_CONFIG, RW_WRITER) || in vdev_config_dirty()
2963 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_config_dirty()
2964 spa_config_held(spa, SCL_CONFIG, RW_READER))); in vdev_config_dirty()
2974 list_insert_head(&spa->spa_config_dirty_list, vd); in vdev_config_dirty()
2981 spa_t *spa = vd->vdev_spa; in vdev_config_clean() local
2983 ASSERT(spa_config_held(spa, SCL_CONFIG, RW_WRITER) || in vdev_config_clean()
2984 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_config_clean()
2985 spa_config_held(spa, SCL_CONFIG, RW_READER))); in vdev_config_clean()
2988 list_remove(&spa->spa_config_dirty_list, vd); in vdev_config_clean()
3000 spa_t *spa = vd->vdev_spa; in vdev_state_dirty() local
3002 ASSERT(spa_writeable(spa)); in vdev_state_dirty()
3011 ASSERT(spa_config_held(spa, SCL_STATE, RW_WRITER) || in vdev_state_dirty()
3012 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_state_dirty()
3013 spa_config_held(spa, SCL_STATE, RW_READER))); in vdev_state_dirty()
3016 list_insert_head(&spa->spa_state_dirty_list, vd); in vdev_state_dirty()
3022 spa_t *spa = vd->vdev_spa; in vdev_state_clean() local
3024 ASSERT(spa_config_held(spa, SCL_STATE, RW_WRITER) || in vdev_state_clean()
3025 (dsl_pool_sync_context(spa_get_dsl(spa)) && in vdev_state_clean()
3026 spa_config_held(spa, SCL_STATE, RW_READER))); in vdev_state_clean()
3029 list_remove(&spa->spa_state_dirty_list, vd); in vdev_state_clean()
3038 spa_t *spa = vd->vdev_spa; in vdev_propagate_state() local
3039 vdev_t *rvd = spa->spa_root_vdev; in vdev_propagate_state()
3055 (!vdev_writeable(child) && spa_writeable(spa))) { in vdev_propagate_state()
3103 spa_t *spa = vd->vdev_spa; in vdev_set_state() local
3140 zfs_post_state_change(spa, vd); in vdev_set_state()
3165 if ((spa_load_state(spa) == SPA_LOAD_IMPORT || in vdev_set_state()
3166 spa_load_state(spa) == SPA_LOAD_RECOVER) && in vdev_set_state()
3187 vd != spa->spa_root_vdev) { in vdev_set_state()
3213 zfs_ereport_post(class, spa, vd, NULL, save_state, 0); in vdev_set_state()
3263 spa_t *spa = nvd->vdev_spa; in vdev_load_log_state() local
3266 ASSERT(spa_config_held(spa, SCL_STATE_ALL, RW_WRITER) == SCL_STATE_ALL); in vdev_load_log_state()
3350 spa_t *spa = vd->vdev_spa; in vdev_deadman() local
3361 if (delta > spa_deadman_synctime(spa)) { in vdev_deadman()
3367 "hung.", spa_name(spa)); in vdev_deadman()