Lines Matching refs:msp
601 metaslab_t *msp = vd->vdev_ms[m]; in metaslab_group_histogram_verify() local
603 if (msp->ms_sm == NULL) in metaslab_group_histogram_verify()
608 msp->ms_sm->sm_phys->smp_histogram[i]; in metaslab_group_histogram_verify()
618 metaslab_group_histogram_add(metaslab_group_t *mg, metaslab_t *msp) in metaslab_group_histogram_add() argument
623 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_group_histogram_add()
624 if (msp->ms_sm == NULL) in metaslab_group_histogram_add()
630 msp->ms_sm->sm_phys->smp_histogram[i]; in metaslab_group_histogram_add()
632 msp->ms_sm->sm_phys->smp_histogram[i]; in metaslab_group_histogram_add()
638 metaslab_group_histogram_remove(metaslab_group_t *mg, metaslab_t *msp) in metaslab_group_histogram_remove() argument
643 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_group_histogram_remove()
644 if (msp->ms_sm == NULL) in metaslab_group_histogram_remove()
650 msp->ms_sm->sm_phys->smp_histogram[i]); in metaslab_group_histogram_remove()
652 msp->ms_sm->sm_phys->smp_histogram[i]); in metaslab_group_histogram_remove()
655 msp->ms_sm->sm_phys->smp_histogram[i]; in metaslab_group_histogram_remove()
657 msp->ms_sm->sm_phys->smp_histogram[i]; in metaslab_group_histogram_remove()
663 metaslab_group_add(metaslab_group_t *mg, metaslab_t *msp) in metaslab_group_add() argument
665 ASSERT(msp->ms_group == NULL); in metaslab_group_add()
667 msp->ms_group = mg; in metaslab_group_add()
668 msp->ms_weight = 0; in metaslab_group_add()
669 avl_add(&mg->mg_metaslab_tree, msp); in metaslab_group_add()
672 mutex_enter(&msp->ms_lock); in metaslab_group_add()
673 metaslab_group_histogram_add(mg, msp); in metaslab_group_add()
674 mutex_exit(&msp->ms_lock); in metaslab_group_add()
678 metaslab_group_remove(metaslab_group_t *mg, metaslab_t *msp) in metaslab_group_remove() argument
680 mutex_enter(&msp->ms_lock); in metaslab_group_remove()
681 metaslab_group_histogram_remove(mg, msp); in metaslab_group_remove()
682 mutex_exit(&msp->ms_lock); in metaslab_group_remove()
685 ASSERT(msp->ms_group == mg); in metaslab_group_remove()
686 avl_remove(&mg->mg_metaslab_tree, msp); in metaslab_group_remove()
687 msp->ms_group = NULL; in metaslab_group_remove()
692 metaslab_group_sort(metaslab_group_t *mg, metaslab_t *msp, uint64_t weight) in metaslab_group_sort() argument
699 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_group_sort()
702 ASSERT(msp->ms_group == mg); in metaslab_group_sort()
703 avl_remove(&mg->mg_metaslab_tree, msp); in metaslab_group_sort()
704 msp->ms_weight = weight; in metaslab_group_sort()
705 avl_add(&mg->mg_metaslab_tree, msp); in metaslab_group_sort()
724 metaslab_t *msp = vd->vdev_ms[m]; in metaslab_group_fragmentation() local
726 if (msp->ms_fragmentation == ZFS_FRAG_INVALID) in metaslab_group_fragmentation()
730 fragmentation += msp->ms_fragmentation; in metaslab_group_fragmentation()
811 metaslab_t *msp = arg; in metaslab_rt_create() local
813 ASSERT3P(rt->rt_arg, ==, msp); in metaslab_rt_create()
814 ASSERT(msp->ms_tree == NULL); in metaslab_rt_create()
816 avl_create(&msp->ms_size_tree, metaslab_rangesize_compare, in metaslab_rt_create()
826 metaslab_t *msp = arg; in metaslab_rt_destroy() local
828 ASSERT3P(rt->rt_arg, ==, msp); in metaslab_rt_destroy()
829 ASSERT3P(msp->ms_tree, ==, rt); in metaslab_rt_destroy()
830 ASSERT0(avl_numnodes(&msp->ms_size_tree)); in metaslab_rt_destroy()
832 avl_destroy(&msp->ms_size_tree); in metaslab_rt_destroy()
838 metaslab_t *msp = arg; in metaslab_rt_add() local
840 ASSERT3P(rt->rt_arg, ==, msp); in metaslab_rt_add()
841 ASSERT3P(msp->ms_tree, ==, rt); in metaslab_rt_add()
842 VERIFY(!msp->ms_condensing); in metaslab_rt_add()
843 avl_add(&msp->ms_size_tree, rs); in metaslab_rt_add()
849 metaslab_t *msp = arg; in metaslab_rt_remove() local
851 ASSERT3P(rt->rt_arg, ==, msp); in metaslab_rt_remove()
852 ASSERT3P(msp->ms_tree, ==, rt); in metaslab_rt_remove()
853 VERIFY(!msp->ms_condensing); in metaslab_rt_remove()
854 avl_remove(&msp->ms_size_tree, rs); in metaslab_rt_remove()
860 metaslab_t *msp = arg; in metaslab_rt_vacate() local
862 ASSERT3P(rt->rt_arg, ==, msp); in metaslab_rt_vacate()
863 ASSERT3P(msp->ms_tree, ==, rt); in metaslab_rt_vacate()
871 avl_create(&msp->ms_size_tree, metaslab_rangesize_compare, in metaslab_rt_vacate()
893 metaslab_block_maxsize(metaslab_t *msp) in metaslab_block_maxsize() argument
895 avl_tree_t *t = &msp->ms_size_tree; in metaslab_block_maxsize()
905 metaslab_block_alloc(metaslab_t *msp, uint64_t size) in metaslab_block_alloc() argument
908 range_tree_t *rt = msp->ms_tree; in metaslab_block_alloc()
910 VERIFY(!msp->ms_condensing); in metaslab_block_alloc()
912 start = msp->ms_ops->msop_alloc(msp, size); in metaslab_block_alloc()
914 vdev_t *vd = msp->ms_group->mg_vd; in metaslab_block_alloc()
918 VERIFY3U(range_tree_space(rt) - size, <=, msp->ms_size); in metaslab_block_alloc()
976 metaslab_ff_alloc(metaslab_t *msp, uint64_t size) in metaslab_ff_alloc() argument
986 uint64_t *cursor = &msp->ms_lbas[highbit64(align) - 1]; in metaslab_ff_alloc()
987 avl_tree_t *t = &msp->ms_tree->rt_root; in metaslab_ff_alloc()
1005 metaslab_df_alloc(metaslab_t *msp, uint64_t size) in metaslab_df_alloc() argument
1015 uint64_t *cursor = &msp->ms_lbas[highbit64(align) - 1]; in metaslab_df_alloc()
1016 range_tree_t *rt = msp->ms_tree; in metaslab_df_alloc()
1018 uint64_t max_size = metaslab_block_maxsize(msp); in metaslab_df_alloc()
1019 int free_pct = range_tree_space(rt) * 100 / msp->ms_size; in metaslab_df_alloc()
1021 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_df_alloc()
1022 ASSERT3U(avl_numnodes(t), ==, avl_numnodes(&msp->ms_size_tree)); in metaslab_df_alloc()
1033 t = &msp->ms_size_tree; in metaslab_df_alloc()
1054 metaslab_cf_alloc(metaslab_t *msp, uint64_t size) in metaslab_cf_alloc() argument
1056 range_tree_t *rt = msp->ms_tree; in metaslab_cf_alloc()
1057 avl_tree_t *t = &msp->ms_size_tree; in metaslab_cf_alloc()
1058 uint64_t *cursor = &msp->ms_lbas[0]; in metaslab_cf_alloc()
1059 uint64_t *cursor_end = &msp->ms_lbas[1]; in metaslab_cf_alloc()
1062 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_cf_alloc()
1070 rs = avl_last(&msp->ms_size_tree); in metaslab_cf_alloc()
1104 metaslab_ndf_alloc(metaslab_t *msp, uint64_t size) in metaslab_ndf_alloc() argument
1106 avl_tree_t *t = &msp->ms_tree->rt_root; in metaslab_ndf_alloc()
1110 uint64_t *cursor = &msp->ms_lbas[hbit - 1]; in metaslab_ndf_alloc()
1111 uint64_t max_size = metaslab_block_maxsize(msp); in metaslab_ndf_alloc()
1113 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_ndf_alloc()
1114 ASSERT3U(avl_numnodes(t), ==, avl_numnodes(&msp->ms_size_tree)); in metaslab_ndf_alloc()
1124 t = &msp->ms_size_tree; in metaslab_ndf_alloc()
1158 metaslab_load_wait(metaslab_t *msp) in metaslab_load_wait() argument
1160 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_load_wait()
1162 while (msp->ms_loading) { in metaslab_load_wait()
1163 ASSERT(!msp->ms_loaded); in metaslab_load_wait()
1164 cv_wait(&msp->ms_load_cv, &msp->ms_lock); in metaslab_load_wait()
1169 metaslab_load(metaslab_t *msp) in metaslab_load() argument
1173 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_load()
1174 ASSERT(!msp->ms_loaded); in metaslab_load()
1175 ASSERT(!msp->ms_loading); in metaslab_load()
1177 msp->ms_loading = B_TRUE; in metaslab_load()
1184 if (msp->ms_sm != NULL) in metaslab_load()
1185 error = space_map_load(msp->ms_sm, msp->ms_tree, SM_FREE); in metaslab_load()
1187 range_tree_add(msp->ms_tree, msp->ms_start, msp->ms_size); in metaslab_load()
1189 msp->ms_loaded = (error == 0); in metaslab_load()
1190 msp->ms_loading = B_FALSE; in metaslab_load()
1192 if (msp->ms_loaded) { in metaslab_load()
1194 range_tree_walk(msp->ms_defertree[t], in metaslab_load()
1195 range_tree_remove, msp->ms_tree); in metaslab_load()
1198 cv_broadcast(&msp->ms_load_cv); in metaslab_load()
1203 metaslab_unload(metaslab_t *msp) in metaslab_unload() argument
1205 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_unload()
1206 range_tree_vacate(msp->ms_tree, NULL, NULL); in metaslab_unload()
1207 msp->ms_loaded = B_FALSE; in metaslab_unload()
1208 msp->ms_weight &= ~METASLAB_ACTIVE_MASK; in metaslab_unload()
1213 metaslab_t **msp) in metaslab_init() argument
1281 *msp = ms; in metaslab_init()
1287 metaslab_fini(metaslab_t *msp) in metaslab_fini() argument
1289 metaslab_group_t *mg = msp->ms_group; in metaslab_fini()
1291 metaslab_group_remove(mg, msp); in metaslab_fini()
1293 mutex_enter(&msp->ms_lock); in metaslab_fini()
1295 VERIFY(msp->ms_group == NULL); in metaslab_fini()
1296 vdev_space_update(mg->mg_vd, -space_map_allocated(msp->ms_sm), in metaslab_fini()
1297 0, -msp->ms_size); in metaslab_fini()
1298 space_map_close(msp->ms_sm); in metaslab_fini()
1300 metaslab_unload(msp); in metaslab_fini()
1301 range_tree_destroy(msp->ms_tree); in metaslab_fini()
1304 range_tree_destroy(msp->ms_alloctree[t]); in metaslab_fini()
1305 range_tree_destroy(msp->ms_freetree[t]); in metaslab_fini()
1309 range_tree_destroy(msp->ms_defertree[t]); in metaslab_fini()
1312 ASSERT0(msp->ms_deferspace); in metaslab_fini()
1314 mutex_exit(&msp->ms_lock); in metaslab_fini()
1315 cv_destroy(&msp->ms_load_cv); in metaslab_fini()
1316 mutex_destroy(&msp->ms_lock); in metaslab_fini()
1318 kmem_free(msp, sizeof (metaslab_t)); in metaslab_fini()
1368 metaslab_fragmentation(metaslab_t *msp) in metaslab_fragmentation() argument
1370 spa_t *spa = msp->ms_group->mg_vd->vdev_spa; in metaslab_fragmentation()
1383 if (msp->ms_sm == NULL) in metaslab_fragmentation()
1390 if (msp->ms_sm->sm_dbuf->db_size != sizeof (space_map_phys_t)) { in metaslab_fragmentation()
1392 vdev_t *vd = msp->ms_group->mg_vd; in metaslab_fragmentation()
1395 msp->ms_condense_wanted = B_TRUE; in metaslab_fragmentation()
1396 vdev_dirty(vd, VDD_METASLAB, msp, txg + 1); in metaslab_fragmentation()
1398 "msp %p, vd %p", txg, msp, vd); in metaslab_fragmentation()
1405 uint8_t shift = msp->ms_sm->sm_shift; in metaslab_fragmentation()
1409 if (msp->ms_sm->sm_phys->smp_histogram[i] == 0) in metaslab_fragmentation()
1412 space = msp->ms_sm->sm_phys->smp_histogram[i] << (i + shift); in metaslab_fragmentation()
1431 metaslab_weight(metaslab_t *msp) in metaslab_weight() argument
1433 metaslab_group_t *mg = msp->ms_group; in metaslab_weight()
1437 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_weight()
1444 ASSERT0(space_map_allocated(msp->ms_sm)); in metaslab_weight()
1452 space = msp->ms_size - space_map_allocated(msp->ms_sm); in metaslab_weight()
1454 msp->ms_fragmentation = metaslab_fragmentation(msp); in metaslab_weight()
1456 msp->ms_fragmentation != ZFS_FRAG_INVALID) { in metaslab_weight()
1464 space = (space * (100 - (msp->ms_fragmentation - 1))) / 100; in metaslab_weight()
1488 weight = 2 * weight - (msp->ms_id * weight) / vd->vdev_ms_count; in metaslab_weight()
1498 if (msp->ms_loaded && msp->ms_fragmentation != ZFS_FRAG_INVALID && in metaslab_weight()
1499 msp->ms_fragmentation <= zfs_metaslab_fragmentation_threshold) { in metaslab_weight()
1500 weight |= (msp->ms_weight & METASLAB_ACTIVE_MASK); in metaslab_weight()
1507 metaslab_activate(metaslab_t *msp, uint64_t activation_weight) in metaslab_activate() argument
1509 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_activate()
1511 if ((msp->ms_weight & METASLAB_ACTIVE_MASK) == 0) { in metaslab_activate()
1512 metaslab_load_wait(msp); in metaslab_activate()
1513 if (!msp->ms_loaded) { in metaslab_activate()
1514 int error = metaslab_load(msp); in metaslab_activate()
1516 metaslab_group_sort(msp->ms_group, msp, 0); in metaslab_activate()
1521 metaslab_group_sort(msp->ms_group, msp, in metaslab_activate()
1522 msp->ms_weight | activation_weight); in metaslab_activate()
1524 ASSERT(msp->ms_loaded); in metaslab_activate()
1525 ASSERT(msp->ms_weight & METASLAB_ACTIVE_MASK); in metaslab_activate()
1531 metaslab_passivate(metaslab_t *msp, uint64_t size) in metaslab_passivate() argument
1538 ASSERT(size >= SPA_MINBLOCKSIZE || range_tree_space(msp->ms_tree) == 0); in metaslab_passivate()
1539 metaslab_group_sort(msp->ms_group, msp, MIN(msp->ms_weight, size)); in metaslab_passivate()
1540 ASSERT((msp->ms_weight & METASLAB_ACTIVE_MASK) == 0); in metaslab_passivate()
1546 metaslab_t *msp = arg; in metaslab_preload() local
1547 spa_t *spa = msp->ms_group->mg_vd->vdev_spa; in metaslab_preload()
1549 ASSERT(!MUTEX_HELD(&msp->ms_group->mg_lock)); in metaslab_preload()
1551 mutex_enter(&msp->ms_lock); in metaslab_preload()
1552 metaslab_load_wait(msp); in metaslab_preload()
1553 if (!msp->ms_loaded) in metaslab_preload()
1554 (void) metaslab_load(msp); in metaslab_preload()
1559 msp->ms_access_txg = spa_syncing_txg(spa) + metaslab_unload_delay + 1; in metaslab_preload()
1560 mutex_exit(&msp->ms_lock); in metaslab_preload()
1567 metaslab_t *msp; in metaslab_group_preload() local
1580 msp = avl_first(t); in metaslab_group_preload()
1581 while (msp != NULL) { in metaslab_group_preload()
1582 metaslab_t *msp_next = AVL_NEXT(t, msp); in metaslab_group_preload()
1590 if (++m > metaslab_preload_limit && !msp->ms_condense_wanted) { in metaslab_group_preload()
1591 msp = msp_next; in metaslab_group_preload()
1609 msp, TQ_SLEEP) != NULL); in metaslab_group_preload()
1611 msp = msp_next; in metaslab_group_preload()
1648 metaslab_should_condense(metaslab_t *msp) in metaslab_should_condense() argument
1650 space_map_t *sm = msp->ms_sm; in metaslab_should_condense()
1654 uint64_t vdev_blocksize = 1 << msp->ms_group->mg_vd->vdev_ashift; in metaslab_should_condense()
1656 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_should_condense()
1657 ASSERT(msp->ms_loaded); in metaslab_should_condense()
1665 rs = avl_last(&msp->ms_size_tree); in metaslab_should_condense()
1666 if (rs == NULL || msp->ms_condense_wanted) in metaslab_should_condense()
1679 optimal_size = sizeof (uint64_t) * avl_numnodes(&msp->ms_tree->rt_root); in metaslab_should_condense()
1680 object_size = space_map_length(msp->ms_sm); in metaslab_should_condense()
1696 metaslab_condense(metaslab_t *msp, uint64_t txg, dmu_tx_t *tx) in metaslab_condense() argument
1698 spa_t *spa = msp->ms_group->mg_vd->vdev_spa; in metaslab_condense()
1699 range_tree_t *freetree = msp->ms_freetree[txg & TXG_MASK]; in metaslab_condense()
1701 space_map_t *sm = msp->ms_sm; in metaslab_condense()
1703 ASSERT(MUTEX_HELD(&msp->ms_lock)); in metaslab_condense()
1705 ASSERT(msp->ms_loaded); in metaslab_condense()
1710 msp->ms_id, msp, space_map_length(msp->ms_sm), in metaslab_condense()
1711 avl_numnodes(&msp->ms_tree->rt_root), in metaslab_condense()
1712 msp->ms_condense_wanted ? "TRUE" : "FALSE"); in metaslab_condense()
1714 msp->ms_condense_wanted = B_FALSE; in metaslab_condense()
1723 condense_tree = range_tree_create(NULL, NULL, &msp->ms_lock); in metaslab_condense()
1724 range_tree_add(condense_tree, msp->ms_start, msp->ms_size); in metaslab_condense()
1734 range_tree_walk(msp->ms_defertree[t], in metaslab_condense()
1739 range_tree_walk(msp->ms_alloctree[(txg + t) & TXG_MASK], in metaslab_condense()
1752 msp->ms_condensing = B_TRUE; in metaslab_condense()
1754 mutex_exit(&msp->ms_lock); in metaslab_condense()
1756 mutex_enter(&msp->ms_lock); in metaslab_condense()
1772 space_map_write(sm, msp->ms_tree, SM_FREE, tx); in metaslab_condense()
1773 msp->ms_condensing = B_FALSE; in metaslab_condense()
1780 metaslab_sync(metaslab_t *msp, uint64_t txg) in metaslab_sync() argument
1782 metaslab_group_t *mg = msp->ms_group; in metaslab_sync()
1786 range_tree_t *alloctree = msp->ms_alloctree[txg & TXG_MASK]; in metaslab_sync()
1787 range_tree_t **freetree = &msp->ms_freetree[txg & TXG_MASK]; in metaslab_sync()
1789 &msp->ms_freetree[TXG_CLEAN(txg) & TXG_MASK]; in metaslab_sync()
1791 uint64_t object = space_map_object(msp->ms_sm); in metaslab_sync()
1814 !msp->ms_condense_wanted) in metaslab_sync()
1829 if (msp->ms_sm == NULL) { in metaslab_sync()
1835 VERIFY0(space_map_open(&msp->ms_sm, mos, new_object, in metaslab_sync()
1836 msp->ms_start, msp->ms_size, vd->vdev_ashift, in metaslab_sync()
1837 &msp->ms_lock)); in metaslab_sync()
1838 ASSERT(msp->ms_sm != NULL); in metaslab_sync()
1841 mutex_enter(&msp->ms_lock); in metaslab_sync()
1850 metaslab_group_histogram_remove(mg, msp); in metaslab_sync()
1852 if (msp->ms_loaded && spa_sync_pass(spa) == 1 && in metaslab_sync()
1853 metaslab_should_condense(msp)) { in metaslab_sync()
1854 metaslab_condense(msp, txg, tx); in metaslab_sync()
1856 space_map_write(msp->ms_sm, alloctree, SM_ALLOC, tx); in metaslab_sync()
1857 space_map_write(msp->ms_sm, *freetree, SM_FREE, tx); in metaslab_sync()
1860 if (msp->ms_loaded) { in metaslab_sync()
1867 space_map_histogram_clear(msp->ms_sm); in metaslab_sync()
1868 space_map_histogram_add(msp->ms_sm, msp->ms_tree, tx); in metaslab_sync()
1877 space_map_histogram_add(msp->ms_sm, *freetree, tx); in metaslab_sync()
1879 metaslab_group_histogram_add(mg, msp); in metaslab_sync()
1896 ASSERT0(range_tree_space(msp->ms_alloctree[txg & TXG_MASK])); in metaslab_sync()
1897 ASSERT0(range_tree_space(msp->ms_freetree[txg & TXG_MASK])); in metaslab_sync()
1899 mutex_exit(&msp->ms_lock); in metaslab_sync()
1901 if (object != space_map_object(msp->ms_sm)) { in metaslab_sync()
1902 object = space_map_object(msp->ms_sm); in metaslab_sync()
1904 msp->ms_id, sizeof (uint64_t), &object, tx); in metaslab_sync()
1914 metaslab_sync_done(metaslab_t *msp, uint64_t txg) in metaslab_sync_done() argument
1916 metaslab_group_t *mg = msp->ms_group; in metaslab_sync_done()
1924 mutex_enter(&msp->ms_lock); in metaslab_sync_done()
1931 if (msp->ms_freetree[TXG_CLEAN(txg) & TXG_MASK] == NULL) { in metaslab_sync_done()
1933 ASSERT(msp->ms_alloctree[t] == NULL); in metaslab_sync_done()
1934 ASSERT(msp->ms_freetree[t] == NULL); in metaslab_sync_done()
1936 msp->ms_alloctree[t] = range_tree_create(NULL, msp, in metaslab_sync_done()
1937 &msp->ms_lock); in metaslab_sync_done()
1938 msp->ms_freetree[t] = range_tree_create(NULL, msp, in metaslab_sync_done()
1939 &msp->ms_lock); in metaslab_sync_done()
1943 ASSERT(msp->ms_defertree[t] == NULL); in metaslab_sync_done()
1945 msp->ms_defertree[t] = range_tree_create(NULL, msp, in metaslab_sync_done()
1946 &msp->ms_lock); in metaslab_sync_done()
1949 vdev_space_update(vd, 0, 0, msp->ms_size); in metaslab_sync_done()
1952 freed_tree = &msp->ms_freetree[TXG_CLEAN(txg) & TXG_MASK]; in metaslab_sync_done()
1953 defer_tree = &msp->ms_defertree[txg % TXG_DEFER_SIZE]; in metaslab_sync_done()
1955 alloc_delta = space_map_alloc_delta(msp->ms_sm); in metaslab_sync_done()
1961 ASSERT0(range_tree_space(msp->ms_alloctree[txg & TXG_MASK])); in metaslab_sync_done()
1962 ASSERT0(range_tree_space(msp->ms_freetree[txg & TXG_MASK])); in metaslab_sync_done()
1968 metaslab_load_wait(msp); in metaslab_sync_done()
1977 msp->ms_loaded ? range_tree_add : NULL, msp->ms_tree); in metaslab_sync_done()
1980 space_map_update(msp->ms_sm); in metaslab_sync_done()
1982 msp->ms_deferspace += defer_delta; in metaslab_sync_done()
1983 ASSERT3S(msp->ms_deferspace, >=, 0); in metaslab_sync_done()
1984 ASSERT3S(msp->ms_deferspace, <=, msp->ms_size); in metaslab_sync_done()
1985 if (msp->ms_deferspace != 0) { in metaslab_sync_done()
1990 vdev_dirty(vd, VDD_METASLAB, msp, txg + 1); in metaslab_sync_done()
1993 if (msp->ms_loaded && msp->ms_access_txg < txg) { in metaslab_sync_done()
1996 msp->ms_alloctree[(txg + t) & TXG_MASK])); in metaslab_sync_done()
2000 metaslab_unload(msp); in metaslab_sync_done()
2003 metaslab_group_sort(mg, msp, metaslab_weight(msp)); in metaslab_sync_done()
2004 mutex_exit(&msp->ms_lock); in metaslab_sync_done()
2020 metaslab_distance(metaslab_t *msp, dva_t *dva) in metaslab_distance() argument
2022 uint64_t ms_shift = msp->ms_group->mg_vd->vdev_ms_shift; in metaslab_distance()
2024 uint64_t start = msp->ms_id; in metaslab_distance()
2026 if (msp->ms_group->mg_vd->vdev_id != DVA_GET_VDEV(dva)) in metaslab_distance()
2041 metaslab_t *msp = NULL; in metaslab_group_alloc() local
2060 for (msp = avl_first(t); msp; msp = AVL_NEXT(t, msp)) { in metaslab_group_alloc()
2061 if (msp->ms_weight < asize) { in metaslab_group_alloc()
2067 mg, msp, psize, asize, msp->ms_weight); in metaslab_group_alloc()
2075 if (msp->ms_condensing) in metaslab_group_alloc()
2078 was_active = msp->ms_weight & METASLAB_ACTIVE_MASK; in metaslab_group_alloc()
2083 (space_map_allocated(msp->ms_sm) != 0 ? 0 : in metaslab_group_alloc()
2087 if (metaslab_distance(msp, &dva[i]) < in metaslab_group_alloc()
2094 if (msp == NULL) in metaslab_group_alloc()
2097 mutex_enter(&msp->ms_lock); in metaslab_group_alloc()
2105 if (msp->ms_weight < asize || (was_active && in metaslab_group_alloc()
2106 !(msp->ms_weight & METASLAB_ACTIVE_MASK) && in metaslab_group_alloc()
2108 mutex_exit(&msp->ms_lock); in metaslab_group_alloc()
2112 if ((msp->ms_weight & METASLAB_WEIGHT_SECONDARY) && in metaslab_group_alloc()
2114 metaslab_passivate(msp, in metaslab_group_alloc()
2115 msp->ms_weight & ~METASLAB_ACTIVE_MASK); in metaslab_group_alloc()
2116 mutex_exit(&msp->ms_lock); in metaslab_group_alloc()
2120 if (metaslab_activate(msp, activation_weight) != 0) { in metaslab_group_alloc()
2121 mutex_exit(&msp->ms_lock); in metaslab_group_alloc()
2130 if (msp->ms_condensing) { in metaslab_group_alloc()
2131 mutex_exit(&msp->ms_lock); in metaslab_group_alloc()
2135 if ((offset = metaslab_block_alloc(msp, asize)) != -1ULL) in metaslab_group_alloc()
2138 metaslab_passivate(msp, metaslab_block_maxsize(msp)); in metaslab_group_alloc()
2139 mutex_exit(&msp->ms_lock); in metaslab_group_alloc()
2142 if (range_tree_space(msp->ms_alloctree[txg & TXG_MASK]) == 0) in metaslab_group_alloc()
2143 vdev_dirty(mg->mg_vd, VDD_METASLAB, msp, txg); in metaslab_group_alloc()
2145 range_tree_add(msp->ms_alloctree[txg & TXG_MASK], offset, asize); in metaslab_group_alloc()
2146 msp->ms_access_txg = txg + metaslab_unload_delay; in metaslab_group_alloc()
2148 mutex_exit(&msp->ms_lock); in metaslab_group_alloc()
2370 metaslab_t *msp; in metaslab_free_dva() local
2385 msp = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in metaslab_free_dva()
2390 mutex_enter(&msp->ms_lock); in metaslab_free_dva()
2393 range_tree_remove(msp->ms_alloctree[txg & TXG_MASK], in metaslab_free_dva()
2396 VERIFY(!msp->ms_condensing); in metaslab_free_dva()
2397 VERIFY3U(offset, >=, msp->ms_start); in metaslab_free_dva()
2398 VERIFY3U(offset + size, <=, msp->ms_start + msp->ms_size); in metaslab_free_dva()
2399 VERIFY3U(range_tree_space(msp->ms_tree) + size, <=, in metaslab_free_dva()
2400 msp->ms_size); in metaslab_free_dva()
2403 range_tree_add(msp->ms_tree, offset, size); in metaslab_free_dva()
2405 if (range_tree_space(msp->ms_freetree[txg & TXG_MASK]) == 0) in metaslab_free_dva()
2406 vdev_dirty(vd, VDD_METASLAB, msp, txg); in metaslab_free_dva()
2407 range_tree_add(msp->ms_freetree[txg & TXG_MASK], in metaslab_free_dva()
2411 mutex_exit(&msp->ms_lock); in metaslab_free_dva()
2427 metaslab_t *msp; in metaslab_claim_dva() local
2436 msp = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in metaslab_claim_dva()
2441 mutex_enter(&msp->ms_lock); in metaslab_claim_dva()
2443 if ((txg != 0 && spa_writeable(spa)) || !msp->ms_loaded) in metaslab_claim_dva()
2444 error = metaslab_activate(msp, METASLAB_WEIGHT_SECONDARY); in metaslab_claim_dva()
2446 if (error == 0 && !range_tree_contains(msp->ms_tree, offset, size)) in metaslab_claim_dva()
2450 mutex_exit(&msp->ms_lock); in metaslab_claim_dva()
2454 VERIFY(!msp->ms_condensing); in metaslab_claim_dva()
2457 VERIFY3U(range_tree_space(msp->ms_tree) - size, <=, msp->ms_size); in metaslab_claim_dva()
2458 range_tree_remove(msp->ms_tree, offset, size); in metaslab_claim_dva()
2461 if (range_tree_space(msp->ms_alloctree[txg & TXG_MASK]) == 0) in metaslab_claim_dva()
2462 vdev_dirty(vd, VDD_METASLAB, msp, txg); in metaslab_claim_dva()
2463 range_tree_add(msp->ms_alloctree[txg & TXG_MASK], offset, size); in metaslab_claim_dva()
2466 mutex_exit(&msp->ms_lock); in metaslab_claim_dva()
2575 metaslab_t *msp = vd->vdev_ms[offset >> vd->vdev_ms_shift]; in metaslab_check_free() local
2577 if (msp->ms_loaded) in metaslab_check_free()
2578 range_tree_verify(msp->ms_tree, offset, size); in metaslab_check_free()
2581 range_tree_verify(msp->ms_freetree[j], offset, size); in metaslab_check_free()
2583 range_tree_verify(msp->ms_defertree[j], offset, size); in metaslab_check_free()