Searched refs:ms_allocating (Results 1 – 4 of 4) sorted by relevance
373 zfs_range_tree_t *ms_allocating[TXG_SIZE]; member
2085 zfs_range_tree_space(msp->ms_allocating[(txg + t) & in metaslab_verify_space()2845 ms->ms_allocating[t] = zfs_range_tree_create(NULL, type, in metaslab_init()2968 zfs_range_tree_destroy(msp->ms_allocating[t]); in metaslab_fini()3847 zfs_range_tree_walk(msp->ms_allocating[(txg + t) & TXG_MASK], in metaslab_condense()4140 zfs_range_tree_t *alloctree = msp->ms_allocating[txg & TXG_MASK]; in metaslab_sync()4391 ASSERT0(zfs_range_tree_space(msp->ms_allocating[txg & TXG_MASK])); in metaslab_sync()4392 ASSERT0(zfs_range_tree_space(msp->ms_allocating[TXG_CLEAN(txg) in metaslab_sync()4420 msp->ms_allocating[(txg + t) & TXG_MASK])); in metaslab_evict()4556 ASSERT0(zfs_range_tree_space(msp->ms_allocating[txg & TXG_MASK])); in metaslab_sync_done()4747 if (zfs_range_tree_is_empty(msp->ms_allocating[txg & TXG_MASK])) in metaslab_block_alloc()[all …]
853 if (zfs_range_tree_space(msp->ms_allocating[j])) { in vdev_rebuild_thread()875 msp->ms_allocating[i])); in vdev_rebuild_thread()
484 ASSERT0(zfs_range_tree_space(ms->ms_allocating[t])); in vdev_remove_initiate_sync()1650 ASSERT0(zfs_range_tree_space(msp->ms_allocating[i])); in spa_vdev_remove_thread()1922 ASSERT0(zfs_range_tree_space(msp->ms_allocating[i])); in spa_vdev_remove_cancel_sync()