Lines Matching defs:mg
218 metaslab_group_t *mg;
227 if ((mg = mc->mc_rotor) == NULL)
231 vd = mg->mg_vd;
234 ASSERT3P(mg->mg_class, ==, mc);
236 } while ((mg = mg->mg_next) != mc->mc_rotor);
290 metaslab_group_t *mg = tvd->vdev_mg;
297 mg->mg_class != mc) {
302 mc_hist[i] += mg->mg_histogram[i];
328 metaslab_group_t *mg = tvd->vdev_mg;
335 mg->mg_class != mc) {
343 if (mg->mg_fragmentation == ZFS_FRAG_INVALID) {
352 fragmentation += mg->mg_fragmentation *
353 metaslab_group_get_space(mg);
377 metaslab_group_t *mg = tvd->vdev_mg;
380 mg->mg_class != mc) {
427 metaslab_group_alloc_update(metaslab_group_t *mg)
429 vdev_t *vd = mg->mg_vd;
430 metaslab_class_t *mc = mg->mg_class;
436 mutex_enter(&mg->mg_lock);
437 was_allocatable = mg->mg_allocatable;
439 mg->mg_free_capacity = ((vs->vs_space - vs->vs_alloc) * 100) /
448 mg->mg_allocatable = (mg->mg_free_capacity > zfs_mg_noalloc_threshold &&
449 (mg->mg_fragmentation == ZFS_FRAG_INVALID ||
450 mg->mg_fragmentation <= zfs_mg_fragmentation_threshold));
467 if (was_allocatable && !mg->mg_allocatable)
469 else if (!was_allocatable && mg->mg_allocatable)
472 mutex_exit(&mg->mg_lock);
478 metaslab_group_t *mg;
480 mg = kmem_zalloc(sizeof (metaslab_group_t), KM_SLEEP);
481 mutex_init(&mg->mg_lock, NULL, MUTEX_DEFAULT, NULL);
482 avl_create(&mg->mg_metaslab_tree, metaslab_compare,
484 mg->mg_vd = vd;
485 mg->mg_class = mc;
486 mg->mg_activation_count = 0;
488 mg->mg_taskq = taskq_create("metaslab_group_taskq", metaslab_load_pct,
491 return (mg);
495 metaslab_group_destroy(metaslab_group_t *mg)
497 ASSERT(mg->mg_prev == NULL);
498 ASSERT(mg->mg_next == NULL);
504 ASSERT(mg->mg_activation_count <= 0);
506 taskq_destroy(mg->mg_taskq);
507 avl_destroy(&mg->mg_metaslab_tree);
508 mutex_destroy(&mg->mg_lock);
509 kmem_free(mg, sizeof (metaslab_group_t));
513 metaslab_group_activate(metaslab_group_t *mg)
515 metaslab_class_t *mc = mg->mg_class;
520 ASSERT(mc->mc_rotor != mg);
521 ASSERT(mg->mg_prev == NULL);
522 ASSERT(mg->mg_next == NULL);
523 ASSERT(mg->mg_activation_count <= 0);
525 if (++mg->mg_activation_count <= 0)
528 mg->mg_aliquot = metaslab_aliquot * MAX(1, mg->mg_vd->vdev_children);
529 metaslab_group_alloc_update(mg);
532 mg->mg_prev = mg;
533 mg->mg_next = mg;
536 mg->mg_prev = mgprev;
537 mg->mg_next = mgnext;
538 mgprev->mg_next = mg;
539 mgnext->mg_prev = mg;
541 mc->mc_rotor = mg;
545 metaslab_group_passivate(metaslab_group_t *mg)
547 metaslab_class_t *mc = mg->mg_class;
552 if (--mg->mg_activation_count != 0) {
553 ASSERT(mc->mc_rotor != mg);
554 ASSERT(mg->mg_prev == NULL);
555 ASSERT(mg->mg_next == NULL);
556 ASSERT(mg->mg_activation_count < 0);
560 taskq_wait(mg->mg_taskq);
561 metaslab_group_alloc_update(mg);
563 mgprev = mg->mg_prev;
564 mgnext = mg->mg_next;
566 if (mg == mgnext) {
574 mg->mg_prev = NULL;
575 mg->mg_next = NULL;
579 metaslab_group_get_space(metaslab_group_t *mg)
581 return ((1ULL << mg->mg_vd->vdev_ms_shift) * mg->mg_vd->vdev_ms_count);
585 metaslab_group_histogram_verify(metaslab_group_t *mg)
588 vdev_t *vd = mg->mg_vd;
613 VERIFY3U(mg_hist[i], ==, mg->mg_histogram[i]);
619 metaslab_group_histogram_add(metaslab_group_t *mg, metaslab_t *msp)
621 metaslab_class_t *mc = mg->mg_class;
622 uint64_t ashift = mg->mg_vd->vdev_ashift;
628 mutex_enter(&mg->mg_lock);
630 mg->mg_histogram[i + ashift] +=
635 mutex_exit(&mg->mg_lock);
639 metaslab_group_histogram_remove(metaslab_group_t *mg, metaslab_t *msp)
641 metaslab_class_t *mc = mg->mg_class;
642 uint64_t ashift = mg->mg_vd->vdev_ashift;
648 mutex_enter(&mg->mg_lock);
650 ASSERT3U(mg->mg_histogram[i + ashift], >=,
655 mg->mg_histogram[i + ashift] -=
660 mutex_exit(&mg->mg_lock);
664 metaslab_group_add(metaslab_group_t *mg, metaslab_t *msp)
667 mutex_enter(&mg->mg_lock);
668 msp->ms_group = mg;
670 avl_add(&mg->mg_metaslab_tree, msp);
671 mutex_exit(&mg->mg_lock);
674 metaslab_group_histogram_add(mg, msp);
679 metaslab_group_remove(metaslab_group_t *mg, metaslab_t *msp)
682 metaslab_group_histogram_remove(mg, msp);
685 mutex_enter(&mg->mg_lock);
686 ASSERT(msp->ms_group == mg);
687 avl_remove(&mg->mg_metaslab_tree, msp);
689 mutex_exit(&mg->mg_lock);
693 metaslab_group_sort(metaslab_group_t *mg, metaslab_t *msp, uint64_t weight)
702 mutex_enter(&mg->mg_lock);
703 ASSERT(msp->ms_group == mg);
704 avl_remove(&mg->mg_metaslab_tree, msp);
706 avl_add(&mg->mg_metaslab_tree, msp);
707 mutex_exit(&mg->mg_lock);
718 metaslab_group_fragmentation(metaslab_group_t *mg)
720 vdev_t *vd = mg->mg_vd;
750 metaslab_group_allocatable(metaslab_group_t *mg)
752 vdev_t *vd = mg->mg_vd;
754 metaslab_class_t *mc = mg->mg_class;
767 return ((mg->mg_free_capacity > zfs_mg_noalloc_threshold &&
768 (mg->mg_fragmentation == ZFS_FRAG_INVALID ||
769 mg->mg_fragmentation <= zfs_mg_fragmentation_threshold)) ||
1213 metaslab_init(metaslab_group_t *mg, uint64_t id, uint64_t object, uint64_t txg,
1216 vdev_t *vd = mg->mg_vd;
1252 metaslab_group_add(mg, ms);
1255 ms->ms_ops = mg->mg_class->mc_ops;
1290 metaslab_group_t *mg = msp->ms_group;
1292 metaslab_group_remove(mg, msp);
1297 vdev_space_update(mg->mg_vd, -space_map_allocated(msp->ms_sm),
1434 metaslab_group_t *mg = msp->ms_group;
1435 vdev_t *vd = mg->mg_vd;
1565 metaslab_group_preload(metaslab_group_t *mg)
1567 spa_t *spa = mg->mg_vd->vdev_spa;
1569 avl_tree_t *t = &mg->mg_metaslab_tree;
1573 taskq_wait(mg->mg_taskq);
1577 mutex_enter(&mg->mg_lock);
1608 mutex_exit(&mg->mg_lock);
1609 VERIFY(taskq_dispatch(mg->mg_taskq, metaslab_preload,
1611 mutex_enter(&mg->mg_lock);
1614 mutex_exit(&mg->mg_lock);
1784 metaslab_group_t *mg = msp->ms_group;
1785 vdev_t *vd = mg->mg_vd;
1850 metaslab_group_histogram_verify(mg);
1851 metaslab_class_histogram_verify(mg->mg_class);
1852 metaslab_group_histogram_remove(mg, msp);
1881 metaslab_group_histogram_add(mg, msp);
1882 metaslab_group_histogram_verify(mg);
1883 metaslab_class_histogram_verify(mg->mg_class);
1918 metaslab_group_t *mg = msp->ms_group;
1919 vdev_t *vd = mg->mg_vd;
2005 metaslab_group_sort(mg, msp, metaslab_weight(msp));
2010 metaslab_sync_reassess(metaslab_group_t *mg)
2012 metaslab_group_alloc_update(mg);
2013 mg->mg_fragmentation = metaslab_group_fragmentation(mg);
2018 metaslab_group_preload(mg);
2039 metaslab_group_alloc(metaslab_group_t *mg, uint64_t psize, uint64_t asize,
2042 spa_t *spa = mg->mg_vd->vdev_spa;
2045 avl_tree_t *t = &mg->mg_metaslab_tree;
2052 if (DVA_GET_VDEV(&dva[i]) == mg->mg_vd->vdev_id) {
2061 mutex_enter(&mg->mg_lock);
2065 "requirement: vdev %llu, txg %llu, mg %p, "
2068 mg->mg_vd->vdev_id, txg,
2069 mg, msp, psize, asize, msp->ms_weight);
2070 mutex_exit(&mg->mg_lock);
2095 mutex_exit(&mg->mg_lock);
2145 vdev_dirty(mg->mg_vd, VDD_METASLAB, msp, txg);
2162 metaslab_group_t *mg, *rotor;
2211 mg = vd->vdev_mg;
2214 mg->mg_next != NULL)
2215 mg = mg->mg_next;
2217 mg = mc->mc_rotor;
2221 mg = vd->vdev_mg->mg_next;
2223 mg = mc->mc_rotor;
2230 if (mg->mg_class != mc || mg->mg_activation_count <= 0)
2231 mg = mc->mc_rotor;
2233 rotor = mg;
2237 ASSERT(mg->mg_activation_count == 1);
2239 vd = mg->mg_vd;
2264 allocatable = metaslab_group_allocatable(mg);
2280 ASSERT(mg->mg_class == mc);
2291 offset = metaslab_group_alloc(mg, psize, asize, txg, distance,
2320 mg->mg_bias = ((cu - vu) *
2321 (int64_t)mg->mg_aliquot) / 100;
2323 mg->mg_bias = 0;
2327 mg->mg_aliquot + mg->mg_bias) {
2328 mc->mc_rotor = mg->mg_next;
2340 mc->mc_rotor = mg->mg_next;
2342 } while ((mg = mg->mg_next) != rotor);