Lines Matching refs:vmp
401 vmem_getseg(vmem_t *vmp) in vmem_getseg() argument
405 ASSERT(vmp->vm_nsegfree > 0); in vmem_getseg()
407 vsp = vmp->vm_segfree; in vmem_getseg()
408 vmp->vm_segfree = vsp->vs_knext; in vmem_getseg()
409 vmp->vm_nsegfree--; in vmem_getseg()
418 vmem_putseg(vmem_t *vmp, vmem_seg_t *vsp) in vmem_putseg() argument
420 vsp->vs_knext = vmp->vm_segfree; in vmem_putseg()
421 vmp->vm_segfree = vsp; in vmem_putseg()
422 vmp->vm_nsegfree++; in vmem_putseg()
429 vmem_freelist_insert(vmem_t *vmp, vmem_seg_t *vsp) in vmem_freelist_insert() argument
433 ASSERT(*VMEM_HASH(vmp, vsp->vs_start) != vsp); in vmem_freelist_insert()
435 vprev = (vmem_seg_t *)&vmp->vm_freelist[highbit(VS_SIZE(vsp)) - 1]; in vmem_freelist_insert()
437 vmp->vm_freemap |= VS_SIZE(vprev); in vmem_freelist_insert()
440 cv_broadcast(&vmp->vm_cv); in vmem_freelist_insert()
447 vmem_freelist_delete(vmem_t *vmp, vmem_seg_t *vsp) in vmem_freelist_delete() argument
449 ASSERT(*VMEM_HASH(vmp, vsp->vs_start) != vsp); in vmem_freelist_delete()
457 ASSERT(vmp->vm_freemap & VS_SIZE(vsp->vs_kprev)); in vmem_freelist_delete()
458 vmp->vm_freemap ^= VS_SIZE(vsp->vs_kprev); in vmem_freelist_delete()
467 vmem_hash_insert(vmem_t *vmp, vmem_seg_t *vsp) in vmem_hash_insert() argument
472 bucket = VMEM_HASH(vmp, vsp->vs_start); in vmem_hash_insert()
485 vmp->vm_kstat.vk_alloc.value.ui64++; in vmem_hash_insert()
486 vmp->vm_kstat.vk_mem_inuse.value.ui64 += VS_SIZE(vsp); in vmem_hash_insert()
493 vmem_hash_delete(vmem_t *vmp, uintptr_t addr, size_t size) in vmem_hash_delete() argument
497 prev_vspp = VMEM_HASH(vmp, addr); in vmem_hash_delete()
503 vmp->vm_kstat.vk_lookup.value.ui64++; in vmem_hash_delete()
509 (void *)vmp, addr, size); in vmem_hash_delete()
512 (void *)vmp, addr, size, VS_SIZE(vsp)); in vmem_hash_delete()
514 vmp->vm_kstat.vk_free.value.ui64++; in vmem_hash_delete()
515 vmp->vm_kstat.vk_mem_inuse.value.ui64 -= size; in vmem_hash_delete()
524 vmem_seg_create(vmem_t *vmp, vmem_seg_t *vprev, uintptr_t start, uintptr_t end) in vmem_seg_create() argument
526 vmem_seg_t *newseg = vmem_getseg(vmp); in vmem_seg_create()
542 vmem_seg_destroy(vmem_t *vmp, vmem_seg_t *vsp) in vmem_seg_destroy() argument
547 vmem_putseg(vmp, vsp); in vmem_seg_destroy()
554 vmem_span_create(vmem_t *vmp, void *vaddr, size_t size, uint8_t import) in vmem_span_create() argument
560 ASSERT(MUTEX_HELD(&vmp->vm_lock)); in vmem_span_create()
562 if ((start | end) & (vmp->vm_quantum - 1)) in vmem_span_create()
564 (void *)vmp, vaddr, size); in vmem_span_create()
566 span = vmem_seg_create(vmp, vmp->vm_seg0.vs_aprev, start, end); in vmem_span_create()
569 VMEM_INSERT(vmp->vm_seg0.vs_kprev, span, k); in vmem_span_create()
571 newseg = vmem_seg_create(vmp, span, start, end); in vmem_span_create()
572 vmem_freelist_insert(vmp, newseg); in vmem_span_create()
575 vmp->vm_kstat.vk_mem_import.value.ui64 += size; in vmem_span_create()
576 vmp->vm_kstat.vk_mem_total.value.ui64 += size; in vmem_span_create()
585 vmem_span_destroy(vmem_t *vmp, vmem_seg_t *vsp) in vmem_span_destroy() argument
590 ASSERT(MUTEX_HELD(&vmp->vm_lock)); in vmem_span_destroy()
594 vmp->vm_kstat.vk_mem_import.value.ui64 -= size; in vmem_span_destroy()
595 vmp->vm_kstat.vk_mem_total.value.ui64 -= size; in vmem_span_destroy()
599 vmem_seg_destroy(vmp, vsp); in vmem_span_destroy()
600 vmem_seg_destroy(vmp, span); in vmem_span_destroy()
609 vmem_seg_alloc(vmem_t *vmp, vmem_seg_t *vsp, uintptr_t addr, size_t size) in vmem_seg_alloc() argument
614 size_t realsize = P2ROUNDUP(size, vmp->vm_quantum); in vmem_seg_alloc()
617 ASSERT(P2PHASE(vs_start, vmp->vm_quantum) == 0); in vmem_seg_alloc()
618 ASSERT(P2PHASE(addr, vmp->vm_quantum) == 0); in vmem_seg_alloc()
631 vsp = vmem_seg_create(vmp, vsp->vs_aprev, addr, addr + size); in vmem_seg_alloc()
632 vmem_hash_insert(vmp, vsp); in vmem_seg_alloc()
636 vmem_freelist_delete(vmp, vsp); in vmem_seg_alloc()
639 vmem_freelist_insert(vmp, in vmem_seg_alloc()
640 vmem_seg_create(vmp, vsp, addr_end, vs_end)); in vmem_seg_alloc()
643 vmem_freelist_insert(vmp, in vmem_seg_alloc()
644 vmem_seg_create(vmp, vsp->vs_aprev, vs_start, addr)); in vmem_seg_alloc()
649 vmem_hash_insert(vmp, vsp); in vmem_seg_alloc()
670 vmem_populate(vmem_t *vmp, int vmflag) in vmem_populate() argument
679 while (vmp->vm_nsegfree < VMEM_MINFREE && in vmem_populate()
681 vmem_putseg(vmp, vsp); in vmem_populate()
683 if (vmp->vm_nsegfree >= VMEM_MINFREE) in vmem_populate()
690 ASSERT(vmp->vm_cflags & VMC_POPULATOR); in vmem_populate()
694 mutex_exit(&vmp->vm_lock); in vmem_populate()
720 mutex_enter(&vmp->vm_lock); in vmem_populate()
721 vmp->vm_kstat.vk_populate_fail.value.ui64++; in vmem_populate()
737 mutex_enter(&vmp->vm_lock); in vmem_populate()
743 while (vmp->vm_nsegfree < VMEM_MINFREE) in vmem_populate()
744 vmem_putseg(vmp, (vmem_seg_t *)(p + --nseg * vmem_seg_size)); in vmem_populate()
760 vmem_advance(vmem_t *vmp, vmem_seg_t *walker, vmem_seg_t *afterme) in vmem_advance() argument
778 vmem_freelist_delete(vmp, vnext); in vmem_advance()
779 vmem_freelist_delete(vmp, vprev); in vmem_advance()
781 vmem_freelist_insert(vmp, vprev); in vmem_advance()
782 vmem_seg_destroy(vmp, vnext); in vmem_advance()
794 vmp->vm_source_free != NULL && in vmem_advance()
800 vmem_freelist_delete(vmp, vsp); in vmem_advance()
801 vmem_span_destroy(vmp, vsp); in vmem_advance()
802 mutex_exit(&vmp->vm_lock); in vmem_advance()
803 vmp->vm_source_free(vmp->vm_source, vaddr, size); in vmem_advance()
804 mutex_enter(&vmp->vm_lock); in vmem_advance()
816 vmem_nextfit_alloc(vmem_t *vmp, size_t size, int vmflag) in vmem_nextfit_alloc() argument
820 size_t realsize = P2ROUNDUP(size, vmp->vm_quantum); in vmem_nextfit_alloc()
823 mutex_enter(&vmp->vm_lock); in vmem_nextfit_alloc()
825 if (vmp->vm_nsegfree < VMEM_MINFREE && !vmem_populate(vmp, vmflag)) { in vmem_nextfit_alloc()
826 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
840 rotor = &vmp->vm_rotor; in vmem_nextfit_alloc()
847 vmem_hash_insert(vmp, in vmem_nextfit_alloc()
848 vmem_seg_create(vmp, rotor->vs_aprev, addr, addr + size)); in vmem_nextfit_alloc()
849 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
858 vmp->vm_kstat.vk_search.value.ui64++; in vmem_nextfit_alloc()
871 vmem_advance(vmp, rotor, rotor->vs_anext); in vmem_nextfit_alloc()
880 if (vmp->vm_source_alloc != NULL || in vmem_nextfit_alloc()
882 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
883 return (vmem_xalloc(vmp, size, vmp->vm_quantum, in vmem_nextfit_alloc()
886 vmp->vm_kstat.vk_wait.value.ui64++; in vmem_nextfit_alloc()
887 cv_wait(&vmp->vm_cv, &vmp->vm_lock); in vmem_nextfit_alloc()
896 vsp = vmem_seg_alloc(vmp, vsp, addr, size); in vmem_nextfit_alloc()
904 vmem_advance(vmp, rotor, vsp); in vmem_nextfit_alloc()
905 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
917 vmem_canalloc(vmem_t *vmp, size_t size) in vmem_canalloc() argument
921 ASSERT(MUTEX_HELD(&vmp->vm_lock)); in vmem_canalloc()
924 flist = lowbit(P2ALIGN(vmp->vm_freemap, size)); in vmem_canalloc()
926 flist = lowbit(P2ALIGN(vmp->vm_freemap, 1UL << hb)); in vmem_canalloc()
937 vmem_xalloc(vmem_t *vmp, size_t size, size_t align_arg, size_t phase, in vmem_xalloc() argument
943 uintptr_t align = (align_arg != 0) ? align_arg : vmp->vm_quantum; in vmem_xalloc()
949 if ((align | phase | nocross) & (vmp->vm_quantum - 1)) in vmem_xalloc()
952 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
959 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
965 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
968 if ((mtbf = vmem_mtbf | vmp->vm_mtbf) != 0 && gethrtime() % mtbf == 0 && in vmem_xalloc()
972 mutex_enter(&vmp->vm_lock); in vmem_xalloc()
974 if (vmp->vm_nsegfree < VMEM_MINFREE && in vmem_xalloc()
975 !vmem_populate(vmp, vmflag)) in vmem_xalloc()
997 flist = lowbit(P2ALIGN(vmp->vm_freemap, size)); in vmem_xalloc()
1000 if ((vmp->vm_freemap >> hb) == 0 || in vmem_xalloc()
1004 flist = lowbit(P2ALIGN(vmp->vm_freemap, 1UL << hb)); in vmem_xalloc()
1008 vmp->vm_freelist[flist - 1].vs_knext; in vmem_xalloc()
1010 vmp->vm_kstat.vk_search.value.ui64++; in vmem_xalloc()
1022 flist = lowbit(P2ALIGN(vmp->vm_freemap, in vmem_xalloc()
1026 vsp = (vmem_seg_t *)&vmp->vm_freelist[flist]; in vmem_xalloc()
1053 if (vmp->vm_source_alloc != NULL && nocross == 0 && in vmem_xalloc()
1056 size_t aquantum = MAX(vmp->vm_quantum, in vmem_xalloc()
1057 vmp->vm_source->vm_quantum); in vmem_xalloc()
1060 !(vmp->vm_cflags & VMC_XALIGN)) { in vmem_xalloc()
1062 align - vmp->vm_quantum : align - aquantum; in vmem_xalloc()
1065 aneeded = MAX(size + aphase, vmp->vm_min_import); in vmem_xalloc()
1076 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1089 if (size == asize && !(vmp->vm_cflags & VMC_XALLOC)) in vmem_xalloc()
1093 align <= vmp->vm_source->vm_quantum) in vmem_xalloc()
1099 ASSERT(vmp->vm_nsegfree >= resv); in vmem_xalloc()
1100 vmp->vm_nsegfree -= resv; /* reserve our segs */ in vmem_xalloc()
1101 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1102 if (vmp->vm_cflags & VMC_XALLOC) { in vmem_xalloc()
1105 vmp->vm_source_alloc)(vmp->vm_source, in vmem_xalloc()
1109 vmp->vm_source->vm_quantum) == 0); in vmem_xalloc()
1110 ASSERT(!(vmp->vm_cflags & VMC_XALIGN) || in vmem_xalloc()
1113 vaddr = vmp->vm_source_alloc(vmp->vm_source, in vmem_xalloc()
1116 mutex_enter(&vmp->vm_lock); in vmem_xalloc()
1117 vmp->vm_nsegfree += resv; /* claim reservation */ in vmem_xalloc()
1118 aneeded = size + align - vmp->vm_quantum; in vmem_xalloc()
1119 aneeded = P2ROUNDUP(aneeded, vmp->vm_quantum); in vmem_xalloc()
1130 vmp->vm_source_free != NULL && in vmem_xalloc()
1131 vmem_canalloc(vmp, aneeded)) { in vmem_xalloc()
1138 vbest = vmem_span_create(vmp, vaddr, asize, 1); in vmem_xalloc()
1141 } else if (vmem_canalloc(vmp, aneeded)) { in vmem_xalloc()
1159 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1160 if (vmp->vm_cflags & VMC_IDENTIFIER) in vmem_xalloc()
1164 mutex_enter(&vmp->vm_lock); in vmem_xalloc()
1167 vmp->vm_kstat.vk_wait.value.ui64++; in vmem_xalloc()
1168 cv_wait(&vmp->vm_cv, &vmp->vm_lock); in vmem_xalloc()
1178 (void) vmem_seg_alloc(vmp, vbest, addr, size); in vmem_xalloc()
1179 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1181 vmp->vm_source_free(vmp->vm_source, xvaddr, xsize); in vmem_xalloc()
1188 vmp->vm_kstat.vk_fail.value.ui64++; in vmem_xalloc()
1189 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1193 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
1205 vmem_xfree(vmem_t *vmp, void *vaddr, size_t size) in vmem_xfree() argument
1209 mutex_enter(&vmp->vm_lock); in vmem_xfree()
1211 vsp = vmem_hash_delete(vmp, (uintptr_t)vaddr, size); in vmem_xfree()
1212 vsp->vs_end = P2ROUNDUP(vsp->vs_end, vmp->vm_quantum); in vmem_xfree()
1220 vmem_freelist_delete(vmp, vnext); in vmem_xfree()
1222 vmem_seg_destroy(vmp, vnext); in vmem_xfree()
1231 vmem_freelist_delete(vmp, vprev); in vmem_xfree()
1233 vmem_seg_destroy(vmp, vsp); in vmem_xfree()
1240 if (vsp->vs_aprev->vs_import && vmp->vm_source_free != NULL && in vmem_xfree()
1246 vmem_span_destroy(vmp, vsp); in vmem_xfree()
1247 mutex_exit(&vmp->vm_lock); in vmem_xfree()
1248 vmp->vm_source_free(vmp->vm_source, vaddr, size); in vmem_xfree()
1250 vmem_freelist_insert(vmp, vsp); in vmem_xfree()
1251 mutex_exit(&vmp->vm_lock); in vmem_xfree()
1263 vmem_alloc(vmem_t *vmp, size_t size, int vmflag) in vmem_alloc() argument
1271 if (size - 1 < vmp->vm_qcache_max) in vmem_alloc()
1272 return (kmem_cache_alloc(vmp->vm_qcache[(size - 1) >> in vmem_alloc()
1273 vmp->vm_qshift], vmflag & VM_KMFLAGS)); in vmem_alloc()
1275 if ((mtbf = vmem_mtbf | vmp->vm_mtbf) != 0 && gethrtime() % mtbf == 0 && in vmem_alloc()
1280 return (vmem_nextfit_alloc(vmp, size, vmflag)); in vmem_alloc()
1283 return (vmem_xalloc(vmp, size, vmp->vm_quantum, 0, 0, in vmem_alloc()
1289 mutex_enter(&vmp->vm_lock); in vmem_alloc()
1291 if (vmp->vm_nsegfree >= VMEM_MINFREE || vmem_populate(vmp, vmflag)) { in vmem_alloc()
1293 flist = lowbit(P2ALIGN(vmp->vm_freemap, size)); in vmem_alloc()
1295 flist = lowbit(P2ALIGN(vmp->vm_freemap, 1UL << hb)); in vmem_alloc()
1299 mutex_exit(&vmp->vm_lock); in vmem_alloc()
1300 return (vmem_xalloc(vmp, size, vmp->vm_quantum, in vmem_alloc()
1305 vsp = vmp->vm_freelist[flist].vs_knext; in vmem_alloc()
1310 (void) vmem_seg_alloc(vmp, vsp, addr, size); in vmem_alloc()
1311 mutex_exit(&vmp->vm_lock); in vmem_alloc()
1319 vmem_free(vmem_t *vmp, void *vaddr, size_t size) in vmem_free() argument
1321 if (size - 1 < vmp->vm_qcache_max) in vmem_free()
1322 kmem_cache_free(vmp->vm_qcache[(size - 1) >> vmp->vm_qshift], in vmem_free()
1325 vmem_xfree(vmp, vaddr, size); in vmem_free()
1332 vmem_contains(vmem_t *vmp, void *vaddr, size_t size) in vmem_contains() argument
1337 vmem_seg_t *seg0 = &vmp->vm_seg0; in vmem_contains()
1339 mutex_enter(&vmp->vm_lock); in vmem_contains()
1340 vmp->vm_kstat.vk_contains.value.ui64++; in vmem_contains()
1342 vmp->vm_kstat.vk_contains_search.value.ui64++; in vmem_contains()
1347 mutex_exit(&vmp->vm_lock); in vmem_contains()
1355 vmem_add(vmem_t *vmp, void *vaddr, size_t size, int vmflag) in vmem_add() argument
1359 (void *)vmp, vaddr, size); in vmem_add()
1361 ASSERT(!vmem_contains(vmp, vaddr, size)); in vmem_add()
1363 mutex_enter(&vmp->vm_lock); in vmem_add()
1364 if (vmem_populate(vmp, vmflag)) in vmem_add()
1365 (void) vmem_span_create(vmp, vaddr, size, 0); in vmem_add()
1368 mutex_exit(&vmp->vm_lock); in vmem_add()
1381 vmem_walk(vmem_t *vmp, int typemask, in vmem_walk() argument
1385 vmem_seg_t *seg0 = &vmp->vm_seg0; in vmem_walk()
1394 mutex_enter(&vmp->vm_lock); in vmem_walk()
1401 vmem_advance(vmp, &walker, vsp); in vmem_walk()
1402 mutex_exit(&vmp->vm_lock); in vmem_walk()
1404 mutex_enter(&vmp->vm_lock); in vmem_walk()
1411 vmem_advance(vmp, &walker, NULL); in vmem_walk()
1412 mutex_exit(&vmp->vm_lock); in vmem_walk()
1423 vmem_size(vmem_t *vmp, int typemask) in vmem_size() argument
1428 size += vmp->vm_kstat.vk_mem_inuse.value.ui64; in vmem_size()
1430 size += vmp->vm_kstat.vk_mem_total.value.ui64 - in vmem_size()
1431 vmp->vm_kstat.vk_mem_inuse.value.ui64; in vmem_size()
1452 vmem_t *vmp, *cur, **vmpp; in vmem_create_common() local
1458 vmp = vmem_alloc(vmem_vmem_arena, sizeof (vmem_t), in vmem_create_common()
1462 vmp = &vmem0[id - 1]; in vmem_create_common()
1469 if (vmp == NULL) in vmem_create_common()
1471 bzero(vmp, sizeof (vmem_t)); in vmem_create_common()
1473 (void) snprintf(vmp->vm_name, VMEM_NAMELEN, "%s", name); in vmem_create_common()
1474 mutex_init(&vmp->vm_lock, NULL, MUTEX_DEFAULT, NULL); in vmem_create_common()
1475 cv_init(&vmp->vm_cv, NULL, CV_DEFAULT, NULL); in vmem_create_common()
1476 vmp->vm_cflags = vmflag; in vmem_create_common()
1479 vmp->vm_quantum = quantum; in vmem_create_common()
1480 vmp->vm_qshift = highbit(quantum) - 1; in vmem_create_common()
1481 nqcache = MIN(qcache_max >> vmp->vm_qshift, VMEM_NQCACHE_MAX); in vmem_create_common()
1484 vfp = &vmp->vm_freelist[i]; in vmem_create_common()
1490 vmp->vm_freelist[0].vs_kprev = NULL; in vmem_create_common()
1491 vmp->vm_freelist[VMEM_FREELISTS].vs_knext = NULL; in vmem_create_common()
1492 vmp->vm_freelist[VMEM_FREELISTS].vs_end = 0; in vmem_create_common()
1493 vmp->vm_hash_table = vmp->vm_hash0; in vmem_create_common()
1494 vmp->vm_hash_mask = VMEM_HASH_INITIAL - 1; in vmem_create_common()
1495 vmp->vm_hash_shift = highbit(vmp->vm_hash_mask); in vmem_create_common()
1497 vsp = &vmp->vm_seg0; in vmem_create_common()
1504 vsp = &vmp->vm_rotor; in vmem_create_common()
1506 VMEM_INSERT(&vmp->vm_seg0, vsp, a); in vmem_create_common()
1508 bcopy(&vmem_kstat_template, &vmp->vm_kstat, sizeof (vmem_kstat_t)); in vmem_create_common()
1510 vmp->vm_id = id; in vmem_create_common()
1512 vmp->vm_kstat.vk_source_id.value.ui32 = source->vm_id; in vmem_create_common()
1513 vmp->vm_source = source; in vmem_create_common()
1514 vmp->vm_source_alloc = afunc; in vmem_create_common()
1515 vmp->vm_source_free = ffunc; in vmem_create_common()
1522 if (vmp->vm_cflags & VMC_NO_QCACHE) { in vmem_create_common()
1523 vmp->vm_min_import = in vmem_create_common()
1524 VMEM_QCACHE_SLABSIZE(nqcache << vmp->vm_qshift); in vmem_create_common()
1530 vmp->vm_qcache_max = nqcache << vmp->vm_qshift; in vmem_create_common()
1533 (void) sprintf(buf, "%s_%lu", vmp->vm_name, in vmem_create_common()
1535 vmp->vm_qcache[i] = kmem_cache_create(buf, in vmem_create_common()
1537 NULL, vmp, KMC_QCACHE | KMC_NOTOUCH); in vmem_create_common()
1541 if ((vmp->vm_ksp = kstat_create("vmem", vmp->vm_id, vmp->vm_name, in vmem_create_common()
1544 vmp->vm_ksp->ks_data = &vmp->vm_kstat; in vmem_create_common()
1545 kstat_install(vmp->vm_ksp); in vmem_create_common()
1552 *vmpp = vmp; in vmem_create_common()
1555 if (vmp->vm_cflags & VMC_POPULATOR) { in vmem_create_common()
1557 vmem_populator[atomic_inc_32_nv(&vmem_populators) - 1] = vmp; in vmem_create_common()
1558 mutex_enter(&vmp->vm_lock); in vmem_create_common()
1559 (void) vmem_populate(vmp, vmflag | VM_PANIC); in vmem_create_common()
1560 mutex_exit(&vmp->vm_lock); in vmem_create_common()
1563 if ((base || size) && vmem_add(vmp, base, size, vmflag) == NULL) { in vmem_create_common()
1564 vmem_destroy(vmp); in vmem_create_common()
1568 return (vmp); in vmem_create_common()
1600 vmem_destroy(vmem_t *vmp) in vmem_destroy() argument
1603 vmem_seg_t *seg0 = &vmp->vm_seg0; in vmem_destroy()
1610 while ((cur = *vmpp) != vmp) in vmem_destroy()
1612 *vmpp = vmp->vm_next; in vmem_destroy()
1616 if (vmp->vm_qcache[i]) in vmem_destroy()
1617 kmem_cache_destroy(vmp->vm_qcache[i]); in vmem_destroy()
1619 leaked = vmem_size(vmp, VMEM_ALLOC); in vmem_destroy()
1622 vmp->vm_name, leaked, (vmp->vm_cflags & VMC_IDENTIFIER) ? in vmem_destroy()
1625 if (vmp->vm_hash_table != vmp->vm_hash0) in vmem_destroy()
1626 vmem_free(vmem_hash_arena, vmp->vm_hash_table, in vmem_destroy()
1627 (vmp->vm_hash_mask + 1) * sizeof (void *)); in vmem_destroy()
1633 VMEM_DELETE(&vmp->vm_rotor, a); in vmem_destroy()
1639 while (vmp->vm_nsegfree > 0) in vmem_destroy()
1640 vmem_putseg_global(vmem_getseg(vmp)); in vmem_destroy()
1642 kstat_delete(vmp->vm_ksp); in vmem_destroy()
1644 mutex_destroy(&vmp->vm_lock); in vmem_destroy()
1645 cv_destroy(&vmp->vm_cv); in vmem_destroy()
1646 vmem_free(vmem_vmem_arena, vmp, sizeof (vmem_t)); in vmem_destroy()
1653 vmem_hash_rescale(vmem_t *vmp) in vmem_hash_rescale() argument
1658 nseg = (size_t)(vmp->vm_kstat.vk_alloc.value.ui64 - in vmem_hash_rescale()
1659 vmp->vm_kstat.vk_free.value.ui64); in vmem_hash_rescale()
1662 old_size = vmp->vm_hash_mask + 1; in vmem_hash_rescale()
1673 mutex_enter(&vmp->vm_lock); in vmem_hash_rescale()
1675 old_size = vmp->vm_hash_mask + 1; in vmem_hash_rescale()
1676 old_table = vmp->vm_hash_table; in vmem_hash_rescale()
1678 vmp->vm_hash_mask = new_size - 1; in vmem_hash_rescale()
1679 vmp->vm_hash_table = new_table; in vmem_hash_rescale()
1680 vmp->vm_hash_shift = highbit(vmp->vm_hash_mask); in vmem_hash_rescale()
1687 vmem_seg_t **hash_bucket = VMEM_HASH(vmp, addr); in vmem_hash_rescale()
1694 mutex_exit(&vmp->vm_lock); in vmem_hash_rescale()
1696 if (old_table != vmp->vm_hash0) in vmem_hash_rescale()
1707 vmem_t *vmp; in vmem_update() local
1710 for (vmp = vmem_list; vmp != NULL; vmp = vmp->vm_next) { in vmem_update()
1716 cv_broadcast(&vmp->vm_cv); in vmem_update()
1721 vmem_hash_rescale(vmp); in vmem_update()
1729 vmem_qcache_reap(vmem_t *vmp) in vmem_qcache_reap() argument
1737 if (vmp->vm_qcache[i]) in vmem_qcache_reap()
1738 kmem_cache_reap_now(vmp->vm_qcache[i]); in vmem_qcache_reap()