Lines Matching refs:vmp

404 vmem_getseg(vmem_t *vmp)  in vmem_getseg()  argument
408 ASSERT(vmp->vm_nsegfree > 0); in vmem_getseg()
410 vsp = vmp->vm_segfree; in vmem_getseg()
411 vmp->vm_segfree = vsp->vs_knext; in vmem_getseg()
412 vmp->vm_nsegfree--; in vmem_getseg()
421 vmem_putseg(vmem_t *vmp, vmem_seg_t *vsp) in vmem_putseg() argument
423 vsp->vs_knext = vmp->vm_segfree; in vmem_putseg()
424 vmp->vm_segfree = vsp; in vmem_putseg()
425 vmp->vm_nsegfree++; in vmem_putseg()
432 vmem_freelist_insert(vmem_t *vmp, vmem_seg_t *vsp) in vmem_freelist_insert() argument
436 ASSERT(*VMEM_HASH(vmp, vsp->vs_start) != vsp); in vmem_freelist_insert()
438 vprev = (vmem_seg_t *)&vmp->vm_freelist[highbit(VS_SIZE(vsp)) - 1]; in vmem_freelist_insert()
440 vmp->vm_freemap |= VS_SIZE(vprev); in vmem_freelist_insert()
443 cv_broadcast(&vmp->vm_cv); in vmem_freelist_insert()
450 vmem_freelist_delete(vmem_t *vmp, vmem_seg_t *vsp) in vmem_freelist_delete() argument
452 ASSERT(*VMEM_HASH(vmp, vsp->vs_start) != vsp); in vmem_freelist_delete()
460 ASSERT(vmp->vm_freemap & VS_SIZE(vsp->vs_kprev)); in vmem_freelist_delete()
461 vmp->vm_freemap ^= VS_SIZE(vsp->vs_kprev); in vmem_freelist_delete()
470 vmem_hash_insert(vmem_t *vmp, vmem_seg_t *vsp) in vmem_hash_insert() argument
475 bucket = VMEM_HASH(vmp, vsp->vs_start); in vmem_hash_insert()
488 vmp->vm_kstat.vk_alloc.value.ui64++; in vmem_hash_insert()
489 vmp->vm_kstat.vk_mem_inuse.value.ui64 += VS_SIZE(vsp); in vmem_hash_insert()
496 vmem_hash_delete(vmem_t *vmp, uintptr_t addr, size_t size) in vmem_hash_delete() argument
500 prev_vspp = VMEM_HASH(vmp, addr); in vmem_hash_delete()
506 vmp->vm_kstat.vk_lookup.value.ui64++; in vmem_hash_delete()
512 (void *)vmp, addr, size); in vmem_hash_delete()
515 (void *)vmp, addr, size, VS_SIZE(vsp)); in vmem_hash_delete()
517 vmp->vm_kstat.vk_free.value.ui64++; in vmem_hash_delete()
518 vmp->vm_kstat.vk_mem_inuse.value.ui64 -= size; in vmem_hash_delete()
527 vmem_seg_create(vmem_t *vmp, vmem_seg_t *vprev, uintptr_t start, uintptr_t end) in vmem_seg_create() argument
529 vmem_seg_t *newseg = vmem_getseg(vmp); in vmem_seg_create()
545 vmem_seg_destroy(vmem_t *vmp, vmem_seg_t *vsp) in vmem_seg_destroy() argument
550 vmem_putseg(vmp, vsp); in vmem_seg_destroy()
557 vmem_span_create(vmem_t *vmp, void *vaddr, size_t size, uint8_t import) in vmem_span_create() argument
563 ASSERT(MUTEX_HELD(&vmp->vm_lock)); in vmem_span_create()
565 if ((start | end) & (vmp->vm_quantum - 1)) in vmem_span_create()
567 (void *)vmp, vaddr, size); in vmem_span_create()
569 span = vmem_seg_create(vmp, vmp->vm_seg0.vs_aprev, start, end); in vmem_span_create()
572 VMEM_INSERT(vmp->vm_seg0.vs_kprev, span, k); in vmem_span_create()
574 newseg = vmem_seg_create(vmp, span, start, end); in vmem_span_create()
575 vmem_freelist_insert(vmp, newseg); in vmem_span_create()
578 vmp->vm_kstat.vk_mem_import.value.ui64 += size; in vmem_span_create()
579 vmp->vm_kstat.vk_mem_total.value.ui64 += size; in vmem_span_create()
588 vmem_span_destroy(vmem_t *vmp, vmem_seg_t *vsp) in vmem_span_destroy() argument
593 ASSERT(MUTEX_HELD(&vmp->vm_lock)); in vmem_span_destroy()
597 vmp->vm_kstat.vk_mem_import.value.ui64 -= size; in vmem_span_destroy()
598 vmp->vm_kstat.vk_mem_total.value.ui64 -= size; in vmem_span_destroy()
602 vmem_seg_destroy(vmp, vsp); in vmem_span_destroy()
603 vmem_seg_destroy(vmp, span); in vmem_span_destroy()
612 vmem_seg_alloc(vmem_t *vmp, vmem_seg_t *vsp, uintptr_t addr, size_t size) in vmem_seg_alloc() argument
617 size_t realsize = P2ROUNDUP(size, vmp->vm_quantum); in vmem_seg_alloc()
620 ASSERT(P2PHASE(vs_start, vmp->vm_quantum) == 0); in vmem_seg_alloc()
621 ASSERT(P2PHASE(addr, vmp->vm_quantum) == 0); in vmem_seg_alloc()
634 vsp = vmem_seg_create(vmp, vsp->vs_aprev, addr, addr + size); in vmem_seg_alloc()
635 vmem_hash_insert(vmp, vsp); in vmem_seg_alloc()
639 vmem_freelist_delete(vmp, vsp); in vmem_seg_alloc()
642 vmem_freelist_insert(vmp, in vmem_seg_alloc()
643 vmem_seg_create(vmp, vsp, addr_end, vs_end)); in vmem_seg_alloc()
646 vmem_freelist_insert(vmp, in vmem_seg_alloc()
647 vmem_seg_create(vmp, vsp->vs_aprev, vs_start, addr)); in vmem_seg_alloc()
652 vmem_hash_insert(vmp, vsp); in vmem_seg_alloc()
673 vmem_populate(vmem_t *vmp, int vmflag) in vmem_populate() argument
682 while (vmp->vm_nsegfree < VMEM_MINFREE && in vmem_populate()
684 vmem_putseg(vmp, vsp); in vmem_populate()
686 if (vmp->vm_nsegfree >= VMEM_MINFREE) in vmem_populate()
693 ASSERT(vmp->vm_cflags & VMC_POPULATOR); in vmem_populate()
697 mutex_exit(&vmp->vm_lock); in vmem_populate()
723 mutex_enter(&vmp->vm_lock); in vmem_populate()
724 vmp->vm_kstat.vk_populate_fail.value.ui64++; in vmem_populate()
740 mutex_enter(&vmp->vm_lock); in vmem_populate()
746 while (vmp->vm_nsegfree < VMEM_MINFREE) in vmem_populate()
747 vmem_putseg(vmp, (vmem_seg_t *)(p + --nseg * vmem_seg_size)); in vmem_populate()
763 vmem_advance(vmem_t *vmp, vmem_seg_t *walker, vmem_seg_t *afterme) in vmem_advance() argument
781 vmem_freelist_delete(vmp, vnext); in vmem_advance()
782 vmem_freelist_delete(vmp, vprev); in vmem_advance()
784 vmem_freelist_insert(vmp, vprev); in vmem_advance()
785 vmem_seg_destroy(vmp, vnext); in vmem_advance()
797 vmp->vm_source_free != NULL && in vmem_advance()
803 vmem_freelist_delete(vmp, vsp); in vmem_advance()
804 vmem_span_destroy(vmp, vsp); in vmem_advance()
805 mutex_exit(&vmp->vm_lock); in vmem_advance()
806 vmp->vm_source_free(vmp->vm_source, vaddr, size); in vmem_advance()
807 mutex_enter(&vmp->vm_lock); in vmem_advance()
819 vmem_nextfit_alloc(vmem_t *vmp, size_t size, int vmflag) in vmem_nextfit_alloc() argument
823 size_t realsize = P2ROUNDUP(size, vmp->vm_quantum); in vmem_nextfit_alloc()
826 mutex_enter(&vmp->vm_lock); in vmem_nextfit_alloc()
828 if (vmp->vm_nsegfree < VMEM_MINFREE && !vmem_populate(vmp, vmflag)) { in vmem_nextfit_alloc()
829 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
843 rotor = &vmp->vm_rotor; in vmem_nextfit_alloc()
850 vmem_hash_insert(vmp, in vmem_nextfit_alloc()
851 vmem_seg_create(vmp, rotor->vs_aprev, addr, addr + size)); in vmem_nextfit_alloc()
852 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
861 vmp->vm_kstat.vk_search.value.ui64++; in vmem_nextfit_alloc()
874 vmem_advance(vmp, rotor, rotor->vs_anext); in vmem_nextfit_alloc()
883 if (vmp->vm_source_alloc != NULL || in vmem_nextfit_alloc()
885 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
886 return (vmem_xalloc(vmp, size, vmp->vm_quantum, in vmem_nextfit_alloc()
889 vmp->vm_kstat.vk_wait.value.ui64++; in vmem_nextfit_alloc()
890 cv_wait(&vmp->vm_cv, &vmp->vm_lock); in vmem_nextfit_alloc()
899 vsp = vmem_seg_alloc(vmp, vsp, addr, size); in vmem_nextfit_alloc()
907 vmem_advance(vmp, rotor, vsp); in vmem_nextfit_alloc()
908 mutex_exit(&vmp->vm_lock); in vmem_nextfit_alloc()
920 vmem_canalloc(vmem_t *vmp, size_t size) in vmem_canalloc() argument
924 ASSERT(MUTEX_HELD(&vmp->vm_lock)); in vmem_canalloc()
927 flist = lowbit(P2ALIGN(vmp->vm_freemap, size)); in vmem_canalloc()
929 flist = lowbit(P2ALIGN(vmp->vm_freemap, 1UL << hb)); in vmem_canalloc()
940 vmem_xalloc(vmem_t *vmp, size_t size, size_t align_arg, size_t phase, in vmem_xalloc() argument
946 uintptr_t align = (align_arg != 0) ? align_arg : vmp->vm_quantum; in vmem_xalloc()
952 if ((align | phase | nocross) & (vmp->vm_quantum - 1)) in vmem_xalloc()
955 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
962 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
968 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
971 if ((mtbf = vmem_mtbf | vmp->vm_mtbf) != 0 && gethrtime() % mtbf == 0 && in vmem_xalloc()
977 mutex_enter(&vmp->vm_lock); in vmem_xalloc()
979 if (vmp->vm_nsegfree < VMEM_MINFREE && in vmem_xalloc()
980 !vmem_populate(vmp, vmflag)) in vmem_xalloc()
1002 flist = lowbit(P2ALIGN(vmp->vm_freemap, size)); in vmem_xalloc()
1005 if ((vmp->vm_freemap >> hb) == 0 || in vmem_xalloc()
1009 flist = lowbit(P2ALIGN(vmp->vm_freemap, 1UL << hb)); in vmem_xalloc()
1013 vmp->vm_freelist[flist - 1].vs_knext; in vmem_xalloc()
1015 vmp->vm_kstat.vk_search.value.ui64++; in vmem_xalloc()
1027 flist = lowbit(P2ALIGN(vmp->vm_freemap, in vmem_xalloc()
1031 vsp = (vmem_seg_t *)&vmp->vm_freelist[flist]; in vmem_xalloc()
1058 if (vmp->vm_source_alloc != NULL && nocross == 0 && in vmem_xalloc()
1061 size_t aquantum = MAX(vmp->vm_quantum, in vmem_xalloc()
1062 vmp->vm_source->vm_quantum); in vmem_xalloc()
1065 !(vmp->vm_cflags & VMC_XALIGN)) { in vmem_xalloc()
1067 align - vmp->vm_quantum : align - aquantum; in vmem_xalloc()
1070 aneeded = MAX(size + aphase, vmp->vm_min_import); in vmem_xalloc()
1081 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1094 if (size == asize && !(vmp->vm_cflags & VMC_XALLOC)) in vmem_xalloc()
1098 align <= vmp->vm_source->vm_quantum) in vmem_xalloc()
1104 ASSERT(vmp->vm_nsegfree >= resv); in vmem_xalloc()
1105 vmp->vm_nsegfree -= resv; /* reserve our segs */ in vmem_xalloc()
1106 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1107 if (vmp->vm_cflags & VMC_XALLOC) { in vmem_xalloc()
1112 (uintptr_t)vmp->vm_source_alloc; in vmem_xalloc()
1114 vaddr = vmem_ximport(vmp->vm_source, in vmem_xalloc()
1118 vmp->vm_source->vm_quantum) == 0); in vmem_xalloc()
1119 ASSERT(!(vmp->vm_cflags & VMC_XALIGN) || in vmem_xalloc()
1122 vaddr = vmp->vm_source_alloc(vmp->vm_source, in vmem_xalloc()
1125 mutex_enter(&vmp->vm_lock); in vmem_xalloc()
1126 vmp->vm_nsegfree += resv; /* claim reservation */ in vmem_xalloc()
1127 aneeded = size + align - vmp->vm_quantum; in vmem_xalloc()
1128 aneeded = P2ROUNDUP(aneeded, vmp->vm_quantum); in vmem_xalloc()
1139 vmp->vm_source_free != NULL && in vmem_xalloc()
1140 vmem_canalloc(vmp, aneeded)) { in vmem_xalloc()
1147 vbest = vmem_span_create(vmp, vaddr, asize, 1); in vmem_xalloc()
1150 } else if (vmem_canalloc(vmp, aneeded)) { in vmem_xalloc()
1168 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1169 if (vmp->vm_cflags & VMC_IDENTIFIER) in vmem_xalloc()
1173 mutex_enter(&vmp->vm_lock); in vmem_xalloc()
1176 vmp->vm_kstat.vk_wait.value.ui64++; in vmem_xalloc()
1177 cv_wait(&vmp->vm_cv, &vmp->vm_lock); in vmem_xalloc()
1187 (void) vmem_seg_alloc(vmp, vbest, addr, size); in vmem_xalloc()
1188 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1190 vmp->vm_source_free(vmp->vm_source, xvaddr, xsize); in vmem_xalloc()
1197 vmp->vm_kstat.vk_fail.value.ui64++; in vmem_xalloc()
1198 mutex_exit(&vmp->vm_lock); in vmem_xalloc()
1202 (void *)vmp, size, align_arg, phase, nocross, in vmem_xalloc()
1214 vmem_xfree(vmem_t *vmp, void *vaddr, size_t size) in vmem_xfree() argument
1218 mutex_enter(&vmp->vm_lock); in vmem_xfree()
1220 vsp = vmem_hash_delete(vmp, (uintptr_t)vaddr, size); in vmem_xfree()
1221 vsp->vs_end = P2ROUNDUP(vsp->vs_end, vmp->vm_quantum); in vmem_xfree()
1229 vmem_freelist_delete(vmp, vnext); in vmem_xfree()
1231 vmem_seg_destroy(vmp, vnext); in vmem_xfree()
1240 vmem_freelist_delete(vmp, vprev); in vmem_xfree()
1242 vmem_seg_destroy(vmp, vsp); in vmem_xfree()
1249 if (vsp->vs_aprev->vs_import && vmp->vm_source_free != NULL && in vmem_xfree()
1255 vmem_span_destroy(vmp, vsp); in vmem_xfree()
1256 mutex_exit(&vmp->vm_lock); in vmem_xfree()
1257 vmp->vm_source_free(vmp->vm_source, vaddr, size); in vmem_xfree()
1259 vmem_freelist_insert(vmp, vsp); in vmem_xfree()
1260 mutex_exit(&vmp->vm_lock); in vmem_xfree()
1272 vmem_alloc(vmem_t *vmp, size_t size, int vmflag) in vmem_alloc() argument
1280 if (size - 1 < vmp->vm_qcache_max) in vmem_alloc()
1281 return (kmem_cache_alloc(vmp->vm_qcache[(size - 1) >> in vmem_alloc()
1282 vmp->vm_qshift], vmflag & VM_KMFLAGS)); in vmem_alloc()
1284 if ((mtbf = vmem_mtbf | vmp->vm_mtbf) != 0 && gethrtime() % mtbf == 0 && in vmem_alloc()
1289 return (vmem_nextfit_alloc(vmp, size, vmflag)); in vmem_alloc()
1292 return (vmem_xalloc(vmp, size, vmp->vm_quantum, 0, 0, in vmem_alloc()
1298 mutex_enter(&vmp->vm_lock); in vmem_alloc()
1300 if (vmp->vm_nsegfree >= VMEM_MINFREE || vmem_populate(vmp, vmflag)) { in vmem_alloc()
1302 flist = lowbit(P2ALIGN(vmp->vm_freemap, size)); in vmem_alloc()
1304 flist = lowbit(P2ALIGN(vmp->vm_freemap, 1UL << hb)); in vmem_alloc()
1308 mutex_exit(&vmp->vm_lock); in vmem_alloc()
1309 return (vmem_xalloc(vmp, size, vmp->vm_quantum, in vmem_alloc()
1314 vsp = vmp->vm_freelist[flist].vs_knext; in vmem_alloc()
1319 (void) vmem_seg_alloc(vmp, vsp, addr, size); in vmem_alloc()
1320 mutex_exit(&vmp->vm_lock); in vmem_alloc()
1328 vmem_free(vmem_t *vmp, void *vaddr, size_t size) in vmem_free() argument
1330 if (size - 1 < vmp->vm_qcache_max) in vmem_free()
1331 kmem_cache_free(vmp->vm_qcache[(size - 1) >> vmp->vm_qshift], in vmem_free()
1334 vmem_xfree(vmp, vaddr, size); in vmem_free()
1341 vmem_contains(vmem_t *vmp, void *vaddr, size_t size) in vmem_contains() argument
1346 vmem_seg_t *seg0 = &vmp->vm_seg0; in vmem_contains()
1348 mutex_enter(&vmp->vm_lock); in vmem_contains()
1349 vmp->vm_kstat.vk_contains.value.ui64++; in vmem_contains()
1351 vmp->vm_kstat.vk_contains_search.value.ui64++; in vmem_contains()
1356 mutex_exit(&vmp->vm_lock); in vmem_contains()
1364 vmem_add(vmem_t *vmp, void *vaddr, size_t size, int vmflag) in vmem_add() argument
1368 (void *)vmp, vaddr, size); in vmem_add()
1370 ASSERT(!vmem_contains(vmp, vaddr, size)); in vmem_add()
1372 mutex_enter(&vmp->vm_lock); in vmem_add()
1373 if (vmem_populate(vmp, vmflag)) in vmem_add()
1374 (void) vmem_span_create(vmp, vaddr, size, 0); in vmem_add()
1377 mutex_exit(&vmp->vm_lock); in vmem_add()
1390 vmem_walk(vmem_t *vmp, int typemask, in vmem_walk() argument
1394 vmem_seg_t *seg0 = &vmp->vm_seg0; in vmem_walk()
1403 mutex_enter(&vmp->vm_lock); in vmem_walk()
1410 vmem_advance(vmp, &walker, vsp); in vmem_walk()
1411 mutex_exit(&vmp->vm_lock); in vmem_walk()
1413 mutex_enter(&vmp->vm_lock); in vmem_walk()
1420 vmem_advance(vmp, &walker, NULL); in vmem_walk()
1421 mutex_exit(&vmp->vm_lock); in vmem_walk()
1432 vmem_size(vmem_t *vmp, int typemask) in vmem_size() argument
1437 size += vmp->vm_kstat.vk_mem_inuse.value.ui64; in vmem_size()
1439 size += vmp->vm_kstat.vk_mem_total.value.ui64 - in vmem_size()
1440 vmp->vm_kstat.vk_mem_inuse.value.ui64; in vmem_size()
1461 vmem_t *vmp, *cur, **vmpp; in vmem_create_common() local
1467 vmp = vmem_alloc(vmem_vmem_arena, sizeof (vmem_t), in vmem_create_common()
1471 vmp = &vmem0[id - 1]; in vmem_create_common()
1478 if (vmp == NULL) in vmem_create_common()
1480 bzero(vmp, sizeof (vmem_t)); in vmem_create_common()
1482 (void) snprintf(vmp->vm_name, VMEM_NAMELEN, "%s", name); in vmem_create_common()
1483 mutex_init(&vmp->vm_lock, NULL, MUTEX_DEFAULT, NULL); in vmem_create_common()
1484 cv_init(&vmp->vm_cv, NULL, CV_DEFAULT, NULL); in vmem_create_common()
1485 vmp->vm_cflags = vmflag; in vmem_create_common()
1488 vmp->vm_quantum = quantum; in vmem_create_common()
1489 vmp->vm_qshift = highbit(quantum) - 1; in vmem_create_common()
1490 nqcache = MIN(qcache_max >> vmp->vm_qshift, VMEM_NQCACHE_MAX); in vmem_create_common()
1493 vfp = &vmp->vm_freelist[i]; in vmem_create_common()
1499 vmp->vm_freelist[0].vs_kprev = NULL; in vmem_create_common()
1500 vmp->vm_freelist[VMEM_FREELISTS].vs_knext = NULL; in vmem_create_common()
1501 vmp->vm_freelist[VMEM_FREELISTS].vs_end = 0; in vmem_create_common()
1502 vmp->vm_hash_table = vmp->vm_hash0; in vmem_create_common()
1503 vmp->vm_hash_mask = VMEM_HASH_INITIAL - 1; in vmem_create_common()
1504 vmp->vm_hash_shift = highbit(vmp->vm_hash_mask); in vmem_create_common()
1506 vsp = &vmp->vm_seg0; in vmem_create_common()
1513 vsp = &vmp->vm_rotor; in vmem_create_common()
1515 VMEM_INSERT(&vmp->vm_seg0, vsp, a); in vmem_create_common()
1517 bcopy(&vmem_kstat_template, &vmp->vm_kstat, sizeof (vmem_kstat_t)); in vmem_create_common()
1519 vmp->vm_id = id; in vmem_create_common()
1521 vmp->vm_kstat.vk_source_id.value.ui32 = source->vm_id; in vmem_create_common()
1522 vmp->vm_source = source; in vmem_create_common()
1523 vmp->vm_source_alloc = afunc; in vmem_create_common()
1524 vmp->vm_source_free = ffunc; in vmem_create_common()
1531 if (vmp->vm_cflags & VMC_NO_QCACHE) { in vmem_create_common()
1532 vmp->vm_min_import = in vmem_create_common()
1533 VMEM_QCACHE_SLABSIZE(nqcache << vmp->vm_qshift); in vmem_create_common()
1539 vmp->vm_qcache_max = nqcache << vmp->vm_qshift; in vmem_create_common()
1542 (void) sprintf(buf, "%s_%lu", vmp->vm_name, in vmem_create_common()
1544 vmp->vm_qcache[i] = kmem_cache_create(buf, in vmem_create_common()
1546 NULL, vmp, KMC_QCACHE | KMC_NOTOUCH); in vmem_create_common()
1550 if ((vmp->vm_ksp = kstat_create("vmem", vmp->vm_id, vmp->vm_name, in vmem_create_common()
1553 vmp->vm_ksp->ks_data = &vmp->vm_kstat; in vmem_create_common()
1554 kstat_install(vmp->vm_ksp); in vmem_create_common()
1561 *vmpp = vmp; in vmem_create_common()
1564 if (vmp->vm_cflags & VMC_POPULATOR) { in vmem_create_common()
1566 vmem_populator[atomic_inc_32_nv(&vmem_populators) - 1] = vmp; in vmem_create_common()
1567 mutex_enter(&vmp->vm_lock); in vmem_create_common()
1568 (void) vmem_populate(vmp, vmflag | VM_PANIC); in vmem_create_common()
1569 mutex_exit(&vmp->vm_lock); in vmem_create_common()
1572 if ((base || size) && vmem_add(vmp, base, size, vmflag) == NULL) { in vmem_create_common()
1573 vmem_destroy(vmp); in vmem_create_common()
1577 return (vmp); in vmem_create_common()
1611 vmem_destroy(vmem_t *vmp) in vmem_destroy() argument
1614 vmem_seg_t *seg0 = &vmp->vm_seg0; in vmem_destroy()
1621 while ((cur = *vmpp) != vmp) in vmem_destroy()
1623 *vmpp = vmp->vm_next; in vmem_destroy()
1627 if (vmp->vm_qcache[i]) in vmem_destroy()
1628 kmem_cache_destroy(vmp->vm_qcache[i]); in vmem_destroy()
1630 leaked = vmem_size(vmp, VMEM_ALLOC); in vmem_destroy()
1633 vmp->vm_name, leaked, (vmp->vm_cflags & VMC_IDENTIFIER) ? in vmem_destroy()
1636 if (vmp->vm_hash_table != vmp->vm_hash0) in vmem_destroy()
1637 vmem_free(vmem_hash_arena, vmp->vm_hash_table, in vmem_destroy()
1638 (vmp->vm_hash_mask + 1) * sizeof (void *)); in vmem_destroy()
1644 VMEM_DELETE(&vmp->vm_rotor, a); in vmem_destroy()
1650 while (vmp->vm_nsegfree > 0) in vmem_destroy()
1651 vmem_putseg_global(vmem_getseg(vmp)); in vmem_destroy()
1653 kstat_delete(vmp->vm_ksp); in vmem_destroy()
1655 mutex_destroy(&vmp->vm_lock); in vmem_destroy()
1656 cv_destroy(&vmp->vm_cv); in vmem_destroy()
1657 vmem_free(vmem_vmem_arena, vmp, sizeof (vmem_t)); in vmem_destroy()
1670 vmem_hash_rescale(vmem_t *vmp) in vmem_hash_rescale() argument
1675 nseg = (size_t)(vmp->vm_kstat.vk_alloc.value.ui64 - in vmem_hash_rescale()
1676 vmp->vm_kstat.vk_free.value.ui64); in vmem_hash_rescale()
1679 old_size = vmp->vm_hash_mask + 1; in vmem_hash_rescale()
1691 mutex_enter(&vmp->vm_lock); in vmem_hash_rescale()
1693 old_size = vmp->vm_hash_mask + 1; in vmem_hash_rescale()
1694 old_table = vmp->vm_hash_table; in vmem_hash_rescale()
1696 vmp->vm_hash_mask = new_size - 1; in vmem_hash_rescale()
1697 vmp->vm_hash_table = new_table; in vmem_hash_rescale()
1698 vmp->vm_hash_shift = highbit(vmp->vm_hash_mask); in vmem_hash_rescale()
1705 vmem_seg_t **hash_bucket = VMEM_HASH(vmp, addr); in vmem_hash_rescale()
1712 mutex_exit(&vmp->vm_lock); in vmem_hash_rescale()
1714 if (old_table != vmp->vm_hash0) in vmem_hash_rescale()
1725 vmem_t *vmp; in vmem_update() local
1728 for (vmp = vmem_list; vmp != NULL; vmp = vmp->vm_next) { in vmem_update()
1734 cv_broadcast(&vmp->vm_cv); in vmem_update()
1739 vmem_hash_rescale(vmp); in vmem_update()
1747 vmem_qcache_reap(vmem_t *vmp) in vmem_qcache_reap() argument
1755 if (vmp->vm_qcache[i]) in vmem_qcache_reap()
1756 kmem_cache_reap_soon(vmp->vm_qcache[i]); in vmem_qcache_reap()