Lines Matching +full:early +full:- +full:to +full:- +full:mid
1 /*-
2 * SPDX-License-Identifier: BSD-3-Clause
14 * may be used to endorse or promote products derived from this software
18 * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
30 * BLIST.C - Bitmap allocator/deallocator, using a radix tree with hinting
34 * try to interpret the meaning of a 'block' other than to return
37 * A radix tree controls access to pieces of the bitmap, and includes
56 * radix tree should be able to operate well no matter how much
61 * The non-blocking nature of allocations and frees is required by swap
66 * memory) by BLIST_RADIX lower-level nodes. This is a recursive
68 * 'skip' calculation. The memory allocation is only large enough to
72 * BLIST_RADIX lower-level nodes of a some nodes may not be allocated.
82 * This code can be compiled stand-alone for debugging.
151 #define BLIST_MASK (BLIST_RADIX - 1)
154 * For a subtree that can represent the state of up to 'radix' blocks, the
158 * or, equivalently, (m**(h+1)-1)/(m-1). This quantity is called 'skip'
161 * skip = (m * m**h) / (m - 1)
162 * skip = (m * (radix / m)) / (m - 1)
163 * skip = radix / (m - 1)
181 return (((u_daddr_t)-1 << n) & in bitrange()
182 ((u_daddr_t)-1 >> (BLIST_RADIX - (n + count)))); in bitrange()
191 return (ffsll(mask) - 1); in bitpos()
195 * blist_create() - create a blist capable of handling up to the specified
198 * blocks - must be greater than 0
199 * flags - malloc flags
216 for (radix = 1; (blocks - 1) / BLIST_RADIX / radix > 0; in blist_create()
218 nodes += 1 + (blocks - 1) / BLIST_RADIX / radix; in blist_create()
221 * Include a sentinel node to ensure that cross-leaf scans stay within in blist_create()
232 bl->bl_blocks = blocks; in blist_create()
233 bl->bl_radix = radix; in blist_create()
239 (long long)bl->bl_blocks, in blist_create()
240 (long long)bl->bl_blocks * 4 / 1024, in blist_create()
258 * blist_alloc() - reserve space in the block bitmap. Return the base
274 * first iteration leads to a second iteration only if the cursor was in blist_alloc()
275 * non-zero. When the cursor is zero, an allocation failure will in blist_alloc()
278 for (cursor = bl->bl_cursor;; cursor = 0) { in blist_alloc()
279 blk = blst_meta_alloc(bl->bl_root, cursor, count, maxcount, in blist_alloc()
280 bl->bl_radix); in blist_alloc()
282 bl->bl_avail -= *count; in blist_alloc()
283 bl->bl_cursor = blk + *count; in blist_alloc()
284 if (bl->bl_cursor == bl->bl_blocks) in blist_alloc()
285 bl->bl_cursor = 0; in blist_alloc()
294 * blist_avail() - return the number of free blocks.
300 return (bl->bl_avail); in blist_avail()
304 * blist_free() - free up space in the block bitmap. Return the base
311 KASSERT(blkno >= 0 && blkno + count <= bl->bl_blocks, in blist_free()
313 (uintmax_t)blkno, (int)count, (uintmax_t)bl->bl_blocks)); in blist_free()
314 blst_meta_free(bl->bl_root, blkno, count, bl->bl_radix); in blist_free()
315 bl->bl_avail += count; in blist_free()
319 * blist_fill() - mark a region in the block bitmap as off-limits
320 * to the allocator (i.e. allocate it), ignoring any
329 KASSERT(blkno >= 0 && blkno + count <= bl->bl_blocks, in blist_fill()
331 (uintmax_t)blkno, (int)count, (uintmax_t)bl->bl_blocks)); in blist_fill()
332 filled = blst_meta_fill(bl->bl_root, blkno, count, bl->bl_radix); in blist_fill()
333 bl->bl_avail -= filled; in blist_fill()
338 * blist_resize() - resize an existing radix tree to handle the
340 * the tree and transfer the previous bitmap to the new
342 * the new blocks are to left allocated or freed.
351 if (count > save->bl_blocks) in blist_resize()
352 count = save->bl_blocks; in blist_resize()
353 blst_copy(save->bl_root, 0, save->bl_radix, newbl, count); in blist_resize()
358 if (freenew && count < newbl->bl_blocks) { in blist_resize()
359 blist_free(newbl, count, newbl->bl_blocks - count); in blist_resize()
367 * blist_print() - dump radix tree
373 (uintmax_t)bl->bl_avail, (uintmax_t)bl->bl_cursor); in blist_print()
375 if (bl->bl_root->bm_bitmap != 0) in blist_print()
376 blst_radix_print(bl->bl_root, 0, bl->bl_radix, 4); in blist_print()
389 * Use 'gap' to describe a maximal range of unallocated blocks/bits.
396 daddr_t err; /* sum - num * avg */
402 * gap_stats_counting() - is the state 'counting 1 bits'?
409 return (stats->start != SWAPBLK_NONE); in gap_stats_counting()
413 * init_gap_stats() - initialize stats on gap sizes
420 stats->start = SWAPBLK_NONE; in init_gap_stats()
424 * update_gap_stats() - update stats on gap sizes
430 int hi, lo, mid; in update_gap_stats() local
433 stats->start = posn; in update_gap_stats()
436 size = posn - stats->start; in update_gap_stats()
437 stats->start = SWAPBLK_NONE; in update_gap_stats()
438 if (size > stats->max) in update_gap_stats()
439 stats->max = size; in update_gap_stats()
443 * expecting to find it in an early range. in update_gap_stats()
454 mid = (lo + hi) >> 1; in update_gap_stats()
455 if (fib[mid] <= size) in update_gap_stats()
456 lo = mid; in update_gap_stats()
458 hi = mid; in update_gap_stats()
460 stats->histo[lo]++; in update_gap_stats()
461 if (lo > stats->max_bucket) in update_gap_stats()
462 stats->max_bucket = lo; in update_gap_stats()
463 stats->err += size - stats->avg; in update_gap_stats()
464 stats->num++; in update_gap_stats()
465 stats->avg += stats->err / stats->num; in update_gap_stats()
466 stats->err %= stats->num; in update_gap_stats()
470 * dump_gap_stats() - print stats on gap sizes
478 (intmax_t)stats->num); in dump_gap_stats()
479 sbuf_printf(s, "largest free range: %jd\n", (intmax_t)stats->max); in dump_gap_stats()
481 (intmax_t)stats->avg); in dump_gap_stats()
484 sbuf_cat(s, " ----- | ----------\n"); in dump_gap_stats()
485 for (i = 0; i < stats->max_bucket; i++) { in dump_gap_stats()
486 if (stats->histo[i] != 0) { in dump_gap_stats()
488 (intmax_t)stats->histo[i]); in dump_gap_stats()
489 if (fib[i] != fib[i + 1] - 1) in dump_gap_stats()
490 sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i], in dump_gap_stats()
491 (intmax_t)fib[i + 1] - 1); in dump_gap_stats()
496 sbuf_printf(s, "%20jd | ", (intmax_t)stats->histo[i]); in dump_gap_stats()
497 if (stats->histo[i] > 1) in dump_gap_stats()
498 sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i], in dump_gap_stats()
499 (intmax_t)stats->max); in dump_gap_stats()
501 sbuf_printf(s, "%jd\n", (intmax_t)stats->max); in dump_gap_stats()
505 * blist_stats() - dump radix tree stats
518 radix = bl->bl_radix; in blist_stats()
519 for (i = 0; i < bl->bl_blocks; ) { in blist_stats()
524 if (bl->bl_root[nodes].bm_bitmap == 0) { in blist_stats()
540 mask = bl->bl_root[nodes].bm_bitmap; in blist_stats()
576 * BLST_NEXT_LEAF_ALLOC() - allocate the blocks starting with the next leaf.
581 * addresses to determine how many meta-nodes lie between the leaves. If
583 * set, clear them and clear the bits in the meta nodes on the path up to
584 * the least common ancestor to mark any subtrees made completely empty.
594 for (blk = start; blk - start < maxcount; blk += BLIST_RADIX) { in blst_next_leaf_alloc()
595 /* Skip meta-nodes, as long as they promise more free blocks. */ in blst_next_leaf_alloc()
597 while (((++scan)->bm_bitmap & 1) == 1 && in blst_next_leaf_alloc()
600 if (~scan->bm_bitmap != 0) { in blst_next_leaf_alloc()
607 avail = blk - start + bitpos(~scan->bm_bitmap); in blst_next_leaf_alloc()
611 * blocks at its beginning to bother in blst_next_leaf_alloc()
619 * There was no next leaf. Back scan up to in blst_next_leaf_alloc()
624 --scan; in blst_next_leaf_alloc()
626 blk -= BLIST_RADIX; in blst_next_leaf_alloc()
632 * 'scan' is the last leaf that provides blocks. Clear from 1 to in blst_next_leaf_alloc()
633 * BLIST_RADIX bits to represent the allocation of those last blocks. in blst_next_leaf_alloc()
636 scan->bm_bitmap &= ~bitrange(0, maxcount % BLIST_RADIX); in blst_next_leaf_alloc()
638 scan->bm_bitmap = 0; in blst_next_leaf_alloc()
641 /* Back up over meta-nodes, clearing bits if necessary. */ in blst_next_leaf_alloc()
642 blk -= BLIST_RADIX; in blst_next_leaf_alloc()
646 if ((scan--)->bm_bitmap == 0) in blst_next_leaf_alloc()
647 scan->bm_bitmap ^= 1; in blst_next_leaf_alloc()
649 if ((scan--)->bm_bitmap == 0) in blst_next_leaf_alloc()
650 scan[-digit * radix_to_skip(radix)].bm_bitmap ^= in blst_next_leaf_alloc()
656 scan->bm_bitmap = 0; in blst_next_leaf_alloc()
662 * BLST_LEAF_ALLOC() - allocate at a leaf in the radix tree (a bitmap).
665 * proportional to log(count), plus height of the tree if the allocation
674 count1 = *count - 1; in blst_leaf_alloc()
676 mask = ~scan->bm_bitmap; in blst_leaf_alloc()
680 * num_shifts)] are 1 in scan->bm_bitmap. Reduce num_shifts to in blst_leaf_alloc()
681 * 0, while preserving this invariant. The updates to mask in blst_leaf_alloc()
683 * longer string of consecutive 1-bits in scan->bm_bitmap. If in blst_leaf_alloc()
684 * more updates to mask cannot set more bits, because mask is in blst_leaf_alloc()
688 num_shifts--; in blst_leaf_alloc()
697 scan->bm_bighint = bighint; in blst_leaf_alloc()
708 scan->bm_bighint = bighint; in blst_leaf_alloc()
712 blk -= blk & BLIST_MASK; in blst_leaf_alloc()
727 if (maxcount < hi - lo) in blst_leaf_alloc()
729 *count = hi - lo; in blst_leaf_alloc()
731 } else if (maxcount <= BLIST_RADIX - lo) { in blst_leaf_alloc()
737 scan->bm_bighint = bighint; in blst_leaf_alloc()
740 count1 = *count - (BLIST_RADIX - lo); in blst_leaf_alloc()
741 maxcount -= BLIST_RADIX - lo; in blst_leaf_alloc()
745 * The next leaf cannot supply enough blocks to reach in blst_leaf_alloc()
749 * the next leaf changes, and without any changes to in blst_leaf_alloc()
753 *count = BLIST_RADIX - lo + hi; in blst_leaf_alloc()
754 scan->bm_bighint = bighint; in blst_leaf_alloc()
758 scan->bm_bitmap &= mask; in blst_leaf_alloc()
763 * blist_meta_alloc() - allocate at a meta in the radix tree.
765 * Attempt to allocate at a meta node. If we can't, we update
767 * calls that hit this node. We have to check for our collapse cases
781 blk = cursor & -(radix * BLIST_RADIX); in blst_meta_alloc()
784 mask = scan->bm_bitmap; in blst_meta_alloc()
788 mask &= (u_daddr_t)-1 << digit; in blst_meta_alloc()
793 * If the first try is for a block that includes the cursor, pre-undo in blst_meta_alloc()
798 cursor -= digit * radix; in blst_meta_alloc()
816 scan->bm_bitmap ^= bitrange(digit, 1); in blst_meta_alloc()
827 if (scan_from_start && !(digit == BLIST_RADIX - 1 && in blst_meta_alloc()
829 scan->bm_bighint = *count - 1; in blst_meta_alloc()
835 * BLST_LEAF_FREE() - free allocated block from leaf bitmap
850 KASSERT((scan->bm_bitmap & mask) == 0, in blst_leaf_free()
852 (uintmax_t)blk, count, (uintmax_t)scan->bm_bitmap & mask)); in blst_leaf_free()
853 scan->bm_bitmap |= mask; in blst_leaf_free()
857 * BLST_META_FREE() - free allocated blocks from radix tree meta info
861 * meta node, we break the range down recursively to free blocks
873 * We could probably do a better job here. We are required to make in blst_meta_free()
878 scan->bm_bighint = BLIST_MAX_ALLOC; in blst_meta_free()
884 blk = (freeBlk + radix * BLIST_RADIX) & -(radix * BLIST_RADIX); in blst_meta_free()
892 blk = freeBlk & -radix; in blst_meta_free()
894 endDigit = 1 + (((endBlk - 1) / radix) & BLIST_MASK); in blst_meta_free()
895 scan->bm_bitmap |= bitrange(digit, endDigit - digit); in blst_meta_free()
898 count = ummin(blk, endBlk) - freeBlk; in blst_meta_free()
905 * BLST_COPY() - copy one radix tree to another
921 u_daddr_t v = scan->bm_bitmap; in blst_copy()
923 if (v == (u_daddr_t)-1) { in blst_copy()
940 if (scan->bm_bitmap == 0) { in blst_copy()
953 count -= blk - endBlk; in blst_copy()
954 blst_copy(&scan[i], blk - radix, in blst_copy()
960 * BLST_LEAF_FILL() - allocate specific blocks in leaf bitmap
975 nblks = bitcount64(scan->bm_bitmap & mask); in blst_leaf_fill()
977 scan->bm_bitmap &= ~mask; in blst_leaf_fill()
982 * BLIST_META_FILL() - allocate specific blocks at a meta node
999 blk = (allocBlk + radix * BLIST_RADIX) & -(radix * BLIST_RADIX); in blst_meta_fill()
1007 blk = allocBlk & -radix; in blst_meta_fill()
1013 count = ummin(blk, endBlk) - allocBlk; in blst_meta_fill()
1017 scan->bm_bitmap &= ~((u_daddr_t)1 << digit); in blst_meta_fill()
1037 (int)(1 + (BLIST_RADIX - 1) / 4), in blst_radix_print()
1038 (long long)scan->bm_bitmap, in blst_radix_print()
1039 (long long)scan->bm_bighint in blst_radix_print()
1049 (int)(1 + (BLIST_RADIX - 1) / 4), in blst_radix_print()
1050 (long long)scan->bm_bitmap, in blst_radix_print()
1051 (long long)scan->bm_bighint in blst_radix_print()
1057 mask = scan->bm_bitmap; in blst_radix_print()
1064 tab -= 4; in blst_radix_print()
1086 if (*ptr != '-') { in main()
1091 fprintf(stderr, "Bad option: %s\n", ptr - 2); in main()
1107 (long long)size, (long long)bl->bl_radix * BLIST_RADIX); in main()
1155 "p -print\n" in main()
1156 "s -stats\n" in main()
1157 "a %d %d -allocate\n" in main()
1158 "f %x %d -free\n" in main()
1159 "l %x %d -fill\n" in main()
1160 "r %d -resize\n" in main()
1161 "h/? -help\n" in main()
1162 "q -quit" in main()