xref: /freebsd/sys/kern/subr_blist.c (revision 31c82722c1f899df2292323462d5b0ca90a73cd3)
106b4bf3eSWarner Losh /*-
251369649SPedro F. Giffuni  * SPDX-License-Identifier: BSD-3-Clause
351369649SPedro F. Giffuni  *
406b4bf3eSWarner Losh  * Copyright (c) 1998 Matthew Dillon.  All Rights Reserved.
506b4bf3eSWarner Losh  * Redistribution and use in source and binary forms, with or without
606b4bf3eSWarner Losh  * modification, are permitted provided that the following conditions
706b4bf3eSWarner Losh  * are met:
806b4bf3eSWarner Losh  * 1. Redistributions of source code must retain the above copyright
906b4bf3eSWarner Losh  *    notice, this list of conditions and the following disclaimer.
1006b4bf3eSWarner Losh  * 2. Redistributions in binary form must reproduce the above copyright
1106b4bf3eSWarner Losh  *    notice, this list of conditions and the following disclaimer in the
1206b4bf3eSWarner Losh  *    documentation and/or other materials provided with the distribution.
1369a28758SEd Maste  * 3. Neither the name of the University nor the names of its contributors
1406b4bf3eSWarner Losh  *    may be used to endorse or promote products derived from this software
1506b4bf3eSWarner Losh  *    without specific prior written permission.
1606b4bf3eSWarner Losh  *
1706b4bf3eSWarner Losh  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
1806b4bf3eSWarner Losh  * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1906b4bf3eSWarner Losh  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
2006b4bf3eSWarner Losh  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
2106b4bf3eSWarner Losh  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
2206b4bf3eSWarner Losh  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
2306b4bf3eSWarner Losh  * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
2406b4bf3eSWarner Losh  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
2506b4bf3eSWarner Losh  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
2606b4bf3eSWarner Losh  * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
2706b4bf3eSWarner Losh  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2806b4bf3eSWarner Losh  */
297090df5aSMatthew Dillon /*
307090df5aSMatthew Dillon  * BLIST.C -	Bitmap allocator/deallocator, using a radix tree with hinting
317090df5aSMatthew Dillon  *
327090df5aSMatthew Dillon  *	This module implements a general bitmap allocator/deallocator.  The
337090df5aSMatthew Dillon  *	allocator eats around 2 bits per 'block'.  The module does not
34f565f395SEitan Adler  *	try to interpret the meaning of a 'block' other than to return
357090df5aSMatthew Dillon  *	SWAPBLK_NONE on an allocation failure.
367090df5aSMatthew Dillon  *
37ec371b57SAlan Cox  *	A radix tree controls access to pieces of the bitmap, and includes
38ec371b57SAlan Cox  *	auxiliary information at each interior node about the availabilty of
39ec371b57SAlan Cox  *	contiguous free blocks in the subtree rooted at that node.  Two radix
40ec371b57SAlan Cox  *	constants are involved: one for the size of the bitmaps contained in the
41ec371b57SAlan Cox  *	leaf nodes (BLIST_BMAP_RADIX), and one for the number of descendents of
42ec371b57SAlan Cox  *	each of the meta (interior) nodes (BLIST_META_RADIX).  Each subtree is
43ec371b57SAlan Cox  *	associated with a range of blocks.  The root of any subtree stores a
44ec371b57SAlan Cox  *	hint field that defines an upper bound on the size of the largest
45ec371b57SAlan Cox  *	allocation that can begin in the associated block range.  A hint is an
46ec371b57SAlan Cox  *	upper bound on a potential allocation, but not necessarily a tight upper
47ec371b57SAlan Cox  *	bound.
487090df5aSMatthew Dillon  *
49bb4a27f9SMark Johnston  *	The bitmap field in each node directs the search for available blocks.
50bb4a27f9SMark Johnston  *	For a leaf node, a bit is set if the corresponding block is free.  For a
51bb4a27f9SMark Johnston  *	meta node, a bit is set if the corresponding subtree contains a free
52bb4a27f9SMark Johnston  *	block somewhere within it.  The search at a meta node considers only
53bb4a27f9SMark Johnston  *	children of that node that represent a range that includes a free block.
547090df5aSMatthew Dillon  *
557090df5aSMatthew Dillon  * 	The hinting greatly increases code efficiency for allocations while
567090df5aSMatthew Dillon  *	the general radix structure optimizes both allocations and frees.  The
577090df5aSMatthew Dillon  *	radix tree should be able to operate well no matter how much
587090df5aSMatthew Dillon  *	fragmentation there is and no matter how large a bitmap is used.
597090df5aSMatthew Dillon  *
6051dc4feaSSergey Kandaurov  *	The blist code wires all necessary memory at creation time.  Neither
6151dc4feaSSergey Kandaurov  *	allocations nor frees require interaction with the memory subsystem.
62bb4a27f9SMark Johnston  *	The non-blocking nature of allocations and frees is required by swap
63bb4a27f9SMark Johnston  *	code (vm/swap_pager.c).
647090df5aSMatthew Dillon  *
65bb4a27f9SMark Johnston  *	LAYOUT: The radix tree is laid out recursively using a linear array.
66bb4a27f9SMark Johnston  *	Each meta node is immediately followed (laid out sequentially in
67bb4a27f9SMark Johnston  *	memory) by BLIST_META_RADIX lower level nodes.  This is a recursive
68bb4a27f9SMark Johnston  *	structure but one that can be easily scanned through a very simple
69bb4a27f9SMark Johnston  *	'skip' calculation.  The memory allocation is only large enough to
70bb4a27f9SMark Johnston  *	cover the number of blocks requested at creation time.  Nodes that
71bb4a27f9SMark Johnston  *	represent blocks beyond that limit, nodes that would never be read
72bb4a27f9SMark Johnston  *	or written, are not allocated, so that the last of the
73bb4a27f9SMark Johnston  *	BLIST_META_RADIX lower level nodes of a some nodes may not be
74bb4a27f9SMark Johnston  *	allocated.
757090df5aSMatthew Dillon  *
76f565f395SEitan Adler  *	NOTE: the allocator cannot currently allocate more than
777090df5aSMatthew Dillon  *	BLIST_BMAP_RADIX blocks per call.  It will panic with 'allocation too
787090df5aSMatthew Dillon  *	large' if you try.  This is an area that could use improvement.  The
797090df5aSMatthew Dillon  *	radix is large enough that this restriction does not effect the swap
80b411efa4SAlan Cox  *	system, though.  Currently only the allocation code is affected by
817090df5aSMatthew Dillon  *	this algorithmic unfeature.  The freeing code can handle arbitrary
827090df5aSMatthew Dillon  *	ranges.
837090df5aSMatthew Dillon  *
847090df5aSMatthew Dillon  *	This code can be compiled stand-alone for debugging.
857090df5aSMatthew Dillon  */
867090df5aSMatthew Dillon 
87677b542eSDavid E. O'Brien #include <sys/cdefs.h>
88677b542eSDavid E. O'Brien __FBSDID("$FreeBSD$");
89677b542eSDavid E. O'Brien 
90c4473420SPeter Wemm #ifdef _KERNEL
917090df5aSMatthew Dillon 
927090df5aSMatthew Dillon #include <sys/param.h>
937090df5aSMatthew Dillon #include <sys/systm.h>
947090df5aSMatthew Dillon #include <sys/lock.h>
957090df5aSMatthew Dillon #include <sys/kernel.h>
967090df5aSMatthew Dillon #include <sys/blist.h>
977090df5aSMatthew Dillon #include <sys/malloc.h>
98d027ed2eSAlan Cox #include <sys/sbuf.h>
990cddd8f0SMatthew Dillon #include <sys/proc.h>
10023955314SAlfred Perlstein #include <sys/mutex.h>
1017090df5aSMatthew Dillon 
1027090df5aSMatthew Dillon #else
1037090df5aSMatthew Dillon 
1047090df5aSMatthew Dillon #ifndef BLIST_NO_DEBUG
1057090df5aSMatthew Dillon #define BLIST_DEBUG
1067090df5aSMatthew Dillon #endif
1077090df5aSMatthew Dillon 
108bb4a27f9SMark Johnston #include <sys/errno.h>
1097090df5aSMatthew Dillon #include <sys/types.h>
110d90bf7d8SAlan Cox #include <sys/malloc.h>
111d027ed2eSAlan Cox #include <sys/sbuf.h>
112b1f59c92SDoug Moore #include <assert.h>
1137090df5aSMatthew Dillon #include <stdio.h>
1147090df5aSMatthew Dillon #include <string.h>
1158eefcd40SAlan Cox #include <stddef.h>
1167090df5aSMatthew Dillon #include <stdlib.h>
1177090df5aSMatthew Dillon #include <stdarg.h>
118d3783724SAlan Cox #include <stdbool.h>
1197090df5aSMatthew Dillon 
1204be4fd5dSAlan Cox #define	bitcount64(x)	__bitcount64((uint64_t)(x))
12192da00bbSMatthew Dillon #define malloc(a,b,c)	calloc(a, 1)
1227090df5aSMatthew Dillon #define free(a,b)	free(a)
123bb4a27f9SMark Johnston #define ummin(a,b)	((a) < (b) ? (a) : (b))
124*31c82722SDoug Moore #define imin(a,b)	((a) < (b) ? (a) : (b))
125b1f59c92SDoug Moore #define KASSERT(a,b)	assert(a)
1267090df5aSMatthew Dillon 
1277090df5aSMatthew Dillon #include <sys/blist.h>
1287090df5aSMatthew Dillon 
1297090df5aSMatthew Dillon #endif
1307090df5aSMatthew Dillon 
1317090df5aSMatthew Dillon /*
1327090df5aSMatthew Dillon  * static support functions
1337090df5aSMatthew Dillon  */
13487ae0686SDoug Moore static daddr_t	blst_leaf_alloc(blmeta_t *scan, daddr_t blk,
13587ae0686SDoug Moore     int *count, int maxcount);
13687ae0686SDoug Moore static daddr_t	blst_meta_alloc(blmeta_t *scan, daddr_t cursor, int *count,
13787ae0686SDoug Moore     int maxcount, u_daddr_t radix);
1387090df5aSMatthew Dillon static void blst_leaf_free(blmeta_t *scan, daddr_t relblk, int count);
1397090df5aSMatthew Dillon static void blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count,
140bee93d3cSAlan Cox 		    u_daddr_t radix);
1417090df5aSMatthew Dillon static void blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix,
1422ac0c7c3SAlan Cox 		    blist_t dest, daddr_t count);
143a7249a6cSAlan Cox static daddr_t blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count);
144a7249a6cSAlan Cox static daddr_t blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count,
145bee93d3cSAlan Cox 		    u_daddr_t radix);
146c4473420SPeter Wemm #ifndef _KERNEL
147d3783724SAlan Cox static void	blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix,
1482ac0c7c3SAlan Cox 		    int tab);
1497090df5aSMatthew Dillon #endif
1507090df5aSMatthew Dillon 
151c4473420SPeter Wemm #ifdef _KERNEL
1527090df5aSMatthew Dillon static MALLOC_DEFINE(M_SWAP, "SWAP", "Swap space");
1537090df5aSMatthew Dillon #endif
1547090df5aSMatthew Dillon 
155ec371b57SAlan Cox _Static_assert(BLIST_BMAP_RADIX % BLIST_META_RADIX == 0,
156ec371b57SAlan Cox     "radix divisibility error");
157ec371b57SAlan Cox #define	BLIST_BMAP_MASK	(BLIST_BMAP_RADIX - 1)
158d027ed2eSAlan Cox #define	BLIST_META_MASK	(BLIST_META_RADIX - 1)
159ba98e6a2SAlan Cox 
1607090df5aSMatthew Dillon /*
1612ac0c7c3SAlan Cox  * For a subtree that can represent the state of up to 'radix' blocks, the
1622ac0c7c3SAlan Cox  * number of leaf nodes of the subtree is L=radix/BLIST_BMAP_RADIX.  If 'm'
1632ac0c7c3SAlan Cox  * is short for BLIST_META_RADIX, then for a tree of height h with L=m**h
1642ac0c7c3SAlan Cox  * leaf nodes, the total number of tree nodes is 1 + m + m**2 + ... + m**h,
1652ac0c7c3SAlan Cox  * or, equivalently, (m**(h+1)-1)/(m-1).  This quantity is called 'skip'
1662ac0c7c3SAlan Cox  * in the 'meta' functions that process subtrees.  Since integer division
1672ac0c7c3SAlan Cox  * discards remainders, we can express this computation as
1682ac0c7c3SAlan Cox  * skip = (m * m**h) / (m - 1)
169ba98e6a2SAlan Cox  * skip = (m * (radix / BLIST_BMAP_RADIX)) / (m - 1)
170ba98e6a2SAlan Cox  * and since m divides BLIST_BMAP_RADIX, we can simplify further to
171ba98e6a2SAlan Cox  * skip = (radix / (BLIST_BMAP_RADIX / m)) / (m - 1)
172ba98e6a2SAlan Cox  * skip = radix / ((BLIST_BMAP_RADIX / m) * (m - 1))
173ba98e6a2SAlan Cox  * so that simple integer division by a constant can safely be used for the
174ba98e6a2SAlan Cox  * calculation.
1752ac0c7c3SAlan Cox  */
1762ac0c7c3SAlan Cox static inline daddr_t
1772ac0c7c3SAlan Cox radix_to_skip(daddr_t radix)
1782ac0c7c3SAlan Cox {
1792ac0c7c3SAlan Cox 
1802ac0c7c3SAlan Cox 	return (radix /
181d027ed2eSAlan Cox 	    ((BLIST_BMAP_RADIX / BLIST_META_RADIX) * BLIST_META_MASK));
182d027ed2eSAlan Cox }
183d027ed2eSAlan Cox 
184d027ed2eSAlan Cox /*
185bb4a27f9SMark Johnston  * Provide a mask with count bits set, starting as position n.
186bb4a27f9SMark Johnston  */
187bb4a27f9SMark Johnston static inline u_daddr_t
188bb4a27f9SMark Johnston bitrange(int n, int count)
189bb4a27f9SMark Johnston {
190bb4a27f9SMark Johnston 
191bb4a27f9SMark Johnston 	return (((u_daddr_t)-1 << n) &
192bb4a27f9SMark Johnston 	    ((u_daddr_t)-1 >> (BLIST_BMAP_RADIX - (n + count))));
193bb4a27f9SMark Johnston }
194bb4a27f9SMark Johnston 
195bb4a27f9SMark Johnston 
196bb4a27f9SMark Johnston /*
19753519253SDoug Moore  * Find the first bit set in a u_daddr_t.
198d027ed2eSAlan Cox  */
199d027ed2eSAlan Cox static inline int
20053519253SDoug Moore generic_bitpos(u_daddr_t mask)
20153519253SDoug Moore {
20253519253SDoug Moore 	int hi, lo, mid;
20353519253SDoug Moore 
20453519253SDoug Moore 	lo = 0;
20553519253SDoug Moore 	hi = BLIST_BMAP_RADIX;
20653519253SDoug Moore 	while (lo + 1 < hi) {
20753519253SDoug Moore 		mid = (lo + hi) >> 1;
20853519253SDoug Moore 		if (mask & bitrange(0, mid))
20953519253SDoug Moore 			hi = mid;
21053519253SDoug Moore 		else
21153519253SDoug Moore 			lo = mid;
21253519253SDoug Moore 	}
21353519253SDoug Moore 	return (lo);
21453519253SDoug Moore }
21553519253SDoug Moore 
21653519253SDoug Moore static inline int
2174ab18ea2SDoug Moore bitpos(u_daddr_t mask)
2184ab18ea2SDoug Moore {
2194ab18ea2SDoug Moore 
22012cd7dedSDoug Moore 	switch (sizeof(mask)) {
2214ab18ea2SDoug Moore #ifdef HAVE_INLINE_FFSLL
22212cd7dedSDoug Moore 	case sizeof(long long):
22312cd7dedSDoug Moore 		return (ffsll(mask) - 1);
2244ab18ea2SDoug Moore #endif
22553519253SDoug Moore #ifdef HAVE_INLINE_FFS
22653519253SDoug Moore 	case sizeof(int):
22753519253SDoug Moore 		return (ffs(mask) - 1);
22853519253SDoug Moore #endif
22912cd7dedSDoug Moore 	default:
23053519253SDoug Moore 		return (generic_bitpos(mask));
23112cd7dedSDoug Moore 	}
2322ac0c7c3SAlan Cox }
2332ac0c7c3SAlan Cox 
2342ac0c7c3SAlan Cox /*
2357090df5aSMatthew Dillon  * blist_create() - create a blist capable of handling up to the specified
2367090df5aSMatthew Dillon  *		    number of blocks
2377090df5aSMatthew Dillon  *
238f565f395SEitan Adler  *	blocks - must be greater than 0
239c8c7ad92SKip Macy  * 	flags  - malloc flags
2407090df5aSMatthew Dillon  *
2417090df5aSMatthew Dillon  *	The smallest blist consists of a single leaf node capable of
2427090df5aSMatthew Dillon  *	managing BLIST_BMAP_RADIX blocks.
2437090df5aSMatthew Dillon  */
2447090df5aSMatthew Dillon blist_t
245c8c7ad92SKip Macy blist_create(daddr_t blocks, int flags)
2467090df5aSMatthew Dillon {
2477090df5aSMatthew Dillon 	blist_t bl;
248bb4a27f9SMark Johnston 	u_daddr_t nodes, radix;
2497090df5aSMatthew Dillon 
250b1f59c92SDoug Moore 	KASSERT(blocks > 0, ("invalid block count"));
251ce9eea64SMark Johnston 
2527090df5aSMatthew Dillon 	/*
253ce9eea64SMark Johnston 	 * Calculate the radix and node count used for scanning.
2547090df5aSMatthew Dillon 	 */
255bb4a27f9SMark Johnston 	nodes = 1;
2567090df5aSMatthew Dillon 	radix = BLIST_BMAP_RADIX;
257bb4a27f9SMark Johnston 	while (radix <= blocks) {
258bb4a27f9SMark Johnston 		nodes += 1 + (blocks - 1) / radix;
25957e6d29bSMatthew Dillon 		radix *= BLIST_META_RADIX;
2607090df5aSMatthew Dillon 	}
2617090df5aSMatthew Dillon 
26203ca2137SAlan Cox 	bl = malloc(offsetof(struct blist, bl_root[nodes]), M_SWAP, flags |
26303ca2137SAlan Cox 	    M_ZERO);
264015d7db6SAlan Cox 	if (bl == NULL)
265015d7db6SAlan Cox 		return (NULL);
2667090df5aSMatthew Dillon 
2677090df5aSMatthew Dillon 	bl->bl_blocks = blocks;
2687090df5aSMatthew Dillon 	bl->bl_radix = radix;
2697090df5aSMatthew Dillon 
2707090df5aSMatthew Dillon #if defined(BLIST_DEBUG)
2717090df5aSMatthew Dillon 	printf(
27292da00bbSMatthew Dillon 		"BLIST representing %lld blocks (%lld MB of swap)"
27392da00bbSMatthew Dillon 		", requiring %lldK of ram\n",
27492da00bbSMatthew Dillon 		(long long)bl->bl_blocks,
27592da00bbSMatthew Dillon 		(long long)bl->bl_blocks * 4 / 1024,
276015d7db6SAlan Cox 		(long long)(nodes * sizeof(blmeta_t) + 1023) / 1024
2777090df5aSMatthew Dillon 	);
27892da00bbSMatthew Dillon 	printf("BLIST raw radix tree contains %lld records\n",
279015d7db6SAlan Cox 	    (long long)nodes);
2807090df5aSMatthew Dillon #endif
2817090df5aSMatthew Dillon 
2827090df5aSMatthew Dillon 	return (bl);
2837090df5aSMatthew Dillon }
2847090df5aSMatthew Dillon 
2857090df5aSMatthew Dillon void
2867090df5aSMatthew Dillon blist_destroy(blist_t bl)
2877090df5aSMatthew Dillon {
2888eefcd40SAlan Cox 
2897090df5aSMatthew Dillon 	free(bl, M_SWAP);
2907090df5aSMatthew Dillon }
2917090df5aSMatthew Dillon 
2927090df5aSMatthew Dillon /*
2937090df5aSMatthew Dillon  * blist_alloc() -   reserve space in the block bitmap.  Return the base
2947090df5aSMatthew Dillon  *		     of a contiguous region or SWAPBLK_NONE if space could
2957090df5aSMatthew Dillon  *		     not be allocated.
2967090df5aSMatthew Dillon  */
2977090df5aSMatthew Dillon daddr_t
29887ae0686SDoug Moore blist_alloc(blist_t bl, int *count, int maxcount)
2997090df5aSMatthew Dillon {
30027d172bbSDoug Moore 	daddr_t blk, cursor;
3017090df5aSMatthew Dillon 
30287ae0686SDoug Moore 	KASSERT(*count <= maxcount,
30387ae0686SDoug Moore 	    ("invalid parameters %d > %d", *count, maxcount));
304*31c82722SDoug Moore 	KASSERT(*count <= BLIST_MAX_ALLOC,
305*31c82722SDoug Moore 	    ("minimum allocation too large: %d", *count));
306bb4a27f9SMark Johnston 
307d4e3484bSAlan Cox 	/*
308d4e3484bSAlan Cox 	 * This loop iterates at most twice.  An allocation failure in the
309d4e3484bSAlan Cox 	 * first iteration leads to a second iteration only if the cursor was
310d4e3484bSAlan Cox 	 * non-zero.  When the cursor is zero, an allocation failure will
311749cdf6fSAlan Cox 	 * stop further iterations.
312d4e3484bSAlan Cox 	 */
31387ae0686SDoug Moore 	for (cursor = bl->bl_cursor;; cursor = 0) {
31487ae0686SDoug Moore 		blk = blst_meta_alloc(bl->bl_root, cursor, count, maxcount,
315bee93d3cSAlan Cox 		    bl->bl_radix);
316d4e3484bSAlan Cox 		if (blk != SWAPBLK_NONE) {
31787ae0686SDoug Moore 			bl->bl_avail -= *count;
31887ae0686SDoug Moore 			bl->bl_cursor = blk + *count;
319a0ae476bSAlan Cox 			if (bl->bl_cursor == bl->bl_blocks)
320a0ae476bSAlan Cox 				bl->bl_cursor = 0;
3217090df5aSMatthew Dillon 			return (blk);
32287ae0686SDoug Moore 		}
32387ae0686SDoug Moore 		if (cursor == 0)
324749cdf6fSAlan Cox 			return (SWAPBLK_NONE);
3257090df5aSMatthew Dillon 	}
3264be4fd5dSAlan Cox }
3274be4fd5dSAlan Cox 
3284be4fd5dSAlan Cox /*
3294be4fd5dSAlan Cox  * blist_avail() -	return the number of free blocks.
3304be4fd5dSAlan Cox  */
3314be4fd5dSAlan Cox daddr_t
3324be4fd5dSAlan Cox blist_avail(blist_t bl)
3334be4fd5dSAlan Cox {
3344be4fd5dSAlan Cox 
335bb4a27f9SMark Johnston 	return (bl->bl_avail);
3364be4fd5dSAlan Cox }
3377090df5aSMatthew Dillon 
3387090df5aSMatthew Dillon /*
3397090df5aSMatthew Dillon  * blist_free() -	free up space in the block bitmap.  Return the base
340b1f59c92SDoug Moore  *		     	of a contiguous region.
3417090df5aSMatthew Dillon  */
3427090df5aSMatthew Dillon void
3437090df5aSMatthew Dillon blist_free(blist_t bl, daddr_t blkno, daddr_t count)
3447090df5aSMatthew Dillon {
345a396c83aSAlan Cox 
346b1f59c92SDoug Moore 	KASSERT(blkno >= 0 && blkno + count <= bl->bl_blocks,
347b1f59c92SDoug Moore 	    ("freeing invalid range: blkno %jx, count %d, blocks %jd",
348b1f59c92SDoug Moore 	    (uintmax_t)blkno, (int)count, (uintmax_t)bl->bl_blocks));
349bee93d3cSAlan Cox 	blst_meta_free(bl->bl_root, blkno, count, bl->bl_radix);
350bb4a27f9SMark Johnston 	bl->bl_avail += count;
3517090df5aSMatthew Dillon }
3527090df5aSMatthew Dillon 
3537090df5aSMatthew Dillon /*
35492da00bbSMatthew Dillon  * blist_fill() -	mark a region in the block bitmap as off-limits
35592da00bbSMatthew Dillon  *			to the allocator (i.e. allocate it), ignoring any
35692da00bbSMatthew Dillon  *			existing allocations.  Return the number of blocks
35792da00bbSMatthew Dillon  *			actually filled that were free before the call.
35892da00bbSMatthew Dillon  */
359a7249a6cSAlan Cox daddr_t
36092da00bbSMatthew Dillon blist_fill(blist_t bl, daddr_t blkno, daddr_t count)
36192da00bbSMatthew Dillon {
362bb4a27f9SMark Johnston 	daddr_t filled;
36392da00bbSMatthew Dillon 
364b1f59c92SDoug Moore 	KASSERT(blkno >= 0 && blkno + count <= bl->bl_blocks,
365b1f59c92SDoug Moore 	    ("filling invalid range: blkno %jx, count %d, blocks %jd",
366b1f59c92SDoug Moore 	    (uintmax_t)blkno, (int)count, (uintmax_t)bl->bl_blocks));
367bb4a27f9SMark Johnston 	filled = blst_meta_fill(bl->bl_root, blkno, count, bl->bl_radix);
368bb4a27f9SMark Johnston 	bl->bl_avail -= filled;
369bb4a27f9SMark Johnston 	return (filled);
37092da00bbSMatthew Dillon }
37192da00bbSMatthew Dillon 
37292da00bbSMatthew Dillon /*
3737090df5aSMatthew Dillon  * blist_resize() -	resize an existing radix tree to handle the
3747090df5aSMatthew Dillon  *			specified number of blocks.  This will reallocate
3757090df5aSMatthew Dillon  *			the tree and transfer the previous bitmap to the new
3767090df5aSMatthew Dillon  *			one.  When extending the tree you can specify whether
3777090df5aSMatthew Dillon  *			the new blocks are to left allocated or freed.
3787090df5aSMatthew Dillon  */
3797090df5aSMatthew Dillon void
380c8c7ad92SKip Macy blist_resize(blist_t *pbl, daddr_t count, int freenew, int flags)
3817090df5aSMatthew Dillon {
382c8c7ad92SKip Macy     blist_t newbl = blist_create(count, flags);
3837090df5aSMatthew Dillon     blist_t save = *pbl;
3847090df5aSMatthew Dillon 
3857090df5aSMatthew Dillon     *pbl = newbl;
3867090df5aSMatthew Dillon     if (count > save->bl_blocks)
3877090df5aSMatthew Dillon 	    count = save->bl_blocks;
3882ac0c7c3SAlan Cox     blst_copy(save->bl_root, 0, save->bl_radix, newbl, count);
3897090df5aSMatthew Dillon 
3907090df5aSMatthew Dillon     /*
3917090df5aSMatthew Dillon      * If resizing upwards, should we free the new space or not?
3927090df5aSMatthew Dillon      */
3937090df5aSMatthew Dillon     if (freenew && count < newbl->bl_blocks) {
3947090df5aSMatthew Dillon 	    blist_free(newbl, count, newbl->bl_blocks - count);
3957090df5aSMatthew Dillon     }
3967090df5aSMatthew Dillon     blist_destroy(save);
3977090df5aSMatthew Dillon }
3987090df5aSMatthew Dillon 
3997090df5aSMatthew Dillon #ifdef BLIST_DEBUG
4007090df5aSMatthew Dillon 
4017090df5aSMatthew Dillon /*
4027090df5aSMatthew Dillon  * blist_print()    - dump radix tree
4037090df5aSMatthew Dillon  */
4047090df5aSMatthew Dillon void
4057090df5aSMatthew Dillon blist_print(blist_t bl)
4067090df5aSMatthew Dillon {
407bb4a27f9SMark Johnston 	printf("BLIST avail = %jd, cursor = %08jx {\n",
408bb4a27f9SMark Johnston 	    (uintmax_t)bl->bl_avail, (uintmax_t)bl->bl_cursor);
409bb4a27f9SMark Johnston 
410bb4a27f9SMark Johnston 	if (bl->bl_root->bm_bitmap != 0)
4112ac0c7c3SAlan Cox 		blst_radix_print(bl->bl_root, 0, bl->bl_radix, 4);
4127090df5aSMatthew Dillon 	printf("}\n");
4137090df5aSMatthew Dillon }
4147090df5aSMatthew Dillon 
4157090df5aSMatthew Dillon #endif
4167090df5aSMatthew Dillon 
417d027ed2eSAlan Cox static const u_daddr_t fib[] = {
418d027ed2eSAlan Cox 	1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584,
419d027ed2eSAlan Cox 	4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811,
420d027ed2eSAlan Cox 	514229, 832040, 1346269, 2178309, 3524578,
421d027ed2eSAlan Cox };
422d027ed2eSAlan Cox 
423d027ed2eSAlan Cox /*
424d027ed2eSAlan Cox  * Use 'gap' to describe a maximal range of unallocated blocks/bits.
425d027ed2eSAlan Cox  */
426d027ed2eSAlan Cox struct gap_stats {
427d027ed2eSAlan Cox 	daddr_t	start;		/* current gap start, or SWAPBLK_NONE */
428d027ed2eSAlan Cox 	daddr_t	num;		/* number of gaps observed */
429d027ed2eSAlan Cox 	daddr_t	max;		/* largest gap size */
430d027ed2eSAlan Cox 	daddr_t	avg;		/* average gap size */
431d027ed2eSAlan Cox 	daddr_t	err;		/* sum - num * avg */
432d027ed2eSAlan Cox 	daddr_t	histo[nitems(fib)]; /* # gaps in each size range */
433d027ed2eSAlan Cox 	int	max_bucket;	/* last histo elt with nonzero val */
434d027ed2eSAlan Cox };
435d027ed2eSAlan Cox 
436d027ed2eSAlan Cox /*
437d027ed2eSAlan Cox  * gap_stats_counting()    - is the state 'counting 1 bits'?
438d027ed2eSAlan Cox  *                           or 'skipping 0 bits'?
439d027ed2eSAlan Cox  */
440d027ed2eSAlan Cox static inline bool
441d027ed2eSAlan Cox gap_stats_counting(const struct gap_stats *stats)
442d027ed2eSAlan Cox {
443d027ed2eSAlan Cox 
444d027ed2eSAlan Cox 	return (stats->start != SWAPBLK_NONE);
445d027ed2eSAlan Cox }
446d027ed2eSAlan Cox 
447d027ed2eSAlan Cox /*
448d027ed2eSAlan Cox  * init_gap_stats()    - initialize stats on gap sizes
449d027ed2eSAlan Cox  */
450d027ed2eSAlan Cox static inline void
451d027ed2eSAlan Cox init_gap_stats(struct gap_stats *stats)
452d027ed2eSAlan Cox {
453d027ed2eSAlan Cox 
454d027ed2eSAlan Cox 	bzero(stats, sizeof(*stats));
455d027ed2eSAlan Cox 	stats->start = SWAPBLK_NONE;
456d027ed2eSAlan Cox }
457d027ed2eSAlan Cox 
458d027ed2eSAlan Cox /*
459d027ed2eSAlan Cox  * update_gap_stats()    - update stats on gap sizes
460d027ed2eSAlan Cox  */
461d027ed2eSAlan Cox static void
462d027ed2eSAlan Cox update_gap_stats(struct gap_stats *stats, daddr_t posn)
463d027ed2eSAlan Cox {
464d027ed2eSAlan Cox 	daddr_t size;
465d027ed2eSAlan Cox 	int hi, lo, mid;
466d027ed2eSAlan Cox 
467d027ed2eSAlan Cox 	if (!gap_stats_counting(stats)) {
468d027ed2eSAlan Cox 		stats->start = posn;
469d027ed2eSAlan Cox 		return;
470d027ed2eSAlan Cox 	}
471d027ed2eSAlan Cox 	size = posn - stats->start;
472d027ed2eSAlan Cox 	stats->start = SWAPBLK_NONE;
473d027ed2eSAlan Cox 	if (size > stats->max)
474d027ed2eSAlan Cox 		stats->max = size;
475d027ed2eSAlan Cox 
476d027ed2eSAlan Cox 	/*
477d027ed2eSAlan Cox 	 * Find the fibonacci range that contains size,
478d027ed2eSAlan Cox 	 * expecting to find it in an early range.
479d027ed2eSAlan Cox 	 */
480d027ed2eSAlan Cox 	lo = 0;
481d027ed2eSAlan Cox 	hi = 1;
482d027ed2eSAlan Cox 	while (hi < nitems(fib) && fib[hi] <= size) {
483d027ed2eSAlan Cox 		lo = hi;
484d027ed2eSAlan Cox 		hi *= 2;
485d027ed2eSAlan Cox 	}
486d027ed2eSAlan Cox 	if (hi >= nitems(fib))
487d027ed2eSAlan Cox 		hi = nitems(fib);
488d027ed2eSAlan Cox 	while (lo + 1 != hi) {
489d027ed2eSAlan Cox 		mid = (lo + hi) >> 1;
490d027ed2eSAlan Cox 		if (fib[mid] <= size)
491d027ed2eSAlan Cox 			lo = mid;
492d027ed2eSAlan Cox 		else
493d027ed2eSAlan Cox 			hi = mid;
494d027ed2eSAlan Cox 	}
495d027ed2eSAlan Cox 	stats->histo[lo]++;
496d027ed2eSAlan Cox 	if (lo > stats->max_bucket)
497d027ed2eSAlan Cox 		stats->max_bucket = lo;
498d027ed2eSAlan Cox 	stats->err += size - stats->avg;
499d027ed2eSAlan Cox 	stats->num++;
500d027ed2eSAlan Cox 	stats->avg += stats->err / stats->num;
501d027ed2eSAlan Cox 	stats->err %= stats->num;
502d027ed2eSAlan Cox }
503d027ed2eSAlan Cox 
504d027ed2eSAlan Cox /*
505d027ed2eSAlan Cox  * dump_gap_stats()    - print stats on gap sizes
506d027ed2eSAlan Cox  */
507d027ed2eSAlan Cox static inline void
508d027ed2eSAlan Cox dump_gap_stats(const struct gap_stats *stats, struct sbuf *s)
509d027ed2eSAlan Cox {
510d027ed2eSAlan Cox 	int i;
511d027ed2eSAlan Cox 
512d027ed2eSAlan Cox 	sbuf_printf(s, "number of maximal free ranges: %jd\n",
513d027ed2eSAlan Cox 	    (intmax_t)stats->num);
514d027ed2eSAlan Cox 	sbuf_printf(s, "largest free range: %jd\n", (intmax_t)stats->max);
515d027ed2eSAlan Cox 	sbuf_printf(s, "average maximal free range size: %jd\n",
516d027ed2eSAlan Cox 	    (intmax_t)stats->avg);
517d027ed2eSAlan Cox 	sbuf_printf(s, "number of maximal free ranges of different sizes:\n");
518d027ed2eSAlan Cox 	sbuf_printf(s, "               count  |  size range\n");
519d027ed2eSAlan Cox 	sbuf_printf(s, "               -----  |  ----------\n");
520d027ed2eSAlan Cox 	for (i = 0; i < stats->max_bucket; i++) {
521d027ed2eSAlan Cox 		if (stats->histo[i] != 0) {
522d027ed2eSAlan Cox 			sbuf_printf(s, "%20jd  |  ",
523d027ed2eSAlan Cox 			    (intmax_t)stats->histo[i]);
524d027ed2eSAlan Cox 			if (fib[i] != fib[i + 1] - 1)
525d027ed2eSAlan Cox 				sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i],
526d027ed2eSAlan Cox 				    (intmax_t)fib[i + 1] - 1);
527d027ed2eSAlan Cox 			else
528d027ed2eSAlan Cox 				sbuf_printf(s, "%jd\n", (intmax_t)fib[i]);
529d027ed2eSAlan Cox 		}
530d027ed2eSAlan Cox 	}
531d027ed2eSAlan Cox 	sbuf_printf(s, "%20jd  |  ", (intmax_t)stats->histo[i]);
532d027ed2eSAlan Cox 	if (stats->histo[i] > 1)
533d027ed2eSAlan Cox 		sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i],
534d027ed2eSAlan Cox 		    (intmax_t)stats->max);
535d027ed2eSAlan Cox 	else
536d027ed2eSAlan Cox 		sbuf_printf(s, "%jd\n", (intmax_t)stats->max);
537d027ed2eSAlan Cox }
538d027ed2eSAlan Cox 
539d027ed2eSAlan Cox /*
540d027ed2eSAlan Cox  * blist_stats()    - dump radix tree stats
541d027ed2eSAlan Cox  */
542d027ed2eSAlan Cox void
543d027ed2eSAlan Cox blist_stats(blist_t bl, struct sbuf *s)
544d027ed2eSAlan Cox {
545d027ed2eSAlan Cox 	struct gap_stats gstats;
546d027ed2eSAlan Cox 	struct gap_stats *stats = &gstats;
547d027ed2eSAlan Cox 	daddr_t i, nodes, radix;
54853519253SDoug Moore 	u_daddr_t diff, mask;
54953519253SDoug Moore 	int digit;
550d027ed2eSAlan Cox 
551d027ed2eSAlan Cox 	init_gap_stats(stats);
552d027ed2eSAlan Cox 	nodes = 0;
553d027ed2eSAlan Cox 	i = bl->bl_radix;
554d027ed2eSAlan Cox 	while (i < bl->bl_radix + bl->bl_blocks) {
555d027ed2eSAlan Cox 		/*
556d027ed2eSAlan Cox 		 * Find max size subtree starting at i.
557d027ed2eSAlan Cox 		 */
558d027ed2eSAlan Cox 		radix = BLIST_BMAP_RADIX;
559d027ed2eSAlan Cox 		while (((i / radix) & BLIST_META_MASK) == 0)
560d027ed2eSAlan Cox 			radix *= BLIST_META_RADIX;
561d027ed2eSAlan Cox 
562d027ed2eSAlan Cox 		/*
563d027ed2eSAlan Cox 		 * Check for skippable subtrees starting at i.
564d027ed2eSAlan Cox 		 */
565d027ed2eSAlan Cox 		while (radix > BLIST_BMAP_RADIX) {
566bb4a27f9SMark Johnston 			if (bl->bl_root[nodes].bm_bitmap == 0) {
567d027ed2eSAlan Cox 				if (gap_stats_counting(stats))
568d027ed2eSAlan Cox 					update_gap_stats(stats, i);
569d027ed2eSAlan Cox 				break;
570d027ed2eSAlan Cox 			}
571d027ed2eSAlan Cox 
572d027ed2eSAlan Cox 			/*
573d027ed2eSAlan Cox 			 * Skip subtree root.
574d027ed2eSAlan Cox 			 */
575d027ed2eSAlan Cox 			nodes++;
576d027ed2eSAlan Cox 			radix /= BLIST_META_RADIX;
577d027ed2eSAlan Cox 		}
578d027ed2eSAlan Cox 		if (radix == BLIST_BMAP_RADIX) {
579d027ed2eSAlan Cox 			/*
580d027ed2eSAlan Cox 			 * Scan leaf.
581d027ed2eSAlan Cox 			 */
582bb4a27f9SMark Johnston 			mask = bl->bl_root[nodes].bm_bitmap;
583d027ed2eSAlan Cox 			diff = mask ^ (mask << 1);
584d027ed2eSAlan Cox 			if (gap_stats_counting(stats))
585d027ed2eSAlan Cox 				diff ^= 1;
586d027ed2eSAlan Cox 			while (diff != 0) {
58753519253SDoug Moore 				digit = bitpos(diff);
58853519253SDoug Moore 				update_gap_stats(stats, i + digit);
58953519253SDoug Moore 				diff ^= bitrange(digit, 1);
590d027ed2eSAlan Cox 			}
591d027ed2eSAlan Cox 		}
592d027ed2eSAlan Cox 		nodes += radix_to_skip(radix);
593d027ed2eSAlan Cox 		i += radix;
594d027ed2eSAlan Cox 	}
595d027ed2eSAlan Cox 	update_gap_stats(stats, i);
596d027ed2eSAlan Cox 	dump_gap_stats(stats, s);
597d027ed2eSAlan Cox }
598d027ed2eSAlan Cox 
5997090df5aSMatthew Dillon /************************************************************************
6007090df5aSMatthew Dillon  *			  ALLOCATION SUPPORT FUNCTIONS			*
6017090df5aSMatthew Dillon  ************************************************************************
6027090df5aSMatthew Dillon  *
6037090df5aSMatthew Dillon  *	These support functions do all the actual work.  They may seem
6047090df5aSMatthew Dillon  *	rather longish, but that's because I've commented them up.  The
6057090df5aSMatthew Dillon  *	actual code is straight forward.
6067090df5aSMatthew Dillon  *
6077090df5aSMatthew Dillon  */
6087090df5aSMatthew Dillon 
6097090df5aSMatthew Dillon /*
610*31c82722SDoug Moore  * BLST_NEXT_LEAF_ALLOC() - allocate the blocks starting with the next leaf.
611bb4a27f9SMark Johnston  *
612*31c82722SDoug Moore  *	'scan' is a leaf node, and its first block is at address 'start'.  The
613*31c82722SDoug Moore  *	next leaf node could be adjacent, or several nodes away if the least
614*31c82722SDoug Moore  *	common ancestor of 'scan' and its neighbor is several levels up.  Use
615*31c82722SDoug Moore  *	addresses to determine how many meta-nodes lie between the leaves.  If
616*31c82722SDoug Moore  *	sequence of leaves starting with the next one has enough initial bits
617*31c82722SDoug Moore  *	set, clear them and clear the bits in the meta nodes on the path up to
618*31c82722SDoug Moore  *	the least common ancestor to mark any subtrees made completely empty.
619bb4a27f9SMark Johnston  */
620bb4a27f9SMark Johnston static int
621*31c82722SDoug Moore blst_next_leaf_alloc(blmeta_t *scan, daddr_t start, int count, int maxcount)
622bb4a27f9SMark Johnston {
623bb4a27f9SMark Johnston 	u_daddr_t radix;
624*31c82722SDoug Moore 	daddr_t blk;
62587ae0686SDoug Moore 	int avail, digit;
626bb4a27f9SMark Johnston 
627*31c82722SDoug Moore 	start += BLIST_BMAP_RADIX;
628*31c82722SDoug Moore 	for (blk = start; blk - start < maxcount; blk += BLIST_BMAP_RADIX) {
629*31c82722SDoug Moore 		/* Skip meta-nodes, as long as they promise more free blocks. */
630bb4a27f9SMark Johnston 		radix = BLIST_BMAP_RADIX;
631*31c82722SDoug Moore 		while (((++scan)->bm_bitmap & 1) == 1 &&
632*31c82722SDoug Moore 		    ((blk / radix) & BLIST_META_MASK) == 0)
633bb4a27f9SMark Johnston 			radix *= BLIST_META_RADIX;
634*31c82722SDoug Moore 		if (~scan->bm_bitmap != 0) {
635*31c82722SDoug Moore 			/*
636*31c82722SDoug Moore 			 * Either there is no next leaf with any free blocks,
637*31c82722SDoug Moore 			 * or we've reached the next leaf and found that some
638*31c82722SDoug Moore 			 * of its blocks are not free.  In the first case,
639*31c82722SDoug Moore 			 * bitpos() returns zero here.
640*31c82722SDoug Moore 			 */
641*31c82722SDoug Moore 			avail = blk - start + bitpos(~scan->bm_bitmap);
64287ae0686SDoug Moore 			if (avail < count) {
643bb4a27f9SMark Johnston 				/*
644*31c82722SDoug Moore 				 * There isn't a next leaf with enough free
645*31c82722SDoug Moore 				 * blocks at its beginning to complete the
646*31c82722SDoug Moore 				 * spanning allocation.
647bb4a27f9SMark Johnston 				 */
648*31c82722SDoug Moore 				return (avail);
649bb4a27f9SMark Johnston 			}
650*31c82722SDoug Moore 			maxcount = imin(avail, maxcount);
651*31c82722SDoug Moore 		}
652*31c82722SDoug Moore 	}
653bb4a27f9SMark Johnston 
654bb4a27f9SMark Johnston 	/*
655*31c82722SDoug Moore 	 * 'scan' is the last leaf that provides blocks.  Clear from 1 to
656*31c82722SDoug Moore 	 * BLIST_BMAP_RADIX bits to represent the allocation of those last
657*31c82722SDoug Moore 	 * blocks.
658bb4a27f9SMark Johnston 	 */
659*31c82722SDoug Moore 	if (maxcount % BLIST_BMAP_RADIX != 0)
660*31c82722SDoug Moore 		scan->bm_bitmap &= ~bitrange(0, maxcount % BLIST_BMAP_RADIX);
661*31c82722SDoug Moore 	else
662*31c82722SDoug Moore 		scan->bm_bitmap = 0;
663*31c82722SDoug Moore 
664*31c82722SDoug Moore 	for (;;) {
665*31c82722SDoug Moore 		/* Back up over meta-nodes, clearing bits if necessary. */
666*31c82722SDoug Moore 		blk -= BLIST_BMAP_RADIX;
667*31c82722SDoug Moore 		radix = BLIST_BMAP_RADIX;
668*31c82722SDoug Moore 		while ((digit = ((blk / radix) & BLIST_META_MASK)) == 0) {
669*31c82722SDoug Moore 			if ((scan--)->bm_bitmap == 0)
670*31c82722SDoug Moore 				scan->bm_bitmap ^= 1;
671*31c82722SDoug Moore 			radix *= BLIST_META_RADIX;
672*31c82722SDoug Moore 		}
673*31c82722SDoug Moore 		if ((scan--)->bm_bitmap == 0)
674d1c34a6bSDoug Moore 			scan[-digit * radix_to_skip(radix)].bm_bitmap ^=
675d1c34a6bSDoug Moore 			    (u_daddr_t)1 << digit;
676*31c82722SDoug Moore 
677*31c82722SDoug Moore 		if (blk == start)
678d1c34a6bSDoug Moore 			break;
679*31c82722SDoug Moore 		/* Clear all the bits of this leaf. */
680*31c82722SDoug Moore 		scan->bm_bitmap = 0;
681bb4a27f9SMark Johnston 	}
682*31c82722SDoug Moore 	return (maxcount);
683bb4a27f9SMark Johnston }
684bb4a27f9SMark Johnston 
685bb4a27f9SMark Johnston /*
68609b380a1SDoug Moore  * Given a bitmask, flip all the bits from the least-significant 1-bit to the
68709b380a1SDoug Moore  * most significant bit.  If the result is non-zero, then the least-significant
68809b380a1SDoug Moore  * 1-bit of the result is in the same position as the least-signification 0-bit
68909b380a1SDoug Moore  * in mask that is followed by a 1-bit.
69009b380a1SDoug Moore  */
69109b380a1SDoug Moore static inline u_daddr_t
69209b380a1SDoug Moore flip_hibits(u_daddr_t mask)
69309b380a1SDoug Moore {
69409b380a1SDoug Moore 
69509b380a1SDoug Moore 	return (-mask & ~mask);
69609b380a1SDoug Moore }
69709b380a1SDoug Moore 
69809b380a1SDoug Moore /*
699bb4a27f9SMark Johnston  * BLST_LEAF_ALLOC() -	allocate at a leaf in the radix tree (a bitmap).
7007090df5aSMatthew Dillon  *
7012905d1ceSAlan Cox  *	This function is the core of the allocator.  Its execution time is
7022905d1ceSAlan Cox  *	proportional to log(count), plus height of the tree if the allocation
7032905d1ceSAlan Cox  *	crosses a leaf boundary.
7047090df5aSMatthew Dillon  */
7057090df5aSMatthew Dillon static daddr_t
70687ae0686SDoug Moore blst_leaf_alloc(blmeta_t *scan, daddr_t blk, int *count, int maxcount)
70754541421SAlan Cox {
7082905d1ceSAlan Cox 	u_daddr_t cursor_mask, mask;
709ec371b57SAlan Cox 	int count1, hi, lo, num_shifts, range1, range_ext;
7107090df5aSMatthew Dillon 
71154541421SAlan Cox 	range1 = 0;
71287ae0686SDoug Moore 	count1 = *count - 1;
71354541421SAlan Cox 	num_shifts = fls(count1);
714bb4a27f9SMark Johnston 	mask = scan->bm_bitmap;
71509b380a1SDoug Moore 	while (flip_hibits(mask) != 0 && num_shifts > 0) {
71654541421SAlan Cox 		/*
71754541421SAlan Cox 		 * If bit i is set in mask, then bits in [i, i+range1] are set
7182905d1ceSAlan Cox 		 * in scan->bm_bitmap.  The value of range1 is equal to count1
7192905d1ceSAlan Cox 		 * >> num_shifts.  Grow range1 and reduce num_shifts to 0,
7202905d1ceSAlan Cox 		 * while preserving these invariants.  The updates to mask
7212905d1ceSAlan Cox 		 * leave fewer bits set, but each bit that remains set
7222905d1ceSAlan Cox 		 * represents a longer string of consecutive bits set in
7232905d1ceSAlan Cox 		 * scan->bm_bitmap.  If more updates to mask cannot clear more
7242905d1ceSAlan Cox 		 * bits, because mask is partitioned with all 0 bits preceding
7252905d1ceSAlan Cox 		 * all 1 bits, the loop terminates immediately.
72654541421SAlan Cox 		 */
72754541421SAlan Cox 		num_shifts--;
72854541421SAlan Cox 		range_ext = range1 + ((count1 >> num_shifts) & 1);
729ec371b57SAlan Cox 		/*
730ec371b57SAlan Cox 		 * mask is a signed quantity for the shift because when it is
731ec371b57SAlan Cox 		 * shifted right, the sign bit should copied; when the last
732ec371b57SAlan Cox 		 * block of the leaf is free, pretend, for a while, that all the
733ec371b57SAlan Cox 		 * blocks that follow it are also free.
734ec371b57SAlan Cox 		 */
735ec371b57SAlan Cox 		mask &= (daddr_t)mask >> range_ext;
73654541421SAlan Cox 		range1 += range_ext;
73754541421SAlan Cox 	}
73854541421SAlan Cox 	if (mask == 0) {
73954541421SAlan Cox 		/*
74054541421SAlan Cox 		 * Update bighint.  There is no allocation bigger than range1
741ec371b57SAlan Cox 		 * starting in this leaf.
74254541421SAlan Cox 		 */
74354541421SAlan Cox 		scan->bm_bighint = range1;
7447090df5aSMatthew Dillon 		return (SWAPBLK_NONE);
7457090df5aSMatthew Dillon 	}
74654541421SAlan Cox 
747ec371b57SAlan Cox 	/* Discard any candidates that appear before blk. */
7482905d1ceSAlan Cox 	if ((blk & BLIST_BMAP_MASK) != 0) {
7492905d1ceSAlan Cox 		cursor_mask = mask & bitrange(0, blk & BLIST_BMAP_MASK);
7502905d1ceSAlan Cox 		if (cursor_mask != 0) {
7512905d1ceSAlan Cox 			mask ^= cursor_mask;
75254541421SAlan Cox 			if (mask == 0)
75354541421SAlan Cox 				return (SWAPBLK_NONE);
7547090df5aSMatthew Dillon 
7557090df5aSMatthew Dillon 			/*
7562905d1ceSAlan Cox 			 * Bighint change for last block allocation cannot
7572905d1ceSAlan Cox 			 * assume that any other blocks are allocated, so the
7582905d1ceSAlan Cox 			 * bighint cannot be reduced much.
7592905d1ceSAlan Cox 			 */
7602905d1ceSAlan Cox 			range1 = BLIST_MAX_ALLOC - 1;
7612905d1ceSAlan Cox 		}
7622905d1ceSAlan Cox 		blk &= ~BLIST_BMAP_MASK;
7632905d1ceSAlan Cox 	}
7642905d1ceSAlan Cox 
7652905d1ceSAlan Cox 	/*
76654541421SAlan Cox 	 * The least significant set bit in mask marks the start of the first
76787ae0686SDoug Moore 	 * available range of sufficient size.  Find its position.
7687090df5aSMatthew Dillon 	 */
769d027ed2eSAlan Cox 	lo = bitpos(mask);
7707090df5aSMatthew Dillon 
77154541421SAlan Cox 	/*
77287ae0686SDoug Moore 	 * Find how much space is available starting at that position.
77354541421SAlan Cox 	 */
77487ae0686SDoug Moore 	if (flip_hibits(mask) != 0) {
77587ae0686SDoug Moore 		/* Count the 1 bits starting at position lo. */
77687ae0686SDoug Moore 		hi = bitpos(flip_hibits(mask)) + count1;
77787ae0686SDoug Moore 		if (maxcount < hi - lo)
77887ae0686SDoug Moore 			hi = lo + maxcount;
77987ae0686SDoug Moore 		*count = hi - lo;
78087ae0686SDoug Moore 		mask = bitrange(lo, *count);
78187ae0686SDoug Moore 	} else if (maxcount <= BLIST_BMAP_RADIX - lo) {
78287ae0686SDoug Moore 		/* All the blocks we can use are available here. */
78387ae0686SDoug Moore 		hi = lo + maxcount;
78487ae0686SDoug Moore 		*count = maxcount;
78587ae0686SDoug Moore 		mask = bitrange(lo, *count);
78687ae0686SDoug Moore 	} else {
78787ae0686SDoug Moore 		/* Check next leaf for some of the blocks we want or need. */
78887ae0686SDoug Moore 		count1 = *count - (BLIST_BMAP_RADIX - lo);
78987ae0686SDoug Moore 		maxcount -= BLIST_BMAP_RADIX - lo;
79087ae0686SDoug Moore 		hi = blst_next_leaf_alloc(scan, blk, count1, maxcount);
79187ae0686SDoug Moore 		if (hi < count1)
792ec371b57SAlan Cox 			/*
79387ae0686SDoug Moore 			 * The next leaf cannot supply enough blocks to reach
79487ae0686SDoug Moore 			 * the minimum required allocation.  The hint cannot be
79587ae0686SDoug Moore 			 * updated, because the same allocation request could
79687ae0686SDoug Moore 			 * be satisfied later, by this leaf, if the state of
79787ae0686SDoug Moore 			 * the next leaf changes, and without any changes to
79887ae0686SDoug Moore 			 * this leaf.
799ec371b57SAlan Cox 			 */
800ec371b57SAlan Cox 			return (SWAPBLK_NONE);
80187ae0686SDoug Moore 		*count = BLIST_BMAP_RADIX - lo + hi;
802ec371b57SAlan Cox 		hi = BLIST_BMAP_RADIX;
803ec371b57SAlan Cox 	}
804ec371b57SAlan Cox 
805ec371b57SAlan Cox 	if (hi == BLIST_BMAP_RADIX) {
806ec371b57SAlan Cox 		/*
807ec371b57SAlan Cox 		 * Update bighint.  There is no allocation bigger than range1
808ec371b57SAlan Cox 		 * available in this leaf after this allocation completes.
809ec371b57SAlan Cox 		 */
810ec371b57SAlan Cox 		scan->bm_bighint = range1;
811ec371b57SAlan Cox 	}
812ec371b57SAlan Cox 	/* Clear the allocated bits from this leaf. */
813bb4a27f9SMark Johnston 	scan->bm_bitmap &= ~mask;
8142905d1ceSAlan Cox 	return (blk + lo);
8157090df5aSMatthew Dillon }
8167090df5aSMatthew Dillon 
8177090df5aSMatthew Dillon /*
8187090df5aSMatthew Dillon  * blist_meta_alloc() -	allocate at a meta in the radix tree.
8197090df5aSMatthew Dillon  *
8207090df5aSMatthew Dillon  *	Attempt to allocate at a meta node.  If we can't, we update
8217090df5aSMatthew Dillon  *	bighint and return a failure.  Updating bighint optimize future
8227090df5aSMatthew Dillon  *	calls that hit this node.  We have to check for our collapse cases
8237090df5aSMatthew Dillon  *	and we have a few optimizations strewn in as well.
8247090df5aSMatthew Dillon  */
8257090df5aSMatthew Dillon static daddr_t
82687ae0686SDoug Moore blst_meta_alloc(blmeta_t *scan, daddr_t cursor, int *count,
82787ae0686SDoug Moore     int maxcount, u_daddr_t radix)
828d4e3484bSAlan Cox {
829bb4a27f9SMark Johnston 	daddr_t blk, i, r, skip;
83053519253SDoug Moore 	u_daddr_t mask;
831d4e3484bSAlan Cox 	bool scan_from_start;
832bb4a27f9SMark Johnston 	int digit;
8337090df5aSMatthew Dillon 
834a396c83aSAlan Cox 	if (radix == BLIST_BMAP_RADIX)
83587ae0686SDoug Moore 		return (blst_leaf_alloc(scan, cursor, count, maxcount));
836ec371b57SAlan Cox 	blk = cursor & -radix;
837bb4a27f9SMark Johnston 	scan_from_start = (cursor == blk);
838bb4a27f9SMark Johnston 	radix /= BLIST_META_RADIX;
8392ac0c7c3SAlan Cox 	skip = radix_to_skip(radix);
840bb4a27f9SMark Johnston 	mask = scan->bm_bitmap;
841bb4a27f9SMark Johnston 
842bb4a27f9SMark Johnston 	/* Discard any candidates that appear before cursor. */
843bb4a27f9SMark Johnston 	digit = (cursor / radix) & BLIST_META_MASK;
844bb4a27f9SMark Johnston 	mask &= (u_daddr_t)-1 << digit;
845ee73fef9SAlan Cox 	if (mask == 0)
846ee73fef9SAlan Cox 		return (SWAPBLK_NONE);
8477090df5aSMatthew Dillon 
8484be4fd5dSAlan Cox 	/*
849bb4a27f9SMark Johnston 	 * If the first try is for a block that includes the cursor, pre-undo
850bb4a27f9SMark Johnston 	 * the digit * radix offset in the first call; otherwise, ignore the
851bb4a27f9SMark Johnston 	 * cursor entirely.
8524be4fd5dSAlan Cox 	 */
853bb4a27f9SMark Johnston 	if (((mask >> digit) & 1) == 1)
854bb4a27f9SMark Johnston 		cursor -= digit * radix;
855d4e3484bSAlan Cox 	else
856bb4a27f9SMark Johnston 		cursor = blk;
8577090df5aSMatthew Dillon 
8584be4fd5dSAlan Cox 	/*
859bb4a27f9SMark Johnston 	 * Examine the nonempty subtree associated with each bit set in mask.
8604be4fd5dSAlan Cox 	 */
861bb4a27f9SMark Johnston 	do {
86253519253SDoug Moore 		digit = bitpos(mask);
863bb4a27f9SMark Johnston 		i = 1 + digit * skip;
86487ae0686SDoug Moore 		if (*count <= scan[i].bm_bighint) {
8657090df5aSMatthew Dillon 			/*
866ec371b57SAlan Cox 			 * The allocation might fit beginning in the i'th subtree.
8677090df5aSMatthew Dillon 			 */
868bb4a27f9SMark Johnston 			r = blst_meta_alloc(&scan[i], cursor + digit * radix,
86987ae0686SDoug Moore 			    count, maxcount, radix);
8707090df5aSMatthew Dillon 			if (r != SWAPBLK_NONE) {
871bb4a27f9SMark Johnston 				if (scan[i].bm_bitmap == 0)
87253519253SDoug Moore 					scan->bm_bitmap ^= bitrange(digit, 1);
8737090df5aSMatthew Dillon 				return (r);
8747090df5aSMatthew Dillon 			}
8757090df5aSMatthew Dillon 		}
876bb4a27f9SMark Johnston 		cursor = blk;
87753519253SDoug Moore 	} while ((mask ^= bitrange(digit, 1)) != 0);
8787090df5aSMatthew Dillon 
8797090df5aSMatthew Dillon 	/*
880bb4a27f9SMark Johnston 	 * We couldn't allocate count in this subtree.  If the whole tree was
881bb4a27f9SMark Johnston 	 * scanned, and the last tree node is allocated, update bighint.
8827090df5aSMatthew Dillon 	 */
883bb4a27f9SMark Johnston 	if (scan_from_start && !(digit == BLIST_META_RADIX - 1 &&
884bb4a27f9SMark Johnston 	    scan[i].bm_bighint == BLIST_MAX_ALLOC))
88587ae0686SDoug Moore 		scan->bm_bighint = *count - 1;
886d4e3484bSAlan Cox 
8877090df5aSMatthew Dillon 	return (SWAPBLK_NONE);
8887090df5aSMatthew Dillon }
8897090df5aSMatthew Dillon 
8907090df5aSMatthew Dillon /*
8917090df5aSMatthew Dillon  * BLST_LEAF_FREE() -	free allocated block from leaf bitmap
8927090df5aSMatthew Dillon  *
8937090df5aSMatthew Dillon  */
8947090df5aSMatthew Dillon static void
895b411efa4SAlan Cox blst_leaf_free(blmeta_t *scan, daddr_t blk, int count)
896b411efa4SAlan Cox {
897ec371b57SAlan Cox 	u_daddr_t mask;
898ec371b57SAlan Cox 
8997090df5aSMatthew Dillon 	/*
9007090df5aSMatthew Dillon 	 * free some data in this bitmap
901ec371b57SAlan Cox 	 * mask=0000111111111110000
9027090df5aSMatthew Dillon 	 *          \_________/\__/
903ec371b57SAlan Cox 	 *		count   n
9047090df5aSMatthew Dillon 	 */
905bb4a27f9SMark Johnston 	mask = bitrange(blk & BLIST_BMAP_MASK, count);
906b1f59c92SDoug Moore 	KASSERT((scan->bm_bitmap & mask) == 0,
907b1f59c92SDoug Moore 	    ("freeing free block: %jx, size %d, mask %jx",
908b1f59c92SDoug Moore 	    (uintmax_t)blk, count, (uintmax_t)scan->bm_bitmap & mask));
909bb4a27f9SMark Johnston 	scan->bm_bitmap |= mask;
9107090df5aSMatthew Dillon }
9117090df5aSMatthew Dillon 
9127090df5aSMatthew Dillon /*
9137090df5aSMatthew Dillon  * BLST_META_FREE() - free allocated blocks from radix tree meta info
9147090df5aSMatthew Dillon  *
9157090df5aSMatthew Dillon  *	This support routine frees a range of blocks from the bitmap.
9167090df5aSMatthew Dillon  *	The range must be entirely enclosed by this radix node.  If a
9177090df5aSMatthew Dillon  *	meta node, we break the range down recursively to free blocks
9187090df5aSMatthew Dillon  *	in subnodes (which means that this code can free an arbitrary
9197090df5aSMatthew Dillon  *	range whereas the allocation code cannot allocate an arbitrary
9207090df5aSMatthew Dillon  *	range).
9217090df5aSMatthew Dillon  */
9227090df5aSMatthew Dillon static void
923bee93d3cSAlan Cox blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count, u_daddr_t radix)
924d3783724SAlan Cox {
925bb4a27f9SMark Johnston 	daddr_t blk, endBlk, i, skip;
926bb4a27f9SMark Johnston 	int digit, endDigit;
9277090df5aSMatthew Dillon 
928bb4a27f9SMark Johnston 	/*
929bb4a27f9SMark Johnston 	 * We could probably do a better job here.  We are required to make
930bb4a27f9SMark Johnston 	 * bighint at least as large as the biggest allocable block of data.
931bb4a27f9SMark Johnston 	 * If we just shoehorn it, a little extra overhead will be incurred
932bb4a27f9SMark Johnston 	 * on the next allocation (but only that one typically).
933bb4a27f9SMark Johnston 	 */
934bb4a27f9SMark Johnston 	scan->bm_bighint = BLIST_MAX_ALLOC;
935bb4a27f9SMark Johnston 
936a396c83aSAlan Cox 	if (radix == BLIST_BMAP_RADIX)
937a396c83aSAlan Cox 		return (blst_leaf_free(scan, freeBlk, count));
9387090df5aSMatthew Dillon 
939bb4a27f9SMark Johnston 	endBlk = ummin(freeBlk + count, (freeBlk + radix) & -radix);
94057e6d29bSMatthew Dillon 	radix /= BLIST_META_RADIX;
941bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
942bb4a27f9SMark Johnston 	blk = freeBlk & -radix;
943bb4a27f9SMark Johnston 	digit = (blk / radix) & BLIST_META_MASK;
944bb4a27f9SMark Johnston 	endDigit = 1 + (((endBlk - 1) / radix) & BLIST_META_MASK);
945bb4a27f9SMark Johnston 	scan->bm_bitmap |= bitrange(digit, endDigit - digit);
946bb4a27f9SMark Johnston 	for (i = 1 + digit * skip; blk < endBlk; i += skip) {
9477090df5aSMatthew Dillon 		blk += radix;
948bb4a27f9SMark Johnston 		count = ummin(blk, endBlk) - freeBlk;
949bb4a27f9SMark Johnston 		blst_meta_free(&scan[i], freeBlk, count, radix);
950bb4a27f9SMark Johnston 		freeBlk = blk;
9517090df5aSMatthew Dillon 	}
9527090df5aSMatthew Dillon }
9537090df5aSMatthew Dillon 
9547090df5aSMatthew Dillon /*
955bb4a27f9SMark Johnston  * BLST_COPY() - copy one radix tree to another
9567090df5aSMatthew Dillon  *
9577090df5aSMatthew Dillon  *	Locates free space in the source tree and frees it in the destination
9587090df5aSMatthew Dillon  *	tree.  The space may not already be free in the destination.
9597090df5aSMatthew Dillon  */
960b411efa4SAlan Cox static void
9612ac0c7c3SAlan Cox blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix, blist_t dest,
9622ac0c7c3SAlan Cox     daddr_t count)
963b411efa4SAlan Cox {
964bb4a27f9SMark Johnston 	daddr_t endBlk, i, skip;
9657090df5aSMatthew Dillon 
9667090df5aSMatthew Dillon 	/*
9677090df5aSMatthew Dillon 	 * Leaf node
9687090df5aSMatthew Dillon 	 */
9697090df5aSMatthew Dillon 
9707090df5aSMatthew Dillon 	if (radix == BLIST_BMAP_RADIX) {
971bb4a27f9SMark Johnston 		u_daddr_t v = scan->bm_bitmap;
9727090df5aSMatthew Dillon 
9737090df5aSMatthew Dillon 		if (v == (u_daddr_t)-1) {
9747090df5aSMatthew Dillon 			blist_free(dest, blk, count);
9757090df5aSMatthew Dillon 		} else if (v != 0) {
9767090df5aSMatthew Dillon 			int i;
9777090df5aSMatthew Dillon 
978bb4a27f9SMark Johnston 			for (i = 0; i < count; ++i) {
979064650c1SAlan Cox 				if (v & ((u_daddr_t)1 << i))
9807090df5aSMatthew Dillon 					blist_free(dest, blk + i, 1);
9817090df5aSMatthew Dillon 			}
9827090df5aSMatthew Dillon 		}
9837090df5aSMatthew Dillon 		return;
9847090df5aSMatthew Dillon 	}
9857090df5aSMatthew Dillon 
9867090df5aSMatthew Dillon 	/*
9877090df5aSMatthew Dillon 	 * Meta node
9887090df5aSMatthew Dillon 	 */
9897090df5aSMatthew Dillon 
990bb4a27f9SMark Johnston 	if (scan->bm_bitmap == 0) {
9917090df5aSMatthew Dillon 		/*
9927090df5aSMatthew Dillon 		 * Source all allocated, leave dest allocated
9937090df5aSMatthew Dillon 		 */
9947090df5aSMatthew Dillon 		return;
9957090df5aSMatthew Dillon 	}
9967090df5aSMatthew Dillon 
997bb4a27f9SMark Johnston 	endBlk = blk + count;
9982ac0c7c3SAlan Cox 	radix /= BLIST_META_RADIX;
999bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
1000bb4a27f9SMark Johnston 	for (i = 1; blk < endBlk; i += skip) {
10017090df5aSMatthew Dillon 		blk += radix;
1002bb4a27f9SMark Johnston 		count = radix;
1003bb4a27f9SMark Johnston 		if (blk >= endBlk)
1004bb4a27f9SMark Johnston 			count -= blk - endBlk;
1005bb4a27f9SMark Johnston 		blst_copy(&scan[i], blk - radix, radix, dest, count);
10067090df5aSMatthew Dillon 	}
10077090df5aSMatthew Dillon }
10087090df5aSMatthew Dillon 
10097090df5aSMatthew Dillon /*
101092da00bbSMatthew Dillon  * BLST_LEAF_FILL() -	allocate specific blocks in leaf bitmap
101192da00bbSMatthew Dillon  *
101292da00bbSMatthew Dillon  *	This routine allocates all blocks in the specified range
101392da00bbSMatthew Dillon  *	regardless of any existing allocations in that range.  Returns
101492da00bbSMatthew Dillon  *	the number of blocks allocated by the call.
101592da00bbSMatthew Dillon  */
1016a7249a6cSAlan Cox static daddr_t
101792da00bbSMatthew Dillon blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count)
101892da00bbSMatthew Dillon {
1019a7249a6cSAlan Cox 	daddr_t nblks;
10204be4fd5dSAlan Cox 	u_daddr_t mask;
102192da00bbSMatthew Dillon 
1022bb4a27f9SMark Johnston 	mask = bitrange(blk & BLIST_BMAP_MASK, count);
102392da00bbSMatthew Dillon 
10244be4fd5dSAlan Cox 	/* Count the number of blocks that we are allocating. */
1025bb4a27f9SMark Johnston 	nblks = bitcount64(scan->bm_bitmap & mask);
102692da00bbSMatthew Dillon 
1027bb4a27f9SMark Johnston 	scan->bm_bitmap &= ~mask;
10284be4fd5dSAlan Cox 	return (nblks);
102992da00bbSMatthew Dillon }
103092da00bbSMatthew Dillon 
103192da00bbSMatthew Dillon /*
103292da00bbSMatthew Dillon  * BLIST_META_FILL() -	allocate specific blocks at a meta node
103392da00bbSMatthew Dillon  *
103492da00bbSMatthew Dillon  *	This routine allocates the specified range of blocks,
103592da00bbSMatthew Dillon  *	regardless of any existing allocations in the range.  The
103692da00bbSMatthew Dillon  *	range must be within the extent of this node.  Returns the
103792da00bbSMatthew Dillon  *	number of blocks allocated by the call.
103892da00bbSMatthew Dillon  */
1039a7249a6cSAlan Cox static daddr_t
1040bee93d3cSAlan Cox blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count, u_daddr_t radix)
1041d3783724SAlan Cox {
1042bb4a27f9SMark Johnston 	daddr_t blk, endBlk, i, nblks, skip;
1043bb4a27f9SMark Johnston 	int digit;
104492da00bbSMatthew Dillon 
1045a396c83aSAlan Cox 	if (radix == BLIST_BMAP_RADIX)
1046a396c83aSAlan Cox 		return (blst_leaf_fill(scan, allocBlk, count));
1047bb4a27f9SMark Johnston 
1048bb4a27f9SMark Johnston 	endBlk = ummin(allocBlk + count, (allocBlk + radix) & -radix);
1049bb4a27f9SMark Johnston 	radix /= BLIST_META_RADIX;
10502ac0c7c3SAlan Cox 	skip = radix_to_skip(radix);
1051bee93d3cSAlan Cox 	blk = allocBlk & -radix;
1052d3783724SAlan Cox 	nblks = 0;
1053bb4a27f9SMark Johnston 	while (blk < endBlk) {
1054bb4a27f9SMark Johnston 		digit = (blk / radix) & BLIST_META_MASK;
1055bb4a27f9SMark Johnston 		i = 1 + digit * skip;
105692da00bbSMatthew Dillon 		blk += radix;
1057bb4a27f9SMark Johnston 		count = ummin(blk, endBlk) - allocBlk;
1058bb4a27f9SMark Johnston 		nblks += blst_meta_fill(&scan[i], allocBlk, count, radix);
1059bb4a27f9SMark Johnston 		if (scan[i].bm_bitmap == 0)
1060bb4a27f9SMark Johnston 			scan->bm_bitmap &= ~((u_daddr_t)1 << digit);
1061bb4a27f9SMark Johnston 		allocBlk = blk;
106292da00bbSMatthew Dillon 	}
1063a396c83aSAlan Cox 	return (nblks);
106492da00bbSMatthew Dillon }
106592da00bbSMatthew Dillon 
10667090df5aSMatthew Dillon #ifdef BLIST_DEBUG
10677090df5aSMatthew Dillon 
10687090df5aSMatthew Dillon static void
10692ac0c7c3SAlan Cox blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix, int tab)
10707090df5aSMatthew Dillon {
1071bb4a27f9SMark Johnston 	daddr_t skip;
107253519253SDoug Moore 	u_daddr_t mask;
1073bb4a27f9SMark Johnston 	int digit;
10747090df5aSMatthew Dillon 
10757090df5aSMatthew Dillon 	if (radix == BLIST_BMAP_RADIX) {
10767090df5aSMatthew Dillon 		printf(
1077bb4a27f9SMark Johnston 		    "%*.*s(%08llx,%lld): bitmap %0*llx big=%lld\n",
10787090df5aSMatthew Dillon 		    tab, tab, "",
107992da00bbSMatthew Dillon 		    (long long)blk, (long long)radix,
1080bb4a27f9SMark Johnston 		    1 + (BLIST_BMAP_RADIX - 1) / 4,
1081bb4a27f9SMark Johnston 		    (long long)scan->bm_bitmap,
108292da00bbSMatthew Dillon 		    (long long)scan->bm_bighint
10837090df5aSMatthew Dillon 		);
10847090df5aSMatthew Dillon 		return;
10857090df5aSMatthew Dillon 	}
10867090df5aSMatthew Dillon 
10877090df5aSMatthew Dillon 	printf(
1088bb4a27f9SMark Johnston 	    "%*.*s(%08llx): subtree (%lld/%lld) bitmap %0*llx big=%lld {\n",
10897090df5aSMatthew Dillon 	    tab, tab, "",
109092da00bbSMatthew Dillon 	    (long long)blk, (long long)radix,
109192da00bbSMatthew Dillon 	    (long long)radix,
1092bb4a27f9SMark Johnston 	    1 + (BLIST_META_RADIX - 1) / 4,
1093bb4a27f9SMark Johnston 	    (long long)scan->bm_bitmap,
109492da00bbSMatthew Dillon 	    (long long)scan->bm_bighint
10957090df5aSMatthew Dillon 	);
10967090df5aSMatthew Dillon 
10972ac0c7c3SAlan Cox 	radix /= BLIST_META_RADIX;
1098bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
10997090df5aSMatthew Dillon 	tab += 4;
11007090df5aSMatthew Dillon 
1101bb4a27f9SMark Johnston 	mask = scan->bm_bitmap;
1102bb4a27f9SMark Johnston 	/* Examine the nonempty subtree associated with each bit set in mask */
1103bb4a27f9SMark Johnston 	do {
110453519253SDoug Moore 		digit = bitpos(mask);
1105bb4a27f9SMark Johnston 		blst_radix_print(&scan[1 + digit * skip], blk + digit * radix,
1106bb4a27f9SMark Johnston 		    radix, tab);
110753519253SDoug Moore 	} while ((mask ^= bitrange(digit, 1)) != 0);
11087090df5aSMatthew Dillon 	tab -= 4;
11097090df5aSMatthew Dillon 
11107090df5aSMatthew Dillon 	printf(
11117090df5aSMatthew Dillon 	    "%*.*s}\n",
11127090df5aSMatthew Dillon 	    tab, tab, ""
11137090df5aSMatthew Dillon 	);
11147090df5aSMatthew Dillon }
11157090df5aSMatthew Dillon 
11167090df5aSMatthew Dillon #endif
11177090df5aSMatthew Dillon 
11187090df5aSMatthew Dillon #ifdef BLIST_DEBUG
11197090df5aSMatthew Dillon 
11207090df5aSMatthew Dillon int
11217090df5aSMatthew Dillon main(int ac, char **av)
11227090df5aSMatthew Dillon {
1123bb4a27f9SMark Johnston 	int size = BLIST_META_RADIX * BLIST_BMAP_RADIX;
11247090df5aSMatthew Dillon 	int i;
11257090df5aSMatthew Dillon 	blist_t bl;
1126d027ed2eSAlan Cox 	struct sbuf *s;
11277090df5aSMatthew Dillon 
11287090df5aSMatthew Dillon 	for (i = 1; i < ac; ++i) {
11297090df5aSMatthew Dillon 		const char *ptr = av[i];
11307090df5aSMatthew Dillon 		if (*ptr != '-') {
11317090df5aSMatthew Dillon 			size = strtol(ptr, NULL, 0);
11327090df5aSMatthew Dillon 			continue;
11337090df5aSMatthew Dillon 		}
11347090df5aSMatthew Dillon 		ptr += 2;
11357090df5aSMatthew Dillon 		fprintf(stderr, "Bad option: %s\n", ptr - 2);
11367090df5aSMatthew Dillon 		exit(1);
11377090df5aSMatthew Dillon 	}
1138c8c7ad92SKip Macy 	bl = blist_create(size, M_WAITOK);
11397090df5aSMatthew Dillon 	blist_free(bl, 0, size);
11407090df5aSMatthew Dillon 
11417090df5aSMatthew Dillon 	for (;;) {
11427090df5aSMatthew Dillon 		char buf[1024];
1143d90bf7d8SAlan Cox 		long long da = 0;
114487ae0686SDoug Moore 		int count = 0, maxcount = 0;
11457090df5aSMatthew Dillon 
11464be4fd5dSAlan Cox 		printf("%lld/%lld/%lld> ", (long long)blist_avail(bl),
114792da00bbSMatthew Dillon 		    (long long)size, (long long)bl->bl_radix);
11487090df5aSMatthew Dillon 		fflush(stdout);
11497090df5aSMatthew Dillon 		if (fgets(buf, sizeof(buf), stdin) == NULL)
11507090df5aSMatthew Dillon 			break;
11517090df5aSMatthew Dillon 		switch(buf[0]) {
11527090df5aSMatthew Dillon 		case 'r':
115387ae0686SDoug Moore 			if (sscanf(buf + 1, "%d", &count) == 1) {
1154d90bf7d8SAlan Cox 				blist_resize(&bl, count, 1, M_WAITOK);
11557090df5aSMatthew Dillon 			} else {
11567090df5aSMatthew Dillon 				printf("?\n");
11577090df5aSMatthew Dillon 			}
11587090df5aSMatthew Dillon 		case 'p':
11597090df5aSMatthew Dillon 			blist_print(bl);
11607090df5aSMatthew Dillon 			break;
1161d027ed2eSAlan Cox 		case 's':
1162d027ed2eSAlan Cox 			s = sbuf_new_auto();
1163d027ed2eSAlan Cox 			blist_stats(bl, s);
1164d027ed2eSAlan Cox 			sbuf_finish(s);
1165d027ed2eSAlan Cox 			printf("%s", sbuf_data(s));
1166d027ed2eSAlan Cox 			sbuf_delete(s);
1167d027ed2eSAlan Cox 			break;
11687090df5aSMatthew Dillon 		case 'a':
116987ae0686SDoug Moore 			if (sscanf(buf + 1, "%d%d", &count, &maxcount) == 2) {
117087ae0686SDoug Moore 				daddr_t blk = blist_alloc(bl, &count, maxcount);
117187ae0686SDoug Moore 				printf("    R=%08llx, c=%08d\n",
117287ae0686SDoug Moore 				    (long long)blk, count);
11737090df5aSMatthew Dillon 			} else {
11747090df5aSMatthew Dillon 				printf("?\n");
11757090df5aSMatthew Dillon 			}
11767090df5aSMatthew Dillon 			break;
11777090df5aSMatthew Dillon 		case 'f':
117887ae0686SDoug Moore 			if (sscanf(buf + 1, "%llx %d", &da, &count) == 2) {
11797090df5aSMatthew Dillon 				blist_free(bl, da, count);
11807090df5aSMatthew Dillon 			} else {
11817090df5aSMatthew Dillon 				printf("?\n");
11827090df5aSMatthew Dillon 			}
11837090df5aSMatthew Dillon 			break;
118492da00bbSMatthew Dillon 		case 'l':
118587ae0686SDoug Moore 			if (sscanf(buf + 1, "%llx %d", &da, &count) == 2) {
1186a7249a6cSAlan Cox 				printf("    n=%jd\n",
1187a7249a6cSAlan Cox 				    (intmax_t)blist_fill(bl, da, count));
118892da00bbSMatthew Dillon 			} else {
118992da00bbSMatthew Dillon 				printf("?\n");
119092da00bbSMatthew Dillon 			}
119192da00bbSMatthew Dillon 			break;
11927090df5aSMatthew Dillon 		case '?':
11937090df5aSMatthew Dillon 		case 'h':
11947090df5aSMatthew Dillon 			puts(
11957090df5aSMatthew Dillon 			    "p          -print\n"
1196d027ed2eSAlan Cox 			    "s          -stats\n"
119787ae0686SDoug Moore 			    "a %d %d    -allocate\n"
11987090df5aSMatthew Dillon 			    "f %x %d    -free\n"
119992da00bbSMatthew Dillon 			    "l %x %d    -fill\n"
12007090df5aSMatthew Dillon 			    "r %d       -resize\n"
1201d4808c44SDoug Moore 			    "h/?        -help\n"
1202d4808c44SDoug Moore 			    "q          -quit"
12037090df5aSMatthew Dillon 			);
12047090df5aSMatthew Dillon 			break;
1205d4808c44SDoug Moore 		case 'q':
1206d4808c44SDoug Moore 			break;
12077090df5aSMatthew Dillon 		default:
12087090df5aSMatthew Dillon 			printf("?\n");
12097090df5aSMatthew Dillon 			break;
12107090df5aSMatthew Dillon 		}
1211d4808c44SDoug Moore 		if (buf[0] == 'q')
1212d4808c44SDoug Moore 			break;
12137090df5aSMatthew Dillon 	}
12147090df5aSMatthew Dillon 	return (0);
12157090df5aSMatthew Dillon }
12167090df5aSMatthew Dillon 
12177090df5aSMatthew Dillon #endif
1218