xref: /freebsd/sys/kern/subr_blist.c (revision bb4a27f927a13f70e61d75df0de325f2c93a282f)
106b4bf3eSWarner Losh /*-
251369649SPedro F. Giffuni  * SPDX-License-Identifier: BSD-3-Clause
351369649SPedro F. Giffuni  *
406b4bf3eSWarner Losh  * Copyright (c) 1998 Matthew Dillon.  All Rights Reserved.
506b4bf3eSWarner Losh  * Redistribution and use in source and binary forms, with or without
606b4bf3eSWarner Losh  * modification, are permitted provided that the following conditions
706b4bf3eSWarner Losh  * are met:
806b4bf3eSWarner Losh  * 1. Redistributions of source code must retain the above copyright
906b4bf3eSWarner Losh  *    notice, this list of conditions and the following disclaimer.
1006b4bf3eSWarner Losh  * 2. Redistributions in binary form must reproduce the above copyright
1106b4bf3eSWarner Losh  *    notice, this list of conditions and the following disclaimer in the
1206b4bf3eSWarner Losh  *    documentation and/or other materials provided with the distribution.
1369a28758SEd Maste  * 3. Neither the name of the University nor the names of its contributors
1406b4bf3eSWarner Losh  *    may be used to endorse or promote products derived from this software
1506b4bf3eSWarner Losh  *    without specific prior written permission.
1606b4bf3eSWarner Losh  *
1706b4bf3eSWarner Losh  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
1806b4bf3eSWarner Losh  * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1906b4bf3eSWarner Losh  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
2006b4bf3eSWarner Losh  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
2106b4bf3eSWarner Losh  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
2206b4bf3eSWarner Losh  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
2306b4bf3eSWarner Losh  * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
2406b4bf3eSWarner Losh  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
2506b4bf3eSWarner Losh  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
2606b4bf3eSWarner Losh  * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
2706b4bf3eSWarner Losh  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2806b4bf3eSWarner Losh  */
297090df5aSMatthew Dillon /*
307090df5aSMatthew Dillon  * BLIST.C -	Bitmap allocator/deallocator, using a radix tree with hinting
317090df5aSMatthew Dillon  *
327090df5aSMatthew Dillon  *	This module implements a general bitmap allocator/deallocator.  The
337090df5aSMatthew Dillon  *	allocator eats around 2 bits per 'block'.  The module does not
34f565f395SEitan Adler  *	try to interpret the meaning of a 'block' other than to return
357090df5aSMatthew Dillon  *	SWAPBLK_NONE on an allocation failure.
367090df5aSMatthew Dillon  *
37ec371b57SAlan Cox  *	A radix tree controls access to pieces of the bitmap, and includes
38ec371b57SAlan Cox  *	auxiliary information at each interior node about the availabilty of
39ec371b57SAlan Cox  *	contiguous free blocks in the subtree rooted at that node.  Two radix
40ec371b57SAlan Cox  *	constants are involved: one for the size of the bitmaps contained in the
41ec371b57SAlan Cox  *	leaf nodes (BLIST_BMAP_RADIX), and one for the number of descendents of
42ec371b57SAlan Cox  *	each of the meta (interior) nodes (BLIST_META_RADIX).  Each subtree is
43ec371b57SAlan Cox  *	associated with a range of blocks.  The root of any subtree stores a
44ec371b57SAlan Cox  *	hint field that defines an upper bound on the size of the largest
45ec371b57SAlan Cox  *	allocation that can begin in the associated block range.  A hint is an
46ec371b57SAlan Cox  *	upper bound on a potential allocation, but not necessarily a tight upper
47ec371b57SAlan Cox  *	bound.
487090df5aSMatthew Dillon  *
49*bb4a27f9SMark Johnston  *	The bitmap field in each node directs the search for available blocks.
50*bb4a27f9SMark Johnston  *	For a leaf node, a bit is set if the corresponding block is free.  For a
51*bb4a27f9SMark Johnston  *	meta node, a bit is set if the corresponding subtree contains a free
52*bb4a27f9SMark Johnston  *	block somewhere within it.  The search at a meta node considers only
53*bb4a27f9SMark Johnston  *	children of that node that represent a range that includes a free block.
547090df5aSMatthew Dillon  *
557090df5aSMatthew Dillon  * 	The hinting greatly increases code efficiency for allocations while
567090df5aSMatthew Dillon  *	the general radix structure optimizes both allocations and frees.  The
577090df5aSMatthew Dillon  *	radix tree should be able to operate well no matter how much
587090df5aSMatthew Dillon  *	fragmentation there is and no matter how large a bitmap is used.
597090df5aSMatthew Dillon  *
6051dc4feaSSergey Kandaurov  *	The blist code wires all necessary memory at creation time.  Neither
6151dc4feaSSergey Kandaurov  *	allocations nor frees require interaction with the memory subsystem.
62*bb4a27f9SMark Johnston  *	The non-blocking nature of allocations and frees is required by swap
63*bb4a27f9SMark Johnston  *	code (vm/swap_pager.c).
647090df5aSMatthew Dillon  *
65*bb4a27f9SMark Johnston  *	LAYOUT: The radix tree is laid out recursively using a linear array.
66*bb4a27f9SMark Johnston  *	Each meta node is immediately followed (laid out sequentially in
67*bb4a27f9SMark Johnston  *	memory) by BLIST_META_RADIX lower level nodes.  This is a recursive
68*bb4a27f9SMark Johnston  *	structure but one that can be easily scanned through a very simple
69*bb4a27f9SMark Johnston  *	'skip' calculation.  The memory allocation is only large enough to
70*bb4a27f9SMark Johnston  *	cover the number of blocks requested at creation time.  Nodes that
71*bb4a27f9SMark Johnston  *	represent blocks beyond that limit, nodes that would never be read
72*bb4a27f9SMark Johnston  *	or written, are not allocated, so that the last of the
73*bb4a27f9SMark Johnston  *	BLIST_META_RADIX lower level nodes of a some nodes may not be
74*bb4a27f9SMark Johnston  *	allocated.
757090df5aSMatthew Dillon  *
76f565f395SEitan Adler  *	NOTE: the allocator cannot currently allocate more than
777090df5aSMatthew Dillon  *	BLIST_BMAP_RADIX blocks per call.  It will panic with 'allocation too
787090df5aSMatthew Dillon  *	large' if you try.  This is an area that could use improvement.  The
797090df5aSMatthew Dillon  *	radix is large enough that this restriction does not effect the swap
80b411efa4SAlan Cox  *	system, though.  Currently only the allocation code is affected by
817090df5aSMatthew Dillon  *	this algorithmic unfeature.  The freeing code can handle arbitrary
827090df5aSMatthew Dillon  *	ranges.
837090df5aSMatthew Dillon  *
847090df5aSMatthew Dillon  *	This code can be compiled stand-alone for debugging.
857090df5aSMatthew Dillon  */
867090df5aSMatthew Dillon 
87677b542eSDavid E. O'Brien #include <sys/cdefs.h>
88677b542eSDavid E. O'Brien __FBSDID("$FreeBSD$");
89677b542eSDavid E. O'Brien 
90c4473420SPeter Wemm #ifdef _KERNEL
917090df5aSMatthew Dillon 
927090df5aSMatthew Dillon #include <sys/param.h>
937090df5aSMatthew Dillon #include <sys/systm.h>
947090df5aSMatthew Dillon #include <sys/lock.h>
957090df5aSMatthew Dillon #include <sys/kernel.h>
967090df5aSMatthew Dillon #include <sys/blist.h>
977090df5aSMatthew Dillon #include <sys/malloc.h>
98d027ed2eSAlan Cox #include <sys/sbuf.h>
990cddd8f0SMatthew Dillon #include <sys/proc.h>
10023955314SAlfred Perlstein #include <sys/mutex.h>
1017090df5aSMatthew Dillon 
1027090df5aSMatthew Dillon #else
1037090df5aSMatthew Dillon 
1047090df5aSMatthew Dillon #ifndef BLIST_NO_DEBUG
1057090df5aSMatthew Dillon #define BLIST_DEBUG
1067090df5aSMatthew Dillon #endif
1077090df5aSMatthew Dillon 
108*bb4a27f9SMark Johnston #include <sys/errno.h>
1097090df5aSMatthew Dillon #include <sys/types.h>
110d90bf7d8SAlan Cox #include <sys/malloc.h>
111d027ed2eSAlan Cox #include <sys/sbuf.h>
1127090df5aSMatthew Dillon #include <stdio.h>
1137090df5aSMatthew Dillon #include <string.h>
1148eefcd40SAlan Cox #include <stddef.h>
1157090df5aSMatthew Dillon #include <stdlib.h>
1167090df5aSMatthew Dillon #include <stdarg.h>
117d3783724SAlan Cox #include <stdbool.h>
1187090df5aSMatthew Dillon 
1194be4fd5dSAlan Cox #define	bitcount64(x)	__bitcount64((uint64_t)(x))
12092da00bbSMatthew Dillon #define malloc(a,b,c)	calloc(a, 1)
1217090df5aSMatthew Dillon #define free(a,b)	free(a)
122*bb4a27f9SMark Johnston #define ummin(a,b)	((a) < (b) ? (a) : (b))
1237090df5aSMatthew Dillon 
1247090df5aSMatthew Dillon #include <sys/blist.h>
1257090df5aSMatthew Dillon 
1267090df5aSMatthew Dillon void panic(const char *ctl, ...);
1277090df5aSMatthew Dillon 
1287090df5aSMatthew Dillon #endif
1297090df5aSMatthew Dillon 
1307090df5aSMatthew Dillon /*
1317090df5aSMatthew Dillon  * static support functions
1327090df5aSMatthew Dillon  */
133ec371b57SAlan Cox static daddr_t	blst_leaf_alloc(blmeta_t *scan, daddr_t blk, int count);
134bee93d3cSAlan Cox static daddr_t	blst_meta_alloc(blmeta_t *scan, daddr_t cursor, daddr_t count,
135bee93d3cSAlan Cox 		    u_daddr_t radix);
1367090df5aSMatthew Dillon static void blst_leaf_free(blmeta_t *scan, daddr_t relblk, int count);
1377090df5aSMatthew Dillon static void blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count,
138bee93d3cSAlan Cox 		    u_daddr_t radix);
1397090df5aSMatthew Dillon static void blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix,
1402ac0c7c3SAlan Cox 		    blist_t dest, daddr_t count);
141a7249a6cSAlan Cox static daddr_t blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count);
142a7249a6cSAlan Cox static daddr_t blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count,
143bee93d3cSAlan Cox 		    u_daddr_t radix);
144c4473420SPeter Wemm #ifndef _KERNEL
145d3783724SAlan Cox static void	blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix,
1462ac0c7c3SAlan Cox 		    int tab);
1477090df5aSMatthew Dillon #endif
1487090df5aSMatthew Dillon 
149c4473420SPeter Wemm #ifdef _KERNEL
1507090df5aSMatthew Dillon static MALLOC_DEFINE(M_SWAP, "SWAP", "Swap space");
1517090df5aSMatthew Dillon #endif
1527090df5aSMatthew Dillon 
153ec371b57SAlan Cox _Static_assert(BLIST_BMAP_RADIX % BLIST_META_RADIX == 0,
154ec371b57SAlan Cox     "radix divisibility error");
155ec371b57SAlan Cox #define	BLIST_BMAP_MASK	(BLIST_BMAP_RADIX - 1)
156d027ed2eSAlan Cox #define	BLIST_META_MASK	(BLIST_META_RADIX - 1)
157ba98e6a2SAlan Cox 
1587090df5aSMatthew Dillon /*
1592ac0c7c3SAlan Cox  * For a subtree that can represent the state of up to 'radix' blocks, the
1602ac0c7c3SAlan Cox  * number of leaf nodes of the subtree is L=radix/BLIST_BMAP_RADIX.  If 'm'
1612ac0c7c3SAlan Cox  * is short for BLIST_META_RADIX, then for a tree of height h with L=m**h
1622ac0c7c3SAlan Cox  * leaf nodes, the total number of tree nodes is 1 + m + m**2 + ... + m**h,
1632ac0c7c3SAlan Cox  * or, equivalently, (m**(h+1)-1)/(m-1).  This quantity is called 'skip'
1642ac0c7c3SAlan Cox  * in the 'meta' functions that process subtrees.  Since integer division
1652ac0c7c3SAlan Cox  * discards remainders, we can express this computation as
1662ac0c7c3SAlan Cox  * skip = (m * m**h) / (m - 1)
167ba98e6a2SAlan Cox  * skip = (m * (radix / BLIST_BMAP_RADIX)) / (m - 1)
168ba98e6a2SAlan Cox  * and since m divides BLIST_BMAP_RADIX, we can simplify further to
169ba98e6a2SAlan Cox  * skip = (radix / (BLIST_BMAP_RADIX / m)) / (m - 1)
170ba98e6a2SAlan Cox  * skip = radix / ((BLIST_BMAP_RADIX / m) * (m - 1))
171ba98e6a2SAlan Cox  * so that simple integer division by a constant can safely be used for the
172ba98e6a2SAlan Cox  * calculation.
1732ac0c7c3SAlan Cox  */
1742ac0c7c3SAlan Cox static inline daddr_t
1752ac0c7c3SAlan Cox radix_to_skip(daddr_t radix)
1762ac0c7c3SAlan Cox {
1772ac0c7c3SAlan Cox 
1782ac0c7c3SAlan Cox 	return (radix /
179d027ed2eSAlan Cox 	    ((BLIST_BMAP_RADIX / BLIST_META_RADIX) * BLIST_META_MASK));
180d027ed2eSAlan Cox }
181d027ed2eSAlan Cox 
182d027ed2eSAlan Cox /*
183*bb4a27f9SMark Johnston  * Provide a mask with count bits set, starting as position n.
184*bb4a27f9SMark Johnston  */
185*bb4a27f9SMark Johnston static inline u_daddr_t
186*bb4a27f9SMark Johnston bitrange(int n, int count)
187*bb4a27f9SMark Johnston {
188*bb4a27f9SMark Johnston 
189*bb4a27f9SMark Johnston 	return (((u_daddr_t)-1 << n) &
190*bb4a27f9SMark Johnston 	    ((u_daddr_t)-1 >> (BLIST_BMAP_RADIX - (n + count))));
191*bb4a27f9SMark Johnston }
192*bb4a27f9SMark Johnston 
193*bb4a27f9SMark Johnston 
194*bb4a27f9SMark Johnston /*
195d027ed2eSAlan Cox  * Use binary search, or a faster method, to find the 1 bit in a u_daddr_t.
196d027ed2eSAlan Cox  * Assumes that the argument has only one bit set.
197d027ed2eSAlan Cox  */
198d027ed2eSAlan Cox static inline int
199d027ed2eSAlan Cox bitpos(u_daddr_t mask)
200d027ed2eSAlan Cox {
201d027ed2eSAlan Cox 	int hi, lo, mid;
202d027ed2eSAlan Cox 
203d027ed2eSAlan Cox 	switch (sizeof(mask)) {
204d027ed2eSAlan Cox #ifdef HAVE_INLINE_FFSLL
205d027ed2eSAlan Cox 	case sizeof(long long):
206d027ed2eSAlan Cox 		return (ffsll(mask) - 1);
207d027ed2eSAlan Cox #endif
208d027ed2eSAlan Cox 	default:
209d027ed2eSAlan Cox 		lo = 0;
210d027ed2eSAlan Cox 		hi = BLIST_BMAP_RADIX;
211d027ed2eSAlan Cox 		while (lo + 1 < hi) {
212d027ed2eSAlan Cox 			mid = (lo + hi) >> 1;
213d027ed2eSAlan Cox 			if ((mask >> mid) != 0)
214d027ed2eSAlan Cox 				lo = mid;
215d027ed2eSAlan Cox 			else
216d027ed2eSAlan Cox 				hi = mid;
217d027ed2eSAlan Cox 		}
218d027ed2eSAlan Cox 		return (lo);
219d027ed2eSAlan Cox 	}
2202ac0c7c3SAlan Cox }
2212ac0c7c3SAlan Cox 
2222ac0c7c3SAlan Cox /*
2237090df5aSMatthew Dillon  * blist_create() - create a blist capable of handling up to the specified
2247090df5aSMatthew Dillon  *		    number of blocks
2257090df5aSMatthew Dillon  *
226f565f395SEitan Adler  *	blocks - must be greater than 0
227c8c7ad92SKip Macy  * 	flags  - malloc flags
2287090df5aSMatthew Dillon  *
2297090df5aSMatthew Dillon  *	The smallest blist consists of a single leaf node capable of
2307090df5aSMatthew Dillon  *	managing BLIST_BMAP_RADIX blocks.
2317090df5aSMatthew Dillon  */
2327090df5aSMatthew Dillon blist_t
233c8c7ad92SKip Macy blist_create(daddr_t blocks, int flags)
2347090df5aSMatthew Dillon {
2357090df5aSMatthew Dillon 	blist_t bl;
236*bb4a27f9SMark Johnston 	u_daddr_t nodes, radix;
2377090df5aSMatthew Dillon 
238ce9eea64SMark Johnston 	if (blocks == 0)
239ce9eea64SMark Johnston 		panic("invalid block count");
240ce9eea64SMark Johnston 
2417090df5aSMatthew Dillon 	/*
242ce9eea64SMark Johnston 	 * Calculate the radix and node count used for scanning.
2437090df5aSMatthew Dillon 	 */
244*bb4a27f9SMark Johnston 	nodes = 1;
2457090df5aSMatthew Dillon 	radix = BLIST_BMAP_RADIX;
246*bb4a27f9SMark Johnston 	while (radix <= blocks) {
247*bb4a27f9SMark Johnston 		nodes += 1 + (blocks - 1) / radix;
24857e6d29bSMatthew Dillon 		radix *= BLIST_META_RADIX;
2497090df5aSMatthew Dillon 	}
2507090df5aSMatthew Dillon 
25103ca2137SAlan Cox 	bl = malloc(offsetof(struct blist, bl_root[nodes]), M_SWAP, flags |
25203ca2137SAlan Cox 	    M_ZERO);
253015d7db6SAlan Cox 	if (bl == NULL)
254015d7db6SAlan Cox 		return (NULL);
2557090df5aSMatthew Dillon 
2567090df5aSMatthew Dillon 	bl->bl_blocks = blocks;
2577090df5aSMatthew Dillon 	bl->bl_radix = radix;
2587090df5aSMatthew Dillon 
2597090df5aSMatthew Dillon #if defined(BLIST_DEBUG)
2607090df5aSMatthew Dillon 	printf(
26192da00bbSMatthew Dillon 		"BLIST representing %lld blocks (%lld MB of swap)"
26292da00bbSMatthew Dillon 		", requiring %lldK of ram\n",
26392da00bbSMatthew Dillon 		(long long)bl->bl_blocks,
26492da00bbSMatthew Dillon 		(long long)bl->bl_blocks * 4 / 1024,
265015d7db6SAlan Cox 		(long long)(nodes * sizeof(blmeta_t) + 1023) / 1024
2667090df5aSMatthew Dillon 	);
26792da00bbSMatthew Dillon 	printf("BLIST raw radix tree contains %lld records\n",
268015d7db6SAlan Cox 	    (long long)nodes);
2697090df5aSMatthew Dillon #endif
2707090df5aSMatthew Dillon 
2717090df5aSMatthew Dillon 	return (bl);
2727090df5aSMatthew Dillon }
2737090df5aSMatthew Dillon 
2747090df5aSMatthew Dillon void
2757090df5aSMatthew Dillon blist_destroy(blist_t bl)
2767090df5aSMatthew Dillon {
2778eefcd40SAlan Cox 
2787090df5aSMatthew Dillon 	free(bl, M_SWAP);
2797090df5aSMatthew Dillon }
2807090df5aSMatthew Dillon 
2817090df5aSMatthew Dillon /*
2827090df5aSMatthew Dillon  * blist_alloc() -   reserve space in the block bitmap.  Return the base
2837090df5aSMatthew Dillon  *		     of a contiguous region or SWAPBLK_NONE if space could
2847090df5aSMatthew Dillon  *		     not be allocated.
2857090df5aSMatthew Dillon  */
2867090df5aSMatthew Dillon daddr_t
2877090df5aSMatthew Dillon blist_alloc(blist_t bl, daddr_t count)
2887090df5aSMatthew Dillon {
2894be4fd5dSAlan Cox 	daddr_t blk;
2907090df5aSMatthew Dillon 
291*bb4a27f9SMark Johnston 	if (count > BLIST_MAX_ALLOC)
292*bb4a27f9SMark Johnston 		panic("allocation too large");
293*bb4a27f9SMark Johnston 
294d4e3484bSAlan Cox 	/*
295d4e3484bSAlan Cox 	 * This loop iterates at most twice.  An allocation failure in the
296d4e3484bSAlan Cox 	 * first iteration leads to a second iteration only if the cursor was
297d4e3484bSAlan Cox 	 * non-zero.  When the cursor is zero, an allocation failure will
298d4e3484bSAlan Cox 	 * reduce the hint, stopping further iterations.
299d4e3484bSAlan Cox 	 */
300d4e3484bSAlan Cox 	while (count <= bl->bl_root->bm_bighint) {
301bee93d3cSAlan Cox 		blk = blst_meta_alloc(bl->bl_root, bl->bl_cursor, count,
302bee93d3cSAlan Cox 		    bl->bl_radix);
303d4e3484bSAlan Cox 		if (blk != SWAPBLK_NONE) {
304*bb4a27f9SMark Johnston 			bl->bl_avail -= count;
305d4e3484bSAlan Cox 			bl->bl_cursor = blk + count;
306a0ae476bSAlan Cox 			if (bl->bl_cursor == bl->bl_blocks)
307a0ae476bSAlan Cox 				bl->bl_cursor = 0;
3087090df5aSMatthew Dillon 			return (blk);
309*bb4a27f9SMark Johnston 		}
310d4e3484bSAlan Cox 		bl->bl_cursor = 0;
3117090df5aSMatthew Dillon 	}
3124be4fd5dSAlan Cox 	return (SWAPBLK_NONE);
3134be4fd5dSAlan Cox }
3144be4fd5dSAlan Cox 
3154be4fd5dSAlan Cox /*
3164be4fd5dSAlan Cox  * blist_avail() -	return the number of free blocks.
3174be4fd5dSAlan Cox  */
3184be4fd5dSAlan Cox daddr_t
3194be4fd5dSAlan Cox blist_avail(blist_t bl)
3204be4fd5dSAlan Cox {
3214be4fd5dSAlan Cox 
322*bb4a27f9SMark Johnston 	return (bl->bl_avail);
3234be4fd5dSAlan Cox }
3247090df5aSMatthew Dillon 
3257090df5aSMatthew Dillon /*
3267090df5aSMatthew Dillon  * blist_free() -	free up space in the block bitmap.  Return the base
3277090df5aSMatthew Dillon  *		     	of a contiguous region.  Panic if an inconsistancy is
3287090df5aSMatthew Dillon  *			found.
3297090df5aSMatthew Dillon  */
3307090df5aSMatthew Dillon void
3317090df5aSMatthew Dillon blist_free(blist_t bl, daddr_t blkno, daddr_t count)
3327090df5aSMatthew Dillon {
333a396c83aSAlan Cox 
334*bb4a27f9SMark Johnston 	if (blkno < 0 || blkno + count > bl->bl_blocks)
335*bb4a27f9SMark Johnston 		panic("freeing invalid range");
336bee93d3cSAlan Cox 	blst_meta_free(bl->bl_root, blkno, count, bl->bl_radix);
337*bb4a27f9SMark Johnston 	bl->bl_avail += count;
3387090df5aSMatthew Dillon }
3397090df5aSMatthew Dillon 
3407090df5aSMatthew Dillon /*
34192da00bbSMatthew Dillon  * blist_fill() -	mark a region in the block bitmap as off-limits
34292da00bbSMatthew Dillon  *			to the allocator (i.e. allocate it), ignoring any
34392da00bbSMatthew Dillon  *			existing allocations.  Return the number of blocks
34492da00bbSMatthew Dillon  *			actually filled that were free before the call.
34592da00bbSMatthew Dillon  */
346a7249a6cSAlan Cox daddr_t
34792da00bbSMatthew Dillon blist_fill(blist_t bl, daddr_t blkno, daddr_t count)
34892da00bbSMatthew Dillon {
349*bb4a27f9SMark Johnston 	daddr_t filled;
35092da00bbSMatthew Dillon 
351*bb4a27f9SMark Johnston 	if (blkno < 0 || blkno + count > bl->bl_blocks)
352*bb4a27f9SMark Johnston 		panic("filling invalid range");
353*bb4a27f9SMark Johnston 	filled = blst_meta_fill(bl->bl_root, blkno, count, bl->bl_radix);
354*bb4a27f9SMark Johnston 	bl->bl_avail -= filled;
355*bb4a27f9SMark Johnston 	return (filled);
35692da00bbSMatthew Dillon }
35792da00bbSMatthew Dillon 
35892da00bbSMatthew Dillon /*
3597090df5aSMatthew Dillon  * blist_resize() -	resize an existing radix tree to handle the
3607090df5aSMatthew Dillon  *			specified number of blocks.  This will reallocate
3617090df5aSMatthew Dillon  *			the tree and transfer the previous bitmap to the new
3627090df5aSMatthew Dillon  *			one.  When extending the tree you can specify whether
3637090df5aSMatthew Dillon  *			the new blocks are to left allocated or freed.
3647090df5aSMatthew Dillon  */
3657090df5aSMatthew Dillon void
366c8c7ad92SKip Macy blist_resize(blist_t *pbl, daddr_t count, int freenew, int flags)
3677090df5aSMatthew Dillon {
368c8c7ad92SKip Macy     blist_t newbl = blist_create(count, flags);
3697090df5aSMatthew Dillon     blist_t save = *pbl;
3707090df5aSMatthew Dillon 
3717090df5aSMatthew Dillon     *pbl = newbl;
3727090df5aSMatthew Dillon     if (count > save->bl_blocks)
3737090df5aSMatthew Dillon 	    count = save->bl_blocks;
3742ac0c7c3SAlan Cox     blst_copy(save->bl_root, 0, save->bl_radix, newbl, count);
3757090df5aSMatthew Dillon 
3767090df5aSMatthew Dillon     /*
3777090df5aSMatthew Dillon      * If resizing upwards, should we free the new space or not?
3787090df5aSMatthew Dillon      */
3797090df5aSMatthew Dillon     if (freenew && count < newbl->bl_blocks) {
3807090df5aSMatthew Dillon 	    blist_free(newbl, count, newbl->bl_blocks - count);
3817090df5aSMatthew Dillon     }
3827090df5aSMatthew Dillon     blist_destroy(save);
3837090df5aSMatthew Dillon }
3847090df5aSMatthew Dillon 
3857090df5aSMatthew Dillon #ifdef BLIST_DEBUG
3867090df5aSMatthew Dillon 
3877090df5aSMatthew Dillon /*
3887090df5aSMatthew Dillon  * blist_print()    - dump radix tree
3897090df5aSMatthew Dillon  */
3907090df5aSMatthew Dillon void
3917090df5aSMatthew Dillon blist_print(blist_t bl)
3927090df5aSMatthew Dillon {
393*bb4a27f9SMark Johnston 	printf("BLIST avail = %jd, cursor = %08jx {\n",
394*bb4a27f9SMark Johnston 	    (uintmax_t)bl->bl_avail, (uintmax_t)bl->bl_cursor);
395*bb4a27f9SMark Johnston 
396*bb4a27f9SMark Johnston 	if (bl->bl_root->bm_bitmap != 0)
3972ac0c7c3SAlan Cox 		blst_radix_print(bl->bl_root, 0, bl->bl_radix, 4);
3987090df5aSMatthew Dillon 	printf("}\n");
3997090df5aSMatthew Dillon }
4007090df5aSMatthew Dillon 
4017090df5aSMatthew Dillon #endif
4027090df5aSMatthew Dillon 
403d027ed2eSAlan Cox static const u_daddr_t fib[] = {
404d027ed2eSAlan Cox 	1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584,
405d027ed2eSAlan Cox 	4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811,
406d027ed2eSAlan Cox 	514229, 832040, 1346269, 2178309, 3524578,
407d027ed2eSAlan Cox };
408d027ed2eSAlan Cox 
409d027ed2eSAlan Cox /*
410d027ed2eSAlan Cox  * Use 'gap' to describe a maximal range of unallocated blocks/bits.
411d027ed2eSAlan Cox  */
412d027ed2eSAlan Cox struct gap_stats {
413d027ed2eSAlan Cox 	daddr_t	start;		/* current gap start, or SWAPBLK_NONE */
414d027ed2eSAlan Cox 	daddr_t	num;		/* number of gaps observed */
415d027ed2eSAlan Cox 	daddr_t	max;		/* largest gap size */
416d027ed2eSAlan Cox 	daddr_t	avg;		/* average gap size */
417d027ed2eSAlan Cox 	daddr_t	err;		/* sum - num * avg */
418d027ed2eSAlan Cox 	daddr_t	histo[nitems(fib)]; /* # gaps in each size range */
419d027ed2eSAlan Cox 	int	max_bucket;	/* last histo elt with nonzero val */
420d027ed2eSAlan Cox };
421d027ed2eSAlan Cox 
422d027ed2eSAlan Cox /*
423d027ed2eSAlan Cox  * gap_stats_counting()    - is the state 'counting 1 bits'?
424d027ed2eSAlan Cox  *                           or 'skipping 0 bits'?
425d027ed2eSAlan Cox  */
426d027ed2eSAlan Cox static inline bool
427d027ed2eSAlan Cox gap_stats_counting(const struct gap_stats *stats)
428d027ed2eSAlan Cox {
429d027ed2eSAlan Cox 
430d027ed2eSAlan Cox 	return (stats->start != SWAPBLK_NONE);
431d027ed2eSAlan Cox }
432d027ed2eSAlan Cox 
433d027ed2eSAlan Cox /*
434d027ed2eSAlan Cox  * init_gap_stats()    - initialize stats on gap sizes
435d027ed2eSAlan Cox  */
436d027ed2eSAlan Cox static inline void
437d027ed2eSAlan Cox init_gap_stats(struct gap_stats *stats)
438d027ed2eSAlan Cox {
439d027ed2eSAlan Cox 
440d027ed2eSAlan Cox 	bzero(stats, sizeof(*stats));
441d027ed2eSAlan Cox 	stats->start = SWAPBLK_NONE;
442d027ed2eSAlan Cox }
443d027ed2eSAlan Cox 
444d027ed2eSAlan Cox /*
445d027ed2eSAlan Cox  * update_gap_stats()    - update stats on gap sizes
446d027ed2eSAlan Cox  */
447d027ed2eSAlan Cox static void
448d027ed2eSAlan Cox update_gap_stats(struct gap_stats *stats, daddr_t posn)
449d027ed2eSAlan Cox {
450d027ed2eSAlan Cox 	daddr_t size;
451d027ed2eSAlan Cox 	int hi, lo, mid;
452d027ed2eSAlan Cox 
453d027ed2eSAlan Cox 	if (!gap_stats_counting(stats)) {
454d027ed2eSAlan Cox 		stats->start = posn;
455d027ed2eSAlan Cox 		return;
456d027ed2eSAlan Cox 	}
457d027ed2eSAlan Cox 	size = posn - stats->start;
458d027ed2eSAlan Cox 	stats->start = SWAPBLK_NONE;
459d027ed2eSAlan Cox 	if (size > stats->max)
460d027ed2eSAlan Cox 		stats->max = size;
461d027ed2eSAlan Cox 
462d027ed2eSAlan Cox 	/*
463d027ed2eSAlan Cox 	 * Find the fibonacci range that contains size,
464d027ed2eSAlan Cox 	 * expecting to find it in an early range.
465d027ed2eSAlan Cox 	 */
466d027ed2eSAlan Cox 	lo = 0;
467d027ed2eSAlan Cox 	hi = 1;
468d027ed2eSAlan Cox 	while (hi < nitems(fib) && fib[hi] <= size) {
469d027ed2eSAlan Cox 		lo = hi;
470d027ed2eSAlan Cox 		hi *= 2;
471d027ed2eSAlan Cox 	}
472d027ed2eSAlan Cox 	if (hi >= nitems(fib))
473d027ed2eSAlan Cox 		hi = nitems(fib);
474d027ed2eSAlan Cox 	while (lo + 1 != hi) {
475d027ed2eSAlan Cox 		mid = (lo + hi) >> 1;
476d027ed2eSAlan Cox 		if (fib[mid] <= size)
477d027ed2eSAlan Cox 			lo = mid;
478d027ed2eSAlan Cox 		else
479d027ed2eSAlan Cox 			hi = mid;
480d027ed2eSAlan Cox 	}
481d027ed2eSAlan Cox 	stats->histo[lo]++;
482d027ed2eSAlan Cox 	if (lo > stats->max_bucket)
483d027ed2eSAlan Cox 		stats->max_bucket = lo;
484d027ed2eSAlan Cox 	stats->err += size - stats->avg;
485d027ed2eSAlan Cox 	stats->num++;
486d027ed2eSAlan Cox 	stats->avg += stats->err / stats->num;
487d027ed2eSAlan Cox 	stats->err %= stats->num;
488d027ed2eSAlan Cox }
489d027ed2eSAlan Cox 
490d027ed2eSAlan Cox /*
491d027ed2eSAlan Cox  * dump_gap_stats()    - print stats on gap sizes
492d027ed2eSAlan Cox  */
493d027ed2eSAlan Cox static inline void
494d027ed2eSAlan Cox dump_gap_stats(const struct gap_stats *stats, struct sbuf *s)
495d027ed2eSAlan Cox {
496d027ed2eSAlan Cox 	int i;
497d027ed2eSAlan Cox 
498d027ed2eSAlan Cox 	sbuf_printf(s, "number of maximal free ranges: %jd\n",
499d027ed2eSAlan Cox 	    (intmax_t)stats->num);
500d027ed2eSAlan Cox 	sbuf_printf(s, "largest free range: %jd\n", (intmax_t)stats->max);
501d027ed2eSAlan Cox 	sbuf_printf(s, "average maximal free range size: %jd\n",
502d027ed2eSAlan Cox 	    (intmax_t)stats->avg);
503d027ed2eSAlan Cox 	sbuf_printf(s, "number of maximal free ranges of different sizes:\n");
504d027ed2eSAlan Cox 	sbuf_printf(s, "               count  |  size range\n");
505d027ed2eSAlan Cox 	sbuf_printf(s, "               -----  |  ----------\n");
506d027ed2eSAlan Cox 	for (i = 0; i < stats->max_bucket; i++) {
507d027ed2eSAlan Cox 		if (stats->histo[i] != 0) {
508d027ed2eSAlan Cox 			sbuf_printf(s, "%20jd  |  ",
509d027ed2eSAlan Cox 			    (intmax_t)stats->histo[i]);
510d027ed2eSAlan Cox 			if (fib[i] != fib[i + 1] - 1)
511d027ed2eSAlan Cox 				sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i],
512d027ed2eSAlan Cox 				    (intmax_t)fib[i + 1] - 1);
513d027ed2eSAlan Cox 			else
514d027ed2eSAlan Cox 				sbuf_printf(s, "%jd\n", (intmax_t)fib[i]);
515d027ed2eSAlan Cox 		}
516d027ed2eSAlan Cox 	}
517d027ed2eSAlan Cox 	sbuf_printf(s, "%20jd  |  ", (intmax_t)stats->histo[i]);
518d027ed2eSAlan Cox 	if (stats->histo[i] > 1)
519d027ed2eSAlan Cox 		sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i],
520d027ed2eSAlan Cox 		    (intmax_t)stats->max);
521d027ed2eSAlan Cox 	else
522d027ed2eSAlan Cox 		sbuf_printf(s, "%jd\n", (intmax_t)stats->max);
523d027ed2eSAlan Cox }
524d027ed2eSAlan Cox 
525d027ed2eSAlan Cox /*
526d027ed2eSAlan Cox  * blist_stats()    - dump radix tree stats
527d027ed2eSAlan Cox  */
528d027ed2eSAlan Cox void
529d027ed2eSAlan Cox blist_stats(blist_t bl, struct sbuf *s)
530d027ed2eSAlan Cox {
531d027ed2eSAlan Cox 	struct gap_stats gstats;
532d027ed2eSAlan Cox 	struct gap_stats *stats = &gstats;
533d027ed2eSAlan Cox 	daddr_t i, nodes, radix;
534d027ed2eSAlan Cox 	u_daddr_t bit, diff, mask;
535d027ed2eSAlan Cox 
536d027ed2eSAlan Cox 	init_gap_stats(stats);
537d027ed2eSAlan Cox 	nodes = 0;
538d027ed2eSAlan Cox 	i = bl->bl_radix;
539d027ed2eSAlan Cox 	while (i < bl->bl_radix + bl->bl_blocks) {
540d027ed2eSAlan Cox 		/*
541d027ed2eSAlan Cox 		 * Find max size subtree starting at i.
542d027ed2eSAlan Cox 		 */
543d027ed2eSAlan Cox 		radix = BLIST_BMAP_RADIX;
544d027ed2eSAlan Cox 		while (((i / radix) & BLIST_META_MASK) == 0)
545d027ed2eSAlan Cox 			radix *= BLIST_META_RADIX;
546d027ed2eSAlan Cox 
547d027ed2eSAlan Cox 		/*
548d027ed2eSAlan Cox 		 * Check for skippable subtrees starting at i.
549d027ed2eSAlan Cox 		 */
550d027ed2eSAlan Cox 		while (radix > BLIST_BMAP_RADIX) {
551*bb4a27f9SMark Johnston 			if (bl->bl_root[nodes].bm_bitmap == 0) {
552d027ed2eSAlan Cox 				if (gap_stats_counting(stats))
553d027ed2eSAlan Cox 					update_gap_stats(stats, i);
554d027ed2eSAlan Cox 				break;
555d027ed2eSAlan Cox 			}
556d027ed2eSAlan Cox 
557d027ed2eSAlan Cox 			/*
558d027ed2eSAlan Cox 			 * Skip subtree root.
559d027ed2eSAlan Cox 			 */
560d027ed2eSAlan Cox 			nodes++;
561d027ed2eSAlan Cox 			radix /= BLIST_META_RADIX;
562d027ed2eSAlan Cox 		}
563d027ed2eSAlan Cox 		if (radix == BLIST_BMAP_RADIX) {
564d027ed2eSAlan Cox 			/*
565d027ed2eSAlan Cox 			 * Scan leaf.
566d027ed2eSAlan Cox 			 */
567*bb4a27f9SMark Johnston 			mask = bl->bl_root[nodes].bm_bitmap;
568d027ed2eSAlan Cox 			diff = mask ^ (mask << 1);
569d027ed2eSAlan Cox 			if (gap_stats_counting(stats))
570d027ed2eSAlan Cox 				diff ^= 1;
571d027ed2eSAlan Cox 			while (diff != 0) {
572d027ed2eSAlan Cox 				bit = diff & -diff;
573d027ed2eSAlan Cox 				update_gap_stats(stats, i + bitpos(bit));
574d027ed2eSAlan Cox 				diff ^= bit;
575d027ed2eSAlan Cox 			}
576d027ed2eSAlan Cox 		}
577d027ed2eSAlan Cox 		nodes += radix_to_skip(radix);
578d027ed2eSAlan Cox 		i += radix;
579d027ed2eSAlan Cox 	}
580d027ed2eSAlan Cox 	update_gap_stats(stats, i);
581d027ed2eSAlan Cox 	dump_gap_stats(stats, s);
582d027ed2eSAlan Cox }
583d027ed2eSAlan Cox 
5847090df5aSMatthew Dillon /************************************************************************
5857090df5aSMatthew Dillon  *			  ALLOCATION SUPPORT FUNCTIONS			*
5867090df5aSMatthew Dillon  ************************************************************************
5877090df5aSMatthew Dillon  *
5887090df5aSMatthew Dillon  *	These support functions do all the actual work.  They may seem
5897090df5aSMatthew Dillon  *	rather longish, but that's because I've commented them up.  The
5907090df5aSMatthew Dillon  *	actual code is straight forward.
5917090df5aSMatthew Dillon  *
5927090df5aSMatthew Dillon  */
5937090df5aSMatthew Dillon 
5947090df5aSMatthew Dillon /*
595*bb4a27f9SMark Johnston  * BLST_NEXT_LEAF_ALLOC() - allocate the first few blocks in the next leaf.
596*bb4a27f9SMark Johnston  *
597*bb4a27f9SMark Johnston  *	'scan' is a leaf node, associated with a block containing 'blk'.
598*bb4a27f9SMark Johnston  *	The next leaf node could be adjacent, or several nodes away if the
599*bb4a27f9SMark Johnston  *	least common ancestor of 'scan' and its neighbor is several levels
600*bb4a27f9SMark Johnston  *	up.  Use 'blk' to determine how many meta-nodes lie between the
601*bb4a27f9SMark Johnston  *	leaves.  If the next leaf has enough initial bits set, clear them
602*bb4a27f9SMark Johnston  *	and clear the bits in the meta nodes on the path up to the least
603*bb4a27f9SMark Johnston  *	common ancestor to mark any subtrees made completely empty.
604*bb4a27f9SMark Johnston  */
605*bb4a27f9SMark Johnston static int
606*bb4a27f9SMark Johnston blst_next_leaf_alloc(blmeta_t *scan, daddr_t blk, int count)
607*bb4a27f9SMark Johnston {
608*bb4a27f9SMark Johnston 	blmeta_t *next;
609*bb4a27f9SMark Johnston 	daddr_t skip;
610*bb4a27f9SMark Johnston 	u_daddr_t radix;
611*bb4a27f9SMark Johnston 	int digit;
612*bb4a27f9SMark Johnston 
613*bb4a27f9SMark Johnston 	next = scan + 1;
614*bb4a27f9SMark Johnston 	blk += BLIST_BMAP_RADIX;
615*bb4a27f9SMark Johnston 	radix = BLIST_BMAP_RADIX;
616*bb4a27f9SMark Johnston 	while ((digit = ((blk / radix) & BLIST_META_MASK)) == 0 &&
617*bb4a27f9SMark Johnston 	    (next->bm_bitmap & 1) == 1) {
618*bb4a27f9SMark Johnston 		next++;
619*bb4a27f9SMark Johnston 		radix *= BLIST_META_RADIX;
620*bb4a27f9SMark Johnston 	}
621*bb4a27f9SMark Johnston 	if (((next->bm_bitmap + 1) & ~((u_daddr_t)-1 << count)) != 0) {
622*bb4a27f9SMark Johnston 		/*
623*bb4a27f9SMark Johnston 		 * The next leaf doesn't have enough free blocks at the
624*bb4a27f9SMark Johnston 		 * beginning to complete the spanning allocation.
625*bb4a27f9SMark Johnston 		 */
626*bb4a27f9SMark Johnston 		return (ENOMEM);
627*bb4a27f9SMark Johnston 	}
628*bb4a27f9SMark Johnston 	/* Clear the first 'count' bits in the next leaf to allocate. */
629*bb4a27f9SMark Johnston 	next->bm_bitmap &= (u_daddr_t)-1 << count;
630*bb4a27f9SMark Johnston 
631*bb4a27f9SMark Johnston 	/*
632*bb4a27f9SMark Johnston 	 * Update bitmaps of next-ancestors, up to least common ancestor.
633*bb4a27f9SMark Johnston 	 */
634*bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
635*bb4a27f9SMark Johnston 	while (radix != BLIST_BMAP_RADIX && next->bm_bitmap == 0) {
636*bb4a27f9SMark Johnston 		(--next)->bm_bitmap ^= 1;
637*bb4a27f9SMark Johnston 		radix /= BLIST_META_RADIX;
638*bb4a27f9SMark Johnston 	}
639*bb4a27f9SMark Johnston 	if (next->bm_bitmap == 0)
640*bb4a27f9SMark Johnston 		scan[-digit * skip].bm_bitmap ^= (u_daddr_t)1 << digit;
641*bb4a27f9SMark Johnston 	return (0);
642*bb4a27f9SMark Johnston }
643*bb4a27f9SMark Johnston 
644*bb4a27f9SMark Johnston /*
645*bb4a27f9SMark Johnston  * BLST_LEAF_ALLOC() -	allocate at a leaf in the radix tree (a bitmap).
6467090df5aSMatthew Dillon  *
64754541421SAlan Cox  *	This is the core of the allocator and is optimized for the
64854541421SAlan Cox  *	BLIST_BMAP_RADIX block allocation case.  Otherwise, execution
649d027ed2eSAlan Cox  *	time is proportional to log2(count) + bitpos time.
6507090df5aSMatthew Dillon  */
6517090df5aSMatthew Dillon static daddr_t
652ec371b57SAlan Cox blst_leaf_alloc(blmeta_t *scan, daddr_t blk, int count)
65354541421SAlan Cox {
65454541421SAlan Cox 	u_daddr_t mask;
655ec371b57SAlan Cox 	int count1, hi, lo, num_shifts, range1, range_ext;
6567090df5aSMatthew Dillon 
65754541421SAlan Cox 	range1 = 0;
65854541421SAlan Cox 	count1 = count - 1;
65954541421SAlan Cox 	num_shifts = fls(count1);
660*bb4a27f9SMark Johnston 	mask = scan->bm_bitmap;
661ec371b57SAlan Cox 	while ((-mask & ~mask) != 0 && num_shifts > 0) {
66254541421SAlan Cox 		/*
66354541421SAlan Cox 		 * If bit i is set in mask, then bits in [i, i+range1] are set
664*bb4a27f9SMark Johnston 		 * in scan->bm_bitmap.  The value of range1 is equal to
66554541421SAlan Cox 		 * count1 >> num_shifts.  Grow range and reduce num_shifts to 0,
66654541421SAlan Cox 		 * while preserving these invariants.  The updates to mask leave
66754541421SAlan Cox 		 * fewer bits set, but each bit that remains set represents a
668*bb4a27f9SMark Johnston 		 * longer string of consecutive bits set in scan->bm_bitmap.
669ec371b57SAlan Cox 		 * If more updates to mask cannot clear more bits, because mask
670ec371b57SAlan Cox 		 * is partitioned with all 0 bits preceding all 1 bits, the loop
671ec371b57SAlan Cox 		 * terminates immediately.
67254541421SAlan Cox 		 */
67354541421SAlan Cox 		num_shifts--;
67454541421SAlan Cox 		range_ext = range1 + ((count1 >> num_shifts) & 1);
675ec371b57SAlan Cox 		/*
676ec371b57SAlan Cox 		 * mask is a signed quantity for the shift because when it is
677ec371b57SAlan Cox 		 * shifted right, the sign bit should copied; when the last
678ec371b57SAlan Cox 		 * block of the leaf is free, pretend, for a while, that all the
679ec371b57SAlan Cox 		 * blocks that follow it are also free.
680ec371b57SAlan Cox 		 */
681ec371b57SAlan Cox 		mask &= (daddr_t)mask >> range_ext;
68254541421SAlan Cox 		range1 += range_ext;
68354541421SAlan Cox 	}
68454541421SAlan Cox 	if (mask == 0) {
68554541421SAlan Cox 		/*
68654541421SAlan Cox 		 * Update bighint.  There is no allocation bigger than range1
687ec371b57SAlan Cox 		 * starting in this leaf.
68854541421SAlan Cox 		 */
68954541421SAlan Cox 		scan->bm_bighint = range1;
6907090df5aSMatthew Dillon 		return (SWAPBLK_NONE);
6917090df5aSMatthew Dillon 	}
69254541421SAlan Cox 
693ec371b57SAlan Cox 	/* Discard any candidates that appear before blk. */
694ec371b57SAlan Cox 	mask &= (u_daddr_t)-1 << (blk & BLIST_BMAP_MASK);
69554541421SAlan Cox 	if (mask == 0)
69654541421SAlan Cox 		return (SWAPBLK_NONE);
6977090df5aSMatthew Dillon 
6987090df5aSMatthew Dillon 	/*
69954541421SAlan Cox 	 * The least significant set bit in mask marks the start of the first
70054541421SAlan Cox 	 * available range of sufficient size.  Clear all the bits but that one,
701d027ed2eSAlan Cox 	 * and then find its position.
7027090df5aSMatthew Dillon 	 */
70354541421SAlan Cox 	mask &= -mask;
704d027ed2eSAlan Cox 	lo = bitpos(mask);
7057090df5aSMatthew Dillon 
706ec371b57SAlan Cox 	hi = lo + count;
707ec371b57SAlan Cox 	if (hi > BLIST_BMAP_RADIX) {
70854541421SAlan Cox 		/*
709ec371b57SAlan Cox 		 * An allocation within this leaf is impossible, so a successful
710ec371b57SAlan Cox 		 * allocation depends on the next leaf providing some of the blocks.
71154541421SAlan Cox 		 */
712*bb4a27f9SMark Johnston 		if (blst_next_leaf_alloc(scan, blk, hi - BLIST_BMAP_RADIX) != 0)
713ec371b57SAlan Cox 			/*
714*bb4a27f9SMark Johnston 			 * The hint cannot be updated, because the same
715*bb4a27f9SMark Johnston 			 * allocation request could be satisfied later, by this
716*bb4a27f9SMark Johnston 			 * leaf, if the state of the next leaf changes, and
717*bb4a27f9SMark Johnston 			 * without any changes to this leaf.
718ec371b57SAlan Cox 			 */
719ec371b57SAlan Cox 			return (SWAPBLK_NONE);
720ec371b57SAlan Cox 		hi = BLIST_BMAP_RADIX;
721ec371b57SAlan Cox 	}
722ec371b57SAlan Cox 
723ec371b57SAlan Cox 	/* Set the bits of mask at position 'lo' and higher. */
724ec371b57SAlan Cox 	mask = -mask;
725ec371b57SAlan Cox 	if (hi == BLIST_BMAP_RADIX) {
726ec371b57SAlan Cox 		/*
727ec371b57SAlan Cox 		 * Update bighint.  There is no allocation bigger than range1
728ec371b57SAlan Cox 		 * available in this leaf after this allocation completes.
729ec371b57SAlan Cox 		 */
730ec371b57SAlan Cox 		scan->bm_bighint = range1;
731ec371b57SAlan Cox 	} else {
732ec371b57SAlan Cox 		/* Clear the bits of mask at position 'hi' and higher. */
733ec371b57SAlan Cox 		mask &= (u_daddr_t)-1 >> (BLIST_BMAP_RADIX - hi);
734ec371b57SAlan Cox 	}
735ec371b57SAlan Cox 	/* Clear the allocated bits from this leaf. */
736*bb4a27f9SMark Johnston 	scan->bm_bitmap &= ~mask;
737ec371b57SAlan Cox 	return ((blk & ~BLIST_BMAP_MASK) + lo);
7387090df5aSMatthew Dillon }
7397090df5aSMatthew Dillon 
7407090df5aSMatthew Dillon /*
7417090df5aSMatthew Dillon  * blist_meta_alloc() -	allocate at a meta in the radix tree.
7427090df5aSMatthew Dillon  *
7437090df5aSMatthew Dillon  *	Attempt to allocate at a meta node.  If we can't, we update
7447090df5aSMatthew Dillon  *	bighint and return a failure.  Updating bighint optimize future
7457090df5aSMatthew Dillon  *	calls that hit this node.  We have to check for our collapse cases
7467090df5aSMatthew Dillon  *	and we have a few optimizations strewn in as well.
7477090df5aSMatthew Dillon  */
7487090df5aSMatthew Dillon static daddr_t
749bee93d3cSAlan Cox blst_meta_alloc(blmeta_t *scan, daddr_t cursor, daddr_t count, u_daddr_t radix)
750d4e3484bSAlan Cox {
751*bb4a27f9SMark Johnston 	daddr_t blk, i, r, skip;
752*bb4a27f9SMark Johnston 	u_daddr_t bit, mask;
753d4e3484bSAlan Cox 	bool scan_from_start;
754*bb4a27f9SMark Johnston 	int digit;
7557090df5aSMatthew Dillon 
756a396c83aSAlan Cox 	if (radix == BLIST_BMAP_RADIX)
757ec371b57SAlan Cox 		return (blst_leaf_alloc(scan, cursor, count));
758ec371b57SAlan Cox 	blk = cursor & -radix;
759*bb4a27f9SMark Johnston 	scan_from_start = (cursor == blk);
760*bb4a27f9SMark Johnston 	radix /= BLIST_META_RADIX;
7612ac0c7c3SAlan Cox 	skip = radix_to_skip(radix);
762*bb4a27f9SMark Johnston 	mask = scan->bm_bitmap;
763*bb4a27f9SMark Johnston 
764*bb4a27f9SMark Johnston 	/* Discard any candidates that appear before cursor. */
765*bb4a27f9SMark Johnston 	digit = (cursor / radix) & BLIST_META_MASK;
766*bb4a27f9SMark Johnston 	mask &= (u_daddr_t)-1 << digit;
7677090df5aSMatthew Dillon 
7684be4fd5dSAlan Cox 	/*
769*bb4a27f9SMark Johnston 	 * If the first try is for a block that includes the cursor, pre-undo
770*bb4a27f9SMark Johnston 	 * the digit * radix offset in the first call; otherwise, ignore the
771*bb4a27f9SMark Johnston 	 * cursor entirely.
7724be4fd5dSAlan Cox 	 */
773*bb4a27f9SMark Johnston 	if (((mask >> digit) & 1) == 1)
774*bb4a27f9SMark Johnston 		cursor -= digit * radix;
775d4e3484bSAlan Cox 	else
776*bb4a27f9SMark Johnston 		cursor = blk;
7777090df5aSMatthew Dillon 
7784be4fd5dSAlan Cox 	/*
779*bb4a27f9SMark Johnston 	 * Examine the nonempty subtree associated with each bit set in mask.
7804be4fd5dSAlan Cox 	 */
781*bb4a27f9SMark Johnston 	do {
782*bb4a27f9SMark Johnston 		bit = mask & -mask;
783*bb4a27f9SMark Johnston 		digit = bitpos(bit);
784*bb4a27f9SMark Johnston 		i = 1 + digit * skip;
7857090df5aSMatthew Dillon 		if (count <= scan[i].bm_bighint) {
7867090df5aSMatthew Dillon 			/*
787ec371b57SAlan Cox 			 * The allocation might fit beginning in the i'th subtree.
7887090df5aSMatthew Dillon 			 */
789*bb4a27f9SMark Johnston 			r = blst_meta_alloc(&scan[i], cursor + digit * radix,
790*bb4a27f9SMark Johnston 			    count, radix);
7917090df5aSMatthew Dillon 			if (r != SWAPBLK_NONE) {
792*bb4a27f9SMark Johnston 				if (scan[i].bm_bitmap == 0)
793*bb4a27f9SMark Johnston 					scan->bm_bitmap ^= bit;
7947090df5aSMatthew Dillon 				return (r);
7957090df5aSMatthew Dillon 			}
7967090df5aSMatthew Dillon 		}
797*bb4a27f9SMark Johnston 		cursor = blk;
798*bb4a27f9SMark Johnston 	} while ((mask ^= bit) != 0);
7997090df5aSMatthew Dillon 
8007090df5aSMatthew Dillon 	/*
801*bb4a27f9SMark Johnston 	 * We couldn't allocate count in this subtree.  If the whole tree was
802*bb4a27f9SMark Johnston 	 * scanned, and the last tree node is allocated, update bighint.
8037090df5aSMatthew Dillon 	 */
804*bb4a27f9SMark Johnston 	if (scan_from_start && !(digit == BLIST_META_RADIX - 1 &&
805*bb4a27f9SMark Johnston 	    scan[i].bm_bighint == BLIST_MAX_ALLOC))
8067090df5aSMatthew Dillon 		scan->bm_bighint = count - 1;
807d4e3484bSAlan Cox 
8087090df5aSMatthew Dillon 	return (SWAPBLK_NONE);
8097090df5aSMatthew Dillon }
8107090df5aSMatthew Dillon 
8117090df5aSMatthew Dillon /*
8127090df5aSMatthew Dillon  * BLST_LEAF_FREE() -	free allocated block from leaf bitmap
8137090df5aSMatthew Dillon  *
8147090df5aSMatthew Dillon  */
8157090df5aSMatthew Dillon static void
816b411efa4SAlan Cox blst_leaf_free(blmeta_t *scan, daddr_t blk, int count)
817b411efa4SAlan Cox {
818ec371b57SAlan Cox 	u_daddr_t mask;
819ec371b57SAlan Cox 
8207090df5aSMatthew Dillon 	/*
8217090df5aSMatthew Dillon 	 * free some data in this bitmap
822ec371b57SAlan Cox 	 * mask=0000111111111110000
8237090df5aSMatthew Dillon 	 *          \_________/\__/
824ec371b57SAlan Cox 	 *		count   n
8257090df5aSMatthew Dillon 	 */
826*bb4a27f9SMark Johnston 	mask = bitrange(blk & BLIST_BMAP_MASK, count);
827*bb4a27f9SMark Johnston 	if (scan->bm_bitmap & mask)
828ec371b57SAlan Cox 		panic("freeing free block");
829*bb4a27f9SMark Johnston 	scan->bm_bitmap |= mask;
8307090df5aSMatthew Dillon }
8317090df5aSMatthew Dillon 
8327090df5aSMatthew Dillon /*
8337090df5aSMatthew Dillon  * BLST_META_FREE() - free allocated blocks from radix tree meta info
8347090df5aSMatthew Dillon  *
8357090df5aSMatthew Dillon  *	This support routine frees a range of blocks from the bitmap.
8367090df5aSMatthew Dillon  *	The range must be entirely enclosed by this radix node.  If a
8377090df5aSMatthew Dillon  *	meta node, we break the range down recursively to free blocks
8387090df5aSMatthew Dillon  *	in subnodes (which means that this code can free an arbitrary
8397090df5aSMatthew Dillon  *	range whereas the allocation code cannot allocate an arbitrary
8407090df5aSMatthew Dillon  *	range).
8417090df5aSMatthew Dillon  */
8427090df5aSMatthew Dillon static void
843bee93d3cSAlan Cox blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count, u_daddr_t radix)
844d3783724SAlan Cox {
845*bb4a27f9SMark Johnston 	daddr_t blk, endBlk, i, skip;
846*bb4a27f9SMark Johnston 	int digit, endDigit;
8477090df5aSMatthew Dillon 
848*bb4a27f9SMark Johnston 	/*
849*bb4a27f9SMark Johnston 	 * We could probably do a better job here.  We are required to make
850*bb4a27f9SMark Johnston 	 * bighint at least as large as the biggest allocable block of data.
851*bb4a27f9SMark Johnston 	 * If we just shoehorn it, a little extra overhead will be incurred
852*bb4a27f9SMark Johnston 	 * on the next allocation (but only that one typically).
853*bb4a27f9SMark Johnston 	 */
854*bb4a27f9SMark Johnston 	scan->bm_bighint = BLIST_MAX_ALLOC;
855*bb4a27f9SMark Johnston 
856a396c83aSAlan Cox 	if (radix == BLIST_BMAP_RADIX)
857a396c83aSAlan Cox 		return (blst_leaf_free(scan, freeBlk, count));
8587090df5aSMatthew Dillon 
859*bb4a27f9SMark Johnston 	endBlk = ummin(freeBlk + count, (freeBlk + radix) & -radix);
86057e6d29bSMatthew Dillon 	radix /= BLIST_META_RADIX;
861*bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
862*bb4a27f9SMark Johnston 	blk = freeBlk & -radix;
863*bb4a27f9SMark Johnston 	digit = (blk / radix) & BLIST_META_MASK;
864*bb4a27f9SMark Johnston 	endDigit = 1 + (((endBlk - 1) / radix) & BLIST_META_MASK);
865*bb4a27f9SMark Johnston 	scan->bm_bitmap |= bitrange(digit, endDigit - digit);
866*bb4a27f9SMark Johnston 	for (i = 1 + digit * skip; blk < endBlk; i += skip) {
8677090df5aSMatthew Dillon 		blk += radix;
868*bb4a27f9SMark Johnston 		count = ummin(blk, endBlk) - freeBlk;
869*bb4a27f9SMark Johnston 		blst_meta_free(&scan[i], freeBlk, count, radix);
870*bb4a27f9SMark Johnston 		freeBlk = blk;
8717090df5aSMatthew Dillon 	}
8727090df5aSMatthew Dillon }
8737090df5aSMatthew Dillon 
8747090df5aSMatthew Dillon /*
875*bb4a27f9SMark Johnston  * BLST_COPY() - copy one radix tree to another
8767090df5aSMatthew Dillon  *
8777090df5aSMatthew Dillon  *	Locates free space in the source tree and frees it in the destination
8787090df5aSMatthew Dillon  *	tree.  The space may not already be free in the destination.
8797090df5aSMatthew Dillon  */
880b411efa4SAlan Cox static void
8812ac0c7c3SAlan Cox blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix, blist_t dest,
8822ac0c7c3SAlan Cox     daddr_t count)
883b411efa4SAlan Cox {
884*bb4a27f9SMark Johnston 	daddr_t endBlk, i, skip;
8857090df5aSMatthew Dillon 
8867090df5aSMatthew Dillon 	/*
8877090df5aSMatthew Dillon 	 * Leaf node
8887090df5aSMatthew Dillon 	 */
8897090df5aSMatthew Dillon 
8907090df5aSMatthew Dillon 	if (radix == BLIST_BMAP_RADIX) {
891*bb4a27f9SMark Johnston 		u_daddr_t v = scan->bm_bitmap;
8927090df5aSMatthew Dillon 
8937090df5aSMatthew Dillon 		if (v == (u_daddr_t)-1) {
8947090df5aSMatthew Dillon 			blist_free(dest, blk, count);
8957090df5aSMatthew Dillon 		} else if (v != 0) {
8967090df5aSMatthew Dillon 			int i;
8977090df5aSMatthew Dillon 
898*bb4a27f9SMark Johnston 			for (i = 0; i < count; ++i) {
899064650c1SAlan Cox 				if (v & ((u_daddr_t)1 << i))
9007090df5aSMatthew Dillon 					blist_free(dest, blk + i, 1);
9017090df5aSMatthew Dillon 			}
9027090df5aSMatthew Dillon 		}
9037090df5aSMatthew Dillon 		return;
9047090df5aSMatthew Dillon 	}
9057090df5aSMatthew Dillon 
9067090df5aSMatthew Dillon 	/*
9077090df5aSMatthew Dillon 	 * Meta node
9087090df5aSMatthew Dillon 	 */
9097090df5aSMatthew Dillon 
910*bb4a27f9SMark Johnston 	if (scan->bm_bitmap == 0) {
9117090df5aSMatthew Dillon 		/*
9127090df5aSMatthew Dillon 		 * Source all allocated, leave dest allocated
9137090df5aSMatthew Dillon 		 */
9147090df5aSMatthew Dillon 		return;
9157090df5aSMatthew Dillon 	}
9167090df5aSMatthew Dillon 
917*bb4a27f9SMark Johnston 	endBlk = blk + count;
9182ac0c7c3SAlan Cox 	radix /= BLIST_META_RADIX;
919*bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
920*bb4a27f9SMark Johnston 	for (i = 1; blk < endBlk; i += skip) {
9217090df5aSMatthew Dillon 		blk += radix;
922*bb4a27f9SMark Johnston 		count = radix;
923*bb4a27f9SMark Johnston 		if (blk >= endBlk)
924*bb4a27f9SMark Johnston 			count -= blk - endBlk;
925*bb4a27f9SMark Johnston 		blst_copy(&scan[i], blk - radix, radix, dest, count);
9267090df5aSMatthew Dillon 	}
9277090df5aSMatthew Dillon }
9287090df5aSMatthew Dillon 
9297090df5aSMatthew Dillon /*
93092da00bbSMatthew Dillon  * BLST_LEAF_FILL() -	allocate specific blocks in leaf bitmap
93192da00bbSMatthew Dillon  *
93292da00bbSMatthew Dillon  *	This routine allocates all blocks in the specified range
93392da00bbSMatthew Dillon  *	regardless of any existing allocations in that range.  Returns
93492da00bbSMatthew Dillon  *	the number of blocks allocated by the call.
93592da00bbSMatthew Dillon  */
936a7249a6cSAlan Cox static daddr_t
93792da00bbSMatthew Dillon blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count)
93892da00bbSMatthew Dillon {
939a7249a6cSAlan Cox 	daddr_t nblks;
9404be4fd5dSAlan Cox 	u_daddr_t mask;
94192da00bbSMatthew Dillon 
942*bb4a27f9SMark Johnston 	mask = bitrange(blk & BLIST_BMAP_MASK, count);
94392da00bbSMatthew Dillon 
9444be4fd5dSAlan Cox 	/* Count the number of blocks that we are allocating. */
945*bb4a27f9SMark Johnston 	nblks = bitcount64(scan->bm_bitmap & mask);
94692da00bbSMatthew Dillon 
947*bb4a27f9SMark Johnston 	scan->bm_bitmap &= ~mask;
9484be4fd5dSAlan Cox 	return (nblks);
94992da00bbSMatthew Dillon }
95092da00bbSMatthew Dillon 
95192da00bbSMatthew Dillon /*
95292da00bbSMatthew Dillon  * BLIST_META_FILL() -	allocate specific blocks at a meta node
95392da00bbSMatthew Dillon  *
95492da00bbSMatthew Dillon  *	This routine allocates the specified range of blocks,
95592da00bbSMatthew Dillon  *	regardless of any existing allocations in the range.  The
95692da00bbSMatthew Dillon  *	range must be within the extent of this node.  Returns the
95792da00bbSMatthew Dillon  *	number of blocks allocated by the call.
95892da00bbSMatthew Dillon  */
959a7249a6cSAlan Cox static daddr_t
960bee93d3cSAlan Cox blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count, u_daddr_t radix)
961d3783724SAlan Cox {
962*bb4a27f9SMark Johnston 	daddr_t blk, endBlk, i, nblks, skip;
963*bb4a27f9SMark Johnston 	int digit;
96492da00bbSMatthew Dillon 
965a396c83aSAlan Cox 	if (radix == BLIST_BMAP_RADIX)
966a396c83aSAlan Cox 		return (blst_leaf_fill(scan, allocBlk, count));
967*bb4a27f9SMark Johnston 
968*bb4a27f9SMark Johnston 	endBlk = ummin(allocBlk + count, (allocBlk + radix) & -radix);
969*bb4a27f9SMark Johnston 	radix /= BLIST_META_RADIX;
9702ac0c7c3SAlan Cox 	skip = radix_to_skip(radix);
971bee93d3cSAlan Cox 	blk = allocBlk & -radix;
972d3783724SAlan Cox 	nblks = 0;
973*bb4a27f9SMark Johnston 	while (blk < endBlk) {
974*bb4a27f9SMark Johnston 		digit = (blk / radix) & BLIST_META_MASK;
975*bb4a27f9SMark Johnston 		i = 1 + digit * skip;
97692da00bbSMatthew Dillon 		blk += radix;
977*bb4a27f9SMark Johnston 		count = ummin(blk, endBlk) - allocBlk;
978*bb4a27f9SMark Johnston 		nblks += blst_meta_fill(&scan[i], allocBlk, count, radix);
979*bb4a27f9SMark Johnston 		if (scan[i].bm_bitmap == 0)
980*bb4a27f9SMark Johnston 			scan->bm_bitmap &= ~((u_daddr_t)1 << digit);
981*bb4a27f9SMark Johnston 		allocBlk = blk;
98292da00bbSMatthew Dillon 	}
983a396c83aSAlan Cox 	return (nblks);
98492da00bbSMatthew Dillon }
98592da00bbSMatthew Dillon 
9867090df5aSMatthew Dillon #ifdef BLIST_DEBUG
9877090df5aSMatthew Dillon 
9887090df5aSMatthew Dillon static void
9892ac0c7c3SAlan Cox blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix, int tab)
9907090df5aSMatthew Dillon {
991*bb4a27f9SMark Johnston 	daddr_t skip;
992*bb4a27f9SMark Johnston 	u_daddr_t bit, mask;
993*bb4a27f9SMark Johnston 	int digit;
9947090df5aSMatthew Dillon 
9957090df5aSMatthew Dillon 	if (radix == BLIST_BMAP_RADIX) {
9967090df5aSMatthew Dillon 		printf(
997*bb4a27f9SMark Johnston 		    "%*.*s(%08llx,%lld): bitmap %0*llx big=%lld\n",
9987090df5aSMatthew Dillon 		    tab, tab, "",
99992da00bbSMatthew Dillon 		    (long long)blk, (long long)radix,
1000*bb4a27f9SMark Johnston 		    1 + (BLIST_BMAP_RADIX - 1) / 4,
1001*bb4a27f9SMark Johnston 		    (long long)scan->bm_bitmap,
100292da00bbSMatthew Dillon 		    (long long)scan->bm_bighint
10037090df5aSMatthew Dillon 		);
10047090df5aSMatthew Dillon 		return;
10057090df5aSMatthew Dillon 	}
10067090df5aSMatthew Dillon 
10077090df5aSMatthew Dillon 	printf(
1008*bb4a27f9SMark Johnston 	    "%*.*s(%08llx): subtree (%lld/%lld) bitmap %0*llx big=%lld {\n",
10097090df5aSMatthew Dillon 	    tab, tab, "",
101092da00bbSMatthew Dillon 	    (long long)blk, (long long)radix,
101192da00bbSMatthew Dillon 	    (long long)radix,
1012*bb4a27f9SMark Johnston 	    1 + (BLIST_META_RADIX - 1) / 4,
1013*bb4a27f9SMark Johnston 	    (long long)scan->bm_bitmap,
101492da00bbSMatthew Dillon 	    (long long)scan->bm_bighint
10157090df5aSMatthew Dillon 	);
10167090df5aSMatthew Dillon 
10172ac0c7c3SAlan Cox 	radix /= BLIST_META_RADIX;
1018*bb4a27f9SMark Johnston 	skip = radix_to_skip(radix);
10197090df5aSMatthew Dillon 	tab += 4;
10207090df5aSMatthew Dillon 
1021*bb4a27f9SMark Johnston 	mask = scan->bm_bitmap;
1022*bb4a27f9SMark Johnston 	/* Examine the nonempty subtree associated with each bit set in mask */
1023*bb4a27f9SMark Johnston 	do {
1024*bb4a27f9SMark Johnston 		bit = mask & -mask;
1025*bb4a27f9SMark Johnston 		digit = bitpos(bit);
1026*bb4a27f9SMark Johnston 		blst_radix_print(&scan[1 + digit * skip], blk + digit * radix,
1027*bb4a27f9SMark Johnston 		    radix, tab);
1028*bb4a27f9SMark Johnston 	} while ((mask ^= bit) != 0);
10297090df5aSMatthew Dillon 	tab -= 4;
10307090df5aSMatthew Dillon 
10317090df5aSMatthew Dillon 	printf(
10327090df5aSMatthew Dillon 	    "%*.*s}\n",
10337090df5aSMatthew Dillon 	    tab, tab, ""
10347090df5aSMatthew Dillon 	);
10357090df5aSMatthew Dillon }
10367090df5aSMatthew Dillon 
10377090df5aSMatthew Dillon #endif
10387090df5aSMatthew Dillon 
10397090df5aSMatthew Dillon #ifdef BLIST_DEBUG
10407090df5aSMatthew Dillon 
10417090df5aSMatthew Dillon int
10427090df5aSMatthew Dillon main(int ac, char **av)
10437090df5aSMatthew Dillon {
1044*bb4a27f9SMark Johnston 	int size = BLIST_META_RADIX * BLIST_BMAP_RADIX;
10457090df5aSMatthew Dillon 	int i;
10467090df5aSMatthew Dillon 	blist_t bl;
1047d027ed2eSAlan Cox 	struct sbuf *s;
10487090df5aSMatthew Dillon 
10497090df5aSMatthew Dillon 	for (i = 1; i < ac; ++i) {
10507090df5aSMatthew Dillon 		const char *ptr = av[i];
10517090df5aSMatthew Dillon 		if (*ptr != '-') {
10527090df5aSMatthew Dillon 			size = strtol(ptr, NULL, 0);
10537090df5aSMatthew Dillon 			continue;
10547090df5aSMatthew Dillon 		}
10557090df5aSMatthew Dillon 		ptr += 2;
10567090df5aSMatthew Dillon 		fprintf(stderr, "Bad option: %s\n", ptr - 2);
10577090df5aSMatthew Dillon 		exit(1);
10587090df5aSMatthew Dillon 	}
1059c8c7ad92SKip Macy 	bl = blist_create(size, M_WAITOK);
10607090df5aSMatthew Dillon 	blist_free(bl, 0, size);
10617090df5aSMatthew Dillon 
10627090df5aSMatthew Dillon 	for (;;) {
10637090df5aSMatthew Dillon 		char buf[1024];
1064d90bf7d8SAlan Cox 		long long da = 0;
1065d90bf7d8SAlan Cox 		long long count = 0;
10667090df5aSMatthew Dillon 
10674be4fd5dSAlan Cox 		printf("%lld/%lld/%lld> ", (long long)blist_avail(bl),
106892da00bbSMatthew Dillon 		    (long long)size, (long long)bl->bl_radix);
10697090df5aSMatthew Dillon 		fflush(stdout);
10707090df5aSMatthew Dillon 		if (fgets(buf, sizeof(buf), stdin) == NULL)
10717090df5aSMatthew Dillon 			break;
10727090df5aSMatthew Dillon 		switch(buf[0]) {
10737090df5aSMatthew Dillon 		case 'r':
107492da00bbSMatthew Dillon 			if (sscanf(buf + 1, "%lld", &count) == 1) {
1075d90bf7d8SAlan Cox 				blist_resize(&bl, count, 1, M_WAITOK);
10767090df5aSMatthew Dillon 			} else {
10777090df5aSMatthew Dillon 				printf("?\n");
10787090df5aSMatthew Dillon 			}
10797090df5aSMatthew Dillon 		case 'p':
10807090df5aSMatthew Dillon 			blist_print(bl);
10817090df5aSMatthew Dillon 			break;
1082d027ed2eSAlan Cox 		case 's':
1083d027ed2eSAlan Cox 			s = sbuf_new_auto();
1084d027ed2eSAlan Cox 			blist_stats(bl, s);
1085d027ed2eSAlan Cox 			sbuf_finish(s);
1086d027ed2eSAlan Cox 			printf("%s", sbuf_data(s));
1087d027ed2eSAlan Cox 			sbuf_delete(s);
1088d027ed2eSAlan Cox 			break;
10897090df5aSMatthew Dillon 		case 'a':
109092da00bbSMatthew Dillon 			if (sscanf(buf + 1, "%lld", &count) == 1) {
10917090df5aSMatthew Dillon 				daddr_t blk = blist_alloc(bl, count);
109292da00bbSMatthew Dillon 				printf("    R=%08llx\n", (long long)blk);
10937090df5aSMatthew Dillon 			} else {
10947090df5aSMatthew Dillon 				printf("?\n");
10957090df5aSMatthew Dillon 			}
10967090df5aSMatthew Dillon 			break;
10977090df5aSMatthew Dillon 		case 'f':
1098d90bf7d8SAlan Cox 			if (sscanf(buf + 1, "%llx %lld", &da, &count) == 2) {
10997090df5aSMatthew Dillon 				blist_free(bl, da, count);
11007090df5aSMatthew Dillon 			} else {
11017090df5aSMatthew Dillon 				printf("?\n");
11027090df5aSMatthew Dillon 			}
11037090df5aSMatthew Dillon 			break;
110492da00bbSMatthew Dillon 		case 'l':
1105d90bf7d8SAlan Cox 			if (sscanf(buf + 1, "%llx %lld", &da, &count) == 2) {
1106a7249a6cSAlan Cox 				printf("    n=%jd\n",
1107a7249a6cSAlan Cox 				    (intmax_t)blist_fill(bl, da, count));
110892da00bbSMatthew Dillon 			} else {
110992da00bbSMatthew Dillon 				printf("?\n");
111092da00bbSMatthew Dillon 			}
111192da00bbSMatthew Dillon 			break;
11127090df5aSMatthew Dillon 		case '?':
11137090df5aSMatthew Dillon 		case 'h':
11147090df5aSMatthew Dillon 			puts(
11157090df5aSMatthew Dillon 			    "p          -print\n"
1116d027ed2eSAlan Cox 			    "s          -stats\n"
11177090df5aSMatthew Dillon 			    "a %d       -allocate\n"
11187090df5aSMatthew Dillon 			    "f %x %d    -free\n"
111992da00bbSMatthew Dillon 			    "l %x %d    -fill\n"
11207090df5aSMatthew Dillon 			    "r %d       -resize\n"
11217090df5aSMatthew Dillon 			    "h/?        -help"
11227090df5aSMatthew Dillon 			);
11237090df5aSMatthew Dillon 			break;
11247090df5aSMatthew Dillon 		default:
11257090df5aSMatthew Dillon 			printf("?\n");
11267090df5aSMatthew Dillon 			break;
11277090df5aSMatthew Dillon 		}
11287090df5aSMatthew Dillon 	}
11297090df5aSMatthew Dillon 	return(0);
11307090df5aSMatthew Dillon }
11317090df5aSMatthew Dillon 
11327090df5aSMatthew Dillon void
11337090df5aSMatthew Dillon panic(const char *ctl, ...)
11347090df5aSMatthew Dillon {
11357090df5aSMatthew Dillon 	va_list va;
11367090df5aSMatthew Dillon 
11377090df5aSMatthew Dillon 	va_start(va, ctl);
11387090df5aSMatthew Dillon 	vfprintf(stderr, ctl, va);
11397090df5aSMatthew Dillon 	fprintf(stderr, "\n");
11407090df5aSMatthew Dillon 	va_end(va);
11417090df5aSMatthew Dillon 	exit(1);
11427090df5aSMatthew Dillon }
11437090df5aSMatthew Dillon 
11447090df5aSMatthew Dillon #endif
1145