106b4bf3eSWarner Losh /*- 251369649SPedro F. Giffuni * SPDX-License-Identifier: BSD-3-Clause 351369649SPedro F. Giffuni * 406b4bf3eSWarner Losh * Copyright (c) 1998 Matthew Dillon. All Rights Reserved. 506b4bf3eSWarner Losh * Redistribution and use in source and binary forms, with or without 606b4bf3eSWarner Losh * modification, are permitted provided that the following conditions 706b4bf3eSWarner Losh * are met: 806b4bf3eSWarner Losh * 1. Redistributions of source code must retain the above copyright 906b4bf3eSWarner Losh * notice, this list of conditions and the following disclaimer. 1006b4bf3eSWarner Losh * 2. Redistributions in binary form must reproduce the above copyright 1106b4bf3eSWarner Losh * notice, this list of conditions and the following disclaimer in the 1206b4bf3eSWarner Losh * documentation and/or other materials provided with the distribution. 1369a28758SEd Maste * 3. Neither the name of the University nor the names of its contributors 1406b4bf3eSWarner Losh * may be used to endorse or promote products derived from this software 1506b4bf3eSWarner Losh * without specific prior written permission. 1606b4bf3eSWarner Losh * 1706b4bf3eSWarner Losh * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS 1806b4bf3eSWarner Losh * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 1906b4bf3eSWarner Losh * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 2006b4bf3eSWarner Losh * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY 2106b4bf3eSWarner Losh * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 2206b4bf3eSWarner Losh * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE 2306b4bf3eSWarner Losh * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 2406b4bf3eSWarner Losh * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 2506b4bf3eSWarner Losh * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 2606b4bf3eSWarner Losh * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 2706b4bf3eSWarner Losh * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 2806b4bf3eSWarner Losh */ 297090df5aSMatthew Dillon /* 307090df5aSMatthew Dillon * BLIST.C - Bitmap allocator/deallocator, using a radix tree with hinting 317090df5aSMatthew Dillon * 327090df5aSMatthew Dillon * This module implements a general bitmap allocator/deallocator. The 337090df5aSMatthew Dillon * allocator eats around 2 bits per 'block'. The module does not 34f565f395SEitan Adler * try to interpret the meaning of a 'block' other than to return 357090df5aSMatthew Dillon * SWAPBLK_NONE on an allocation failure. 367090df5aSMatthew Dillon * 37ec371b57SAlan Cox * A radix tree controls access to pieces of the bitmap, and includes 38ec371b57SAlan Cox * auxiliary information at each interior node about the availabilty of 39ec371b57SAlan Cox * contiguous free blocks in the subtree rooted at that node. Two radix 40ec371b57SAlan Cox * constants are involved: one for the size of the bitmaps contained in the 41ec371b57SAlan Cox * leaf nodes (BLIST_BMAP_RADIX), and one for the number of descendents of 42ec371b57SAlan Cox * each of the meta (interior) nodes (BLIST_META_RADIX). Each subtree is 43ec371b57SAlan Cox * associated with a range of blocks. The root of any subtree stores a 44ec371b57SAlan Cox * hint field that defines an upper bound on the size of the largest 45ec371b57SAlan Cox * allocation that can begin in the associated block range. A hint is an 46ec371b57SAlan Cox * upper bound on a potential allocation, but not necessarily a tight upper 47ec371b57SAlan Cox * bound. 487090df5aSMatthew Dillon * 49bb4a27f9SMark Johnston * The bitmap field in each node directs the search for available blocks. 50bb4a27f9SMark Johnston * For a leaf node, a bit is set if the corresponding block is free. For a 51bb4a27f9SMark Johnston * meta node, a bit is set if the corresponding subtree contains a free 52bb4a27f9SMark Johnston * block somewhere within it. The search at a meta node considers only 53bb4a27f9SMark Johnston * children of that node that represent a range that includes a free block. 547090df5aSMatthew Dillon * 557090df5aSMatthew Dillon * The hinting greatly increases code efficiency for allocations while 567090df5aSMatthew Dillon * the general radix structure optimizes both allocations and frees. The 577090df5aSMatthew Dillon * radix tree should be able to operate well no matter how much 587090df5aSMatthew Dillon * fragmentation there is and no matter how large a bitmap is used. 597090df5aSMatthew Dillon * 6051dc4feaSSergey Kandaurov * The blist code wires all necessary memory at creation time. Neither 6151dc4feaSSergey Kandaurov * allocations nor frees require interaction with the memory subsystem. 62bb4a27f9SMark Johnston * The non-blocking nature of allocations and frees is required by swap 63bb4a27f9SMark Johnston * code (vm/swap_pager.c). 647090df5aSMatthew Dillon * 65bb4a27f9SMark Johnston * LAYOUT: The radix tree is laid out recursively using a linear array. 66bb4a27f9SMark Johnston * Each meta node is immediately followed (laid out sequentially in 67bb4a27f9SMark Johnston * memory) by BLIST_META_RADIX lower level nodes. This is a recursive 68bb4a27f9SMark Johnston * structure but one that can be easily scanned through a very simple 69bb4a27f9SMark Johnston * 'skip' calculation. The memory allocation is only large enough to 70bb4a27f9SMark Johnston * cover the number of blocks requested at creation time. Nodes that 71bb4a27f9SMark Johnston * represent blocks beyond that limit, nodes that would never be read 72bb4a27f9SMark Johnston * or written, are not allocated, so that the last of the 73bb4a27f9SMark Johnston * BLIST_META_RADIX lower level nodes of a some nodes may not be 74bb4a27f9SMark Johnston * allocated. 757090df5aSMatthew Dillon * 76f565f395SEitan Adler * NOTE: the allocator cannot currently allocate more than 777090df5aSMatthew Dillon * BLIST_BMAP_RADIX blocks per call. It will panic with 'allocation too 787090df5aSMatthew Dillon * large' if you try. This is an area that could use improvement. The 797090df5aSMatthew Dillon * radix is large enough that this restriction does not effect the swap 80b411efa4SAlan Cox * system, though. Currently only the allocation code is affected by 817090df5aSMatthew Dillon * this algorithmic unfeature. The freeing code can handle arbitrary 827090df5aSMatthew Dillon * ranges. 837090df5aSMatthew Dillon * 847090df5aSMatthew Dillon * This code can be compiled stand-alone for debugging. 857090df5aSMatthew Dillon */ 867090df5aSMatthew Dillon 87677b542eSDavid E. O'Brien #include <sys/cdefs.h> 88677b542eSDavid E. O'Brien __FBSDID("$FreeBSD$"); 89677b542eSDavid E. O'Brien 90c4473420SPeter Wemm #ifdef _KERNEL 917090df5aSMatthew Dillon 927090df5aSMatthew Dillon #include <sys/param.h> 937090df5aSMatthew Dillon #include <sys/systm.h> 947090df5aSMatthew Dillon #include <sys/lock.h> 957090df5aSMatthew Dillon #include <sys/kernel.h> 967090df5aSMatthew Dillon #include <sys/blist.h> 977090df5aSMatthew Dillon #include <sys/malloc.h> 98d027ed2eSAlan Cox #include <sys/sbuf.h> 990cddd8f0SMatthew Dillon #include <sys/proc.h> 10023955314SAlfred Perlstein #include <sys/mutex.h> 1017090df5aSMatthew Dillon 1027090df5aSMatthew Dillon #else 1037090df5aSMatthew Dillon 1047090df5aSMatthew Dillon #ifndef BLIST_NO_DEBUG 1057090df5aSMatthew Dillon #define BLIST_DEBUG 1067090df5aSMatthew Dillon #endif 1077090df5aSMatthew Dillon 108bb4a27f9SMark Johnston #include <sys/errno.h> 1097090df5aSMatthew Dillon #include <sys/types.h> 110d90bf7d8SAlan Cox #include <sys/malloc.h> 111d027ed2eSAlan Cox #include <sys/sbuf.h> 112b1f59c92SDoug Moore #include <assert.h> 1137090df5aSMatthew Dillon #include <stdio.h> 1147090df5aSMatthew Dillon #include <string.h> 1158eefcd40SAlan Cox #include <stddef.h> 1167090df5aSMatthew Dillon #include <stdlib.h> 1177090df5aSMatthew Dillon #include <stdarg.h> 118d3783724SAlan Cox #include <stdbool.h> 1197090df5aSMatthew Dillon 1204be4fd5dSAlan Cox #define bitcount64(x) __bitcount64((uint64_t)(x)) 12192da00bbSMatthew Dillon #define malloc(a,b,c) calloc(a, 1) 1227090df5aSMatthew Dillon #define free(a,b) free(a) 123bb4a27f9SMark Johnston #define ummin(a,b) ((a) < (b) ? (a) : (b)) 124b1f59c92SDoug Moore #define KASSERT(a,b) assert(a) 1257090df5aSMatthew Dillon 1267090df5aSMatthew Dillon #include <sys/blist.h> 1277090df5aSMatthew Dillon 1287090df5aSMatthew Dillon #endif 1297090df5aSMatthew Dillon 1307090df5aSMatthew Dillon /* 1317090df5aSMatthew Dillon * static support functions 1327090df5aSMatthew Dillon */ 133*87ae0686SDoug Moore static daddr_t blst_leaf_alloc(blmeta_t *scan, daddr_t blk, 134*87ae0686SDoug Moore int *count, int maxcount); 135*87ae0686SDoug Moore static daddr_t blst_meta_alloc(blmeta_t *scan, daddr_t cursor, int *count, 136*87ae0686SDoug Moore int maxcount, u_daddr_t radix); 1377090df5aSMatthew Dillon static void blst_leaf_free(blmeta_t *scan, daddr_t relblk, int count); 1387090df5aSMatthew Dillon static void blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count, 139bee93d3cSAlan Cox u_daddr_t radix); 1407090df5aSMatthew Dillon static void blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix, 1412ac0c7c3SAlan Cox blist_t dest, daddr_t count); 142a7249a6cSAlan Cox static daddr_t blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count); 143a7249a6cSAlan Cox static daddr_t blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count, 144bee93d3cSAlan Cox u_daddr_t radix); 145c4473420SPeter Wemm #ifndef _KERNEL 146d3783724SAlan Cox static void blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix, 1472ac0c7c3SAlan Cox int tab); 1487090df5aSMatthew Dillon #endif 1497090df5aSMatthew Dillon 150c4473420SPeter Wemm #ifdef _KERNEL 1517090df5aSMatthew Dillon static MALLOC_DEFINE(M_SWAP, "SWAP", "Swap space"); 1527090df5aSMatthew Dillon #endif 1537090df5aSMatthew Dillon 154ec371b57SAlan Cox _Static_assert(BLIST_BMAP_RADIX % BLIST_META_RADIX == 0, 155ec371b57SAlan Cox "radix divisibility error"); 156ec371b57SAlan Cox #define BLIST_BMAP_MASK (BLIST_BMAP_RADIX - 1) 157d027ed2eSAlan Cox #define BLIST_META_MASK (BLIST_META_RADIX - 1) 158ba98e6a2SAlan Cox 1597090df5aSMatthew Dillon /* 1602ac0c7c3SAlan Cox * For a subtree that can represent the state of up to 'radix' blocks, the 1612ac0c7c3SAlan Cox * number of leaf nodes of the subtree is L=radix/BLIST_BMAP_RADIX. If 'm' 1622ac0c7c3SAlan Cox * is short for BLIST_META_RADIX, then for a tree of height h with L=m**h 1632ac0c7c3SAlan Cox * leaf nodes, the total number of tree nodes is 1 + m + m**2 + ... + m**h, 1642ac0c7c3SAlan Cox * or, equivalently, (m**(h+1)-1)/(m-1). This quantity is called 'skip' 1652ac0c7c3SAlan Cox * in the 'meta' functions that process subtrees. Since integer division 1662ac0c7c3SAlan Cox * discards remainders, we can express this computation as 1672ac0c7c3SAlan Cox * skip = (m * m**h) / (m - 1) 168ba98e6a2SAlan Cox * skip = (m * (radix / BLIST_BMAP_RADIX)) / (m - 1) 169ba98e6a2SAlan Cox * and since m divides BLIST_BMAP_RADIX, we can simplify further to 170ba98e6a2SAlan Cox * skip = (radix / (BLIST_BMAP_RADIX / m)) / (m - 1) 171ba98e6a2SAlan Cox * skip = radix / ((BLIST_BMAP_RADIX / m) * (m - 1)) 172ba98e6a2SAlan Cox * so that simple integer division by a constant can safely be used for the 173ba98e6a2SAlan Cox * calculation. 1742ac0c7c3SAlan Cox */ 1752ac0c7c3SAlan Cox static inline daddr_t 1762ac0c7c3SAlan Cox radix_to_skip(daddr_t radix) 1772ac0c7c3SAlan Cox { 1782ac0c7c3SAlan Cox 1792ac0c7c3SAlan Cox return (radix / 180d027ed2eSAlan Cox ((BLIST_BMAP_RADIX / BLIST_META_RADIX) * BLIST_META_MASK)); 181d027ed2eSAlan Cox } 182d027ed2eSAlan Cox 183d027ed2eSAlan Cox /* 184bb4a27f9SMark Johnston * Provide a mask with count bits set, starting as position n. 185bb4a27f9SMark Johnston */ 186bb4a27f9SMark Johnston static inline u_daddr_t 187bb4a27f9SMark Johnston bitrange(int n, int count) 188bb4a27f9SMark Johnston { 189bb4a27f9SMark Johnston 190bb4a27f9SMark Johnston return (((u_daddr_t)-1 << n) & 191bb4a27f9SMark Johnston ((u_daddr_t)-1 >> (BLIST_BMAP_RADIX - (n + count)))); 192bb4a27f9SMark Johnston } 193bb4a27f9SMark Johnston 194bb4a27f9SMark Johnston 195bb4a27f9SMark Johnston /* 19653519253SDoug Moore * Find the first bit set in a u_daddr_t. 197d027ed2eSAlan Cox */ 198d027ed2eSAlan Cox static inline int 19953519253SDoug Moore generic_bitpos(u_daddr_t mask) 20053519253SDoug Moore { 20153519253SDoug Moore int hi, lo, mid; 20253519253SDoug Moore 20353519253SDoug Moore lo = 0; 20453519253SDoug Moore hi = BLIST_BMAP_RADIX; 20553519253SDoug Moore while (lo + 1 < hi) { 20653519253SDoug Moore mid = (lo + hi) >> 1; 20753519253SDoug Moore if (mask & bitrange(0, mid)) 20853519253SDoug Moore hi = mid; 20953519253SDoug Moore else 21053519253SDoug Moore lo = mid; 21153519253SDoug Moore } 21253519253SDoug Moore return (lo); 21353519253SDoug Moore } 21453519253SDoug Moore 21553519253SDoug Moore static inline int 2164ab18ea2SDoug Moore bitpos(u_daddr_t mask) 2174ab18ea2SDoug Moore { 2184ab18ea2SDoug Moore 21912cd7dedSDoug Moore switch (sizeof(mask)) { 2204ab18ea2SDoug Moore #ifdef HAVE_INLINE_FFSLL 22112cd7dedSDoug Moore case sizeof(long long): 22212cd7dedSDoug Moore return (ffsll(mask) - 1); 2234ab18ea2SDoug Moore #endif 22453519253SDoug Moore #ifdef HAVE_INLINE_FFS 22553519253SDoug Moore case sizeof(int): 22653519253SDoug Moore return (ffs(mask) - 1); 22753519253SDoug Moore #endif 22812cd7dedSDoug Moore default: 22953519253SDoug Moore return (generic_bitpos(mask)); 23012cd7dedSDoug Moore } 2312ac0c7c3SAlan Cox } 2322ac0c7c3SAlan Cox 2332ac0c7c3SAlan Cox /* 2347090df5aSMatthew Dillon * blist_create() - create a blist capable of handling up to the specified 2357090df5aSMatthew Dillon * number of blocks 2367090df5aSMatthew Dillon * 237f565f395SEitan Adler * blocks - must be greater than 0 238c8c7ad92SKip Macy * flags - malloc flags 2397090df5aSMatthew Dillon * 2407090df5aSMatthew Dillon * The smallest blist consists of a single leaf node capable of 2417090df5aSMatthew Dillon * managing BLIST_BMAP_RADIX blocks. 2427090df5aSMatthew Dillon */ 2437090df5aSMatthew Dillon blist_t 244c8c7ad92SKip Macy blist_create(daddr_t blocks, int flags) 2457090df5aSMatthew Dillon { 2467090df5aSMatthew Dillon blist_t bl; 247bb4a27f9SMark Johnston u_daddr_t nodes, radix; 2487090df5aSMatthew Dillon 249b1f59c92SDoug Moore KASSERT(blocks > 0, ("invalid block count")); 250ce9eea64SMark Johnston 2517090df5aSMatthew Dillon /* 252ce9eea64SMark Johnston * Calculate the radix and node count used for scanning. 2537090df5aSMatthew Dillon */ 254bb4a27f9SMark Johnston nodes = 1; 2557090df5aSMatthew Dillon radix = BLIST_BMAP_RADIX; 256bb4a27f9SMark Johnston while (radix <= blocks) { 257bb4a27f9SMark Johnston nodes += 1 + (blocks - 1) / radix; 25857e6d29bSMatthew Dillon radix *= BLIST_META_RADIX; 2597090df5aSMatthew Dillon } 2607090df5aSMatthew Dillon 26103ca2137SAlan Cox bl = malloc(offsetof(struct blist, bl_root[nodes]), M_SWAP, flags | 26203ca2137SAlan Cox M_ZERO); 263015d7db6SAlan Cox if (bl == NULL) 264015d7db6SAlan Cox return (NULL); 2657090df5aSMatthew Dillon 2667090df5aSMatthew Dillon bl->bl_blocks = blocks; 2677090df5aSMatthew Dillon bl->bl_radix = radix; 2687090df5aSMatthew Dillon 2697090df5aSMatthew Dillon #if defined(BLIST_DEBUG) 2707090df5aSMatthew Dillon printf( 27192da00bbSMatthew Dillon "BLIST representing %lld blocks (%lld MB of swap)" 27292da00bbSMatthew Dillon ", requiring %lldK of ram\n", 27392da00bbSMatthew Dillon (long long)bl->bl_blocks, 27492da00bbSMatthew Dillon (long long)bl->bl_blocks * 4 / 1024, 275015d7db6SAlan Cox (long long)(nodes * sizeof(blmeta_t) + 1023) / 1024 2767090df5aSMatthew Dillon ); 27792da00bbSMatthew Dillon printf("BLIST raw radix tree contains %lld records\n", 278015d7db6SAlan Cox (long long)nodes); 2797090df5aSMatthew Dillon #endif 2807090df5aSMatthew Dillon 2817090df5aSMatthew Dillon return (bl); 2827090df5aSMatthew Dillon } 2837090df5aSMatthew Dillon 2847090df5aSMatthew Dillon void 2857090df5aSMatthew Dillon blist_destroy(blist_t bl) 2867090df5aSMatthew Dillon { 2878eefcd40SAlan Cox 2887090df5aSMatthew Dillon free(bl, M_SWAP); 2897090df5aSMatthew Dillon } 2907090df5aSMatthew Dillon 2917090df5aSMatthew Dillon /* 2927090df5aSMatthew Dillon * blist_alloc() - reserve space in the block bitmap. Return the base 2937090df5aSMatthew Dillon * of a contiguous region or SWAPBLK_NONE if space could 2947090df5aSMatthew Dillon * not be allocated. 2957090df5aSMatthew Dillon */ 2967090df5aSMatthew Dillon daddr_t 297*87ae0686SDoug Moore blist_alloc(blist_t bl, int *count, int maxcount) 2987090df5aSMatthew Dillon { 29927d172bbSDoug Moore daddr_t blk, cursor; 3007090df5aSMatthew Dillon 301*87ae0686SDoug Moore KASSERT(*count <= maxcount, 302*87ae0686SDoug Moore ("invalid parameters %d > %d", *count, maxcount)); 303*87ae0686SDoug Moore KASSERT(maxcount <= BLIST_MAX_ALLOC, 304*87ae0686SDoug Moore ("allocation too large: %d", maxcount)); 305bb4a27f9SMark Johnston 306d4e3484bSAlan Cox /* 307d4e3484bSAlan Cox * This loop iterates at most twice. An allocation failure in the 308d4e3484bSAlan Cox * first iteration leads to a second iteration only if the cursor was 309d4e3484bSAlan Cox * non-zero. When the cursor is zero, an allocation failure will 310749cdf6fSAlan Cox * stop further iterations. 311d4e3484bSAlan Cox */ 312*87ae0686SDoug Moore for (cursor = bl->bl_cursor;; cursor = 0) { 313*87ae0686SDoug Moore blk = blst_meta_alloc(bl->bl_root, cursor, count, maxcount, 314bee93d3cSAlan Cox bl->bl_radix); 315d4e3484bSAlan Cox if (blk != SWAPBLK_NONE) { 316*87ae0686SDoug Moore bl->bl_avail -= *count; 317*87ae0686SDoug Moore bl->bl_cursor = blk + *count; 318a0ae476bSAlan Cox if (bl->bl_cursor == bl->bl_blocks) 319a0ae476bSAlan Cox bl->bl_cursor = 0; 3207090df5aSMatthew Dillon return (blk); 321*87ae0686SDoug Moore } 322*87ae0686SDoug Moore if (cursor == 0) 323749cdf6fSAlan Cox return (SWAPBLK_NONE); 3247090df5aSMatthew Dillon } 3254be4fd5dSAlan Cox } 3264be4fd5dSAlan Cox 3274be4fd5dSAlan Cox /* 3284be4fd5dSAlan Cox * blist_avail() - return the number of free blocks. 3294be4fd5dSAlan Cox */ 3304be4fd5dSAlan Cox daddr_t 3314be4fd5dSAlan Cox blist_avail(blist_t bl) 3324be4fd5dSAlan Cox { 3334be4fd5dSAlan Cox 334bb4a27f9SMark Johnston return (bl->bl_avail); 3354be4fd5dSAlan Cox } 3367090df5aSMatthew Dillon 3377090df5aSMatthew Dillon /* 3387090df5aSMatthew Dillon * blist_free() - free up space in the block bitmap. Return the base 339b1f59c92SDoug Moore * of a contiguous region. 3407090df5aSMatthew Dillon */ 3417090df5aSMatthew Dillon void 3427090df5aSMatthew Dillon blist_free(blist_t bl, daddr_t blkno, daddr_t count) 3437090df5aSMatthew Dillon { 344a396c83aSAlan Cox 345b1f59c92SDoug Moore KASSERT(blkno >= 0 && blkno + count <= bl->bl_blocks, 346b1f59c92SDoug Moore ("freeing invalid range: blkno %jx, count %d, blocks %jd", 347b1f59c92SDoug Moore (uintmax_t)blkno, (int)count, (uintmax_t)bl->bl_blocks)); 348bee93d3cSAlan Cox blst_meta_free(bl->bl_root, blkno, count, bl->bl_radix); 349bb4a27f9SMark Johnston bl->bl_avail += count; 3507090df5aSMatthew Dillon } 3517090df5aSMatthew Dillon 3527090df5aSMatthew Dillon /* 35392da00bbSMatthew Dillon * blist_fill() - mark a region in the block bitmap as off-limits 35492da00bbSMatthew Dillon * to the allocator (i.e. allocate it), ignoring any 35592da00bbSMatthew Dillon * existing allocations. Return the number of blocks 35692da00bbSMatthew Dillon * actually filled that were free before the call. 35792da00bbSMatthew Dillon */ 358a7249a6cSAlan Cox daddr_t 35992da00bbSMatthew Dillon blist_fill(blist_t bl, daddr_t blkno, daddr_t count) 36092da00bbSMatthew Dillon { 361bb4a27f9SMark Johnston daddr_t filled; 36292da00bbSMatthew Dillon 363b1f59c92SDoug Moore KASSERT(blkno >= 0 && blkno + count <= bl->bl_blocks, 364b1f59c92SDoug Moore ("filling invalid range: blkno %jx, count %d, blocks %jd", 365b1f59c92SDoug Moore (uintmax_t)blkno, (int)count, (uintmax_t)bl->bl_blocks)); 366bb4a27f9SMark Johnston filled = blst_meta_fill(bl->bl_root, blkno, count, bl->bl_radix); 367bb4a27f9SMark Johnston bl->bl_avail -= filled; 368bb4a27f9SMark Johnston return (filled); 36992da00bbSMatthew Dillon } 37092da00bbSMatthew Dillon 37192da00bbSMatthew Dillon /* 3727090df5aSMatthew Dillon * blist_resize() - resize an existing radix tree to handle the 3737090df5aSMatthew Dillon * specified number of blocks. This will reallocate 3747090df5aSMatthew Dillon * the tree and transfer the previous bitmap to the new 3757090df5aSMatthew Dillon * one. When extending the tree you can specify whether 3767090df5aSMatthew Dillon * the new blocks are to left allocated or freed. 3777090df5aSMatthew Dillon */ 3787090df5aSMatthew Dillon void 379c8c7ad92SKip Macy blist_resize(blist_t *pbl, daddr_t count, int freenew, int flags) 3807090df5aSMatthew Dillon { 381c8c7ad92SKip Macy blist_t newbl = blist_create(count, flags); 3827090df5aSMatthew Dillon blist_t save = *pbl; 3837090df5aSMatthew Dillon 3847090df5aSMatthew Dillon *pbl = newbl; 3857090df5aSMatthew Dillon if (count > save->bl_blocks) 3867090df5aSMatthew Dillon count = save->bl_blocks; 3872ac0c7c3SAlan Cox blst_copy(save->bl_root, 0, save->bl_radix, newbl, count); 3887090df5aSMatthew Dillon 3897090df5aSMatthew Dillon /* 3907090df5aSMatthew Dillon * If resizing upwards, should we free the new space or not? 3917090df5aSMatthew Dillon */ 3927090df5aSMatthew Dillon if (freenew && count < newbl->bl_blocks) { 3937090df5aSMatthew Dillon blist_free(newbl, count, newbl->bl_blocks - count); 3947090df5aSMatthew Dillon } 3957090df5aSMatthew Dillon blist_destroy(save); 3967090df5aSMatthew Dillon } 3977090df5aSMatthew Dillon 3987090df5aSMatthew Dillon #ifdef BLIST_DEBUG 3997090df5aSMatthew Dillon 4007090df5aSMatthew Dillon /* 4017090df5aSMatthew Dillon * blist_print() - dump radix tree 4027090df5aSMatthew Dillon */ 4037090df5aSMatthew Dillon void 4047090df5aSMatthew Dillon blist_print(blist_t bl) 4057090df5aSMatthew Dillon { 406bb4a27f9SMark Johnston printf("BLIST avail = %jd, cursor = %08jx {\n", 407bb4a27f9SMark Johnston (uintmax_t)bl->bl_avail, (uintmax_t)bl->bl_cursor); 408bb4a27f9SMark Johnston 409bb4a27f9SMark Johnston if (bl->bl_root->bm_bitmap != 0) 4102ac0c7c3SAlan Cox blst_radix_print(bl->bl_root, 0, bl->bl_radix, 4); 4117090df5aSMatthew Dillon printf("}\n"); 4127090df5aSMatthew Dillon } 4137090df5aSMatthew Dillon 4147090df5aSMatthew Dillon #endif 4157090df5aSMatthew Dillon 416d027ed2eSAlan Cox static const u_daddr_t fib[] = { 417d027ed2eSAlan Cox 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 418d027ed2eSAlan Cox 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 419d027ed2eSAlan Cox 514229, 832040, 1346269, 2178309, 3524578, 420d027ed2eSAlan Cox }; 421d027ed2eSAlan Cox 422d027ed2eSAlan Cox /* 423d027ed2eSAlan Cox * Use 'gap' to describe a maximal range of unallocated blocks/bits. 424d027ed2eSAlan Cox */ 425d027ed2eSAlan Cox struct gap_stats { 426d027ed2eSAlan Cox daddr_t start; /* current gap start, or SWAPBLK_NONE */ 427d027ed2eSAlan Cox daddr_t num; /* number of gaps observed */ 428d027ed2eSAlan Cox daddr_t max; /* largest gap size */ 429d027ed2eSAlan Cox daddr_t avg; /* average gap size */ 430d027ed2eSAlan Cox daddr_t err; /* sum - num * avg */ 431d027ed2eSAlan Cox daddr_t histo[nitems(fib)]; /* # gaps in each size range */ 432d027ed2eSAlan Cox int max_bucket; /* last histo elt with nonzero val */ 433d027ed2eSAlan Cox }; 434d027ed2eSAlan Cox 435d027ed2eSAlan Cox /* 436d027ed2eSAlan Cox * gap_stats_counting() - is the state 'counting 1 bits'? 437d027ed2eSAlan Cox * or 'skipping 0 bits'? 438d027ed2eSAlan Cox */ 439d027ed2eSAlan Cox static inline bool 440d027ed2eSAlan Cox gap_stats_counting(const struct gap_stats *stats) 441d027ed2eSAlan Cox { 442d027ed2eSAlan Cox 443d027ed2eSAlan Cox return (stats->start != SWAPBLK_NONE); 444d027ed2eSAlan Cox } 445d027ed2eSAlan Cox 446d027ed2eSAlan Cox /* 447d027ed2eSAlan Cox * init_gap_stats() - initialize stats on gap sizes 448d027ed2eSAlan Cox */ 449d027ed2eSAlan Cox static inline void 450d027ed2eSAlan Cox init_gap_stats(struct gap_stats *stats) 451d027ed2eSAlan Cox { 452d027ed2eSAlan Cox 453d027ed2eSAlan Cox bzero(stats, sizeof(*stats)); 454d027ed2eSAlan Cox stats->start = SWAPBLK_NONE; 455d027ed2eSAlan Cox } 456d027ed2eSAlan Cox 457d027ed2eSAlan Cox /* 458d027ed2eSAlan Cox * update_gap_stats() - update stats on gap sizes 459d027ed2eSAlan Cox */ 460d027ed2eSAlan Cox static void 461d027ed2eSAlan Cox update_gap_stats(struct gap_stats *stats, daddr_t posn) 462d027ed2eSAlan Cox { 463d027ed2eSAlan Cox daddr_t size; 464d027ed2eSAlan Cox int hi, lo, mid; 465d027ed2eSAlan Cox 466d027ed2eSAlan Cox if (!gap_stats_counting(stats)) { 467d027ed2eSAlan Cox stats->start = posn; 468d027ed2eSAlan Cox return; 469d027ed2eSAlan Cox } 470d027ed2eSAlan Cox size = posn - stats->start; 471d027ed2eSAlan Cox stats->start = SWAPBLK_NONE; 472d027ed2eSAlan Cox if (size > stats->max) 473d027ed2eSAlan Cox stats->max = size; 474d027ed2eSAlan Cox 475d027ed2eSAlan Cox /* 476d027ed2eSAlan Cox * Find the fibonacci range that contains size, 477d027ed2eSAlan Cox * expecting to find it in an early range. 478d027ed2eSAlan Cox */ 479d027ed2eSAlan Cox lo = 0; 480d027ed2eSAlan Cox hi = 1; 481d027ed2eSAlan Cox while (hi < nitems(fib) && fib[hi] <= size) { 482d027ed2eSAlan Cox lo = hi; 483d027ed2eSAlan Cox hi *= 2; 484d027ed2eSAlan Cox } 485d027ed2eSAlan Cox if (hi >= nitems(fib)) 486d027ed2eSAlan Cox hi = nitems(fib); 487d027ed2eSAlan Cox while (lo + 1 != hi) { 488d027ed2eSAlan Cox mid = (lo + hi) >> 1; 489d027ed2eSAlan Cox if (fib[mid] <= size) 490d027ed2eSAlan Cox lo = mid; 491d027ed2eSAlan Cox else 492d027ed2eSAlan Cox hi = mid; 493d027ed2eSAlan Cox } 494d027ed2eSAlan Cox stats->histo[lo]++; 495d027ed2eSAlan Cox if (lo > stats->max_bucket) 496d027ed2eSAlan Cox stats->max_bucket = lo; 497d027ed2eSAlan Cox stats->err += size - stats->avg; 498d027ed2eSAlan Cox stats->num++; 499d027ed2eSAlan Cox stats->avg += stats->err / stats->num; 500d027ed2eSAlan Cox stats->err %= stats->num; 501d027ed2eSAlan Cox } 502d027ed2eSAlan Cox 503d027ed2eSAlan Cox /* 504d027ed2eSAlan Cox * dump_gap_stats() - print stats on gap sizes 505d027ed2eSAlan Cox */ 506d027ed2eSAlan Cox static inline void 507d027ed2eSAlan Cox dump_gap_stats(const struct gap_stats *stats, struct sbuf *s) 508d027ed2eSAlan Cox { 509d027ed2eSAlan Cox int i; 510d027ed2eSAlan Cox 511d027ed2eSAlan Cox sbuf_printf(s, "number of maximal free ranges: %jd\n", 512d027ed2eSAlan Cox (intmax_t)stats->num); 513d027ed2eSAlan Cox sbuf_printf(s, "largest free range: %jd\n", (intmax_t)stats->max); 514d027ed2eSAlan Cox sbuf_printf(s, "average maximal free range size: %jd\n", 515d027ed2eSAlan Cox (intmax_t)stats->avg); 516d027ed2eSAlan Cox sbuf_printf(s, "number of maximal free ranges of different sizes:\n"); 517d027ed2eSAlan Cox sbuf_printf(s, " count | size range\n"); 518d027ed2eSAlan Cox sbuf_printf(s, " ----- | ----------\n"); 519d027ed2eSAlan Cox for (i = 0; i < stats->max_bucket; i++) { 520d027ed2eSAlan Cox if (stats->histo[i] != 0) { 521d027ed2eSAlan Cox sbuf_printf(s, "%20jd | ", 522d027ed2eSAlan Cox (intmax_t)stats->histo[i]); 523d027ed2eSAlan Cox if (fib[i] != fib[i + 1] - 1) 524d027ed2eSAlan Cox sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i], 525d027ed2eSAlan Cox (intmax_t)fib[i + 1] - 1); 526d027ed2eSAlan Cox else 527d027ed2eSAlan Cox sbuf_printf(s, "%jd\n", (intmax_t)fib[i]); 528d027ed2eSAlan Cox } 529d027ed2eSAlan Cox } 530d027ed2eSAlan Cox sbuf_printf(s, "%20jd | ", (intmax_t)stats->histo[i]); 531d027ed2eSAlan Cox if (stats->histo[i] > 1) 532d027ed2eSAlan Cox sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i], 533d027ed2eSAlan Cox (intmax_t)stats->max); 534d027ed2eSAlan Cox else 535d027ed2eSAlan Cox sbuf_printf(s, "%jd\n", (intmax_t)stats->max); 536d027ed2eSAlan Cox } 537d027ed2eSAlan Cox 538d027ed2eSAlan Cox /* 539d027ed2eSAlan Cox * blist_stats() - dump radix tree stats 540d027ed2eSAlan Cox */ 541d027ed2eSAlan Cox void 542d027ed2eSAlan Cox blist_stats(blist_t bl, struct sbuf *s) 543d027ed2eSAlan Cox { 544d027ed2eSAlan Cox struct gap_stats gstats; 545d027ed2eSAlan Cox struct gap_stats *stats = &gstats; 546d027ed2eSAlan Cox daddr_t i, nodes, radix; 54753519253SDoug Moore u_daddr_t diff, mask; 54853519253SDoug Moore int digit; 549d027ed2eSAlan Cox 550d027ed2eSAlan Cox init_gap_stats(stats); 551d027ed2eSAlan Cox nodes = 0; 552d027ed2eSAlan Cox i = bl->bl_radix; 553d027ed2eSAlan Cox while (i < bl->bl_radix + bl->bl_blocks) { 554d027ed2eSAlan Cox /* 555d027ed2eSAlan Cox * Find max size subtree starting at i. 556d027ed2eSAlan Cox */ 557d027ed2eSAlan Cox radix = BLIST_BMAP_RADIX; 558d027ed2eSAlan Cox while (((i / radix) & BLIST_META_MASK) == 0) 559d027ed2eSAlan Cox radix *= BLIST_META_RADIX; 560d027ed2eSAlan Cox 561d027ed2eSAlan Cox /* 562d027ed2eSAlan Cox * Check for skippable subtrees starting at i. 563d027ed2eSAlan Cox */ 564d027ed2eSAlan Cox while (radix > BLIST_BMAP_RADIX) { 565bb4a27f9SMark Johnston if (bl->bl_root[nodes].bm_bitmap == 0) { 566d027ed2eSAlan Cox if (gap_stats_counting(stats)) 567d027ed2eSAlan Cox update_gap_stats(stats, i); 568d027ed2eSAlan Cox break; 569d027ed2eSAlan Cox } 570d027ed2eSAlan Cox 571d027ed2eSAlan Cox /* 572d027ed2eSAlan Cox * Skip subtree root. 573d027ed2eSAlan Cox */ 574d027ed2eSAlan Cox nodes++; 575d027ed2eSAlan Cox radix /= BLIST_META_RADIX; 576d027ed2eSAlan Cox } 577d027ed2eSAlan Cox if (radix == BLIST_BMAP_RADIX) { 578d027ed2eSAlan Cox /* 579d027ed2eSAlan Cox * Scan leaf. 580d027ed2eSAlan Cox */ 581bb4a27f9SMark Johnston mask = bl->bl_root[nodes].bm_bitmap; 582d027ed2eSAlan Cox diff = mask ^ (mask << 1); 583d027ed2eSAlan Cox if (gap_stats_counting(stats)) 584d027ed2eSAlan Cox diff ^= 1; 585d027ed2eSAlan Cox while (diff != 0) { 58653519253SDoug Moore digit = bitpos(diff); 58753519253SDoug Moore update_gap_stats(stats, i + digit); 58853519253SDoug Moore diff ^= bitrange(digit, 1); 589d027ed2eSAlan Cox } 590d027ed2eSAlan Cox } 591d027ed2eSAlan Cox nodes += radix_to_skip(radix); 592d027ed2eSAlan Cox i += radix; 593d027ed2eSAlan Cox } 594d027ed2eSAlan Cox update_gap_stats(stats, i); 595d027ed2eSAlan Cox dump_gap_stats(stats, s); 596d027ed2eSAlan Cox } 597d027ed2eSAlan Cox 5987090df5aSMatthew Dillon /************************************************************************ 5997090df5aSMatthew Dillon * ALLOCATION SUPPORT FUNCTIONS * 6007090df5aSMatthew Dillon ************************************************************************ 6017090df5aSMatthew Dillon * 6027090df5aSMatthew Dillon * These support functions do all the actual work. They may seem 6037090df5aSMatthew Dillon * rather longish, but that's because I've commented them up. The 6047090df5aSMatthew Dillon * actual code is straight forward. 6057090df5aSMatthew Dillon * 6067090df5aSMatthew Dillon */ 6077090df5aSMatthew Dillon 6087090df5aSMatthew Dillon /* 609bb4a27f9SMark Johnston * BLST_NEXT_LEAF_ALLOC() - allocate the first few blocks in the next leaf. 610bb4a27f9SMark Johnston * 611bb4a27f9SMark Johnston * 'scan' is a leaf node, associated with a block containing 'blk'. 612bb4a27f9SMark Johnston * The next leaf node could be adjacent, or several nodes away if the 613bb4a27f9SMark Johnston * least common ancestor of 'scan' and its neighbor is several levels 614bb4a27f9SMark Johnston * up. Use 'blk' to determine how many meta-nodes lie between the 615bb4a27f9SMark Johnston * leaves. If the next leaf has enough initial bits set, clear them 616bb4a27f9SMark Johnston * and clear the bits in the meta nodes on the path up to the least 617bb4a27f9SMark Johnston * common ancestor to mark any subtrees made completely empty. 618bb4a27f9SMark Johnston */ 619bb4a27f9SMark Johnston static int 620*87ae0686SDoug Moore blst_next_leaf_alloc(blmeta_t *scan, daddr_t blk, int count, int maxcount) 621bb4a27f9SMark Johnston { 622bb4a27f9SMark Johnston blmeta_t *next; 623bb4a27f9SMark Johnston u_daddr_t radix; 624*87ae0686SDoug Moore int avail, digit; 625bb4a27f9SMark Johnston 626bb4a27f9SMark Johnston next = scan + 1; 627bb4a27f9SMark Johnston blk += BLIST_BMAP_RADIX; 628bb4a27f9SMark Johnston radix = BLIST_BMAP_RADIX; 629*87ae0686SDoug Moore while ((next->bm_bitmap & 1) == 1 && 630*87ae0686SDoug Moore (digit = ((blk / radix) & BLIST_META_MASK)) == 0) { 631bb4a27f9SMark Johnston next++; 632bb4a27f9SMark Johnston radix *= BLIST_META_RADIX; 633bb4a27f9SMark Johnston } 634*87ae0686SDoug Moore if ((next->bm_bitmap & 1) != 1) 635*87ae0686SDoug Moore return (0); 636*87ae0686SDoug Moore avail = (~next->bm_bitmap != 0) ? 637*87ae0686SDoug Moore bitpos(~next->bm_bitmap) : BLIST_BMAP_RADIX; 638*87ae0686SDoug Moore if (avail < count) { 639bb4a27f9SMark Johnston /* 640bb4a27f9SMark Johnston * The next leaf doesn't have enough free blocks at the 641bb4a27f9SMark Johnston * beginning to complete the spanning allocation. 642bb4a27f9SMark Johnston */ 643*87ae0686SDoug Moore return (0); 644bb4a27f9SMark Johnston } 645*87ae0686SDoug Moore count = imin(avail, maxcount); 646bb4a27f9SMark Johnston /* Clear the first 'count' bits in the next leaf to allocate. */ 647*87ae0686SDoug Moore next->bm_bitmap &= ~bitrange(0, count); 648bb4a27f9SMark Johnston 649bb4a27f9SMark Johnston /* 650bb4a27f9SMark Johnston * Update bitmaps of next-ancestors, up to least common ancestor. 651bb4a27f9SMark Johnston */ 652d1c34a6bSDoug Moore while (next->bm_bitmap == 0) { 653d1c34a6bSDoug Moore if (--next == scan) { 654d1c34a6bSDoug Moore scan[-digit * radix_to_skip(radix)].bm_bitmap ^= 655d1c34a6bSDoug Moore (u_daddr_t)1 << digit; 656d1c34a6bSDoug Moore break; 657bb4a27f9SMark Johnston } 658d1c34a6bSDoug Moore next->bm_bitmap ^= 1; 659d1c34a6bSDoug Moore } 660*87ae0686SDoug Moore return (count); 661bb4a27f9SMark Johnston } 662bb4a27f9SMark Johnston 663bb4a27f9SMark Johnston /* 66409b380a1SDoug Moore * Given a bitmask, flip all the bits from the least-significant 1-bit to the 66509b380a1SDoug Moore * most significant bit. If the result is non-zero, then the least-significant 66609b380a1SDoug Moore * 1-bit of the result is in the same position as the least-signification 0-bit 66709b380a1SDoug Moore * in mask that is followed by a 1-bit. 66809b380a1SDoug Moore */ 66909b380a1SDoug Moore static inline u_daddr_t 67009b380a1SDoug Moore flip_hibits(u_daddr_t mask) 67109b380a1SDoug Moore { 67209b380a1SDoug Moore 67309b380a1SDoug Moore return (-mask & ~mask); 67409b380a1SDoug Moore } 67509b380a1SDoug Moore 67609b380a1SDoug Moore /* 677bb4a27f9SMark Johnston * BLST_LEAF_ALLOC() - allocate at a leaf in the radix tree (a bitmap). 6787090df5aSMatthew Dillon * 6792905d1ceSAlan Cox * This function is the core of the allocator. Its execution time is 6802905d1ceSAlan Cox * proportional to log(count), plus height of the tree if the allocation 6812905d1ceSAlan Cox * crosses a leaf boundary. 6827090df5aSMatthew Dillon */ 6837090df5aSMatthew Dillon static daddr_t 684*87ae0686SDoug Moore blst_leaf_alloc(blmeta_t *scan, daddr_t blk, int *count, int maxcount) 68554541421SAlan Cox { 6862905d1ceSAlan Cox u_daddr_t cursor_mask, mask; 687ec371b57SAlan Cox int count1, hi, lo, num_shifts, range1, range_ext; 6887090df5aSMatthew Dillon 68954541421SAlan Cox range1 = 0; 690*87ae0686SDoug Moore count1 = *count - 1; 69154541421SAlan Cox num_shifts = fls(count1); 692bb4a27f9SMark Johnston mask = scan->bm_bitmap; 69309b380a1SDoug Moore while (flip_hibits(mask) != 0 && num_shifts > 0) { 69454541421SAlan Cox /* 69554541421SAlan Cox * If bit i is set in mask, then bits in [i, i+range1] are set 6962905d1ceSAlan Cox * in scan->bm_bitmap. The value of range1 is equal to count1 6972905d1ceSAlan Cox * >> num_shifts. Grow range1 and reduce num_shifts to 0, 6982905d1ceSAlan Cox * while preserving these invariants. The updates to mask 6992905d1ceSAlan Cox * leave fewer bits set, but each bit that remains set 7002905d1ceSAlan Cox * represents a longer string of consecutive bits set in 7012905d1ceSAlan Cox * scan->bm_bitmap. If more updates to mask cannot clear more 7022905d1ceSAlan Cox * bits, because mask is partitioned with all 0 bits preceding 7032905d1ceSAlan Cox * all 1 bits, the loop terminates immediately. 70454541421SAlan Cox */ 70554541421SAlan Cox num_shifts--; 70654541421SAlan Cox range_ext = range1 + ((count1 >> num_shifts) & 1); 707ec371b57SAlan Cox /* 708ec371b57SAlan Cox * mask is a signed quantity for the shift because when it is 709ec371b57SAlan Cox * shifted right, the sign bit should copied; when the last 710ec371b57SAlan Cox * block of the leaf is free, pretend, for a while, that all the 711ec371b57SAlan Cox * blocks that follow it are also free. 712ec371b57SAlan Cox */ 713ec371b57SAlan Cox mask &= (daddr_t)mask >> range_ext; 71454541421SAlan Cox range1 += range_ext; 71554541421SAlan Cox } 71654541421SAlan Cox if (mask == 0) { 71754541421SAlan Cox /* 71854541421SAlan Cox * Update bighint. There is no allocation bigger than range1 719ec371b57SAlan Cox * starting in this leaf. 72054541421SAlan Cox */ 72154541421SAlan Cox scan->bm_bighint = range1; 7227090df5aSMatthew Dillon return (SWAPBLK_NONE); 7237090df5aSMatthew Dillon } 72454541421SAlan Cox 725ec371b57SAlan Cox /* Discard any candidates that appear before blk. */ 7262905d1ceSAlan Cox if ((blk & BLIST_BMAP_MASK) != 0) { 7272905d1ceSAlan Cox cursor_mask = mask & bitrange(0, blk & BLIST_BMAP_MASK); 7282905d1ceSAlan Cox if (cursor_mask != 0) { 7292905d1ceSAlan Cox mask ^= cursor_mask; 73054541421SAlan Cox if (mask == 0) 73154541421SAlan Cox return (SWAPBLK_NONE); 7327090df5aSMatthew Dillon 7337090df5aSMatthew Dillon /* 7342905d1ceSAlan Cox * Bighint change for last block allocation cannot 7352905d1ceSAlan Cox * assume that any other blocks are allocated, so the 7362905d1ceSAlan Cox * bighint cannot be reduced much. 7372905d1ceSAlan Cox */ 7382905d1ceSAlan Cox range1 = BLIST_MAX_ALLOC - 1; 7392905d1ceSAlan Cox } 7402905d1ceSAlan Cox blk &= ~BLIST_BMAP_MASK; 7412905d1ceSAlan Cox } 7422905d1ceSAlan Cox 7432905d1ceSAlan Cox /* 74454541421SAlan Cox * The least significant set bit in mask marks the start of the first 745*87ae0686SDoug Moore * available range of sufficient size. Find its position. 7467090df5aSMatthew Dillon */ 747d027ed2eSAlan Cox lo = bitpos(mask); 7487090df5aSMatthew Dillon 74954541421SAlan Cox /* 750*87ae0686SDoug Moore * Find how much space is available starting at that position. 75154541421SAlan Cox */ 752*87ae0686SDoug Moore if (flip_hibits(mask) != 0) { 753*87ae0686SDoug Moore /* Count the 1 bits starting at position lo. */ 754*87ae0686SDoug Moore hi = bitpos(flip_hibits(mask)) + count1; 755*87ae0686SDoug Moore if (maxcount < hi - lo) 756*87ae0686SDoug Moore hi = lo + maxcount; 757*87ae0686SDoug Moore *count = hi - lo; 758*87ae0686SDoug Moore mask = bitrange(lo, *count); 759*87ae0686SDoug Moore } else if (maxcount <= BLIST_BMAP_RADIX - lo) { 760*87ae0686SDoug Moore /* All the blocks we can use are available here. */ 761*87ae0686SDoug Moore hi = lo + maxcount; 762*87ae0686SDoug Moore *count = maxcount; 763*87ae0686SDoug Moore mask = bitrange(lo, *count); 764*87ae0686SDoug Moore } else { 765*87ae0686SDoug Moore /* Check next leaf for some of the blocks we want or need. */ 766*87ae0686SDoug Moore count1 = *count - (BLIST_BMAP_RADIX - lo); 767*87ae0686SDoug Moore maxcount -= BLIST_BMAP_RADIX - lo; 768*87ae0686SDoug Moore hi = blst_next_leaf_alloc(scan, blk, count1, maxcount); 769*87ae0686SDoug Moore if (hi < count1) 770ec371b57SAlan Cox /* 771*87ae0686SDoug Moore * The next leaf cannot supply enough blocks to reach 772*87ae0686SDoug Moore * the minimum required allocation. The hint cannot be 773*87ae0686SDoug Moore * updated, because the same allocation request could 774*87ae0686SDoug Moore * be satisfied later, by this leaf, if the state of 775*87ae0686SDoug Moore * the next leaf changes, and without any changes to 776*87ae0686SDoug Moore * this leaf. 777ec371b57SAlan Cox */ 778ec371b57SAlan Cox return (SWAPBLK_NONE); 779*87ae0686SDoug Moore *count = BLIST_BMAP_RADIX - lo + hi; 780ec371b57SAlan Cox hi = BLIST_BMAP_RADIX; 781ec371b57SAlan Cox } 782ec371b57SAlan Cox 783ec371b57SAlan Cox if (hi == BLIST_BMAP_RADIX) { 784ec371b57SAlan Cox /* 785ec371b57SAlan Cox * Update bighint. There is no allocation bigger than range1 786ec371b57SAlan Cox * available in this leaf after this allocation completes. 787ec371b57SAlan Cox */ 788ec371b57SAlan Cox scan->bm_bighint = range1; 789ec371b57SAlan Cox } 790ec371b57SAlan Cox /* Clear the allocated bits from this leaf. */ 791bb4a27f9SMark Johnston scan->bm_bitmap &= ~mask; 7922905d1ceSAlan Cox return (blk + lo); 7937090df5aSMatthew Dillon } 7947090df5aSMatthew Dillon 7957090df5aSMatthew Dillon /* 7967090df5aSMatthew Dillon * blist_meta_alloc() - allocate at a meta in the radix tree. 7977090df5aSMatthew Dillon * 7987090df5aSMatthew Dillon * Attempt to allocate at a meta node. If we can't, we update 7997090df5aSMatthew Dillon * bighint and return a failure. Updating bighint optimize future 8007090df5aSMatthew Dillon * calls that hit this node. We have to check for our collapse cases 8017090df5aSMatthew Dillon * and we have a few optimizations strewn in as well. 8027090df5aSMatthew Dillon */ 8037090df5aSMatthew Dillon static daddr_t 804*87ae0686SDoug Moore blst_meta_alloc(blmeta_t *scan, daddr_t cursor, int *count, 805*87ae0686SDoug Moore int maxcount, u_daddr_t radix) 806d4e3484bSAlan Cox { 807bb4a27f9SMark Johnston daddr_t blk, i, r, skip; 80853519253SDoug Moore u_daddr_t mask; 809d4e3484bSAlan Cox bool scan_from_start; 810bb4a27f9SMark Johnston int digit; 8117090df5aSMatthew Dillon 812a396c83aSAlan Cox if (radix == BLIST_BMAP_RADIX) 813*87ae0686SDoug Moore return (blst_leaf_alloc(scan, cursor, count, maxcount)); 814ec371b57SAlan Cox blk = cursor & -radix; 815bb4a27f9SMark Johnston scan_from_start = (cursor == blk); 816bb4a27f9SMark Johnston radix /= BLIST_META_RADIX; 8172ac0c7c3SAlan Cox skip = radix_to_skip(radix); 818bb4a27f9SMark Johnston mask = scan->bm_bitmap; 819bb4a27f9SMark Johnston 820bb4a27f9SMark Johnston /* Discard any candidates that appear before cursor. */ 821bb4a27f9SMark Johnston digit = (cursor / radix) & BLIST_META_MASK; 822bb4a27f9SMark Johnston mask &= (u_daddr_t)-1 << digit; 823ee73fef9SAlan Cox if (mask == 0) 824ee73fef9SAlan Cox return (SWAPBLK_NONE); 8257090df5aSMatthew Dillon 8264be4fd5dSAlan Cox /* 827bb4a27f9SMark Johnston * If the first try is for a block that includes the cursor, pre-undo 828bb4a27f9SMark Johnston * the digit * radix offset in the first call; otherwise, ignore the 829bb4a27f9SMark Johnston * cursor entirely. 8304be4fd5dSAlan Cox */ 831bb4a27f9SMark Johnston if (((mask >> digit) & 1) == 1) 832bb4a27f9SMark Johnston cursor -= digit * radix; 833d4e3484bSAlan Cox else 834bb4a27f9SMark Johnston cursor = blk; 8357090df5aSMatthew Dillon 8364be4fd5dSAlan Cox /* 837bb4a27f9SMark Johnston * Examine the nonempty subtree associated with each bit set in mask. 8384be4fd5dSAlan Cox */ 839bb4a27f9SMark Johnston do { 84053519253SDoug Moore digit = bitpos(mask); 841bb4a27f9SMark Johnston i = 1 + digit * skip; 842*87ae0686SDoug Moore if (*count <= scan[i].bm_bighint) { 8437090df5aSMatthew Dillon /* 844ec371b57SAlan Cox * The allocation might fit beginning in the i'th subtree. 8457090df5aSMatthew Dillon */ 846bb4a27f9SMark Johnston r = blst_meta_alloc(&scan[i], cursor + digit * radix, 847*87ae0686SDoug Moore count, maxcount, radix); 8487090df5aSMatthew Dillon if (r != SWAPBLK_NONE) { 849bb4a27f9SMark Johnston if (scan[i].bm_bitmap == 0) 85053519253SDoug Moore scan->bm_bitmap ^= bitrange(digit, 1); 8517090df5aSMatthew Dillon return (r); 8527090df5aSMatthew Dillon } 8537090df5aSMatthew Dillon } 854bb4a27f9SMark Johnston cursor = blk; 85553519253SDoug Moore } while ((mask ^= bitrange(digit, 1)) != 0); 8567090df5aSMatthew Dillon 8577090df5aSMatthew Dillon /* 858bb4a27f9SMark Johnston * We couldn't allocate count in this subtree. If the whole tree was 859bb4a27f9SMark Johnston * scanned, and the last tree node is allocated, update bighint. 8607090df5aSMatthew Dillon */ 861bb4a27f9SMark Johnston if (scan_from_start && !(digit == BLIST_META_RADIX - 1 && 862bb4a27f9SMark Johnston scan[i].bm_bighint == BLIST_MAX_ALLOC)) 863*87ae0686SDoug Moore scan->bm_bighint = *count - 1; 864d4e3484bSAlan Cox 8657090df5aSMatthew Dillon return (SWAPBLK_NONE); 8667090df5aSMatthew Dillon } 8677090df5aSMatthew Dillon 8687090df5aSMatthew Dillon /* 8697090df5aSMatthew Dillon * BLST_LEAF_FREE() - free allocated block from leaf bitmap 8707090df5aSMatthew Dillon * 8717090df5aSMatthew Dillon */ 8727090df5aSMatthew Dillon static void 873b411efa4SAlan Cox blst_leaf_free(blmeta_t *scan, daddr_t blk, int count) 874b411efa4SAlan Cox { 875ec371b57SAlan Cox u_daddr_t mask; 876ec371b57SAlan Cox 8777090df5aSMatthew Dillon /* 8787090df5aSMatthew Dillon * free some data in this bitmap 879ec371b57SAlan Cox * mask=0000111111111110000 8807090df5aSMatthew Dillon * \_________/\__/ 881ec371b57SAlan Cox * count n 8827090df5aSMatthew Dillon */ 883bb4a27f9SMark Johnston mask = bitrange(blk & BLIST_BMAP_MASK, count); 884b1f59c92SDoug Moore KASSERT((scan->bm_bitmap & mask) == 0, 885b1f59c92SDoug Moore ("freeing free block: %jx, size %d, mask %jx", 886b1f59c92SDoug Moore (uintmax_t)blk, count, (uintmax_t)scan->bm_bitmap & mask)); 887bb4a27f9SMark Johnston scan->bm_bitmap |= mask; 8887090df5aSMatthew Dillon } 8897090df5aSMatthew Dillon 8907090df5aSMatthew Dillon /* 8917090df5aSMatthew Dillon * BLST_META_FREE() - free allocated blocks from radix tree meta info 8927090df5aSMatthew Dillon * 8937090df5aSMatthew Dillon * This support routine frees a range of blocks from the bitmap. 8947090df5aSMatthew Dillon * The range must be entirely enclosed by this radix node. If a 8957090df5aSMatthew Dillon * meta node, we break the range down recursively to free blocks 8967090df5aSMatthew Dillon * in subnodes (which means that this code can free an arbitrary 8977090df5aSMatthew Dillon * range whereas the allocation code cannot allocate an arbitrary 8987090df5aSMatthew Dillon * range). 8997090df5aSMatthew Dillon */ 9007090df5aSMatthew Dillon static void 901bee93d3cSAlan Cox blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count, u_daddr_t radix) 902d3783724SAlan Cox { 903bb4a27f9SMark Johnston daddr_t blk, endBlk, i, skip; 904bb4a27f9SMark Johnston int digit, endDigit; 9057090df5aSMatthew Dillon 906bb4a27f9SMark Johnston /* 907bb4a27f9SMark Johnston * We could probably do a better job here. We are required to make 908bb4a27f9SMark Johnston * bighint at least as large as the biggest allocable block of data. 909bb4a27f9SMark Johnston * If we just shoehorn it, a little extra overhead will be incurred 910bb4a27f9SMark Johnston * on the next allocation (but only that one typically). 911bb4a27f9SMark Johnston */ 912bb4a27f9SMark Johnston scan->bm_bighint = BLIST_MAX_ALLOC; 913bb4a27f9SMark Johnston 914a396c83aSAlan Cox if (radix == BLIST_BMAP_RADIX) 915a396c83aSAlan Cox return (blst_leaf_free(scan, freeBlk, count)); 9167090df5aSMatthew Dillon 917bb4a27f9SMark Johnston endBlk = ummin(freeBlk + count, (freeBlk + radix) & -radix); 91857e6d29bSMatthew Dillon radix /= BLIST_META_RADIX; 919bb4a27f9SMark Johnston skip = radix_to_skip(radix); 920bb4a27f9SMark Johnston blk = freeBlk & -radix; 921bb4a27f9SMark Johnston digit = (blk / radix) & BLIST_META_MASK; 922bb4a27f9SMark Johnston endDigit = 1 + (((endBlk - 1) / radix) & BLIST_META_MASK); 923bb4a27f9SMark Johnston scan->bm_bitmap |= bitrange(digit, endDigit - digit); 924bb4a27f9SMark Johnston for (i = 1 + digit * skip; blk < endBlk; i += skip) { 9257090df5aSMatthew Dillon blk += radix; 926bb4a27f9SMark Johnston count = ummin(blk, endBlk) - freeBlk; 927bb4a27f9SMark Johnston blst_meta_free(&scan[i], freeBlk, count, radix); 928bb4a27f9SMark Johnston freeBlk = blk; 9297090df5aSMatthew Dillon } 9307090df5aSMatthew Dillon } 9317090df5aSMatthew Dillon 9327090df5aSMatthew Dillon /* 933bb4a27f9SMark Johnston * BLST_COPY() - copy one radix tree to another 9347090df5aSMatthew Dillon * 9357090df5aSMatthew Dillon * Locates free space in the source tree and frees it in the destination 9367090df5aSMatthew Dillon * tree. The space may not already be free in the destination. 9377090df5aSMatthew Dillon */ 938b411efa4SAlan Cox static void 9392ac0c7c3SAlan Cox blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix, blist_t dest, 9402ac0c7c3SAlan Cox daddr_t count) 941b411efa4SAlan Cox { 942bb4a27f9SMark Johnston daddr_t endBlk, i, skip; 9437090df5aSMatthew Dillon 9447090df5aSMatthew Dillon /* 9457090df5aSMatthew Dillon * Leaf node 9467090df5aSMatthew Dillon */ 9477090df5aSMatthew Dillon 9487090df5aSMatthew Dillon if (radix == BLIST_BMAP_RADIX) { 949bb4a27f9SMark Johnston u_daddr_t v = scan->bm_bitmap; 9507090df5aSMatthew Dillon 9517090df5aSMatthew Dillon if (v == (u_daddr_t)-1) { 9527090df5aSMatthew Dillon blist_free(dest, blk, count); 9537090df5aSMatthew Dillon } else if (v != 0) { 9547090df5aSMatthew Dillon int i; 9557090df5aSMatthew Dillon 956bb4a27f9SMark Johnston for (i = 0; i < count; ++i) { 957064650c1SAlan Cox if (v & ((u_daddr_t)1 << i)) 9587090df5aSMatthew Dillon blist_free(dest, blk + i, 1); 9597090df5aSMatthew Dillon } 9607090df5aSMatthew Dillon } 9617090df5aSMatthew Dillon return; 9627090df5aSMatthew Dillon } 9637090df5aSMatthew Dillon 9647090df5aSMatthew Dillon /* 9657090df5aSMatthew Dillon * Meta node 9667090df5aSMatthew Dillon */ 9677090df5aSMatthew Dillon 968bb4a27f9SMark Johnston if (scan->bm_bitmap == 0) { 9697090df5aSMatthew Dillon /* 9707090df5aSMatthew Dillon * Source all allocated, leave dest allocated 9717090df5aSMatthew Dillon */ 9727090df5aSMatthew Dillon return; 9737090df5aSMatthew Dillon } 9747090df5aSMatthew Dillon 975bb4a27f9SMark Johnston endBlk = blk + count; 9762ac0c7c3SAlan Cox radix /= BLIST_META_RADIX; 977bb4a27f9SMark Johnston skip = radix_to_skip(radix); 978bb4a27f9SMark Johnston for (i = 1; blk < endBlk; i += skip) { 9797090df5aSMatthew Dillon blk += radix; 980bb4a27f9SMark Johnston count = radix; 981bb4a27f9SMark Johnston if (blk >= endBlk) 982bb4a27f9SMark Johnston count -= blk - endBlk; 983bb4a27f9SMark Johnston blst_copy(&scan[i], blk - radix, radix, dest, count); 9847090df5aSMatthew Dillon } 9857090df5aSMatthew Dillon } 9867090df5aSMatthew Dillon 9877090df5aSMatthew Dillon /* 98892da00bbSMatthew Dillon * BLST_LEAF_FILL() - allocate specific blocks in leaf bitmap 98992da00bbSMatthew Dillon * 99092da00bbSMatthew Dillon * This routine allocates all blocks in the specified range 99192da00bbSMatthew Dillon * regardless of any existing allocations in that range. Returns 99292da00bbSMatthew Dillon * the number of blocks allocated by the call. 99392da00bbSMatthew Dillon */ 994a7249a6cSAlan Cox static daddr_t 99592da00bbSMatthew Dillon blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count) 99692da00bbSMatthew Dillon { 997a7249a6cSAlan Cox daddr_t nblks; 9984be4fd5dSAlan Cox u_daddr_t mask; 99992da00bbSMatthew Dillon 1000bb4a27f9SMark Johnston mask = bitrange(blk & BLIST_BMAP_MASK, count); 100192da00bbSMatthew Dillon 10024be4fd5dSAlan Cox /* Count the number of blocks that we are allocating. */ 1003bb4a27f9SMark Johnston nblks = bitcount64(scan->bm_bitmap & mask); 100492da00bbSMatthew Dillon 1005bb4a27f9SMark Johnston scan->bm_bitmap &= ~mask; 10064be4fd5dSAlan Cox return (nblks); 100792da00bbSMatthew Dillon } 100892da00bbSMatthew Dillon 100992da00bbSMatthew Dillon /* 101092da00bbSMatthew Dillon * BLIST_META_FILL() - allocate specific blocks at a meta node 101192da00bbSMatthew Dillon * 101292da00bbSMatthew Dillon * This routine allocates the specified range of blocks, 101392da00bbSMatthew Dillon * regardless of any existing allocations in the range. The 101492da00bbSMatthew Dillon * range must be within the extent of this node. Returns the 101592da00bbSMatthew Dillon * number of blocks allocated by the call. 101692da00bbSMatthew Dillon */ 1017a7249a6cSAlan Cox static daddr_t 1018bee93d3cSAlan Cox blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count, u_daddr_t radix) 1019d3783724SAlan Cox { 1020bb4a27f9SMark Johnston daddr_t blk, endBlk, i, nblks, skip; 1021bb4a27f9SMark Johnston int digit; 102292da00bbSMatthew Dillon 1023a396c83aSAlan Cox if (radix == BLIST_BMAP_RADIX) 1024a396c83aSAlan Cox return (blst_leaf_fill(scan, allocBlk, count)); 1025bb4a27f9SMark Johnston 1026bb4a27f9SMark Johnston endBlk = ummin(allocBlk + count, (allocBlk + radix) & -radix); 1027bb4a27f9SMark Johnston radix /= BLIST_META_RADIX; 10282ac0c7c3SAlan Cox skip = radix_to_skip(radix); 1029bee93d3cSAlan Cox blk = allocBlk & -radix; 1030d3783724SAlan Cox nblks = 0; 1031bb4a27f9SMark Johnston while (blk < endBlk) { 1032bb4a27f9SMark Johnston digit = (blk / radix) & BLIST_META_MASK; 1033bb4a27f9SMark Johnston i = 1 + digit * skip; 103492da00bbSMatthew Dillon blk += radix; 1035bb4a27f9SMark Johnston count = ummin(blk, endBlk) - allocBlk; 1036bb4a27f9SMark Johnston nblks += blst_meta_fill(&scan[i], allocBlk, count, radix); 1037bb4a27f9SMark Johnston if (scan[i].bm_bitmap == 0) 1038bb4a27f9SMark Johnston scan->bm_bitmap &= ~((u_daddr_t)1 << digit); 1039bb4a27f9SMark Johnston allocBlk = blk; 104092da00bbSMatthew Dillon } 1041a396c83aSAlan Cox return (nblks); 104292da00bbSMatthew Dillon } 104392da00bbSMatthew Dillon 10447090df5aSMatthew Dillon #ifdef BLIST_DEBUG 10457090df5aSMatthew Dillon 10467090df5aSMatthew Dillon static void 10472ac0c7c3SAlan Cox blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix, int tab) 10487090df5aSMatthew Dillon { 1049bb4a27f9SMark Johnston daddr_t skip; 105053519253SDoug Moore u_daddr_t mask; 1051bb4a27f9SMark Johnston int digit; 10527090df5aSMatthew Dillon 10537090df5aSMatthew Dillon if (radix == BLIST_BMAP_RADIX) { 10547090df5aSMatthew Dillon printf( 1055bb4a27f9SMark Johnston "%*.*s(%08llx,%lld): bitmap %0*llx big=%lld\n", 10567090df5aSMatthew Dillon tab, tab, "", 105792da00bbSMatthew Dillon (long long)blk, (long long)radix, 1058bb4a27f9SMark Johnston 1 + (BLIST_BMAP_RADIX - 1) / 4, 1059bb4a27f9SMark Johnston (long long)scan->bm_bitmap, 106092da00bbSMatthew Dillon (long long)scan->bm_bighint 10617090df5aSMatthew Dillon ); 10627090df5aSMatthew Dillon return; 10637090df5aSMatthew Dillon } 10647090df5aSMatthew Dillon 10657090df5aSMatthew Dillon printf( 1066bb4a27f9SMark Johnston "%*.*s(%08llx): subtree (%lld/%lld) bitmap %0*llx big=%lld {\n", 10677090df5aSMatthew Dillon tab, tab, "", 106892da00bbSMatthew Dillon (long long)blk, (long long)radix, 106992da00bbSMatthew Dillon (long long)radix, 1070bb4a27f9SMark Johnston 1 + (BLIST_META_RADIX - 1) / 4, 1071bb4a27f9SMark Johnston (long long)scan->bm_bitmap, 107292da00bbSMatthew Dillon (long long)scan->bm_bighint 10737090df5aSMatthew Dillon ); 10747090df5aSMatthew Dillon 10752ac0c7c3SAlan Cox radix /= BLIST_META_RADIX; 1076bb4a27f9SMark Johnston skip = radix_to_skip(radix); 10777090df5aSMatthew Dillon tab += 4; 10787090df5aSMatthew Dillon 1079bb4a27f9SMark Johnston mask = scan->bm_bitmap; 1080bb4a27f9SMark Johnston /* Examine the nonempty subtree associated with each bit set in mask */ 1081bb4a27f9SMark Johnston do { 108253519253SDoug Moore digit = bitpos(mask); 1083bb4a27f9SMark Johnston blst_radix_print(&scan[1 + digit * skip], blk + digit * radix, 1084bb4a27f9SMark Johnston radix, tab); 108553519253SDoug Moore } while ((mask ^= bitrange(digit, 1)) != 0); 10867090df5aSMatthew Dillon tab -= 4; 10877090df5aSMatthew Dillon 10887090df5aSMatthew Dillon printf( 10897090df5aSMatthew Dillon "%*.*s}\n", 10907090df5aSMatthew Dillon tab, tab, "" 10917090df5aSMatthew Dillon ); 10927090df5aSMatthew Dillon } 10937090df5aSMatthew Dillon 10947090df5aSMatthew Dillon #endif 10957090df5aSMatthew Dillon 10967090df5aSMatthew Dillon #ifdef BLIST_DEBUG 10977090df5aSMatthew Dillon 10987090df5aSMatthew Dillon int 10997090df5aSMatthew Dillon main(int ac, char **av) 11007090df5aSMatthew Dillon { 1101bb4a27f9SMark Johnston int size = BLIST_META_RADIX * BLIST_BMAP_RADIX; 11027090df5aSMatthew Dillon int i; 11037090df5aSMatthew Dillon blist_t bl; 1104d027ed2eSAlan Cox struct sbuf *s; 11057090df5aSMatthew Dillon 11067090df5aSMatthew Dillon for (i = 1; i < ac; ++i) { 11077090df5aSMatthew Dillon const char *ptr = av[i]; 11087090df5aSMatthew Dillon if (*ptr != '-') { 11097090df5aSMatthew Dillon size = strtol(ptr, NULL, 0); 11107090df5aSMatthew Dillon continue; 11117090df5aSMatthew Dillon } 11127090df5aSMatthew Dillon ptr += 2; 11137090df5aSMatthew Dillon fprintf(stderr, "Bad option: %s\n", ptr - 2); 11147090df5aSMatthew Dillon exit(1); 11157090df5aSMatthew Dillon } 1116c8c7ad92SKip Macy bl = blist_create(size, M_WAITOK); 11177090df5aSMatthew Dillon blist_free(bl, 0, size); 11187090df5aSMatthew Dillon 11197090df5aSMatthew Dillon for (;;) { 11207090df5aSMatthew Dillon char buf[1024]; 1121d90bf7d8SAlan Cox long long da = 0; 1122*87ae0686SDoug Moore int count = 0, maxcount = 0; 11237090df5aSMatthew Dillon 11244be4fd5dSAlan Cox printf("%lld/%lld/%lld> ", (long long)blist_avail(bl), 112592da00bbSMatthew Dillon (long long)size, (long long)bl->bl_radix); 11267090df5aSMatthew Dillon fflush(stdout); 11277090df5aSMatthew Dillon if (fgets(buf, sizeof(buf), stdin) == NULL) 11287090df5aSMatthew Dillon break; 11297090df5aSMatthew Dillon switch(buf[0]) { 11307090df5aSMatthew Dillon case 'r': 1131*87ae0686SDoug Moore if (sscanf(buf + 1, "%d", &count) == 1) { 1132d90bf7d8SAlan Cox blist_resize(&bl, count, 1, M_WAITOK); 11337090df5aSMatthew Dillon } else { 11347090df5aSMatthew Dillon printf("?\n"); 11357090df5aSMatthew Dillon } 11367090df5aSMatthew Dillon case 'p': 11377090df5aSMatthew Dillon blist_print(bl); 11387090df5aSMatthew Dillon break; 1139d027ed2eSAlan Cox case 's': 1140d027ed2eSAlan Cox s = sbuf_new_auto(); 1141d027ed2eSAlan Cox blist_stats(bl, s); 1142d027ed2eSAlan Cox sbuf_finish(s); 1143d027ed2eSAlan Cox printf("%s", sbuf_data(s)); 1144d027ed2eSAlan Cox sbuf_delete(s); 1145d027ed2eSAlan Cox break; 11467090df5aSMatthew Dillon case 'a': 1147*87ae0686SDoug Moore if (sscanf(buf + 1, "%d%d", &count, &maxcount) == 2) { 1148*87ae0686SDoug Moore daddr_t blk = blist_alloc(bl, &count, maxcount); 1149*87ae0686SDoug Moore printf(" R=%08llx, c=%08d\n", 1150*87ae0686SDoug Moore (long long)blk, count); 11517090df5aSMatthew Dillon } else { 11527090df5aSMatthew Dillon printf("?\n"); 11537090df5aSMatthew Dillon } 11547090df5aSMatthew Dillon break; 11557090df5aSMatthew Dillon case 'f': 1156*87ae0686SDoug Moore if (sscanf(buf + 1, "%llx %d", &da, &count) == 2) { 11577090df5aSMatthew Dillon blist_free(bl, da, count); 11587090df5aSMatthew Dillon } else { 11597090df5aSMatthew Dillon printf("?\n"); 11607090df5aSMatthew Dillon } 11617090df5aSMatthew Dillon break; 116292da00bbSMatthew Dillon case 'l': 1163*87ae0686SDoug Moore if (sscanf(buf + 1, "%llx %d", &da, &count) == 2) { 1164a7249a6cSAlan Cox printf(" n=%jd\n", 1165a7249a6cSAlan Cox (intmax_t)blist_fill(bl, da, count)); 116692da00bbSMatthew Dillon } else { 116792da00bbSMatthew Dillon printf("?\n"); 116892da00bbSMatthew Dillon } 116992da00bbSMatthew Dillon break; 11707090df5aSMatthew Dillon case '?': 11717090df5aSMatthew Dillon case 'h': 11727090df5aSMatthew Dillon puts( 11737090df5aSMatthew Dillon "p -print\n" 1174d027ed2eSAlan Cox "s -stats\n" 1175*87ae0686SDoug Moore "a %d %d -allocate\n" 11767090df5aSMatthew Dillon "f %x %d -free\n" 117792da00bbSMatthew Dillon "l %x %d -fill\n" 11787090df5aSMatthew Dillon "r %d -resize\n" 1179d4808c44SDoug Moore "h/? -help\n" 1180d4808c44SDoug Moore "q -quit" 11817090df5aSMatthew Dillon ); 11827090df5aSMatthew Dillon break; 1183d4808c44SDoug Moore case 'q': 1184d4808c44SDoug Moore break; 11857090df5aSMatthew Dillon default: 11867090df5aSMatthew Dillon printf("?\n"); 11877090df5aSMatthew Dillon break; 11887090df5aSMatthew Dillon } 1189d4808c44SDoug Moore if (buf[0] == 'q') 1190d4808c44SDoug Moore break; 11917090df5aSMatthew Dillon } 11927090df5aSMatthew Dillon return (0); 11937090df5aSMatthew Dillon } 11947090df5aSMatthew Dillon 11957090df5aSMatthew Dillon #endif 1196