106b4bf3eSWarner Losh /*- 251369649SPedro F. Giffuni * SPDX-License-Identifier: BSD-3-Clause 351369649SPedro F. Giffuni * 406b4bf3eSWarner Losh * Copyright (c) 1998 Matthew Dillon. All Rights Reserved. 506b4bf3eSWarner Losh * Redistribution and use in source and binary forms, with or without 606b4bf3eSWarner Losh * modification, are permitted provided that the following conditions 706b4bf3eSWarner Losh * are met: 806b4bf3eSWarner Losh * 1. Redistributions of source code must retain the above copyright 906b4bf3eSWarner Losh * notice, this list of conditions and the following disclaimer. 1006b4bf3eSWarner Losh * 2. Redistributions in binary form must reproduce the above copyright 1106b4bf3eSWarner Losh * notice, this list of conditions and the following disclaimer in the 1206b4bf3eSWarner Losh * documentation and/or other materials provided with the distribution. 1369a28758SEd Maste * 3. Neither the name of the University nor the names of its contributors 1406b4bf3eSWarner Losh * may be used to endorse or promote products derived from this software 1506b4bf3eSWarner Losh * without specific prior written permission. 1606b4bf3eSWarner Losh * 1706b4bf3eSWarner Losh * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS 1806b4bf3eSWarner Losh * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 1906b4bf3eSWarner Losh * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 2006b4bf3eSWarner Losh * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY 2106b4bf3eSWarner Losh * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 2206b4bf3eSWarner Losh * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE 2306b4bf3eSWarner Losh * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 2406b4bf3eSWarner Losh * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 2506b4bf3eSWarner Losh * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 2606b4bf3eSWarner Losh * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 2706b4bf3eSWarner Losh * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 2806b4bf3eSWarner Losh */ 297090df5aSMatthew Dillon /* 307090df5aSMatthew Dillon * BLIST.C - Bitmap allocator/deallocator, using a radix tree with hinting 317090df5aSMatthew Dillon * 327090df5aSMatthew Dillon * This module implements a general bitmap allocator/deallocator. The 337090df5aSMatthew Dillon * allocator eats around 2 bits per 'block'. The module does not 34f565f395SEitan Adler * try to interpret the meaning of a 'block' other than to return 357090df5aSMatthew Dillon * SWAPBLK_NONE on an allocation failure. 367090df5aSMatthew Dillon * 37ec371b57SAlan Cox * A radix tree controls access to pieces of the bitmap, and includes 38ec371b57SAlan Cox * auxiliary information at each interior node about the availabilty of 39ec371b57SAlan Cox * contiguous free blocks in the subtree rooted at that node. Two radix 40ec371b57SAlan Cox * constants are involved: one for the size of the bitmaps contained in the 41ec371b57SAlan Cox * leaf nodes (BLIST_BMAP_RADIX), and one for the number of descendents of 42ec371b57SAlan Cox * each of the meta (interior) nodes (BLIST_META_RADIX). Each subtree is 43ec371b57SAlan Cox * associated with a range of blocks. The root of any subtree stores a 44ec371b57SAlan Cox * hint field that defines an upper bound on the size of the largest 45ec371b57SAlan Cox * allocation that can begin in the associated block range. A hint is an 46ec371b57SAlan Cox * upper bound on a potential allocation, but not necessarily a tight upper 47ec371b57SAlan Cox * bound. 487090df5aSMatthew Dillon * 49bb4a27f9SMark Johnston * The bitmap field in each node directs the search for available blocks. 50bb4a27f9SMark Johnston * For a leaf node, a bit is set if the corresponding block is free. For a 51bb4a27f9SMark Johnston * meta node, a bit is set if the corresponding subtree contains a free 52bb4a27f9SMark Johnston * block somewhere within it. The search at a meta node considers only 53bb4a27f9SMark Johnston * children of that node that represent a range that includes a free block. 547090df5aSMatthew Dillon * 557090df5aSMatthew Dillon * The hinting greatly increases code efficiency for allocations while 567090df5aSMatthew Dillon * the general radix structure optimizes both allocations and frees. The 577090df5aSMatthew Dillon * radix tree should be able to operate well no matter how much 587090df5aSMatthew Dillon * fragmentation there is and no matter how large a bitmap is used. 597090df5aSMatthew Dillon * 6051dc4feaSSergey Kandaurov * The blist code wires all necessary memory at creation time. Neither 6151dc4feaSSergey Kandaurov * allocations nor frees require interaction with the memory subsystem. 62bb4a27f9SMark Johnston * The non-blocking nature of allocations and frees is required by swap 63bb4a27f9SMark Johnston * code (vm/swap_pager.c). 647090df5aSMatthew Dillon * 65bb4a27f9SMark Johnston * LAYOUT: The radix tree is laid out recursively using a linear array. 66bb4a27f9SMark Johnston * Each meta node is immediately followed (laid out sequentially in 67bb4a27f9SMark Johnston * memory) by BLIST_META_RADIX lower level nodes. This is a recursive 68bb4a27f9SMark Johnston * structure but one that can be easily scanned through a very simple 69bb4a27f9SMark Johnston * 'skip' calculation. The memory allocation is only large enough to 70bb4a27f9SMark Johnston * cover the number of blocks requested at creation time. Nodes that 71bb4a27f9SMark Johnston * represent blocks beyond that limit, nodes that would never be read 72bb4a27f9SMark Johnston * or written, are not allocated, so that the last of the 73bb4a27f9SMark Johnston * BLIST_META_RADIX lower level nodes of a some nodes may not be 74bb4a27f9SMark Johnston * allocated. 757090df5aSMatthew Dillon * 76f565f395SEitan Adler * NOTE: the allocator cannot currently allocate more than 777090df5aSMatthew Dillon * BLIST_BMAP_RADIX blocks per call. It will panic with 'allocation too 787090df5aSMatthew Dillon * large' if you try. This is an area that could use improvement. The 797090df5aSMatthew Dillon * radix is large enough that this restriction does not effect the swap 80b411efa4SAlan Cox * system, though. Currently only the allocation code is affected by 817090df5aSMatthew Dillon * this algorithmic unfeature. The freeing code can handle arbitrary 827090df5aSMatthew Dillon * ranges. 837090df5aSMatthew Dillon * 847090df5aSMatthew Dillon * This code can be compiled stand-alone for debugging. 857090df5aSMatthew Dillon */ 867090df5aSMatthew Dillon 87677b542eSDavid E. O'Brien #include <sys/cdefs.h> 88677b542eSDavid E. O'Brien __FBSDID("$FreeBSD$"); 89677b542eSDavid E. O'Brien 90c4473420SPeter Wemm #ifdef _KERNEL 917090df5aSMatthew Dillon 927090df5aSMatthew Dillon #include <sys/param.h> 937090df5aSMatthew Dillon #include <sys/systm.h> 947090df5aSMatthew Dillon #include <sys/lock.h> 957090df5aSMatthew Dillon #include <sys/kernel.h> 967090df5aSMatthew Dillon #include <sys/blist.h> 977090df5aSMatthew Dillon #include <sys/malloc.h> 98d027ed2eSAlan Cox #include <sys/sbuf.h> 990cddd8f0SMatthew Dillon #include <sys/proc.h> 10023955314SAlfred Perlstein #include <sys/mutex.h> 1017090df5aSMatthew Dillon 1027090df5aSMatthew Dillon #else 1037090df5aSMatthew Dillon 1047090df5aSMatthew Dillon #ifndef BLIST_NO_DEBUG 1057090df5aSMatthew Dillon #define BLIST_DEBUG 1067090df5aSMatthew Dillon #endif 1077090df5aSMatthew Dillon 108bb4a27f9SMark Johnston #include <sys/errno.h> 1097090df5aSMatthew Dillon #include <sys/types.h> 110d90bf7d8SAlan Cox #include <sys/malloc.h> 111d027ed2eSAlan Cox #include <sys/sbuf.h> 1127090df5aSMatthew Dillon #include <stdio.h> 1137090df5aSMatthew Dillon #include <string.h> 1148eefcd40SAlan Cox #include <stddef.h> 1157090df5aSMatthew Dillon #include <stdlib.h> 1167090df5aSMatthew Dillon #include <stdarg.h> 117d3783724SAlan Cox #include <stdbool.h> 1187090df5aSMatthew Dillon 1194be4fd5dSAlan Cox #define bitcount64(x) __bitcount64((uint64_t)(x)) 12092da00bbSMatthew Dillon #define malloc(a,b,c) calloc(a, 1) 1217090df5aSMatthew Dillon #define free(a,b) free(a) 122bb4a27f9SMark Johnston #define ummin(a,b) ((a) < (b) ? (a) : (b)) 1237090df5aSMatthew Dillon 1247090df5aSMatthew Dillon #include <sys/blist.h> 1257090df5aSMatthew Dillon 1267090df5aSMatthew Dillon void panic(const char *ctl, ...); 1277090df5aSMatthew Dillon 1287090df5aSMatthew Dillon #endif 1297090df5aSMatthew Dillon 1307090df5aSMatthew Dillon /* 1317090df5aSMatthew Dillon * static support functions 1327090df5aSMatthew Dillon */ 133ec371b57SAlan Cox static daddr_t blst_leaf_alloc(blmeta_t *scan, daddr_t blk, int count); 134bee93d3cSAlan Cox static daddr_t blst_meta_alloc(blmeta_t *scan, daddr_t cursor, daddr_t count, 135bee93d3cSAlan Cox u_daddr_t radix); 1367090df5aSMatthew Dillon static void blst_leaf_free(blmeta_t *scan, daddr_t relblk, int count); 1377090df5aSMatthew Dillon static void blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count, 138bee93d3cSAlan Cox u_daddr_t radix); 1397090df5aSMatthew Dillon static void blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix, 1402ac0c7c3SAlan Cox blist_t dest, daddr_t count); 141a7249a6cSAlan Cox static daddr_t blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count); 142a7249a6cSAlan Cox static daddr_t blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count, 143bee93d3cSAlan Cox u_daddr_t radix); 144c4473420SPeter Wemm #ifndef _KERNEL 145d3783724SAlan Cox static void blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix, 1462ac0c7c3SAlan Cox int tab); 1477090df5aSMatthew Dillon #endif 1487090df5aSMatthew Dillon 149c4473420SPeter Wemm #ifdef _KERNEL 1507090df5aSMatthew Dillon static MALLOC_DEFINE(M_SWAP, "SWAP", "Swap space"); 1517090df5aSMatthew Dillon #endif 1527090df5aSMatthew Dillon 153ec371b57SAlan Cox _Static_assert(BLIST_BMAP_RADIX % BLIST_META_RADIX == 0, 154ec371b57SAlan Cox "radix divisibility error"); 155ec371b57SAlan Cox #define BLIST_BMAP_MASK (BLIST_BMAP_RADIX - 1) 156d027ed2eSAlan Cox #define BLIST_META_MASK (BLIST_META_RADIX - 1) 157ba98e6a2SAlan Cox 1587090df5aSMatthew Dillon /* 1592ac0c7c3SAlan Cox * For a subtree that can represent the state of up to 'radix' blocks, the 1602ac0c7c3SAlan Cox * number of leaf nodes of the subtree is L=radix/BLIST_BMAP_RADIX. If 'm' 1612ac0c7c3SAlan Cox * is short for BLIST_META_RADIX, then for a tree of height h with L=m**h 1622ac0c7c3SAlan Cox * leaf nodes, the total number of tree nodes is 1 + m + m**2 + ... + m**h, 1632ac0c7c3SAlan Cox * or, equivalently, (m**(h+1)-1)/(m-1). This quantity is called 'skip' 1642ac0c7c3SAlan Cox * in the 'meta' functions that process subtrees. Since integer division 1652ac0c7c3SAlan Cox * discards remainders, we can express this computation as 1662ac0c7c3SAlan Cox * skip = (m * m**h) / (m - 1) 167ba98e6a2SAlan Cox * skip = (m * (radix / BLIST_BMAP_RADIX)) / (m - 1) 168ba98e6a2SAlan Cox * and since m divides BLIST_BMAP_RADIX, we can simplify further to 169ba98e6a2SAlan Cox * skip = (radix / (BLIST_BMAP_RADIX / m)) / (m - 1) 170ba98e6a2SAlan Cox * skip = radix / ((BLIST_BMAP_RADIX / m) * (m - 1)) 171ba98e6a2SAlan Cox * so that simple integer division by a constant can safely be used for the 172ba98e6a2SAlan Cox * calculation. 1732ac0c7c3SAlan Cox */ 1742ac0c7c3SAlan Cox static inline daddr_t 1752ac0c7c3SAlan Cox radix_to_skip(daddr_t radix) 1762ac0c7c3SAlan Cox { 1772ac0c7c3SAlan Cox 1782ac0c7c3SAlan Cox return (radix / 179d027ed2eSAlan Cox ((BLIST_BMAP_RADIX / BLIST_META_RADIX) * BLIST_META_MASK)); 180d027ed2eSAlan Cox } 181d027ed2eSAlan Cox 182d027ed2eSAlan Cox /* 183bb4a27f9SMark Johnston * Provide a mask with count bits set, starting as position n. 184bb4a27f9SMark Johnston */ 185bb4a27f9SMark Johnston static inline u_daddr_t 186bb4a27f9SMark Johnston bitrange(int n, int count) 187bb4a27f9SMark Johnston { 188bb4a27f9SMark Johnston 189bb4a27f9SMark Johnston return (((u_daddr_t)-1 << n) & 190bb4a27f9SMark Johnston ((u_daddr_t)-1 >> (BLIST_BMAP_RADIX - (n + count)))); 191bb4a27f9SMark Johnston } 192bb4a27f9SMark Johnston 193bb4a27f9SMark Johnston 194bb4a27f9SMark Johnston /* 195d027ed2eSAlan Cox * Use binary search, or a faster method, to find the 1 bit in a u_daddr_t. 196d027ed2eSAlan Cox * Assumes that the argument has only one bit set. 197d027ed2eSAlan Cox */ 198d027ed2eSAlan Cox static inline int 199d027ed2eSAlan Cox bitpos(u_daddr_t mask) 200d027ed2eSAlan Cox { 201d027ed2eSAlan Cox int hi, lo, mid; 202d027ed2eSAlan Cox 203d027ed2eSAlan Cox switch (sizeof(mask)) { 204d027ed2eSAlan Cox #ifdef HAVE_INLINE_FFSLL 205d027ed2eSAlan Cox case sizeof(long long): 206d027ed2eSAlan Cox return (ffsll(mask) - 1); 207d027ed2eSAlan Cox #endif 208d027ed2eSAlan Cox default: 209d027ed2eSAlan Cox lo = 0; 210d027ed2eSAlan Cox hi = BLIST_BMAP_RADIX; 211d027ed2eSAlan Cox while (lo + 1 < hi) { 212d027ed2eSAlan Cox mid = (lo + hi) >> 1; 213d027ed2eSAlan Cox if ((mask >> mid) != 0) 214d027ed2eSAlan Cox lo = mid; 215d027ed2eSAlan Cox else 216d027ed2eSAlan Cox hi = mid; 217d027ed2eSAlan Cox } 218d027ed2eSAlan Cox return (lo); 219d027ed2eSAlan Cox } 2202ac0c7c3SAlan Cox } 2212ac0c7c3SAlan Cox 2222ac0c7c3SAlan Cox /* 2237090df5aSMatthew Dillon * blist_create() - create a blist capable of handling up to the specified 2247090df5aSMatthew Dillon * number of blocks 2257090df5aSMatthew Dillon * 226f565f395SEitan Adler * blocks - must be greater than 0 227c8c7ad92SKip Macy * flags - malloc flags 2287090df5aSMatthew Dillon * 2297090df5aSMatthew Dillon * The smallest blist consists of a single leaf node capable of 2307090df5aSMatthew Dillon * managing BLIST_BMAP_RADIX blocks. 2317090df5aSMatthew Dillon */ 2327090df5aSMatthew Dillon blist_t 233c8c7ad92SKip Macy blist_create(daddr_t blocks, int flags) 2347090df5aSMatthew Dillon { 2357090df5aSMatthew Dillon blist_t bl; 236bb4a27f9SMark Johnston u_daddr_t nodes, radix; 2377090df5aSMatthew Dillon 238ce9eea64SMark Johnston if (blocks == 0) 239ce9eea64SMark Johnston panic("invalid block count"); 240ce9eea64SMark Johnston 2417090df5aSMatthew Dillon /* 242ce9eea64SMark Johnston * Calculate the radix and node count used for scanning. 2437090df5aSMatthew Dillon */ 244bb4a27f9SMark Johnston nodes = 1; 2457090df5aSMatthew Dillon radix = BLIST_BMAP_RADIX; 246bb4a27f9SMark Johnston while (radix <= blocks) { 247bb4a27f9SMark Johnston nodes += 1 + (blocks - 1) / radix; 24857e6d29bSMatthew Dillon radix *= BLIST_META_RADIX; 2497090df5aSMatthew Dillon } 2507090df5aSMatthew Dillon 25103ca2137SAlan Cox bl = malloc(offsetof(struct blist, bl_root[nodes]), M_SWAP, flags | 25203ca2137SAlan Cox M_ZERO); 253015d7db6SAlan Cox if (bl == NULL) 254015d7db6SAlan Cox return (NULL); 2557090df5aSMatthew Dillon 2567090df5aSMatthew Dillon bl->bl_blocks = blocks; 2577090df5aSMatthew Dillon bl->bl_radix = radix; 2587090df5aSMatthew Dillon 2597090df5aSMatthew Dillon #if defined(BLIST_DEBUG) 2607090df5aSMatthew Dillon printf( 26192da00bbSMatthew Dillon "BLIST representing %lld blocks (%lld MB of swap)" 26292da00bbSMatthew Dillon ", requiring %lldK of ram\n", 26392da00bbSMatthew Dillon (long long)bl->bl_blocks, 26492da00bbSMatthew Dillon (long long)bl->bl_blocks * 4 / 1024, 265015d7db6SAlan Cox (long long)(nodes * sizeof(blmeta_t) + 1023) / 1024 2667090df5aSMatthew Dillon ); 26792da00bbSMatthew Dillon printf("BLIST raw radix tree contains %lld records\n", 268015d7db6SAlan Cox (long long)nodes); 2697090df5aSMatthew Dillon #endif 2707090df5aSMatthew Dillon 2717090df5aSMatthew Dillon return (bl); 2727090df5aSMatthew Dillon } 2737090df5aSMatthew Dillon 2747090df5aSMatthew Dillon void 2757090df5aSMatthew Dillon blist_destroy(blist_t bl) 2767090df5aSMatthew Dillon { 2778eefcd40SAlan Cox 2787090df5aSMatthew Dillon free(bl, M_SWAP); 2797090df5aSMatthew Dillon } 2807090df5aSMatthew Dillon 2817090df5aSMatthew Dillon /* 2827090df5aSMatthew Dillon * blist_alloc() - reserve space in the block bitmap. Return the base 2837090df5aSMatthew Dillon * of a contiguous region or SWAPBLK_NONE if space could 2847090df5aSMatthew Dillon * not be allocated. 2857090df5aSMatthew Dillon */ 2867090df5aSMatthew Dillon daddr_t 2877090df5aSMatthew Dillon blist_alloc(blist_t bl, daddr_t count) 2887090df5aSMatthew Dillon { 2894be4fd5dSAlan Cox daddr_t blk; 2907090df5aSMatthew Dillon 291bb4a27f9SMark Johnston if (count > BLIST_MAX_ALLOC) 292bb4a27f9SMark Johnston panic("allocation too large"); 293bb4a27f9SMark Johnston 294d4e3484bSAlan Cox /* 295d4e3484bSAlan Cox * This loop iterates at most twice. An allocation failure in the 296d4e3484bSAlan Cox * first iteration leads to a second iteration only if the cursor was 297d4e3484bSAlan Cox * non-zero. When the cursor is zero, an allocation failure will 298749cdf6fSAlan Cox * stop further iterations. 299d4e3484bSAlan Cox */ 300749cdf6fSAlan Cox for (;;) { 301bee93d3cSAlan Cox blk = blst_meta_alloc(bl->bl_root, bl->bl_cursor, count, 302bee93d3cSAlan Cox bl->bl_radix); 303d4e3484bSAlan Cox if (blk != SWAPBLK_NONE) { 304bb4a27f9SMark Johnston bl->bl_avail -= count; 305d4e3484bSAlan Cox bl->bl_cursor = blk + count; 306a0ae476bSAlan Cox if (bl->bl_cursor == bl->bl_blocks) 307a0ae476bSAlan Cox bl->bl_cursor = 0; 3087090df5aSMatthew Dillon return (blk); 309749cdf6fSAlan Cox } else if (bl->bl_cursor == 0) 310749cdf6fSAlan Cox return (SWAPBLK_NONE); 311d4e3484bSAlan Cox bl->bl_cursor = 0; 3127090df5aSMatthew Dillon } 3134be4fd5dSAlan Cox } 3144be4fd5dSAlan Cox 3154be4fd5dSAlan Cox /* 3164be4fd5dSAlan Cox * blist_avail() - return the number of free blocks. 3174be4fd5dSAlan Cox */ 3184be4fd5dSAlan Cox daddr_t 3194be4fd5dSAlan Cox blist_avail(blist_t bl) 3204be4fd5dSAlan Cox { 3214be4fd5dSAlan Cox 322bb4a27f9SMark Johnston return (bl->bl_avail); 3234be4fd5dSAlan Cox } 3247090df5aSMatthew Dillon 3257090df5aSMatthew Dillon /* 3267090df5aSMatthew Dillon * blist_free() - free up space in the block bitmap. Return the base 3277090df5aSMatthew Dillon * of a contiguous region. Panic if an inconsistancy is 3287090df5aSMatthew Dillon * found. 3297090df5aSMatthew Dillon */ 3307090df5aSMatthew Dillon void 3317090df5aSMatthew Dillon blist_free(blist_t bl, daddr_t blkno, daddr_t count) 3327090df5aSMatthew Dillon { 333a396c83aSAlan Cox 334bb4a27f9SMark Johnston if (blkno < 0 || blkno + count > bl->bl_blocks) 335bb4a27f9SMark Johnston panic("freeing invalid range"); 336bee93d3cSAlan Cox blst_meta_free(bl->bl_root, blkno, count, bl->bl_radix); 337bb4a27f9SMark Johnston bl->bl_avail += count; 3387090df5aSMatthew Dillon } 3397090df5aSMatthew Dillon 3407090df5aSMatthew Dillon /* 34192da00bbSMatthew Dillon * blist_fill() - mark a region in the block bitmap as off-limits 34292da00bbSMatthew Dillon * to the allocator (i.e. allocate it), ignoring any 34392da00bbSMatthew Dillon * existing allocations. Return the number of blocks 34492da00bbSMatthew Dillon * actually filled that were free before the call. 34592da00bbSMatthew Dillon */ 346a7249a6cSAlan Cox daddr_t 34792da00bbSMatthew Dillon blist_fill(blist_t bl, daddr_t blkno, daddr_t count) 34892da00bbSMatthew Dillon { 349bb4a27f9SMark Johnston daddr_t filled; 35092da00bbSMatthew Dillon 351bb4a27f9SMark Johnston if (blkno < 0 || blkno + count > bl->bl_blocks) 352bb4a27f9SMark Johnston panic("filling invalid range"); 353bb4a27f9SMark Johnston filled = blst_meta_fill(bl->bl_root, blkno, count, bl->bl_radix); 354bb4a27f9SMark Johnston bl->bl_avail -= filled; 355bb4a27f9SMark Johnston return (filled); 35692da00bbSMatthew Dillon } 35792da00bbSMatthew Dillon 35892da00bbSMatthew Dillon /* 3597090df5aSMatthew Dillon * blist_resize() - resize an existing radix tree to handle the 3607090df5aSMatthew Dillon * specified number of blocks. This will reallocate 3617090df5aSMatthew Dillon * the tree and transfer the previous bitmap to the new 3627090df5aSMatthew Dillon * one. When extending the tree you can specify whether 3637090df5aSMatthew Dillon * the new blocks are to left allocated or freed. 3647090df5aSMatthew Dillon */ 3657090df5aSMatthew Dillon void 366c8c7ad92SKip Macy blist_resize(blist_t *pbl, daddr_t count, int freenew, int flags) 3677090df5aSMatthew Dillon { 368c8c7ad92SKip Macy blist_t newbl = blist_create(count, flags); 3697090df5aSMatthew Dillon blist_t save = *pbl; 3707090df5aSMatthew Dillon 3717090df5aSMatthew Dillon *pbl = newbl; 3727090df5aSMatthew Dillon if (count > save->bl_blocks) 3737090df5aSMatthew Dillon count = save->bl_blocks; 3742ac0c7c3SAlan Cox blst_copy(save->bl_root, 0, save->bl_radix, newbl, count); 3757090df5aSMatthew Dillon 3767090df5aSMatthew Dillon /* 3777090df5aSMatthew Dillon * If resizing upwards, should we free the new space or not? 3787090df5aSMatthew Dillon */ 3797090df5aSMatthew Dillon if (freenew && count < newbl->bl_blocks) { 3807090df5aSMatthew Dillon blist_free(newbl, count, newbl->bl_blocks - count); 3817090df5aSMatthew Dillon } 3827090df5aSMatthew Dillon blist_destroy(save); 3837090df5aSMatthew Dillon } 3847090df5aSMatthew Dillon 3857090df5aSMatthew Dillon #ifdef BLIST_DEBUG 3867090df5aSMatthew Dillon 3877090df5aSMatthew Dillon /* 3887090df5aSMatthew Dillon * blist_print() - dump radix tree 3897090df5aSMatthew Dillon */ 3907090df5aSMatthew Dillon void 3917090df5aSMatthew Dillon blist_print(blist_t bl) 3927090df5aSMatthew Dillon { 393bb4a27f9SMark Johnston printf("BLIST avail = %jd, cursor = %08jx {\n", 394bb4a27f9SMark Johnston (uintmax_t)bl->bl_avail, (uintmax_t)bl->bl_cursor); 395bb4a27f9SMark Johnston 396bb4a27f9SMark Johnston if (bl->bl_root->bm_bitmap != 0) 3972ac0c7c3SAlan Cox blst_radix_print(bl->bl_root, 0, bl->bl_radix, 4); 3987090df5aSMatthew Dillon printf("}\n"); 3997090df5aSMatthew Dillon } 4007090df5aSMatthew Dillon 4017090df5aSMatthew Dillon #endif 4027090df5aSMatthew Dillon 403d027ed2eSAlan Cox static const u_daddr_t fib[] = { 404d027ed2eSAlan Cox 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 405d027ed2eSAlan Cox 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 406d027ed2eSAlan Cox 514229, 832040, 1346269, 2178309, 3524578, 407d027ed2eSAlan Cox }; 408d027ed2eSAlan Cox 409d027ed2eSAlan Cox /* 410d027ed2eSAlan Cox * Use 'gap' to describe a maximal range of unallocated blocks/bits. 411d027ed2eSAlan Cox */ 412d027ed2eSAlan Cox struct gap_stats { 413d027ed2eSAlan Cox daddr_t start; /* current gap start, or SWAPBLK_NONE */ 414d027ed2eSAlan Cox daddr_t num; /* number of gaps observed */ 415d027ed2eSAlan Cox daddr_t max; /* largest gap size */ 416d027ed2eSAlan Cox daddr_t avg; /* average gap size */ 417d027ed2eSAlan Cox daddr_t err; /* sum - num * avg */ 418d027ed2eSAlan Cox daddr_t histo[nitems(fib)]; /* # gaps in each size range */ 419d027ed2eSAlan Cox int max_bucket; /* last histo elt with nonzero val */ 420d027ed2eSAlan Cox }; 421d027ed2eSAlan Cox 422d027ed2eSAlan Cox /* 423d027ed2eSAlan Cox * gap_stats_counting() - is the state 'counting 1 bits'? 424d027ed2eSAlan Cox * or 'skipping 0 bits'? 425d027ed2eSAlan Cox */ 426d027ed2eSAlan Cox static inline bool 427d027ed2eSAlan Cox gap_stats_counting(const struct gap_stats *stats) 428d027ed2eSAlan Cox { 429d027ed2eSAlan Cox 430d027ed2eSAlan Cox return (stats->start != SWAPBLK_NONE); 431d027ed2eSAlan Cox } 432d027ed2eSAlan Cox 433d027ed2eSAlan Cox /* 434d027ed2eSAlan Cox * init_gap_stats() - initialize stats on gap sizes 435d027ed2eSAlan Cox */ 436d027ed2eSAlan Cox static inline void 437d027ed2eSAlan Cox init_gap_stats(struct gap_stats *stats) 438d027ed2eSAlan Cox { 439d027ed2eSAlan Cox 440d027ed2eSAlan Cox bzero(stats, sizeof(*stats)); 441d027ed2eSAlan Cox stats->start = SWAPBLK_NONE; 442d027ed2eSAlan Cox } 443d027ed2eSAlan Cox 444d027ed2eSAlan Cox /* 445d027ed2eSAlan Cox * update_gap_stats() - update stats on gap sizes 446d027ed2eSAlan Cox */ 447d027ed2eSAlan Cox static void 448d027ed2eSAlan Cox update_gap_stats(struct gap_stats *stats, daddr_t posn) 449d027ed2eSAlan Cox { 450d027ed2eSAlan Cox daddr_t size; 451d027ed2eSAlan Cox int hi, lo, mid; 452d027ed2eSAlan Cox 453d027ed2eSAlan Cox if (!gap_stats_counting(stats)) { 454d027ed2eSAlan Cox stats->start = posn; 455d027ed2eSAlan Cox return; 456d027ed2eSAlan Cox } 457d027ed2eSAlan Cox size = posn - stats->start; 458d027ed2eSAlan Cox stats->start = SWAPBLK_NONE; 459d027ed2eSAlan Cox if (size > stats->max) 460d027ed2eSAlan Cox stats->max = size; 461d027ed2eSAlan Cox 462d027ed2eSAlan Cox /* 463d027ed2eSAlan Cox * Find the fibonacci range that contains size, 464d027ed2eSAlan Cox * expecting to find it in an early range. 465d027ed2eSAlan Cox */ 466d027ed2eSAlan Cox lo = 0; 467d027ed2eSAlan Cox hi = 1; 468d027ed2eSAlan Cox while (hi < nitems(fib) && fib[hi] <= size) { 469d027ed2eSAlan Cox lo = hi; 470d027ed2eSAlan Cox hi *= 2; 471d027ed2eSAlan Cox } 472d027ed2eSAlan Cox if (hi >= nitems(fib)) 473d027ed2eSAlan Cox hi = nitems(fib); 474d027ed2eSAlan Cox while (lo + 1 != hi) { 475d027ed2eSAlan Cox mid = (lo + hi) >> 1; 476d027ed2eSAlan Cox if (fib[mid] <= size) 477d027ed2eSAlan Cox lo = mid; 478d027ed2eSAlan Cox else 479d027ed2eSAlan Cox hi = mid; 480d027ed2eSAlan Cox } 481d027ed2eSAlan Cox stats->histo[lo]++; 482d027ed2eSAlan Cox if (lo > stats->max_bucket) 483d027ed2eSAlan Cox stats->max_bucket = lo; 484d027ed2eSAlan Cox stats->err += size - stats->avg; 485d027ed2eSAlan Cox stats->num++; 486d027ed2eSAlan Cox stats->avg += stats->err / stats->num; 487d027ed2eSAlan Cox stats->err %= stats->num; 488d027ed2eSAlan Cox } 489d027ed2eSAlan Cox 490d027ed2eSAlan Cox /* 491d027ed2eSAlan Cox * dump_gap_stats() - print stats on gap sizes 492d027ed2eSAlan Cox */ 493d027ed2eSAlan Cox static inline void 494d027ed2eSAlan Cox dump_gap_stats(const struct gap_stats *stats, struct sbuf *s) 495d027ed2eSAlan Cox { 496d027ed2eSAlan Cox int i; 497d027ed2eSAlan Cox 498d027ed2eSAlan Cox sbuf_printf(s, "number of maximal free ranges: %jd\n", 499d027ed2eSAlan Cox (intmax_t)stats->num); 500d027ed2eSAlan Cox sbuf_printf(s, "largest free range: %jd\n", (intmax_t)stats->max); 501d027ed2eSAlan Cox sbuf_printf(s, "average maximal free range size: %jd\n", 502d027ed2eSAlan Cox (intmax_t)stats->avg); 503d027ed2eSAlan Cox sbuf_printf(s, "number of maximal free ranges of different sizes:\n"); 504d027ed2eSAlan Cox sbuf_printf(s, " count | size range\n"); 505d027ed2eSAlan Cox sbuf_printf(s, " ----- | ----------\n"); 506d027ed2eSAlan Cox for (i = 0; i < stats->max_bucket; i++) { 507d027ed2eSAlan Cox if (stats->histo[i] != 0) { 508d027ed2eSAlan Cox sbuf_printf(s, "%20jd | ", 509d027ed2eSAlan Cox (intmax_t)stats->histo[i]); 510d027ed2eSAlan Cox if (fib[i] != fib[i + 1] - 1) 511d027ed2eSAlan Cox sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i], 512d027ed2eSAlan Cox (intmax_t)fib[i + 1] - 1); 513d027ed2eSAlan Cox else 514d027ed2eSAlan Cox sbuf_printf(s, "%jd\n", (intmax_t)fib[i]); 515d027ed2eSAlan Cox } 516d027ed2eSAlan Cox } 517d027ed2eSAlan Cox sbuf_printf(s, "%20jd | ", (intmax_t)stats->histo[i]); 518d027ed2eSAlan Cox if (stats->histo[i] > 1) 519d027ed2eSAlan Cox sbuf_printf(s, "%jd to %jd\n", (intmax_t)fib[i], 520d027ed2eSAlan Cox (intmax_t)stats->max); 521d027ed2eSAlan Cox else 522d027ed2eSAlan Cox sbuf_printf(s, "%jd\n", (intmax_t)stats->max); 523d027ed2eSAlan Cox } 524d027ed2eSAlan Cox 525d027ed2eSAlan Cox /* 526d027ed2eSAlan Cox * blist_stats() - dump radix tree stats 527d027ed2eSAlan Cox */ 528d027ed2eSAlan Cox void 529d027ed2eSAlan Cox blist_stats(blist_t bl, struct sbuf *s) 530d027ed2eSAlan Cox { 531d027ed2eSAlan Cox struct gap_stats gstats; 532d027ed2eSAlan Cox struct gap_stats *stats = &gstats; 533d027ed2eSAlan Cox daddr_t i, nodes, radix; 534d027ed2eSAlan Cox u_daddr_t bit, diff, mask; 535d027ed2eSAlan Cox 536d027ed2eSAlan Cox init_gap_stats(stats); 537d027ed2eSAlan Cox nodes = 0; 538d027ed2eSAlan Cox i = bl->bl_radix; 539d027ed2eSAlan Cox while (i < bl->bl_radix + bl->bl_blocks) { 540d027ed2eSAlan Cox /* 541d027ed2eSAlan Cox * Find max size subtree starting at i. 542d027ed2eSAlan Cox */ 543d027ed2eSAlan Cox radix = BLIST_BMAP_RADIX; 544d027ed2eSAlan Cox while (((i / radix) & BLIST_META_MASK) == 0) 545d027ed2eSAlan Cox radix *= BLIST_META_RADIX; 546d027ed2eSAlan Cox 547d027ed2eSAlan Cox /* 548d027ed2eSAlan Cox * Check for skippable subtrees starting at i. 549d027ed2eSAlan Cox */ 550d027ed2eSAlan Cox while (radix > BLIST_BMAP_RADIX) { 551bb4a27f9SMark Johnston if (bl->bl_root[nodes].bm_bitmap == 0) { 552d027ed2eSAlan Cox if (gap_stats_counting(stats)) 553d027ed2eSAlan Cox update_gap_stats(stats, i); 554d027ed2eSAlan Cox break; 555d027ed2eSAlan Cox } 556d027ed2eSAlan Cox 557d027ed2eSAlan Cox /* 558d027ed2eSAlan Cox * Skip subtree root. 559d027ed2eSAlan Cox */ 560d027ed2eSAlan Cox nodes++; 561d027ed2eSAlan Cox radix /= BLIST_META_RADIX; 562d027ed2eSAlan Cox } 563d027ed2eSAlan Cox if (radix == BLIST_BMAP_RADIX) { 564d027ed2eSAlan Cox /* 565d027ed2eSAlan Cox * Scan leaf. 566d027ed2eSAlan Cox */ 567bb4a27f9SMark Johnston mask = bl->bl_root[nodes].bm_bitmap; 568d027ed2eSAlan Cox diff = mask ^ (mask << 1); 569d027ed2eSAlan Cox if (gap_stats_counting(stats)) 570d027ed2eSAlan Cox diff ^= 1; 571d027ed2eSAlan Cox while (diff != 0) { 572d027ed2eSAlan Cox bit = diff & -diff; 573d027ed2eSAlan Cox update_gap_stats(stats, i + bitpos(bit)); 574d027ed2eSAlan Cox diff ^= bit; 575d027ed2eSAlan Cox } 576d027ed2eSAlan Cox } 577d027ed2eSAlan Cox nodes += radix_to_skip(radix); 578d027ed2eSAlan Cox i += radix; 579d027ed2eSAlan Cox } 580d027ed2eSAlan Cox update_gap_stats(stats, i); 581d027ed2eSAlan Cox dump_gap_stats(stats, s); 582d027ed2eSAlan Cox } 583d027ed2eSAlan Cox 5847090df5aSMatthew Dillon /************************************************************************ 5857090df5aSMatthew Dillon * ALLOCATION SUPPORT FUNCTIONS * 5867090df5aSMatthew Dillon ************************************************************************ 5877090df5aSMatthew Dillon * 5887090df5aSMatthew Dillon * These support functions do all the actual work. They may seem 5897090df5aSMatthew Dillon * rather longish, but that's because I've commented them up. The 5907090df5aSMatthew Dillon * actual code is straight forward. 5917090df5aSMatthew Dillon * 5927090df5aSMatthew Dillon */ 5937090df5aSMatthew Dillon 5947090df5aSMatthew Dillon /* 595bb4a27f9SMark Johnston * BLST_NEXT_LEAF_ALLOC() - allocate the first few blocks in the next leaf. 596bb4a27f9SMark Johnston * 597bb4a27f9SMark Johnston * 'scan' is a leaf node, associated with a block containing 'blk'. 598bb4a27f9SMark Johnston * The next leaf node could be adjacent, or several nodes away if the 599bb4a27f9SMark Johnston * least common ancestor of 'scan' and its neighbor is several levels 600bb4a27f9SMark Johnston * up. Use 'blk' to determine how many meta-nodes lie between the 601bb4a27f9SMark Johnston * leaves. If the next leaf has enough initial bits set, clear them 602bb4a27f9SMark Johnston * and clear the bits in the meta nodes on the path up to the least 603bb4a27f9SMark Johnston * common ancestor to mark any subtrees made completely empty. 604bb4a27f9SMark Johnston */ 605bb4a27f9SMark Johnston static int 606bb4a27f9SMark Johnston blst_next_leaf_alloc(blmeta_t *scan, daddr_t blk, int count) 607bb4a27f9SMark Johnston { 608bb4a27f9SMark Johnston blmeta_t *next; 609bb4a27f9SMark Johnston daddr_t skip; 610bb4a27f9SMark Johnston u_daddr_t radix; 611bb4a27f9SMark Johnston int digit; 612bb4a27f9SMark Johnston 613bb4a27f9SMark Johnston next = scan + 1; 614bb4a27f9SMark Johnston blk += BLIST_BMAP_RADIX; 615bb4a27f9SMark Johnston radix = BLIST_BMAP_RADIX; 616bb4a27f9SMark Johnston while ((digit = ((blk / radix) & BLIST_META_MASK)) == 0 && 617bb4a27f9SMark Johnston (next->bm_bitmap & 1) == 1) { 618bb4a27f9SMark Johnston next++; 619bb4a27f9SMark Johnston radix *= BLIST_META_RADIX; 620bb4a27f9SMark Johnston } 621bb4a27f9SMark Johnston if (((next->bm_bitmap + 1) & ~((u_daddr_t)-1 << count)) != 0) { 622bb4a27f9SMark Johnston /* 623bb4a27f9SMark Johnston * The next leaf doesn't have enough free blocks at the 624bb4a27f9SMark Johnston * beginning to complete the spanning allocation. 625bb4a27f9SMark Johnston */ 626bb4a27f9SMark Johnston return (ENOMEM); 627bb4a27f9SMark Johnston } 628bb4a27f9SMark Johnston /* Clear the first 'count' bits in the next leaf to allocate. */ 629bb4a27f9SMark Johnston next->bm_bitmap &= (u_daddr_t)-1 << count; 630bb4a27f9SMark Johnston 631bb4a27f9SMark Johnston /* 632bb4a27f9SMark Johnston * Update bitmaps of next-ancestors, up to least common ancestor. 633bb4a27f9SMark Johnston */ 634bb4a27f9SMark Johnston skip = radix_to_skip(radix); 635bb4a27f9SMark Johnston while (radix != BLIST_BMAP_RADIX && next->bm_bitmap == 0) { 636bb4a27f9SMark Johnston (--next)->bm_bitmap ^= 1; 637bb4a27f9SMark Johnston radix /= BLIST_META_RADIX; 638bb4a27f9SMark Johnston } 639bb4a27f9SMark Johnston if (next->bm_bitmap == 0) 640bb4a27f9SMark Johnston scan[-digit * skip].bm_bitmap ^= (u_daddr_t)1 << digit; 641bb4a27f9SMark Johnston return (0); 642bb4a27f9SMark Johnston } 643bb4a27f9SMark Johnston 644bb4a27f9SMark Johnston /* 645bb4a27f9SMark Johnston * BLST_LEAF_ALLOC() - allocate at a leaf in the radix tree (a bitmap). 6467090df5aSMatthew Dillon * 647*2905d1ceSAlan Cox * This function is the core of the allocator. Its execution time is 648*2905d1ceSAlan Cox * proportional to log(count), plus height of the tree if the allocation 649*2905d1ceSAlan Cox * crosses a leaf boundary. 6507090df5aSMatthew Dillon */ 6517090df5aSMatthew Dillon static daddr_t 652ec371b57SAlan Cox blst_leaf_alloc(blmeta_t *scan, daddr_t blk, int count) 65354541421SAlan Cox { 654*2905d1ceSAlan Cox u_daddr_t cursor_mask, mask; 655ec371b57SAlan Cox int count1, hi, lo, num_shifts, range1, range_ext; 6567090df5aSMatthew Dillon 65754541421SAlan Cox range1 = 0; 65854541421SAlan Cox count1 = count - 1; 65954541421SAlan Cox num_shifts = fls(count1); 660bb4a27f9SMark Johnston mask = scan->bm_bitmap; 661ec371b57SAlan Cox while ((-mask & ~mask) != 0 && num_shifts > 0) { 66254541421SAlan Cox /* 66354541421SAlan Cox * If bit i is set in mask, then bits in [i, i+range1] are set 664*2905d1ceSAlan Cox * in scan->bm_bitmap. The value of range1 is equal to count1 665*2905d1ceSAlan Cox * >> num_shifts. Grow range1 and reduce num_shifts to 0, 666*2905d1ceSAlan Cox * while preserving these invariants. The updates to mask 667*2905d1ceSAlan Cox * leave fewer bits set, but each bit that remains set 668*2905d1ceSAlan Cox * represents a longer string of consecutive bits set in 669*2905d1ceSAlan Cox * scan->bm_bitmap. If more updates to mask cannot clear more 670*2905d1ceSAlan Cox * bits, because mask is partitioned with all 0 bits preceding 671*2905d1ceSAlan Cox * all 1 bits, the loop terminates immediately. 67254541421SAlan Cox */ 67354541421SAlan Cox num_shifts--; 67454541421SAlan Cox range_ext = range1 + ((count1 >> num_shifts) & 1); 675ec371b57SAlan Cox /* 676ec371b57SAlan Cox * mask is a signed quantity for the shift because when it is 677ec371b57SAlan Cox * shifted right, the sign bit should copied; when the last 678ec371b57SAlan Cox * block of the leaf is free, pretend, for a while, that all the 679ec371b57SAlan Cox * blocks that follow it are also free. 680ec371b57SAlan Cox */ 681ec371b57SAlan Cox mask &= (daddr_t)mask >> range_ext; 68254541421SAlan Cox range1 += range_ext; 68354541421SAlan Cox } 68454541421SAlan Cox if (mask == 0) { 68554541421SAlan Cox /* 68654541421SAlan Cox * Update bighint. There is no allocation bigger than range1 687ec371b57SAlan Cox * starting in this leaf. 68854541421SAlan Cox */ 68954541421SAlan Cox scan->bm_bighint = range1; 6907090df5aSMatthew Dillon return (SWAPBLK_NONE); 6917090df5aSMatthew Dillon } 69254541421SAlan Cox 693ec371b57SAlan Cox /* Discard any candidates that appear before blk. */ 694*2905d1ceSAlan Cox if ((blk & BLIST_BMAP_MASK) != 0) { 695*2905d1ceSAlan Cox cursor_mask = mask & bitrange(0, blk & BLIST_BMAP_MASK); 696*2905d1ceSAlan Cox if (cursor_mask != 0) { 697*2905d1ceSAlan Cox mask ^= cursor_mask; 69854541421SAlan Cox if (mask == 0) 69954541421SAlan Cox return (SWAPBLK_NONE); 7007090df5aSMatthew Dillon 7017090df5aSMatthew Dillon /* 702*2905d1ceSAlan Cox * Bighint change for last block allocation cannot 703*2905d1ceSAlan Cox * assume that any other blocks are allocated, so the 704*2905d1ceSAlan Cox * bighint cannot be reduced much. 705*2905d1ceSAlan Cox */ 706*2905d1ceSAlan Cox range1 = BLIST_MAX_ALLOC - 1; 707*2905d1ceSAlan Cox } 708*2905d1ceSAlan Cox blk &= ~BLIST_BMAP_MASK; 709*2905d1ceSAlan Cox } 710*2905d1ceSAlan Cox 711*2905d1ceSAlan Cox /* 71254541421SAlan Cox * The least significant set bit in mask marks the start of the first 71354541421SAlan Cox * available range of sufficient size. Clear all the bits but that one, 714d027ed2eSAlan Cox * and then find its position. 7157090df5aSMatthew Dillon */ 71654541421SAlan Cox mask &= -mask; 717d027ed2eSAlan Cox lo = bitpos(mask); 7187090df5aSMatthew Dillon 719ec371b57SAlan Cox hi = lo + count; 720ec371b57SAlan Cox if (hi > BLIST_BMAP_RADIX) { 72154541421SAlan Cox /* 722ec371b57SAlan Cox * An allocation within this leaf is impossible, so a successful 723ec371b57SAlan Cox * allocation depends on the next leaf providing some of the blocks. 72454541421SAlan Cox */ 725bb4a27f9SMark Johnston if (blst_next_leaf_alloc(scan, blk, hi - BLIST_BMAP_RADIX) != 0) 726ec371b57SAlan Cox /* 727bb4a27f9SMark Johnston * The hint cannot be updated, because the same 728bb4a27f9SMark Johnston * allocation request could be satisfied later, by this 729bb4a27f9SMark Johnston * leaf, if the state of the next leaf changes, and 730bb4a27f9SMark Johnston * without any changes to this leaf. 731ec371b57SAlan Cox */ 732ec371b57SAlan Cox return (SWAPBLK_NONE); 733ec371b57SAlan Cox hi = BLIST_BMAP_RADIX; 734ec371b57SAlan Cox } 735ec371b57SAlan Cox 736ec371b57SAlan Cox /* Set the bits of mask at position 'lo' and higher. */ 737ec371b57SAlan Cox mask = -mask; 738ec371b57SAlan Cox if (hi == BLIST_BMAP_RADIX) { 739ec371b57SAlan Cox /* 740ec371b57SAlan Cox * Update bighint. There is no allocation bigger than range1 741ec371b57SAlan Cox * available in this leaf after this allocation completes. 742ec371b57SAlan Cox */ 743ec371b57SAlan Cox scan->bm_bighint = range1; 744ec371b57SAlan Cox } else { 745ec371b57SAlan Cox /* Clear the bits of mask at position 'hi' and higher. */ 746ec371b57SAlan Cox mask &= (u_daddr_t)-1 >> (BLIST_BMAP_RADIX - hi); 747ec371b57SAlan Cox } 748ec371b57SAlan Cox /* Clear the allocated bits from this leaf. */ 749bb4a27f9SMark Johnston scan->bm_bitmap &= ~mask; 750*2905d1ceSAlan Cox return (blk + lo); 7517090df5aSMatthew Dillon } 7527090df5aSMatthew Dillon 7537090df5aSMatthew Dillon /* 7547090df5aSMatthew Dillon * blist_meta_alloc() - allocate at a meta in the radix tree. 7557090df5aSMatthew Dillon * 7567090df5aSMatthew Dillon * Attempt to allocate at a meta node. If we can't, we update 7577090df5aSMatthew Dillon * bighint and return a failure. Updating bighint optimize future 7587090df5aSMatthew Dillon * calls that hit this node. We have to check for our collapse cases 7597090df5aSMatthew Dillon * and we have a few optimizations strewn in as well. 7607090df5aSMatthew Dillon */ 7617090df5aSMatthew Dillon static daddr_t 762bee93d3cSAlan Cox blst_meta_alloc(blmeta_t *scan, daddr_t cursor, daddr_t count, u_daddr_t radix) 763d4e3484bSAlan Cox { 764bb4a27f9SMark Johnston daddr_t blk, i, r, skip; 765bb4a27f9SMark Johnston u_daddr_t bit, mask; 766d4e3484bSAlan Cox bool scan_from_start; 767bb4a27f9SMark Johnston int digit; 7687090df5aSMatthew Dillon 769a396c83aSAlan Cox if (radix == BLIST_BMAP_RADIX) 770ec371b57SAlan Cox return (blst_leaf_alloc(scan, cursor, count)); 771ec371b57SAlan Cox blk = cursor & -radix; 772bb4a27f9SMark Johnston scan_from_start = (cursor == blk); 773bb4a27f9SMark Johnston radix /= BLIST_META_RADIX; 7742ac0c7c3SAlan Cox skip = radix_to_skip(radix); 775bb4a27f9SMark Johnston mask = scan->bm_bitmap; 776bb4a27f9SMark Johnston 777bb4a27f9SMark Johnston /* Discard any candidates that appear before cursor. */ 778bb4a27f9SMark Johnston digit = (cursor / radix) & BLIST_META_MASK; 779bb4a27f9SMark Johnston mask &= (u_daddr_t)-1 << digit; 780ee73fef9SAlan Cox if (mask == 0) 781ee73fef9SAlan Cox return (SWAPBLK_NONE); 7827090df5aSMatthew Dillon 7834be4fd5dSAlan Cox /* 784bb4a27f9SMark Johnston * If the first try is for a block that includes the cursor, pre-undo 785bb4a27f9SMark Johnston * the digit * radix offset in the first call; otherwise, ignore the 786bb4a27f9SMark Johnston * cursor entirely. 7874be4fd5dSAlan Cox */ 788bb4a27f9SMark Johnston if (((mask >> digit) & 1) == 1) 789bb4a27f9SMark Johnston cursor -= digit * radix; 790d4e3484bSAlan Cox else 791bb4a27f9SMark Johnston cursor = blk; 7927090df5aSMatthew Dillon 7934be4fd5dSAlan Cox /* 794bb4a27f9SMark Johnston * Examine the nonempty subtree associated with each bit set in mask. 7954be4fd5dSAlan Cox */ 796bb4a27f9SMark Johnston do { 797bb4a27f9SMark Johnston bit = mask & -mask; 798bb4a27f9SMark Johnston digit = bitpos(bit); 799bb4a27f9SMark Johnston i = 1 + digit * skip; 8007090df5aSMatthew Dillon if (count <= scan[i].bm_bighint) { 8017090df5aSMatthew Dillon /* 802ec371b57SAlan Cox * The allocation might fit beginning in the i'th subtree. 8037090df5aSMatthew Dillon */ 804bb4a27f9SMark Johnston r = blst_meta_alloc(&scan[i], cursor + digit * radix, 805bb4a27f9SMark Johnston count, radix); 8067090df5aSMatthew Dillon if (r != SWAPBLK_NONE) { 807bb4a27f9SMark Johnston if (scan[i].bm_bitmap == 0) 808bb4a27f9SMark Johnston scan->bm_bitmap ^= bit; 8097090df5aSMatthew Dillon return (r); 8107090df5aSMatthew Dillon } 8117090df5aSMatthew Dillon } 812bb4a27f9SMark Johnston cursor = blk; 813bb4a27f9SMark Johnston } while ((mask ^= bit) != 0); 8147090df5aSMatthew Dillon 8157090df5aSMatthew Dillon /* 816bb4a27f9SMark Johnston * We couldn't allocate count in this subtree. If the whole tree was 817bb4a27f9SMark Johnston * scanned, and the last tree node is allocated, update bighint. 8187090df5aSMatthew Dillon */ 819bb4a27f9SMark Johnston if (scan_from_start && !(digit == BLIST_META_RADIX - 1 && 820bb4a27f9SMark Johnston scan[i].bm_bighint == BLIST_MAX_ALLOC)) 8217090df5aSMatthew Dillon scan->bm_bighint = count - 1; 822d4e3484bSAlan Cox 8237090df5aSMatthew Dillon return (SWAPBLK_NONE); 8247090df5aSMatthew Dillon } 8257090df5aSMatthew Dillon 8267090df5aSMatthew Dillon /* 8277090df5aSMatthew Dillon * BLST_LEAF_FREE() - free allocated block from leaf bitmap 8287090df5aSMatthew Dillon * 8297090df5aSMatthew Dillon */ 8307090df5aSMatthew Dillon static void 831b411efa4SAlan Cox blst_leaf_free(blmeta_t *scan, daddr_t blk, int count) 832b411efa4SAlan Cox { 833ec371b57SAlan Cox u_daddr_t mask; 834ec371b57SAlan Cox 8357090df5aSMatthew Dillon /* 8367090df5aSMatthew Dillon * free some data in this bitmap 837ec371b57SAlan Cox * mask=0000111111111110000 8387090df5aSMatthew Dillon * \_________/\__/ 839ec371b57SAlan Cox * count n 8407090df5aSMatthew Dillon */ 841bb4a27f9SMark Johnston mask = bitrange(blk & BLIST_BMAP_MASK, count); 842bb4a27f9SMark Johnston if (scan->bm_bitmap & mask) 843ec371b57SAlan Cox panic("freeing free block"); 844bb4a27f9SMark Johnston scan->bm_bitmap |= mask; 8457090df5aSMatthew Dillon } 8467090df5aSMatthew Dillon 8477090df5aSMatthew Dillon /* 8487090df5aSMatthew Dillon * BLST_META_FREE() - free allocated blocks from radix tree meta info 8497090df5aSMatthew Dillon * 8507090df5aSMatthew Dillon * This support routine frees a range of blocks from the bitmap. 8517090df5aSMatthew Dillon * The range must be entirely enclosed by this radix node. If a 8527090df5aSMatthew Dillon * meta node, we break the range down recursively to free blocks 8537090df5aSMatthew Dillon * in subnodes (which means that this code can free an arbitrary 8547090df5aSMatthew Dillon * range whereas the allocation code cannot allocate an arbitrary 8557090df5aSMatthew Dillon * range). 8567090df5aSMatthew Dillon */ 8577090df5aSMatthew Dillon static void 858bee93d3cSAlan Cox blst_meta_free(blmeta_t *scan, daddr_t freeBlk, daddr_t count, u_daddr_t radix) 859d3783724SAlan Cox { 860bb4a27f9SMark Johnston daddr_t blk, endBlk, i, skip; 861bb4a27f9SMark Johnston int digit, endDigit; 8627090df5aSMatthew Dillon 863bb4a27f9SMark Johnston /* 864bb4a27f9SMark Johnston * We could probably do a better job here. We are required to make 865bb4a27f9SMark Johnston * bighint at least as large as the biggest allocable block of data. 866bb4a27f9SMark Johnston * If we just shoehorn it, a little extra overhead will be incurred 867bb4a27f9SMark Johnston * on the next allocation (but only that one typically). 868bb4a27f9SMark Johnston */ 869bb4a27f9SMark Johnston scan->bm_bighint = BLIST_MAX_ALLOC; 870bb4a27f9SMark Johnston 871a396c83aSAlan Cox if (radix == BLIST_BMAP_RADIX) 872a396c83aSAlan Cox return (blst_leaf_free(scan, freeBlk, count)); 8737090df5aSMatthew Dillon 874bb4a27f9SMark Johnston endBlk = ummin(freeBlk + count, (freeBlk + radix) & -radix); 87557e6d29bSMatthew Dillon radix /= BLIST_META_RADIX; 876bb4a27f9SMark Johnston skip = radix_to_skip(radix); 877bb4a27f9SMark Johnston blk = freeBlk & -radix; 878bb4a27f9SMark Johnston digit = (blk / radix) & BLIST_META_MASK; 879bb4a27f9SMark Johnston endDigit = 1 + (((endBlk - 1) / radix) & BLIST_META_MASK); 880bb4a27f9SMark Johnston scan->bm_bitmap |= bitrange(digit, endDigit - digit); 881bb4a27f9SMark Johnston for (i = 1 + digit * skip; blk < endBlk; i += skip) { 8827090df5aSMatthew Dillon blk += radix; 883bb4a27f9SMark Johnston count = ummin(blk, endBlk) - freeBlk; 884bb4a27f9SMark Johnston blst_meta_free(&scan[i], freeBlk, count, radix); 885bb4a27f9SMark Johnston freeBlk = blk; 8867090df5aSMatthew Dillon } 8877090df5aSMatthew Dillon } 8887090df5aSMatthew Dillon 8897090df5aSMatthew Dillon /* 890bb4a27f9SMark Johnston * BLST_COPY() - copy one radix tree to another 8917090df5aSMatthew Dillon * 8927090df5aSMatthew Dillon * Locates free space in the source tree and frees it in the destination 8937090df5aSMatthew Dillon * tree. The space may not already be free in the destination. 8947090df5aSMatthew Dillon */ 895b411efa4SAlan Cox static void 8962ac0c7c3SAlan Cox blst_copy(blmeta_t *scan, daddr_t blk, daddr_t radix, blist_t dest, 8972ac0c7c3SAlan Cox daddr_t count) 898b411efa4SAlan Cox { 899bb4a27f9SMark Johnston daddr_t endBlk, i, skip; 9007090df5aSMatthew Dillon 9017090df5aSMatthew Dillon /* 9027090df5aSMatthew Dillon * Leaf node 9037090df5aSMatthew Dillon */ 9047090df5aSMatthew Dillon 9057090df5aSMatthew Dillon if (radix == BLIST_BMAP_RADIX) { 906bb4a27f9SMark Johnston u_daddr_t v = scan->bm_bitmap; 9077090df5aSMatthew Dillon 9087090df5aSMatthew Dillon if (v == (u_daddr_t)-1) { 9097090df5aSMatthew Dillon blist_free(dest, blk, count); 9107090df5aSMatthew Dillon } else if (v != 0) { 9117090df5aSMatthew Dillon int i; 9127090df5aSMatthew Dillon 913bb4a27f9SMark Johnston for (i = 0; i < count; ++i) { 914064650c1SAlan Cox if (v & ((u_daddr_t)1 << i)) 9157090df5aSMatthew Dillon blist_free(dest, blk + i, 1); 9167090df5aSMatthew Dillon } 9177090df5aSMatthew Dillon } 9187090df5aSMatthew Dillon return; 9197090df5aSMatthew Dillon } 9207090df5aSMatthew Dillon 9217090df5aSMatthew Dillon /* 9227090df5aSMatthew Dillon * Meta node 9237090df5aSMatthew Dillon */ 9247090df5aSMatthew Dillon 925bb4a27f9SMark Johnston if (scan->bm_bitmap == 0) { 9267090df5aSMatthew Dillon /* 9277090df5aSMatthew Dillon * Source all allocated, leave dest allocated 9287090df5aSMatthew Dillon */ 9297090df5aSMatthew Dillon return; 9307090df5aSMatthew Dillon } 9317090df5aSMatthew Dillon 932bb4a27f9SMark Johnston endBlk = blk + count; 9332ac0c7c3SAlan Cox radix /= BLIST_META_RADIX; 934bb4a27f9SMark Johnston skip = radix_to_skip(radix); 935bb4a27f9SMark Johnston for (i = 1; blk < endBlk; i += skip) { 9367090df5aSMatthew Dillon blk += radix; 937bb4a27f9SMark Johnston count = radix; 938bb4a27f9SMark Johnston if (blk >= endBlk) 939bb4a27f9SMark Johnston count -= blk - endBlk; 940bb4a27f9SMark Johnston blst_copy(&scan[i], blk - radix, radix, dest, count); 9417090df5aSMatthew Dillon } 9427090df5aSMatthew Dillon } 9437090df5aSMatthew Dillon 9447090df5aSMatthew Dillon /* 94592da00bbSMatthew Dillon * BLST_LEAF_FILL() - allocate specific blocks in leaf bitmap 94692da00bbSMatthew Dillon * 94792da00bbSMatthew Dillon * This routine allocates all blocks in the specified range 94892da00bbSMatthew Dillon * regardless of any existing allocations in that range. Returns 94992da00bbSMatthew Dillon * the number of blocks allocated by the call. 95092da00bbSMatthew Dillon */ 951a7249a6cSAlan Cox static daddr_t 95292da00bbSMatthew Dillon blst_leaf_fill(blmeta_t *scan, daddr_t blk, int count) 95392da00bbSMatthew Dillon { 954a7249a6cSAlan Cox daddr_t nblks; 9554be4fd5dSAlan Cox u_daddr_t mask; 95692da00bbSMatthew Dillon 957bb4a27f9SMark Johnston mask = bitrange(blk & BLIST_BMAP_MASK, count); 95892da00bbSMatthew Dillon 9594be4fd5dSAlan Cox /* Count the number of blocks that we are allocating. */ 960bb4a27f9SMark Johnston nblks = bitcount64(scan->bm_bitmap & mask); 96192da00bbSMatthew Dillon 962bb4a27f9SMark Johnston scan->bm_bitmap &= ~mask; 9634be4fd5dSAlan Cox return (nblks); 96492da00bbSMatthew Dillon } 96592da00bbSMatthew Dillon 96692da00bbSMatthew Dillon /* 96792da00bbSMatthew Dillon * BLIST_META_FILL() - allocate specific blocks at a meta node 96892da00bbSMatthew Dillon * 96992da00bbSMatthew Dillon * This routine allocates the specified range of blocks, 97092da00bbSMatthew Dillon * regardless of any existing allocations in the range. The 97192da00bbSMatthew Dillon * range must be within the extent of this node. Returns the 97292da00bbSMatthew Dillon * number of blocks allocated by the call. 97392da00bbSMatthew Dillon */ 974a7249a6cSAlan Cox static daddr_t 975bee93d3cSAlan Cox blst_meta_fill(blmeta_t *scan, daddr_t allocBlk, daddr_t count, u_daddr_t radix) 976d3783724SAlan Cox { 977bb4a27f9SMark Johnston daddr_t blk, endBlk, i, nblks, skip; 978bb4a27f9SMark Johnston int digit; 97992da00bbSMatthew Dillon 980a396c83aSAlan Cox if (radix == BLIST_BMAP_RADIX) 981a396c83aSAlan Cox return (blst_leaf_fill(scan, allocBlk, count)); 982bb4a27f9SMark Johnston 983bb4a27f9SMark Johnston endBlk = ummin(allocBlk + count, (allocBlk + radix) & -radix); 984bb4a27f9SMark Johnston radix /= BLIST_META_RADIX; 9852ac0c7c3SAlan Cox skip = radix_to_skip(radix); 986bee93d3cSAlan Cox blk = allocBlk & -radix; 987d3783724SAlan Cox nblks = 0; 988bb4a27f9SMark Johnston while (blk < endBlk) { 989bb4a27f9SMark Johnston digit = (blk / radix) & BLIST_META_MASK; 990bb4a27f9SMark Johnston i = 1 + digit * skip; 99192da00bbSMatthew Dillon blk += radix; 992bb4a27f9SMark Johnston count = ummin(blk, endBlk) - allocBlk; 993bb4a27f9SMark Johnston nblks += blst_meta_fill(&scan[i], allocBlk, count, radix); 994bb4a27f9SMark Johnston if (scan[i].bm_bitmap == 0) 995bb4a27f9SMark Johnston scan->bm_bitmap &= ~((u_daddr_t)1 << digit); 996bb4a27f9SMark Johnston allocBlk = blk; 99792da00bbSMatthew Dillon } 998a396c83aSAlan Cox return (nblks); 99992da00bbSMatthew Dillon } 100092da00bbSMatthew Dillon 10017090df5aSMatthew Dillon #ifdef BLIST_DEBUG 10027090df5aSMatthew Dillon 10037090df5aSMatthew Dillon static void 10042ac0c7c3SAlan Cox blst_radix_print(blmeta_t *scan, daddr_t blk, daddr_t radix, int tab) 10057090df5aSMatthew Dillon { 1006bb4a27f9SMark Johnston daddr_t skip; 1007bb4a27f9SMark Johnston u_daddr_t bit, mask; 1008bb4a27f9SMark Johnston int digit; 10097090df5aSMatthew Dillon 10107090df5aSMatthew Dillon if (radix == BLIST_BMAP_RADIX) { 10117090df5aSMatthew Dillon printf( 1012bb4a27f9SMark Johnston "%*.*s(%08llx,%lld): bitmap %0*llx big=%lld\n", 10137090df5aSMatthew Dillon tab, tab, "", 101492da00bbSMatthew Dillon (long long)blk, (long long)radix, 1015bb4a27f9SMark Johnston 1 + (BLIST_BMAP_RADIX - 1) / 4, 1016bb4a27f9SMark Johnston (long long)scan->bm_bitmap, 101792da00bbSMatthew Dillon (long long)scan->bm_bighint 10187090df5aSMatthew Dillon ); 10197090df5aSMatthew Dillon return; 10207090df5aSMatthew Dillon } 10217090df5aSMatthew Dillon 10227090df5aSMatthew Dillon printf( 1023bb4a27f9SMark Johnston "%*.*s(%08llx): subtree (%lld/%lld) bitmap %0*llx big=%lld {\n", 10247090df5aSMatthew Dillon tab, tab, "", 102592da00bbSMatthew Dillon (long long)blk, (long long)radix, 102692da00bbSMatthew Dillon (long long)radix, 1027bb4a27f9SMark Johnston 1 + (BLIST_META_RADIX - 1) / 4, 1028bb4a27f9SMark Johnston (long long)scan->bm_bitmap, 102992da00bbSMatthew Dillon (long long)scan->bm_bighint 10307090df5aSMatthew Dillon ); 10317090df5aSMatthew Dillon 10322ac0c7c3SAlan Cox radix /= BLIST_META_RADIX; 1033bb4a27f9SMark Johnston skip = radix_to_skip(radix); 10347090df5aSMatthew Dillon tab += 4; 10357090df5aSMatthew Dillon 1036bb4a27f9SMark Johnston mask = scan->bm_bitmap; 1037bb4a27f9SMark Johnston /* Examine the nonempty subtree associated with each bit set in mask */ 1038bb4a27f9SMark Johnston do { 1039bb4a27f9SMark Johnston bit = mask & -mask; 1040bb4a27f9SMark Johnston digit = bitpos(bit); 1041bb4a27f9SMark Johnston blst_radix_print(&scan[1 + digit * skip], blk + digit * radix, 1042bb4a27f9SMark Johnston radix, tab); 1043bb4a27f9SMark Johnston } while ((mask ^= bit) != 0); 10447090df5aSMatthew Dillon tab -= 4; 10457090df5aSMatthew Dillon 10467090df5aSMatthew Dillon printf( 10477090df5aSMatthew Dillon "%*.*s}\n", 10487090df5aSMatthew Dillon tab, tab, "" 10497090df5aSMatthew Dillon ); 10507090df5aSMatthew Dillon } 10517090df5aSMatthew Dillon 10527090df5aSMatthew Dillon #endif 10537090df5aSMatthew Dillon 10547090df5aSMatthew Dillon #ifdef BLIST_DEBUG 10557090df5aSMatthew Dillon 10567090df5aSMatthew Dillon int 10577090df5aSMatthew Dillon main(int ac, char **av) 10587090df5aSMatthew Dillon { 1059bb4a27f9SMark Johnston int size = BLIST_META_RADIX * BLIST_BMAP_RADIX; 10607090df5aSMatthew Dillon int i; 10617090df5aSMatthew Dillon blist_t bl; 1062d027ed2eSAlan Cox struct sbuf *s; 10637090df5aSMatthew Dillon 10647090df5aSMatthew Dillon for (i = 1; i < ac; ++i) { 10657090df5aSMatthew Dillon const char *ptr = av[i]; 10667090df5aSMatthew Dillon if (*ptr != '-') { 10677090df5aSMatthew Dillon size = strtol(ptr, NULL, 0); 10687090df5aSMatthew Dillon continue; 10697090df5aSMatthew Dillon } 10707090df5aSMatthew Dillon ptr += 2; 10717090df5aSMatthew Dillon fprintf(stderr, "Bad option: %s\n", ptr - 2); 10727090df5aSMatthew Dillon exit(1); 10737090df5aSMatthew Dillon } 1074c8c7ad92SKip Macy bl = blist_create(size, M_WAITOK); 10757090df5aSMatthew Dillon blist_free(bl, 0, size); 10767090df5aSMatthew Dillon 10777090df5aSMatthew Dillon for (;;) { 10787090df5aSMatthew Dillon char buf[1024]; 1079d90bf7d8SAlan Cox long long da = 0; 1080d90bf7d8SAlan Cox long long count = 0; 10817090df5aSMatthew Dillon 10824be4fd5dSAlan Cox printf("%lld/%lld/%lld> ", (long long)blist_avail(bl), 108392da00bbSMatthew Dillon (long long)size, (long long)bl->bl_radix); 10847090df5aSMatthew Dillon fflush(stdout); 10857090df5aSMatthew Dillon if (fgets(buf, sizeof(buf), stdin) == NULL) 10867090df5aSMatthew Dillon break; 10877090df5aSMatthew Dillon switch(buf[0]) { 10887090df5aSMatthew Dillon case 'r': 108992da00bbSMatthew Dillon if (sscanf(buf + 1, "%lld", &count) == 1) { 1090d90bf7d8SAlan Cox blist_resize(&bl, count, 1, M_WAITOK); 10917090df5aSMatthew Dillon } else { 10927090df5aSMatthew Dillon printf("?\n"); 10937090df5aSMatthew Dillon } 10947090df5aSMatthew Dillon case 'p': 10957090df5aSMatthew Dillon blist_print(bl); 10967090df5aSMatthew Dillon break; 1097d027ed2eSAlan Cox case 's': 1098d027ed2eSAlan Cox s = sbuf_new_auto(); 1099d027ed2eSAlan Cox blist_stats(bl, s); 1100d027ed2eSAlan Cox sbuf_finish(s); 1101d027ed2eSAlan Cox printf("%s", sbuf_data(s)); 1102d027ed2eSAlan Cox sbuf_delete(s); 1103d027ed2eSAlan Cox break; 11047090df5aSMatthew Dillon case 'a': 110592da00bbSMatthew Dillon if (sscanf(buf + 1, "%lld", &count) == 1) { 11067090df5aSMatthew Dillon daddr_t blk = blist_alloc(bl, count); 110792da00bbSMatthew Dillon printf(" R=%08llx\n", (long long)blk); 11087090df5aSMatthew Dillon } else { 11097090df5aSMatthew Dillon printf("?\n"); 11107090df5aSMatthew Dillon } 11117090df5aSMatthew Dillon break; 11127090df5aSMatthew Dillon case 'f': 1113d90bf7d8SAlan Cox if (sscanf(buf + 1, "%llx %lld", &da, &count) == 2) { 11147090df5aSMatthew Dillon blist_free(bl, da, count); 11157090df5aSMatthew Dillon } else { 11167090df5aSMatthew Dillon printf("?\n"); 11177090df5aSMatthew Dillon } 11187090df5aSMatthew Dillon break; 111992da00bbSMatthew Dillon case 'l': 1120d90bf7d8SAlan Cox if (sscanf(buf + 1, "%llx %lld", &da, &count) == 2) { 1121a7249a6cSAlan Cox printf(" n=%jd\n", 1122a7249a6cSAlan Cox (intmax_t)blist_fill(bl, da, count)); 112392da00bbSMatthew Dillon } else { 112492da00bbSMatthew Dillon printf("?\n"); 112592da00bbSMatthew Dillon } 112692da00bbSMatthew Dillon break; 11277090df5aSMatthew Dillon case '?': 11287090df5aSMatthew Dillon case 'h': 11297090df5aSMatthew Dillon puts( 11307090df5aSMatthew Dillon "p -print\n" 1131d027ed2eSAlan Cox "s -stats\n" 11327090df5aSMatthew Dillon "a %d -allocate\n" 11337090df5aSMatthew Dillon "f %x %d -free\n" 113492da00bbSMatthew Dillon "l %x %d -fill\n" 11357090df5aSMatthew Dillon "r %d -resize\n" 11367090df5aSMatthew Dillon "h/? -help" 11377090df5aSMatthew Dillon ); 11387090df5aSMatthew Dillon break; 11397090df5aSMatthew Dillon default: 11407090df5aSMatthew Dillon printf("?\n"); 11417090df5aSMatthew Dillon break; 11427090df5aSMatthew Dillon } 11437090df5aSMatthew Dillon } 11447090df5aSMatthew Dillon return(0); 11457090df5aSMatthew Dillon } 11467090df5aSMatthew Dillon 11477090df5aSMatthew Dillon void 11487090df5aSMatthew Dillon panic(const char *ctl, ...) 11497090df5aSMatthew Dillon { 11507090df5aSMatthew Dillon va_list va; 11517090df5aSMatthew Dillon 11527090df5aSMatthew Dillon va_start(va, ctl); 11537090df5aSMatthew Dillon vfprintf(stderr, ctl, va); 11547090df5aSMatthew Dillon fprintf(stderr, "\n"); 11557090df5aSMatthew Dillon va_end(va); 11567090df5aSMatthew Dillon exit(1); 11577090df5aSMatthew Dillon } 11587090df5aSMatthew Dillon 11597090df5aSMatthew Dillon #endif 1160