13ae3b454SMatthew Dillon /*
23ae3b454SMatthew Dillon * ALIST.C - Bitmap allocator/deallocator, using a radix tree with hinting.
33ae3b454SMatthew Dillon * Unlimited-size allocations, power-of-2 only, power-of-2
43ae3b454SMatthew Dillon * aligned results only.
53ae3b454SMatthew Dillon *
63ae3b454SMatthew Dillon * Copyright (c) 2007 The DragonFly Project. All rights reserved.
73ae3b454SMatthew Dillon *
83ae3b454SMatthew Dillon * This code is derived from software contributed to The DragonFly Project
93ae3b454SMatthew Dillon * by Matthew Dillon <dillon@backplane.com>
103ae3b454SMatthew Dillon *
113ae3b454SMatthew Dillon * Redistribution and use in source and binary forms, with or without
123ae3b454SMatthew Dillon * modification, are permitted provided that the following conditions
133ae3b454SMatthew Dillon * are met:
143ae3b454SMatthew Dillon *
153ae3b454SMatthew Dillon * 1. Redistributions of source code must retain the above copyright
163ae3b454SMatthew Dillon * notice, this list of conditions and the following disclaimer.
173ae3b454SMatthew Dillon * 2. Redistributions in binary form must reproduce the above copyright
183ae3b454SMatthew Dillon * notice, this list of conditions and the following disclaimer in
193ae3b454SMatthew Dillon * the documentation and/or other materials provided with the
203ae3b454SMatthew Dillon * distribution.
213ae3b454SMatthew Dillon * 3. Neither the name of The DragonFly Project nor the names of its
223ae3b454SMatthew Dillon * contributors may be used to endorse or promote products derived
233ae3b454SMatthew Dillon * from this software without specific, prior written permission.
243ae3b454SMatthew Dillon *
253ae3b454SMatthew Dillon * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
263ae3b454SMatthew Dillon * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
273ae3b454SMatthew Dillon * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
283ae3b454SMatthew Dillon * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
293ae3b454SMatthew Dillon * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
303ae3b454SMatthew Dillon * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
313ae3b454SMatthew Dillon * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
323ae3b454SMatthew Dillon * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
333ae3b454SMatthew Dillon * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
343ae3b454SMatthew Dillon * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
353ae3b454SMatthew Dillon * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
363ae3b454SMatthew Dillon * SUCH DAMAGE.
373ae3b454SMatthew Dillon */
383ae3b454SMatthew Dillon /*
393ae3b454SMatthew Dillon * This module has been adapted from the BLIST module, which was written
403ae3b454SMatthew Dillon * by Matthew Dillon many years ago.
413ae3b454SMatthew Dillon *
423ae3b454SMatthew Dillon * This module implements a general power-of-2 bitmap allocator/deallocator.
433ae3b454SMatthew Dillon * All allocations must be in powers of 2 and will return similarly aligned
443ae3b454SMatthew Dillon * results. The module does not try to interpret the meaning of a 'block'
453ae3b454SMatthew Dillon * other then to return ALIST_BLOCK_NONE on an allocation failure.
463ae3b454SMatthew Dillon *
473ae3b454SMatthew Dillon * A maximum of 2 billion blocks is supported so, for example, if one block
483ae3b454SMatthew Dillon * represented 64 bytes a maximally sized ALIST would represent
493ae3b454SMatthew Dillon * 128 gigabytes.
503ae3b454SMatthew Dillon *
513ae3b454SMatthew Dillon * A radix tree is used to maintain the bitmap and layed out in a manner
523ae3b454SMatthew Dillon * similar to the blist code. Meta nodes use a radix of 16 and 2 bits per
533ae3b454SMatthew Dillon * block while leaf nodes use a radix of 32 and 1 bit per block (stored in
543ae3b454SMatthew Dillon * a 32 bit bitmap field). Both meta and leaf nodes have a hint field.
553ae3b454SMatthew Dillon * This field gives us a hint as to the largest free contiguous range of
563ae3b454SMatthew Dillon * blocks under the node. It may contain a value that is too high, but
573ae3b454SMatthew Dillon * will never contain a value that is too low. When the radix tree is
583ae3b454SMatthew Dillon * searched, allocation failures in subtrees update the hint.
593ae3b454SMatthew Dillon *
603ae3b454SMatthew Dillon * The radix tree is layed out recursively using a linear array. Each meta
613ae3b454SMatthew Dillon * node is immediately followed (layed out sequentially in memory) by
623ae3b454SMatthew Dillon * ALIST_META_RADIX lower level nodes. This is a recursive structure but one
633ae3b454SMatthew Dillon * that can be easily scanned through a very simple 'skip' calculation. In
643ae3b454SMatthew Dillon * order to support large radixes, portions of the tree may reside outside our
653ae3b454SMatthew Dillon * memory allocation. We handle this with an early-terminate optimization
663ae3b454SMatthew Dillon * in the meta-node. The memory allocation is only large enough to cover
673ae3b454SMatthew Dillon * the number of blocks requested at creation time even if it must be
683ae3b454SMatthew Dillon * encompassed in larger root-node radix.
693ae3b454SMatthew Dillon *
703ae3b454SMatthew Dillon * This code can be compiled stand-alone for debugging.
713ae3b454SMatthew Dillon */
723ae3b454SMatthew Dillon
733ae3b454SMatthew Dillon #ifdef _KERNEL
743ae3b454SMatthew Dillon
753ae3b454SMatthew Dillon #include <sys/param.h>
763ae3b454SMatthew Dillon #include <sys/systm.h>
773ae3b454SMatthew Dillon #include <sys/lock.h>
783ae3b454SMatthew Dillon #include <sys/kernel.h>
793ae3b454SMatthew Dillon #include <sys/alist.h>
803ae3b454SMatthew Dillon #include <sys/malloc.h>
813ae3b454SMatthew Dillon #include <vm/vm.h>
823ae3b454SMatthew Dillon #include <vm/vm_object.h>
833ae3b454SMatthew Dillon #include <vm/vm_kern.h>
843ae3b454SMatthew Dillon #include <vm/vm_extern.h>
853ae3b454SMatthew Dillon #include <vm/vm_page.h>
863ae3b454SMatthew Dillon
873ae3b454SMatthew Dillon #else
883ae3b454SMatthew Dillon
893ae3b454SMatthew Dillon #ifndef ALIST_NO_DEBUG
903ae3b454SMatthew Dillon #define ALIST_DEBUG
913ae3b454SMatthew Dillon #endif
923ae3b454SMatthew Dillon
933ae3b454SMatthew Dillon #include <sys/types.h>
943ae3b454SMatthew Dillon #include <stdio.h>
953ae3b454SMatthew Dillon #include <assert.h>
963ae3b454SMatthew Dillon #include <string.h>
973ae3b454SMatthew Dillon #include <stdlib.h>
983ae3b454SMatthew Dillon #include <stdarg.h>
993ae3b454SMatthew Dillon
1003ae3b454SMatthew Dillon #define kmalloc(a,b,c) malloc(a)
1013ae3b454SMatthew Dillon #define kfree(a,b) free(a)
1023ae3b454SMatthew Dillon #define kprintf printf
1033ae3b454SMatthew Dillon #define KKASSERT(exp) assert(exp)
1043ae3b454SMatthew Dillon struct malloc_type;
1053ae3b454SMatthew Dillon
1063ae3b454SMatthew Dillon #include <sys/alist.h>
1073ae3b454SMatthew Dillon
1083ae3b454SMatthew Dillon void panic(const char *ctl, ...);
1093ae3b454SMatthew Dillon
1103ae3b454SMatthew Dillon #endif
1113ae3b454SMatthew Dillon
1123ae3b454SMatthew Dillon /*
1133ae3b454SMatthew Dillon * static support functions
1143ae3b454SMatthew Dillon */
1153ae3b454SMatthew Dillon
1167552e9eeSMatthew Dillon static alist_blk_t alst_leaf_alloc(almeta_t *scan, alist_blk_t blk,
1177552e9eeSMatthew Dillon alist_blk_t start, alist_blk_t count);
1187552e9eeSMatthew Dillon static alist_blk_t alst_meta_alloc(almeta_t *scan, alist_blk_t blk,
1197552e9eeSMatthew Dillon alist_blk_t start, alist_blk_t count,
1207552e9eeSMatthew Dillon alist_blk_t radix, alist_blk_t skip);
1217552e9eeSMatthew Dillon static void alst_leaf_free(almeta_t *scan, alist_blk_t relblk,
1227552e9eeSMatthew Dillon alist_blk_t count);
1237552e9eeSMatthew Dillon static void alst_meta_free(almeta_t *scan, alist_blk_t freeBlk,
1247552e9eeSMatthew Dillon alist_blk_t count, alist_blk_t radix,
1257552e9eeSMatthew Dillon alist_blk_t skip, alist_blk_t blk);
1267552e9eeSMatthew Dillon static alist_blk_t alst_radix_init(almeta_t *scan, alist_blk_t blk,
1277552e9eeSMatthew Dillon alist_blk_t radix, alist_blk_t skip,
1287552e9eeSMatthew Dillon alist_blk_t count);
1293ae3b454SMatthew Dillon #ifndef _KERNEL
1307552e9eeSMatthew Dillon static void alst_radix_print(almeta_t *scan, alist_blk_t blk,
1317552e9eeSMatthew Dillon alist_blk_t radix, alist_blk_t skip,
1327552e9eeSMatthew Dillon int tab);
1333ae3b454SMatthew Dillon #endif
1343ae3b454SMatthew Dillon
1353ae3b454SMatthew Dillon /*
1367552e9eeSMatthew Dillon * Create a alist capable of handling up to the specified number of blocks.
1377552e9eeSMatthew Dillon * Blocks must be greater then 0 but does not have to be a power of 2.
1383ae3b454SMatthew Dillon *
1393ae3b454SMatthew Dillon * The smallest alist consists of a single leaf node capable of
1403ae3b454SMatthew Dillon * managing ALIST_BMAP_RADIX blocks.
1413ae3b454SMatthew Dillon */
1423ae3b454SMatthew Dillon alist_t
alist_create(alist_blk_t blocks,struct malloc_type * mtype)1437552e9eeSMatthew Dillon alist_create(alist_blk_t blocks, struct malloc_type *mtype)
1443ae3b454SMatthew Dillon {
1453ae3b454SMatthew Dillon alist_t bl;
1467552e9eeSMatthew Dillon alist_blk_t radix;
1477552e9eeSMatthew Dillon alist_blk_t skip = 0;
1483ae3b454SMatthew Dillon
1493ae3b454SMatthew Dillon /*
1503ae3b454SMatthew Dillon * Calculate radix and skip field used for scanning.
1513ae3b454SMatthew Dillon */
1523ae3b454SMatthew Dillon radix = ALIST_BMAP_RADIX;
1533ae3b454SMatthew Dillon
1543ae3b454SMatthew Dillon while (radix < blocks) {
1553ae3b454SMatthew Dillon radix *= ALIST_META_RADIX;
1563ae3b454SMatthew Dillon skip = (skip + 1) * ALIST_META_RADIX;
1573ae3b454SMatthew Dillon }
1583ae3b454SMatthew Dillon
1590c374e73SSascha Wildner bl = kmalloc(sizeof(struct alist), mtype, M_WAITOK | M_ZERO);
1603ae3b454SMatthew Dillon
1613ae3b454SMatthew Dillon bl->bl_blocks = blocks;
1623ae3b454SMatthew Dillon bl->bl_radix = radix;
1633ae3b454SMatthew Dillon bl->bl_skip = skip;
1647552e9eeSMatthew Dillon bl->bl_rootblks = 1 + alst_radix_init(NULL, 0, bl->bl_radix,
1657552e9eeSMatthew Dillon bl->bl_skip, blocks);
1667552e9eeSMatthew Dillon bl->bl_root = kmalloc(sizeof(almeta_t) * bl->bl_rootblks,
1677552e9eeSMatthew Dillon mtype, M_WAITOK);
1683ae3b454SMatthew Dillon
1693ae3b454SMatthew Dillon #if defined(ALIST_DEBUG)
1703ae3b454SMatthew Dillon kprintf(
1713ae3b454SMatthew Dillon "ALIST representing %d blocks (%d MB of swap)"
172463a6bddSMatthew Dillon ", requiring %dK (%d bytes) of ram\n",
1733ae3b454SMatthew Dillon bl->bl_blocks,
1743ae3b454SMatthew Dillon bl->bl_blocks * 4 / 1024,
175463a6bddSMatthew Dillon (bl->bl_rootblks * sizeof(almeta_t) + 1023) / 1024,
176463a6bddSMatthew Dillon (bl->bl_rootblks * sizeof(almeta_t))
1773ae3b454SMatthew Dillon );
1783ae3b454SMatthew Dillon kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
1793ae3b454SMatthew Dillon #endif
1807552e9eeSMatthew Dillon alst_radix_init(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, blocks);
1813ae3b454SMatthew Dillon
1823ae3b454SMatthew Dillon return(bl);
1833ae3b454SMatthew Dillon }
1843ae3b454SMatthew Dillon
1853ae3b454SMatthew Dillon void
alist_init(alist_t bl,alist_blk_t blocks,almeta_t * records,alist_blk_t nrecords)1867552e9eeSMatthew Dillon alist_init(alist_t bl, alist_blk_t blocks,
1877552e9eeSMatthew Dillon almeta_t *records, alist_blk_t nrecords)
1887552e9eeSMatthew Dillon {
1897552e9eeSMatthew Dillon alist_blk_t radix;
1907552e9eeSMatthew Dillon alist_blk_t skip = 0;
1917552e9eeSMatthew Dillon
1927552e9eeSMatthew Dillon /*
1937552e9eeSMatthew Dillon * Calculate radix and skip field used for scanning.
1947552e9eeSMatthew Dillon */
1957552e9eeSMatthew Dillon radix = ALIST_BMAP_RADIX;
1967552e9eeSMatthew Dillon
1977552e9eeSMatthew Dillon while (radix < blocks) {
1987552e9eeSMatthew Dillon radix *= ALIST_META_RADIX;
1997552e9eeSMatthew Dillon skip = (skip + 1) * ALIST_META_RADIX;
2007552e9eeSMatthew Dillon }
2017552e9eeSMatthew Dillon bzero(bl, sizeof(*bl));
2027552e9eeSMatthew Dillon
2037552e9eeSMatthew Dillon bl->bl_blocks = blocks;
2047552e9eeSMatthew Dillon bl->bl_radix = radix;
2057552e9eeSMatthew Dillon bl->bl_skip = skip;
2067552e9eeSMatthew Dillon bl->bl_rootblks = 1 + alst_radix_init(NULL, 0, bl->bl_radix,
2077552e9eeSMatthew Dillon bl->bl_skip, blocks);
2087552e9eeSMatthew Dillon KKASSERT(bl->bl_rootblks <= nrecords);
2097552e9eeSMatthew Dillon bl->bl_root = records;
2107552e9eeSMatthew Dillon
2117552e9eeSMatthew Dillon #if defined(ALIST_DEBUG)
2127552e9eeSMatthew Dillon kprintf(
2137552e9eeSMatthew Dillon "ALIST representing %d blocks (%d MB of swap)"
2147552e9eeSMatthew Dillon ", requiring %dK (%d bytes) of ram\n",
2157552e9eeSMatthew Dillon bl->bl_blocks,
2167552e9eeSMatthew Dillon bl->bl_blocks * 4 / 1024,
2177552e9eeSMatthew Dillon (bl->bl_rootblks * sizeof(almeta_t) + 1023) / 1024,
2187552e9eeSMatthew Dillon (bl->bl_rootblks * sizeof(almeta_t))
2197552e9eeSMatthew Dillon );
2207552e9eeSMatthew Dillon kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
2217552e9eeSMatthew Dillon #endif
2227552e9eeSMatthew Dillon alst_radix_init(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, blocks);
2237552e9eeSMatthew Dillon }
2247552e9eeSMatthew Dillon
2257552e9eeSMatthew Dillon void
alist_destroy(alist_t bl,struct malloc_type * mtype)2263ae3b454SMatthew Dillon alist_destroy(alist_t bl, struct malloc_type *mtype)
2273ae3b454SMatthew Dillon {
2283ae3b454SMatthew Dillon kfree(bl->bl_root, mtype);
2293ae3b454SMatthew Dillon kfree(bl, mtype);
2303ae3b454SMatthew Dillon }
2313ae3b454SMatthew Dillon
2323ae3b454SMatthew Dillon /*
2337552e9eeSMatthew Dillon * Reserve space in the block bitmap. Return the base of a contiguous
2347552e9eeSMatthew Dillon * region or ALIST_BLOCK_NONE if space could not be allocated.
2357552e9eeSMatthew Dillon *
2367552e9eeSMatthew Dillon * This nominally allocates a power-of-2 number of blocks. However,
2377552e9eeSMatthew Dillon * non-powers of 2 are accepted and implemented by first allocating
2387552e9eeSMatthew Dillon * the nearest power of 2 and then freeing the remainder.
2393ae3b454SMatthew Dillon */
2407552e9eeSMatthew Dillon alist_blk_t
alist_alloc(alist_t bl,alist_blk_t start,alist_blk_t count)2417552e9eeSMatthew Dillon alist_alloc(alist_t bl, alist_blk_t start, alist_blk_t count)
2423ae3b454SMatthew Dillon {
2437552e9eeSMatthew Dillon alist_blk_t blk = ALIST_BLOCK_NONE;
2443ae3b454SMatthew Dillon
2457552e9eeSMatthew Dillon /*
2467552e9eeSMatthew Dillon * Check non power-of-2
2477552e9eeSMatthew Dillon */
2487552e9eeSMatthew Dillon KKASSERT(count);
2497552e9eeSMatthew Dillon if ((count | (count - 1)) != (count << 1) - 1) {
2507552e9eeSMatthew Dillon alist_blk_t ncount = (count < 256) ? 1 : 256;
2517552e9eeSMatthew Dillon while (ncount < count)
2527552e9eeSMatthew Dillon ncount <<= 1;
2537552e9eeSMatthew Dillon blk = alist_alloc(bl, start, ncount);
2547552e9eeSMatthew Dillon if (blk != ALIST_BLOCK_NONE)
2557552e9eeSMatthew Dillon alist_free(bl, blk + count, ncount - count);
2567552e9eeSMatthew Dillon return (blk);
2577552e9eeSMatthew Dillon }
2583ae3b454SMatthew Dillon
2597552e9eeSMatthew Dillon /*
2607552e9eeSMatthew Dillon * Power of 2
2617552e9eeSMatthew Dillon */
2623ae3b454SMatthew Dillon if (bl && count < bl->bl_radix) {
2637552e9eeSMatthew Dillon if (bl->bl_radix == ALIST_BMAP_RADIX) {
2647552e9eeSMatthew Dillon blk = alst_leaf_alloc(bl->bl_root, 0, start, count);
2657552e9eeSMatthew Dillon } else {
2667552e9eeSMatthew Dillon blk = alst_meta_alloc(bl->bl_root, 0, start, count,
2677552e9eeSMatthew Dillon bl->bl_radix, bl->bl_skip);
2687552e9eeSMatthew Dillon }
2693ae3b454SMatthew Dillon if (blk != ALIST_BLOCK_NONE)
2703ae3b454SMatthew Dillon bl->bl_free -= count;
2713ae3b454SMatthew Dillon }
2723ae3b454SMatthew Dillon return(blk);
2733ae3b454SMatthew Dillon }
2743ae3b454SMatthew Dillon
2753ae3b454SMatthew Dillon /*
2767552e9eeSMatthew Dillon * Free up space in the block bitmap. The starting block and count do not
2777552e9eeSMatthew Dillon * need to be power-of-2 aligned. The related blocks must be in an allocated
2787552e9eeSMatthew Dillon * state.
2793ae3b454SMatthew Dillon */
2803ae3b454SMatthew Dillon void
alist_free(alist_t bl,alist_blk_t blkno,alist_blk_t count)2817552e9eeSMatthew Dillon alist_free(alist_t bl, alist_blk_t blkno, alist_blk_t count)
2823ae3b454SMatthew Dillon {
2833ae3b454SMatthew Dillon if (bl) {
2843ae3b454SMatthew Dillon KKASSERT(blkno + count <= bl->bl_blocks);
2857552e9eeSMatthew Dillon if (bl->bl_radix == ALIST_BMAP_RADIX) {
2863ae3b454SMatthew Dillon alst_leaf_free(bl->bl_root, blkno, count);
2877552e9eeSMatthew Dillon } else {
2887552e9eeSMatthew Dillon alst_meta_free(bl->bl_root, blkno, count,
2897552e9eeSMatthew Dillon bl->bl_radix, bl->bl_skip, 0);
2907552e9eeSMatthew Dillon }
2913ae3b454SMatthew Dillon bl->bl_free += count;
2923ae3b454SMatthew Dillon }
2933ae3b454SMatthew Dillon }
2943ae3b454SMatthew Dillon
2957552e9eeSMatthew Dillon /*
2967552e9eeSMatthew Dillon * Returns the current total number of free blocks and the
2977552e9eeSMatthew Dillon * approximate trailing largest contiguous free block available.
2987552e9eeSMatthew Dillon */
2997552e9eeSMatthew Dillon alist_blk_t
alist_free_info(alist_t bl,alist_blk_t * startp,alist_blk_t * countp)3007552e9eeSMatthew Dillon alist_free_info(alist_t bl, alist_blk_t *startp, alist_blk_t *countp)
3017552e9eeSMatthew Dillon {
3027552e9eeSMatthew Dillon alist_blk_t radix = bl->bl_radix;
3037552e9eeSMatthew Dillon alist_blk_t skip = bl->bl_skip;
3047552e9eeSMatthew Dillon alist_blk_t next_skip;
3057552e9eeSMatthew Dillon alist_blk_t i;
3067552e9eeSMatthew Dillon alist_bmap_t mask;
3077552e9eeSMatthew Dillon almeta_t *scan = bl->bl_root;
3087552e9eeSMatthew Dillon
3097552e9eeSMatthew Dillon *startp = 0;
3107552e9eeSMatthew Dillon *countp = 0;
3117552e9eeSMatthew Dillon
3127552e9eeSMatthew Dillon while (radix != ALIST_BMAP_RADIX) {
3137552e9eeSMatthew Dillon radix /= ALIST_META_RADIX;
3147552e9eeSMatthew Dillon next_skip = skip / ALIST_META_RADIX;
3157552e9eeSMatthew Dillon
3167552e9eeSMatthew Dillon /*
3177552e9eeSMatthew Dillon * Find the biggest fully allocated chunk.
3187552e9eeSMatthew Dillon */
3197552e9eeSMatthew Dillon for (i = ALIST_META_RADIX - 1; i != ALIST_BLOCK_NONE; --i) {
3207552e9eeSMatthew Dillon mask = (scan->bm_bitmap >> (i * 2)) & 3;
3217552e9eeSMatthew Dillon if (mask == 0) {
3227552e9eeSMatthew Dillon /*
3237552e9eeSMatthew Dillon * All allocated, continue the loop
3247552e9eeSMatthew Dillon */
3257552e9eeSMatthew Dillon continue;
3267552e9eeSMatthew Dillon }
3277552e9eeSMatthew Dillon if (mask == 1) {
3287552e9eeSMatthew Dillon /*
3297552e9eeSMatthew Dillon * Partially allocated, push into this guy
3307552e9eeSMatthew Dillon */
3317552e9eeSMatthew Dillon break;
3327552e9eeSMatthew Dillon }
3337552e9eeSMatthew Dillon if (mask == 2) {
3347552e9eeSMatthew Dillon /*
3357552e9eeSMatthew Dillon * Unknown state
3367552e9eeSMatthew Dillon */
3377552e9eeSMatthew Dillon return(bl->bl_free);
3387552e9eeSMatthew Dillon }
3397552e9eeSMatthew Dillon /*
3407552e9eeSMatthew Dillon * All free, we can return the chunk.
3417552e9eeSMatthew Dillon */
3427552e9eeSMatthew Dillon *startp += i * radix;
3437552e9eeSMatthew Dillon *countp = radix;
3447552e9eeSMatthew Dillon return(bl->bl_free);
3457552e9eeSMatthew Dillon }
3467552e9eeSMatthew Dillon
3477552e9eeSMatthew Dillon /*
3487552e9eeSMatthew Dillon * If we failed to find anything stop here, otherwise push
3497552e9eeSMatthew Dillon * in.
3507552e9eeSMatthew Dillon */
3517552e9eeSMatthew Dillon if (i == ALIST_BLOCK_NONE)
3527552e9eeSMatthew Dillon return(bl->bl_free);
3537552e9eeSMatthew Dillon *startp += i * radix;
3547552e9eeSMatthew Dillon scan += 1 + next_skip * i;
3557552e9eeSMatthew Dillon skip = next_skip - 1;
3567552e9eeSMatthew Dillon }
35728efdcebSMatthew Dillon
35828efdcebSMatthew Dillon /*
35928efdcebSMatthew Dillon * If we got all the way down to a leaf node locate the last block,
36028efdcebSMatthew Dillon * power-of-2 aligned and power-of-2 sized. Well, the easiest way
36128efdcebSMatthew Dillon * to deal with this is to just return 1 block.
36228efdcebSMatthew Dillon */
3637552e9eeSMatthew Dillon if (radix == ALIST_BMAP_RADIX) {
3647552e9eeSMatthew Dillon mask = scan->bm_bitmap;
3657552e9eeSMatthew Dillon for (i = ALIST_BMAP_RADIX - 1; i != ALIST_BLOCK_NONE; --i) {
3667552e9eeSMatthew Dillon if ((mask & ((alist_bmap_t)1U << i)))
3677552e9eeSMatthew Dillon break;
3687552e9eeSMatthew Dillon }
3697552e9eeSMatthew Dillon
3707552e9eeSMatthew Dillon /*
3717552e9eeSMatthew Dillon * did not find free entry
3727552e9eeSMatthew Dillon */
37328efdcebSMatthew Dillon if (i == ALIST_BLOCK_NONE)
3747552e9eeSMatthew Dillon return(bl->bl_free);
3757552e9eeSMatthew Dillon
3767552e9eeSMatthew Dillon /*
37728efdcebSMatthew Dillon * Return one block.
3787552e9eeSMatthew Dillon */
37928efdcebSMatthew Dillon *startp += i;
38028efdcebSMatthew Dillon *countp = 1;
3817552e9eeSMatthew Dillon return(bl->bl_free);
3827552e9eeSMatthew Dillon }
3837552e9eeSMatthew Dillon return(bl->bl_free);
3847552e9eeSMatthew Dillon }
3857552e9eeSMatthew Dillon
3863ae3b454SMatthew Dillon #ifdef ALIST_DEBUG
3873ae3b454SMatthew Dillon
3883ae3b454SMatthew Dillon /*
3893ae3b454SMatthew Dillon * alist_print() - dump radix tree
3903ae3b454SMatthew Dillon */
3913ae3b454SMatthew Dillon
3923ae3b454SMatthew Dillon void
alist_print(alist_t bl)3933ae3b454SMatthew Dillon alist_print(alist_t bl)
3943ae3b454SMatthew Dillon {
3953ae3b454SMatthew Dillon kprintf("ALIST {\n");
3963ae3b454SMatthew Dillon alst_radix_print(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, 4);
3973ae3b454SMatthew Dillon kprintf("}\n");
3983ae3b454SMatthew Dillon }
3993ae3b454SMatthew Dillon
4003ae3b454SMatthew Dillon #endif
4013ae3b454SMatthew Dillon
4023ae3b454SMatthew Dillon /************************************************************************
4033ae3b454SMatthew Dillon * ALLOCATION SUPPORT FUNCTIONS *
4043ae3b454SMatthew Dillon ************************************************************************
4053ae3b454SMatthew Dillon *
4063ae3b454SMatthew Dillon * These support functions do all the actual work. They may seem
4073ae3b454SMatthew Dillon * rather longish, but that's because I've commented them up. The
4083ae3b454SMatthew Dillon * actual code is straight forward.
4093ae3b454SMatthew Dillon *
4103ae3b454SMatthew Dillon */
4113ae3b454SMatthew Dillon
4123ae3b454SMatthew Dillon /*
4133ae3b454SMatthew Dillon * alist_leaf_alloc() - allocate at a leaf in the radix tree (a bitmap).
4143ae3b454SMatthew Dillon *
4153ae3b454SMatthew Dillon * This is the core of the allocator and is optimized for the 1 block
4163ae3b454SMatthew Dillon * and the ALIST_BMAP_RADIX block allocation cases. Other cases are
4173ae3b454SMatthew Dillon * somewhat slower. The 1 block allocation case is log2 and extremely
4183ae3b454SMatthew Dillon * quick.
4197552e9eeSMatthew Dillon *
4207552e9eeSMatthew Dillon * mask bit is 0 allocated, not available
4217552e9eeSMatthew Dillon * mask bit is 1 free, available for allocation
4223ae3b454SMatthew Dillon */
4233ae3b454SMatthew Dillon
4247552e9eeSMatthew Dillon static alist_blk_t
alst_leaf_alloc(almeta_t * scan,alist_blk_t blk,alist_blk_t start,alist_blk_t count)4257552e9eeSMatthew Dillon alst_leaf_alloc(almeta_t *scan, alist_blk_t blk, alist_blk_t start,
4267552e9eeSMatthew Dillon alist_blk_t count)
4277552e9eeSMatthew Dillon {
4287552e9eeSMatthew Dillon alist_bmap_t orig = scan->bm_bitmap;
4297552e9eeSMatthew Dillon
4307552e9eeSMatthew Dillon /*
4317552e9eeSMatthew Dillon * Allocate only beyond the start point. Mask to 0 the low bits
4327552e9eeSMatthew Dillon * below start. If start == blk no bits get cleared so don't
4337552e9eeSMatthew Dillon * bother.
4347552e9eeSMatthew Dillon */
4357552e9eeSMatthew Dillon if (start >= blk + ALIST_BMAP_RADIX)
4367552e9eeSMatthew Dillon return(ALIST_BLOCK_NONE);
4377552e9eeSMatthew Dillon
4387552e9eeSMatthew Dillon if (start > blk && start < blk + ALIST_BMAP_RADIX)
4397552e9eeSMatthew Dillon orig &= ~(((alist_bmap_t)1U << (start - blk)) - 1);
4407552e9eeSMatthew Dillon
4417552e9eeSMatthew Dillon start &= ALIST_BMAP_RADIX - 1;
4423ae3b454SMatthew Dillon
4433ae3b454SMatthew Dillon /*
4443ae3b454SMatthew Dillon * Optimize bitmap all-allocated case. Also, count = 1
4453ae3b454SMatthew Dillon * case assumes at least 1 bit is free in the bitmap, so
4463ae3b454SMatthew Dillon * we have to take care of this case here.
4473ae3b454SMatthew Dillon */
4483ae3b454SMatthew Dillon if (orig == 0) {
4497552e9eeSMatthew Dillon if (start <= blk)
4503ae3b454SMatthew Dillon scan->bm_bighint = 0;
4513ae3b454SMatthew Dillon return(ALIST_BLOCK_NONE);
4523ae3b454SMatthew Dillon }
4533ae3b454SMatthew Dillon
4543ae3b454SMatthew Dillon /*
4553ae3b454SMatthew Dillon * Optimized code to allocate one bit out of the bitmap
4563ae3b454SMatthew Dillon */
4573ae3b454SMatthew Dillon if (count == 1) {
4587552e9eeSMatthew Dillon alist_bmap_t mask;
4597552e9eeSMatthew Dillon alist_blk_t j = ALIST_BMAP_RADIX/2;
4607552e9eeSMatthew Dillon alist_blk_t r = 0;
4613ae3b454SMatthew Dillon
4627552e9eeSMatthew Dillon mask = (alist_bmap_t)-1 >> (ALIST_BMAP_RADIX/2);
4633ae3b454SMatthew Dillon
4643ae3b454SMatthew Dillon while (j) {
4653ae3b454SMatthew Dillon if ((orig & mask) == 0) {
4663ae3b454SMatthew Dillon r += j;
4673ae3b454SMatthew Dillon orig >>= j;
4683ae3b454SMatthew Dillon }
4693ae3b454SMatthew Dillon j >>= 1;
4703ae3b454SMatthew Dillon mask >>= j;
4713ae3b454SMatthew Dillon }
4723ae3b454SMatthew Dillon scan->bm_bitmap &= ~(1 << r);
4733ae3b454SMatthew Dillon return(blk + r);
4743ae3b454SMatthew Dillon }
4753ae3b454SMatthew Dillon
4763ae3b454SMatthew Dillon /*
4773ae3b454SMatthew Dillon * non-optimized code to allocate N bits out of the bitmap.
4783ae3b454SMatthew Dillon * The more bits, the faster the code runs. It will run
4793ae3b454SMatthew Dillon * the slowest allocating 2 bits, but since there aren't any
4803ae3b454SMatthew Dillon * memory ops in the core loop (or shouldn't be, anyway),
4813ae3b454SMatthew Dillon * you probably won't notice the difference.
4823ae3b454SMatthew Dillon *
4833ae3b454SMatthew Dillon * Similar to the blist case, the alist code also requires
4843ae3b454SMatthew Dillon * allocations to be power-of-2 sized and aligned to the
4853ae3b454SMatthew Dillon * size of the allocation, which simplifies the algorithm.
4863ae3b454SMatthew Dillon */
4873ae3b454SMatthew Dillon {
4887552e9eeSMatthew Dillon alist_blk_t j;
4897552e9eeSMatthew Dillon alist_blk_t n = ALIST_BMAP_RADIX - count;
4907552e9eeSMatthew Dillon alist_bmap_t mask;
4913ae3b454SMatthew Dillon
4927552e9eeSMatthew Dillon mask = (alist_bmap_t)-1 >> n;
4933ae3b454SMatthew Dillon
4943ae3b454SMatthew Dillon for (j = 0; j <= n; j += count) {
4953ae3b454SMatthew Dillon if ((orig & mask) == mask) {
4963ae3b454SMatthew Dillon scan->bm_bitmap &= ~mask;
4973ae3b454SMatthew Dillon return(blk + j);
4983ae3b454SMatthew Dillon }
4993ae3b454SMatthew Dillon mask = mask << count;
5003ae3b454SMatthew Dillon }
5013ae3b454SMatthew Dillon }
5023ae3b454SMatthew Dillon
5033ae3b454SMatthew Dillon /*
5047552e9eeSMatthew Dillon * We couldn't allocate count in this subtree, update bighint
5057552e9eeSMatthew Dillon * if we were able to check the entire node.
5063ae3b454SMatthew Dillon */
5077552e9eeSMatthew Dillon if (start <= blk)
5083ae3b454SMatthew Dillon scan->bm_bighint = count - 1;
5093ae3b454SMatthew Dillon return(ALIST_BLOCK_NONE);
5103ae3b454SMatthew Dillon }
5113ae3b454SMatthew Dillon
5123ae3b454SMatthew Dillon /*
5133ae3b454SMatthew Dillon * Attempt to allocate at a meta node. If we can't, we update
5143ae3b454SMatthew Dillon * bighint and return a failure. Updating bighint optimize future
5153ae3b454SMatthew Dillon * calls that hit this node. We have to check for our collapse cases
5163ae3b454SMatthew Dillon * and we have a few optimizations strewn in as well.
5173ae3b454SMatthew Dillon */
5187552e9eeSMatthew Dillon static alist_blk_t
alst_meta_alloc(almeta_t * scan,alist_blk_t blk,alist_blk_t start,alist_blk_t count,alist_blk_t radix,alist_blk_t skip)5197552e9eeSMatthew Dillon alst_meta_alloc(almeta_t *scan, alist_blk_t blk, alist_blk_t start,
5207552e9eeSMatthew Dillon alist_blk_t count, alist_blk_t radix, alist_blk_t skip)
5217552e9eeSMatthew Dillon {
5227552e9eeSMatthew Dillon alist_blk_t i;
5237552e9eeSMatthew Dillon alist_bmap_t mask;
5247552e9eeSMatthew Dillon alist_bmap_t pmask;
5257552e9eeSMatthew Dillon alist_blk_t next_skip = ((u_int)skip / ALIST_META_RADIX);
5267552e9eeSMatthew Dillon alist_blk_t orig_blk;
5273ae3b454SMatthew Dillon
5283ae3b454SMatthew Dillon /*
5293ae3b454SMatthew Dillon * ALL-ALLOCATED special case
5303ae3b454SMatthew Dillon */
5313ae3b454SMatthew Dillon if (scan->bm_bitmap == 0) {
5323ae3b454SMatthew Dillon scan->bm_bighint = 0;
5333ae3b454SMatthew Dillon return(ALIST_BLOCK_NONE);
5343ae3b454SMatthew Dillon }
5353ae3b454SMatthew Dillon
5363ae3b454SMatthew Dillon radix /= ALIST_META_RADIX;
5373ae3b454SMatthew Dillon
5383ae3b454SMatthew Dillon /*
5393ae3b454SMatthew Dillon * Radix now represents each bitmap entry for this meta node. If
5403ae3b454SMatthew Dillon * the number of blocks being allocated can be fully represented,
5413ae3b454SMatthew Dillon * we allocate directly out of this meta node.
5423ae3b454SMatthew Dillon *
5433ae3b454SMatthew Dillon * Meta node bitmaps use 2 bits per block.
5443ae3b454SMatthew Dillon *
5453ae3b454SMatthew Dillon * 00 ALL-ALLOCATED
5463ae3b454SMatthew Dillon * 01 PARTIALLY-FREE/PARTIALLY-ALLOCATED
5473ae3b454SMatthew Dillon * 10 (RESERVED)
5483ae3b454SMatthew Dillon * 11 ALL-FREE
5493ae3b454SMatthew Dillon */
5503ae3b454SMatthew Dillon if (count >= radix) {
5517552e9eeSMatthew Dillon alist_blk_t n = count / radix * 2; /* number of bits */
5527552e9eeSMatthew Dillon alist_blk_t j;
5533ae3b454SMatthew Dillon
5547552e9eeSMatthew Dillon mask = (alist_bmap_t)-1 >> (ALIST_BMAP_RADIX - n);
5553ae3b454SMatthew Dillon for (j = 0; j < ALIST_META_RADIX; j += n / 2) {
5567552e9eeSMatthew Dillon if ((scan->bm_bitmap & mask) == mask &&
5577552e9eeSMatthew Dillon blk + j * radix >= start) {
5583ae3b454SMatthew Dillon scan->bm_bitmap &= ~mask;
559a03c3701SMatthew Dillon return(blk + j * radix);
5603ae3b454SMatthew Dillon }
5613ae3b454SMatthew Dillon mask <<= n;
5623ae3b454SMatthew Dillon }
5637552e9eeSMatthew Dillon if (scan->bm_bighint >= count && start <= blk)
5643ae3b454SMatthew Dillon scan->bm_bighint = count >> 1;
5653ae3b454SMatthew Dillon return(ALIST_BLOCK_NONE);
5663ae3b454SMatthew Dillon }
5673ae3b454SMatthew Dillon
5683ae3b454SMatthew Dillon /*
5693ae3b454SMatthew Dillon * If not we have to recurse.
5703ae3b454SMatthew Dillon */
5713ae3b454SMatthew Dillon mask = 0x00000003;
5723ae3b454SMatthew Dillon pmask = 0x00000001;
5737552e9eeSMatthew Dillon orig_blk = blk;
5743ae3b454SMatthew Dillon for (i = 1; i <= skip; i += next_skip) {
5757552e9eeSMatthew Dillon if (scan[i].bm_bighint == (alist_blk_t)-1) {
5763ae3b454SMatthew Dillon /*
5773ae3b454SMatthew Dillon * Terminator
5783ae3b454SMatthew Dillon */
5793ae3b454SMatthew Dillon break;
5803ae3b454SMatthew Dillon }
5814b25f870SMatthew Dillon
5824b25f870SMatthew Dillon /*
5834b25f870SMatthew Dillon * If the element is marked completely free (11), initialize
5844b25f870SMatthew Dillon * the recursion.
5854b25f870SMatthew Dillon */
5863ae3b454SMatthew Dillon if ((scan->bm_bitmap & mask) == mask) {
5877552e9eeSMatthew Dillon scan[i].bm_bitmap = (alist_bmap_t)-1;
5883ae3b454SMatthew Dillon scan[i].bm_bighint = radix;
5893ae3b454SMatthew Dillon }
5903ae3b454SMatthew Dillon
5914b25f870SMatthew Dillon if ((scan->bm_bitmap & mask) == 0) {
5924b25f870SMatthew Dillon /*
5934b25f870SMatthew Dillon * Object marked completely allocated, recursion
5944b25f870SMatthew Dillon * contains garbage.
5954b25f870SMatthew Dillon */
5964b25f870SMatthew Dillon /* Skip it */
5977552e9eeSMatthew Dillon } else if (blk + radix <= start) {
5987552e9eeSMatthew Dillon /*
5997552e9eeSMatthew Dillon * Object does not contain or is not beyond our
6007552e9eeSMatthew Dillon * start point.
6017552e9eeSMatthew Dillon */
6027552e9eeSMatthew Dillon /* Skip it */
6034b25f870SMatthew Dillon } else if (count <= scan[i].bm_bighint) {
6043ae3b454SMatthew Dillon /*
6057552e9eeSMatthew Dillon * count fits in object. If successful and the
6067552e9eeSMatthew Dillon * deeper level becomes all allocated, mark our
6077552e9eeSMatthew Dillon * level as all-allocated.
6083ae3b454SMatthew Dillon */
6097552e9eeSMatthew Dillon alist_blk_t r;
6103ae3b454SMatthew Dillon if (next_skip == 1) {
6117552e9eeSMatthew Dillon r = alst_leaf_alloc(&scan[i], blk, start,
6127552e9eeSMatthew Dillon count);
6133ae3b454SMatthew Dillon } else {
6147552e9eeSMatthew Dillon r = alst_meta_alloc(&scan[i], blk, start,
6157552e9eeSMatthew Dillon count,
6167552e9eeSMatthew Dillon radix, next_skip - 1);
6173ae3b454SMatthew Dillon }
6183ae3b454SMatthew Dillon if (r != ALIST_BLOCK_NONE) {
6193ae3b454SMatthew Dillon if (scan[i].bm_bitmap == 0) {
6203ae3b454SMatthew Dillon scan->bm_bitmap &= ~mask;
6213ae3b454SMatthew Dillon } else {
6223ae3b454SMatthew Dillon scan->bm_bitmap &= ~mask;
6233ae3b454SMatthew Dillon scan->bm_bitmap |= pmask;
6243ae3b454SMatthew Dillon }
6253ae3b454SMatthew Dillon return(r);
6263ae3b454SMatthew Dillon }
6273ae3b454SMatthew Dillon }
6283ae3b454SMatthew Dillon blk += radix;
6293ae3b454SMatthew Dillon mask <<= 2;
6303ae3b454SMatthew Dillon pmask <<= 2;
6313ae3b454SMatthew Dillon }
6323ae3b454SMatthew Dillon
6333ae3b454SMatthew Dillon /*
6347552e9eeSMatthew Dillon * We couldn't allocate count in this subtree, update bighint
6357552e9eeSMatthew Dillon * if we were able to check the entire node.
6363ae3b454SMatthew Dillon */
6377552e9eeSMatthew Dillon if (scan->bm_bighint >= count && start <= orig_blk)
6383ae3b454SMatthew Dillon scan->bm_bighint = count >> 1;
6393ae3b454SMatthew Dillon return(ALIST_BLOCK_NONE);
6403ae3b454SMatthew Dillon }
6413ae3b454SMatthew Dillon
6423ae3b454SMatthew Dillon /*
6437552e9eeSMatthew Dillon * Free allocated block from leaf bitmap
6443ae3b454SMatthew Dillon */
6453ae3b454SMatthew Dillon static void
alst_leaf_free(almeta_t * scan,alist_blk_t blk,alist_blk_t count)6467552e9eeSMatthew Dillon alst_leaf_free(almeta_t *scan, alist_blk_t blk, alist_blk_t count)
6477552e9eeSMatthew Dillon {
6483ae3b454SMatthew Dillon /*
6493ae3b454SMatthew Dillon * free some data in this bitmap
6503ae3b454SMatthew Dillon *
6513ae3b454SMatthew Dillon * e.g.
6523ae3b454SMatthew Dillon * 0000111111111110000
6533ae3b454SMatthew Dillon * \_________/\__/
6543ae3b454SMatthew Dillon * v n
6553ae3b454SMatthew Dillon */
6567552e9eeSMatthew Dillon alist_blk_t n = blk & (ALIST_BMAP_RADIX - 1);
6577552e9eeSMatthew Dillon alist_bmap_t mask;
6583ae3b454SMatthew Dillon
6597552e9eeSMatthew Dillon mask = ((alist_bmap_t)-1 << n) &
6607552e9eeSMatthew Dillon ((alist_bmap_t)-1 >> (ALIST_BMAP_RADIX - count - n));
6613ae3b454SMatthew Dillon
6623ae3b454SMatthew Dillon if (scan->bm_bitmap & mask)
663*33ec1debSSascha Wildner panic("%s: freeing free block", __func__);
6643ae3b454SMatthew Dillon scan->bm_bitmap |= mask;
6653ae3b454SMatthew Dillon
6663ae3b454SMatthew Dillon /*
6673ae3b454SMatthew Dillon * We could probably do a better job here. We are required to make
6683ae3b454SMatthew Dillon * bighint at least as large as the biggest contiguous block of
6693ae3b454SMatthew Dillon * data. If we just shoehorn it, a little extra overhead will
6703ae3b454SMatthew Dillon * be incured on the next allocation (but only that one typically).
6713ae3b454SMatthew Dillon */
6723ae3b454SMatthew Dillon scan->bm_bighint = ALIST_BMAP_RADIX;
6733ae3b454SMatthew Dillon }
6743ae3b454SMatthew Dillon
6753ae3b454SMatthew Dillon /*
6767552e9eeSMatthew Dillon * Free allocated blocks from radix tree meta info. This support routine
6777552e9eeSMatthew Dillon * frees a range of blocks from the bitmap. The range must be entirely
6787552e9eeSMatthew Dillon * enclosed by this radix node. If a meta node, we break the range down
6797552e9eeSMatthew Dillon * recursively to free blocks in subnodes (which means that this code
6807552e9eeSMatthew Dillon * can free an arbitrary range whereas the allocation code cannot allocate
6817552e9eeSMatthew Dillon * an arbitrary range).
6823ae3b454SMatthew Dillon */
6833ae3b454SMatthew Dillon static void
alst_meta_free(almeta_t * scan,alist_blk_t freeBlk,alist_blk_t count,alist_blk_t radix,alist_blk_t skip,alist_blk_t blk)6847552e9eeSMatthew Dillon alst_meta_free(almeta_t *scan, alist_blk_t freeBlk, alist_blk_t count,
6857552e9eeSMatthew Dillon alist_blk_t radix, alist_blk_t skip, alist_blk_t blk)
6867552e9eeSMatthew Dillon {
6877552e9eeSMatthew Dillon alist_blk_t next_skip = ((u_int)skip / ALIST_META_RADIX);
6887552e9eeSMatthew Dillon alist_bmap_t mask;
6897552e9eeSMatthew Dillon alist_bmap_t pmask;
6907552e9eeSMatthew Dillon alist_blk_t i;
6913ae3b454SMatthew Dillon
6923ae3b454SMatthew Dillon /*
6933ae3b454SMatthew Dillon * Break the free down into its components. Because it is so easy
6943ae3b454SMatthew Dillon * to implement, frees are not limited to power-of-2 sizes.
6953ae3b454SMatthew Dillon *
6963ae3b454SMatthew Dillon * Each block in a meta-node bitmap takes two bits.
6973ae3b454SMatthew Dillon */
6983ae3b454SMatthew Dillon radix /= ALIST_META_RADIX;
6993ae3b454SMatthew Dillon
7003ae3b454SMatthew Dillon i = (freeBlk - blk) / radix;
7013ae3b454SMatthew Dillon blk += i * radix;
7023ae3b454SMatthew Dillon mask = 0x00000003 << (i * 2);
7033ae3b454SMatthew Dillon pmask = 0x00000001 << (i * 2);
7043ae3b454SMatthew Dillon
7053ae3b454SMatthew Dillon i = i * next_skip + 1;
7063ae3b454SMatthew Dillon
7073ae3b454SMatthew Dillon while (i <= skip && blk < freeBlk + count) {
7087552e9eeSMatthew Dillon alist_blk_t v;
7093ae3b454SMatthew Dillon
7103ae3b454SMatthew Dillon v = blk + radix - freeBlk;
7113ae3b454SMatthew Dillon if (v > count)
7123ae3b454SMatthew Dillon v = count;
7133ae3b454SMatthew Dillon
7147552e9eeSMatthew Dillon if (scan->bm_bighint == (alist_blk_t)-1)
715*33ec1debSSascha Wildner panic("%s: freeing unexpected range", __func__);
7163ae3b454SMatthew Dillon
7177552e9eeSMatthew Dillon /*
7187552e9eeSMatthew Dillon * WARNING on bighint updates. When we free an element in
7197552e9eeSMatthew Dillon * a chunk if the chunk becomes wholely free it is possible
7207552e9eeSMatthew Dillon * that the whole node is now free, so bighint must be set
7217552e9eeSMatthew Dillon * to cover the whole node. Otherwise address-specific
7227552e9eeSMatthew Dillon * allocations may fail.
7237552e9eeSMatthew Dillon *
7247552e9eeSMatthew Dillon * We don't bother figuring out how much of the node is
7257552e9eeSMatthew Dillon * actually free in this case.
7267552e9eeSMatthew Dillon */
7273ae3b454SMatthew Dillon if (freeBlk == blk && count >= radix) {
7283ae3b454SMatthew Dillon /*
7297552e9eeSMatthew Dillon * The area being freed covers the entire block,
7307552e9eeSMatthew Dillon * assert that we are marked all-allocated and
7317552e9eeSMatthew Dillon * then mark it all-free.
7323ae3b454SMatthew Dillon */
7337552e9eeSMatthew Dillon KKASSERT((scan->bm_bitmap & mask) == 0);
7343ae3b454SMatthew Dillon scan->bm_bitmap |= mask;
7357552e9eeSMatthew Dillon scan->bm_bighint = radix * ALIST_META_RADIX;
7363ae3b454SMatthew Dillon } else {
7373ae3b454SMatthew Dillon /*
7384b25f870SMatthew Dillon * If we were previously marked all-allocated, fix-up
7394b25f870SMatthew Dillon * the next layer so we can recurse down into it.
7404b25f870SMatthew Dillon */
7414b25f870SMatthew Dillon if ((scan->bm_bitmap & mask) == 0) {
7427552e9eeSMatthew Dillon scan[i].bm_bitmap = (alist_bmap_t)0;
7434b25f870SMatthew Dillon scan[i].bm_bighint = 0;
7444b25f870SMatthew Dillon }
7454b25f870SMatthew Dillon
7464b25f870SMatthew Dillon /*
7477552e9eeSMatthew Dillon * Recursion case, then either mark all-free or
7487552e9eeSMatthew Dillon * partially free.
7493ae3b454SMatthew Dillon */
7507552e9eeSMatthew Dillon if (next_skip == 1) {
7513ae3b454SMatthew Dillon alst_leaf_free(&scan[i], freeBlk, v);
7527552e9eeSMatthew Dillon } else {
7537552e9eeSMatthew Dillon alst_meta_free(&scan[i], freeBlk, v,
7547552e9eeSMatthew Dillon radix, next_skip - 1, blk);
7557552e9eeSMatthew Dillon }
7567552e9eeSMatthew Dillon if (scan[i].bm_bitmap == (alist_bmap_t)-1) {
7573ae3b454SMatthew Dillon scan->bm_bitmap |= mask;
7587552e9eeSMatthew Dillon scan->bm_bighint = radix * ALIST_META_RADIX;
7597552e9eeSMatthew Dillon } else {
7607552e9eeSMatthew Dillon scan->bm_bitmap &= ~mask;
7613ae3b454SMatthew Dillon scan->bm_bitmap |= pmask;
7623ae3b454SMatthew Dillon if (scan->bm_bighint < scan[i].bm_bighint)
7633ae3b454SMatthew Dillon scan->bm_bighint = scan[i].bm_bighint;
7643ae3b454SMatthew Dillon }
7657552e9eeSMatthew Dillon }
7663ae3b454SMatthew Dillon mask <<= 2;
7673ae3b454SMatthew Dillon pmask <<= 2;
7683ae3b454SMatthew Dillon count -= v;
7693ae3b454SMatthew Dillon freeBlk += v;
7703ae3b454SMatthew Dillon blk += radix;
7713ae3b454SMatthew Dillon i += next_skip;
7723ae3b454SMatthew Dillon }
7733ae3b454SMatthew Dillon }
7743ae3b454SMatthew Dillon
7753ae3b454SMatthew Dillon /*
7763ae3b454SMatthew Dillon * Initialize our meta structures and bitmaps and calculate the exact
7773ae3b454SMatthew Dillon * amount of space required to manage 'count' blocks - this space may
7783ae3b454SMatthew Dillon * be considerably less then the calculated radix due to the large
7793ae3b454SMatthew Dillon * RADIX values we use.
7803ae3b454SMatthew Dillon */
7817552e9eeSMatthew Dillon static alist_blk_t
alst_radix_init(almeta_t * scan,alist_blk_t blk,alist_blk_t radix,alist_blk_t skip,alist_blk_t count)7827552e9eeSMatthew Dillon alst_radix_init(almeta_t *scan, alist_blk_t blk, alist_blk_t radix,
7837552e9eeSMatthew Dillon alist_blk_t skip, alist_blk_t count)
7843ae3b454SMatthew Dillon {
7857552e9eeSMatthew Dillon alist_blk_t i;
7867552e9eeSMatthew Dillon alist_blk_t next_skip;
7877552e9eeSMatthew Dillon alist_bmap_t mask;
7887552e9eeSMatthew Dillon alist_bmap_t pmask;
7897552e9eeSMatthew Dillon alist_blk_t memindex;
7903ae3b454SMatthew Dillon
7913ae3b454SMatthew Dillon /*
7923ae3b454SMatthew Dillon * Leaf node
7933ae3b454SMatthew Dillon */
7943ae3b454SMatthew Dillon if (radix == ALIST_BMAP_RADIX) {
7953ae3b454SMatthew Dillon if (scan) {
7963ae3b454SMatthew Dillon scan->bm_bighint = 0;
7973ae3b454SMatthew Dillon scan->bm_bitmap = 0;
7983ae3b454SMatthew Dillon }
7997552e9eeSMatthew Dillon return(0);
8003ae3b454SMatthew Dillon }
8013ae3b454SMatthew Dillon
8023ae3b454SMatthew Dillon /*
8033ae3b454SMatthew Dillon * Meta node. If allocating the entire object we can special
8043ae3b454SMatthew Dillon * case it. However, we need to figure out how much memory
8053ae3b454SMatthew Dillon * is required to manage 'count' blocks, so we continue on anyway.
8063ae3b454SMatthew Dillon */
8073ae3b454SMatthew Dillon if (scan) {
8083ae3b454SMatthew Dillon scan->bm_bighint = 0;
8093ae3b454SMatthew Dillon scan->bm_bitmap = 0;
8103ae3b454SMatthew Dillon }
8117552e9eeSMatthew Dillon memindex = 0;
8123ae3b454SMatthew Dillon
8133ae3b454SMatthew Dillon radix /= ALIST_META_RADIX;
8147552e9eeSMatthew Dillon next_skip = skip / ALIST_META_RADIX;
8153ae3b454SMatthew Dillon mask = 0x00000003;
8163ae3b454SMatthew Dillon pmask = 0x00000001;
8173ae3b454SMatthew Dillon
8183ae3b454SMatthew Dillon for (i = 1; i <= skip; i += next_skip) {
8197552e9eeSMatthew Dillon if (count >= blk + radix) {
8203ae3b454SMatthew Dillon /*
8213ae3b454SMatthew Dillon * Allocate the entire object
8223ae3b454SMatthew Dillon */
8237552e9eeSMatthew Dillon memindex += alst_radix_init(((scan) ? &scan[i] : NULL),
8247552e9eeSMatthew Dillon blk, radix,
8257552e9eeSMatthew Dillon next_skip - 1, count);
8263ae3b454SMatthew Dillon /* already marked as wholely allocated */
8277552e9eeSMatthew Dillon } else if (count > blk) {
8283ae3b454SMatthew Dillon /*
8297552e9eeSMatthew Dillon * Allocate a partial object, well it's really
8307552e9eeSMatthew Dillon * all-allocated, we just aren't allowed to
8317552e9eeSMatthew Dillon * free the whole thing.
8323ae3b454SMatthew Dillon */
8337552e9eeSMatthew Dillon memindex += alst_radix_init(((scan) ? &scan[i] : NULL),
8347552e9eeSMatthew Dillon blk, radix,
8357552e9eeSMatthew Dillon next_skip - 1, count);
8367552e9eeSMatthew Dillon /* already marked as wholely allocated */
8373ae3b454SMatthew Dillon } else {
8383ae3b454SMatthew Dillon /*
8397552e9eeSMatthew Dillon * Add terminator but continue the loop. Populate
8407552e9eeSMatthew Dillon * all terminators.
8413ae3b454SMatthew Dillon */
8427552e9eeSMatthew Dillon if (scan) {
8437552e9eeSMatthew Dillon scan[i].bm_bighint = (alist_blk_t)-1;
8447552e9eeSMatthew Dillon scan[i].bm_bitmap = 0;
8457552e9eeSMatthew Dillon }
8463ae3b454SMatthew Dillon /* already marked as wholely allocated */
8473ae3b454SMatthew Dillon }
8483ae3b454SMatthew Dillon mask <<= 2;
8493ae3b454SMatthew Dillon pmask <<= 2;
8507552e9eeSMatthew Dillon blk += radix;
8513ae3b454SMatthew Dillon }
8527552e9eeSMatthew Dillon memindex += ALIST_META_RADIX;
8533ae3b454SMatthew Dillon return(memindex);
8543ae3b454SMatthew Dillon }
8553ae3b454SMatthew Dillon
8563ae3b454SMatthew Dillon #ifdef ALIST_DEBUG
8573ae3b454SMatthew Dillon
8583ae3b454SMatthew Dillon static void
alst_radix_print(almeta_t * scan,alist_blk_t blk,alist_blk_t radix,alist_blk_t skip,int tab)8597552e9eeSMatthew Dillon alst_radix_print(almeta_t *scan, alist_blk_t blk, alist_blk_t radix,
8607552e9eeSMatthew Dillon alist_blk_t skip, int tab)
8613ae3b454SMatthew Dillon {
8627552e9eeSMatthew Dillon alist_blk_t i;
8637552e9eeSMatthew Dillon alist_blk_t next_skip;
8647552e9eeSMatthew Dillon alist_bmap_t mask;
8653ae3b454SMatthew Dillon
8663ae3b454SMatthew Dillon if (radix == ALIST_BMAP_RADIX) {
8673ae3b454SMatthew Dillon kprintf(
8683ae3b454SMatthew Dillon "%*.*s(%04x,%d): bitmap %08x big=%d\n",
8693ae3b454SMatthew Dillon tab, tab, "",
8703ae3b454SMatthew Dillon blk, radix,
8713ae3b454SMatthew Dillon scan->bm_bitmap,
8723ae3b454SMatthew Dillon scan->bm_bighint
8733ae3b454SMatthew Dillon );
8743ae3b454SMatthew Dillon return;
8753ae3b454SMatthew Dillon }
8763ae3b454SMatthew Dillon
8773ae3b454SMatthew Dillon if (scan->bm_bitmap == 0) {
8783ae3b454SMatthew Dillon kprintf(
8793ae3b454SMatthew Dillon "%*.*s(%04x,%d) ALL ALLOCATED\n",
8803ae3b454SMatthew Dillon tab, tab, "",
8813ae3b454SMatthew Dillon blk,
8823ae3b454SMatthew Dillon radix
8833ae3b454SMatthew Dillon );
8843ae3b454SMatthew Dillon return;
8853ae3b454SMatthew Dillon }
8867552e9eeSMatthew Dillon if (scan->bm_bitmap == (alist_bmap_t)-1) {
8873ae3b454SMatthew Dillon kprintf(
8883ae3b454SMatthew Dillon "%*.*s(%04x,%d) ALL FREE\n",
8893ae3b454SMatthew Dillon tab, tab, "",
8903ae3b454SMatthew Dillon blk,
8913ae3b454SMatthew Dillon radix
8923ae3b454SMatthew Dillon );
8933ae3b454SMatthew Dillon return;
8943ae3b454SMatthew Dillon }
8953ae3b454SMatthew Dillon
8963ae3b454SMatthew Dillon kprintf(
8973ae3b454SMatthew Dillon "%*.*s(%04x,%d): subtree (%d) bitmap=%08x big=%d {\n",
8983ae3b454SMatthew Dillon tab, tab, "",
8993ae3b454SMatthew Dillon blk, radix,
9003ae3b454SMatthew Dillon radix,
9013ae3b454SMatthew Dillon scan->bm_bitmap,
9023ae3b454SMatthew Dillon scan->bm_bighint
9033ae3b454SMatthew Dillon );
9043ae3b454SMatthew Dillon
9053ae3b454SMatthew Dillon radix /= ALIST_META_RADIX;
9067552e9eeSMatthew Dillon next_skip = skip / ALIST_META_RADIX;
9073ae3b454SMatthew Dillon tab += 4;
9083ae3b454SMatthew Dillon mask = 0x00000003;
9093ae3b454SMatthew Dillon
9103ae3b454SMatthew Dillon for (i = 1; i <= skip; i += next_skip) {
9117552e9eeSMatthew Dillon if (scan[i].bm_bighint == (alist_blk_t)-1) {
9123ae3b454SMatthew Dillon kprintf(
9133ae3b454SMatthew Dillon "%*.*s(%04x,%d): Terminator\n",
9143ae3b454SMatthew Dillon tab, tab, "",
9153ae3b454SMatthew Dillon blk, radix
9163ae3b454SMatthew Dillon );
9173ae3b454SMatthew Dillon break;
9183ae3b454SMatthew Dillon }
9193ae3b454SMatthew Dillon if ((scan->bm_bitmap & mask) == mask) {
9203ae3b454SMatthew Dillon kprintf(
9213ae3b454SMatthew Dillon "%*.*s(%04x,%d): ALL FREE\n",
9223ae3b454SMatthew Dillon tab, tab, "",
9233ae3b454SMatthew Dillon blk, radix
9243ae3b454SMatthew Dillon );
9253ae3b454SMatthew Dillon } else if ((scan->bm_bitmap & mask) == 0) {
9263ae3b454SMatthew Dillon kprintf(
9273ae3b454SMatthew Dillon "%*.*s(%04x,%d): ALL ALLOCATED\n",
9283ae3b454SMatthew Dillon tab, tab, "",
9293ae3b454SMatthew Dillon blk, radix
9303ae3b454SMatthew Dillon );
9313ae3b454SMatthew Dillon } else {
9323ae3b454SMatthew Dillon alst_radix_print(
9333ae3b454SMatthew Dillon &scan[i],
9343ae3b454SMatthew Dillon blk,
9353ae3b454SMatthew Dillon radix,
9363ae3b454SMatthew Dillon next_skip - 1,
9373ae3b454SMatthew Dillon tab
9383ae3b454SMatthew Dillon );
9393ae3b454SMatthew Dillon }
9403ae3b454SMatthew Dillon blk += radix;
9413ae3b454SMatthew Dillon mask <<= 2;
9423ae3b454SMatthew Dillon }
9433ae3b454SMatthew Dillon tab -= 4;
9443ae3b454SMatthew Dillon
9457552e9eeSMatthew Dillon kprintf("%*.*s}\n", tab, tab, "");
9463ae3b454SMatthew Dillon }
9473ae3b454SMatthew Dillon
9483ae3b454SMatthew Dillon #endif
9493ae3b454SMatthew Dillon
9503ae3b454SMatthew Dillon #ifdef ALIST_DEBUG
9513ae3b454SMatthew Dillon
9523ae3b454SMatthew Dillon int
main(int ac,char ** av)9533ae3b454SMatthew Dillon main(int ac, char **av)
9543ae3b454SMatthew Dillon {
9557552e9eeSMatthew Dillon alist_blk_t size = 1024;
9567552e9eeSMatthew Dillon alist_blk_t da = 0;
9577552e9eeSMatthew Dillon alist_blk_t count = 0;
9583ae3b454SMatthew Dillon alist_t bl;
9597552e9eeSMatthew Dillon int i;
9603ae3b454SMatthew Dillon
9613ae3b454SMatthew Dillon for (i = 1; i < ac; ++i) {
9623ae3b454SMatthew Dillon const char *ptr = av[i];
9633ae3b454SMatthew Dillon if (*ptr != '-') {
9643ae3b454SMatthew Dillon size = strtol(ptr, NULL, 0);
9653ae3b454SMatthew Dillon continue;
9663ae3b454SMatthew Dillon }
9673ae3b454SMatthew Dillon ptr += 2;
9683ae3b454SMatthew Dillon fprintf(stderr, "Bad option: %s\n", ptr - 2);
9693ae3b454SMatthew Dillon exit(1);
9703ae3b454SMatthew Dillon }
9713ae3b454SMatthew Dillon bl = alist_create(size, NULL);
9723ae3b454SMatthew Dillon alist_free(bl, 0, size);
9733ae3b454SMatthew Dillon
9743ae3b454SMatthew Dillon for (;;) {
9753ae3b454SMatthew Dillon char buf[1024];
9767552e9eeSMatthew Dillon alist_blk_t bfree;
9773ae3b454SMatthew Dillon
9783ae3b454SMatthew Dillon
9793ae3b454SMatthew Dillon kprintf("%d/%d/%d> ", bl->bl_free, size, bl->bl_radix);
9803ae3b454SMatthew Dillon fflush(stdout);
9813ae3b454SMatthew Dillon if (fgets(buf, sizeof(buf), stdin) == NULL)
9823ae3b454SMatthew Dillon break;
9833ae3b454SMatthew Dillon switch(buf[0]) {
9843ae3b454SMatthew Dillon case 'p':
9853ae3b454SMatthew Dillon alist_print(bl);
9863ae3b454SMatthew Dillon break;
9873ae3b454SMatthew Dillon case 'a':
9887552e9eeSMatthew Dillon if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
9897552e9eeSMatthew Dillon da = alist_alloc(bl, da, count);
9907552e9eeSMatthew Dillon kprintf(" R=%04x\n", da);
9917552e9eeSMatthew Dillon } else if (sscanf(buf + 1, "%d", &count) == 1) {
9927552e9eeSMatthew Dillon da = alist_alloc(bl, 0, count);
9937552e9eeSMatthew Dillon kprintf(" R=%04x\n", da);
9947552e9eeSMatthew Dillon } else if (count) {
9957552e9eeSMatthew Dillon kprintf("alloc 0x%04x/%d\n", da, count);
9967552e9eeSMatthew Dillon alist_blk_t blk = alist_alloc(bl, da, count);
9973ae3b454SMatthew Dillon kprintf(" R=%04x\n", blk);
9983ae3b454SMatthew Dillon } else {
9993ae3b454SMatthew Dillon kprintf("?\n");
10003ae3b454SMatthew Dillon }
10013ae3b454SMatthew Dillon break;
10023ae3b454SMatthew Dillon case 'f':
10033ae3b454SMatthew Dillon if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
10043ae3b454SMatthew Dillon alist_free(bl, da, count);
10057552e9eeSMatthew Dillon } else if (count) {
10067552e9eeSMatthew Dillon kprintf("free 0x%04x/%d\n", da, count);
10077552e9eeSMatthew Dillon alist_free(bl, da, count);
10083ae3b454SMatthew Dillon } else {
10093ae3b454SMatthew Dillon kprintf("?\n");
10103ae3b454SMatthew Dillon }
10113ae3b454SMatthew Dillon break;
10123ae3b454SMatthew Dillon case '?':
10133ae3b454SMatthew Dillon case 'h':
10147552e9eeSMatthew Dillon puts("p -print\n"
10153ae3b454SMatthew Dillon "a %d -allocate\n"
10163ae3b454SMatthew Dillon "f %x %d -free\n"
10177552e9eeSMatthew Dillon "h/? -help");
10187552e9eeSMatthew Dillon break;
10197552e9eeSMatthew Dillon case 'i':
10207552e9eeSMatthew Dillon bfree = alist_free_info(bl, &da, &count);
10217552e9eeSMatthew Dillon kprintf("info: %d free trailing: 0x%04x/%d\n",
10227552e9eeSMatthew Dillon bfree, da, count);
10233ae3b454SMatthew Dillon break;
10243ae3b454SMatthew Dillon default:
10253ae3b454SMatthew Dillon kprintf("?\n");
10263ae3b454SMatthew Dillon break;
10273ae3b454SMatthew Dillon }
10283ae3b454SMatthew Dillon }
10293ae3b454SMatthew Dillon return(0);
10303ae3b454SMatthew Dillon }
10313ae3b454SMatthew Dillon
10323ae3b454SMatthew Dillon void
panic(const char * ctl,...)10333ae3b454SMatthew Dillon panic(const char *ctl, ...)
10343ae3b454SMatthew Dillon {
10353ae3b454SMatthew Dillon __va_list va;
10363ae3b454SMatthew Dillon
10373ae3b454SMatthew Dillon __va_start(va, ctl);
10383ae3b454SMatthew Dillon vfprintf(stderr, ctl, va);
10393ae3b454SMatthew Dillon fprintf(stderr, "\n");
10403ae3b454SMatthew Dillon __va_end(va);
10413ae3b454SMatthew Dillon exit(1);
10423ae3b454SMatthew Dillon }
10433ae3b454SMatthew Dillon
10443ae3b454SMatthew Dillon #endif
1045