xref: /dflybsd-src/sys/kern/subr_alist.c (revision dae741e33c840b92a8a53bf9f01157ede145e256)
1 /*
2  * ALIST.C -	Bitmap allocator/deallocator, using a radix tree with hinting.
3  *		Unlimited-size allocations, power-of-2 only, power-of-2
4  *		aligned results only.
5  *
6  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
7  *
8  * This code is derived from software contributed to The DragonFly Project
9  * by Matthew Dillon <dillon@backplane.com>
10  *
11  * Redistribution and use in source and binary forms, with or without
12  * modification, are permitted provided that the following conditions
13  * are met:
14  *
15  * 1. Redistributions of source code must retain the above copyright
16  *    notice, this list of conditions and the following disclaimer.
17  * 2. Redistributions in binary form must reproduce the above copyright
18  *    notice, this list of conditions and the following disclaimer in
19  *    the documentation and/or other materials provided with the
20  *    distribution.
21  * 3. Neither the name of The DragonFly Project nor the names of its
22  *    contributors may be used to endorse or promote products derived
23  *    from this software without specific, prior written permission.
24  *
25  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
26  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
27  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
28  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
29  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
30  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
31  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
32  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
33  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
34  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
35  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
36  * SUCH DAMAGE.
37  */
38 /*
39  * This module has been adapted from the BLIST module, which was written
40  * by Matthew Dillon many years ago.
41  *
42  * This module implements a general power-of-2 bitmap allocator/deallocator.
43  * All allocations must be in powers of 2 and will return similarly aligned
44  * results.  The module does not try to interpret the meaning of a 'block'
45  * other then to return ALIST_BLOCK_NONE on an allocation failure.
46  *
47  * A maximum of 2 billion blocks is supported so, for example, if one block
48  * represented 64 bytes a maximally sized ALIST would represent
49  * 128 gigabytes.
50  *
51  * A radix tree is used to maintain the bitmap and layed out in a manner
52  * similar to the blist code.  Meta nodes use a radix of 16 and 2 bits per
53  * block while leaf nodes use a radix of 32 and 1 bit per block (stored in
54  * a 32 bit bitmap field).  Both meta and leaf nodes have a hint field.
55  * This field gives us a hint as to the largest free contiguous range of
56  * blocks under the node.  It may contain a value that is too high, but
57  * will never contain a value that is too low.  When the radix tree is
58  * searched, allocation failures in subtrees update the hint.
59  *
60  * The radix tree is layed out recursively using a linear array.  Each meta
61  * node is immediately followed (layed out sequentially in memory) by
62  * ALIST_META_RADIX lower level nodes.  This is a recursive structure but one
63  * that can be easily scanned through a very simple 'skip' calculation.  In
64  * order to support large radixes, portions of the tree may reside outside our
65  * memory allocation.  We handle this with an early-terminate optimization
66  * in the meta-node.  The memory allocation is only large enough to cover
67  * the number of blocks requested at creation time even if it must be
68  * encompassed in larger root-node radix.
69  *
70  * This code can be compiled stand-alone for debugging.
71  */
72 
73 #ifdef _KERNEL
74 
75 #include <sys/param.h>
76 #include <sys/systm.h>
77 #include <sys/lock.h>
78 #include <sys/kernel.h>
79 #include <sys/alist.h>
80 #include <sys/malloc.h>
81 #include <vm/vm.h>
82 #include <vm/vm_object.h>
83 #include <vm/vm_kern.h>
84 #include <vm/vm_extern.h>
85 #include <vm/vm_page.h>
86 
87 #else
88 
89 #ifndef ALIST_NO_DEBUG
90 #define ALIST_DEBUG
91 #endif
92 
93 #include <sys/types.h>
94 #include <stdio.h>
95 #include <assert.h>
96 #include <string.h>
97 #include <stdlib.h>
98 #include <stdarg.h>
99 
100 #define kmalloc(a,b,c)	malloc(a)
101 #define kfree(a,b)	free(a)
102 #define kprintf		printf
103 #define KKASSERT(exp)	assert(exp)
104 struct malloc_type;
105 
106 typedef unsigned int u_daddr_t;
107 
108 #include <sys/alist.h>
109 
110 void panic(const char *ctl, ...);
111 
112 #endif
113 
114 /*
115  * static support functions
116  */
117 
118 static daddr_t alst_leaf_alloc(almeta_t *scan, daddr_t blk, int count);
119 static daddr_t alst_meta_alloc(almeta_t *scan, daddr_t blk,
120 				daddr_t count, daddr_t radix, int skip);
121 static void alst_leaf_free(almeta_t *scan, daddr_t relblk, int count);
122 static void alst_meta_free(almeta_t *scan, daddr_t freeBlk, daddr_t count,
123 					daddr_t radix, int skip, daddr_t blk);
124 static daddr_t	alst_radix_init(almeta_t *scan, daddr_t radix,
125 						int skip, daddr_t count);
126 #ifndef _KERNEL
127 static void	alst_radix_print(almeta_t *scan, daddr_t blk,
128 					daddr_t radix, int skip, int tab);
129 #endif
130 
131 /*
132  * alist_create() - create a alist capable of handling up to the specified
133  *		    number of blocks
134  *
135  *	blocks must be greater then 0
136  *
137  *	The smallest alist consists of a single leaf node capable of
138  *	managing ALIST_BMAP_RADIX blocks.
139  */
140 
141 alist_t
142 alist_create(daddr_t blocks, struct malloc_type *mtype)
143 {
144 	alist_t bl;
145 	int radix;
146 	int skip = 0;
147 
148 	/*
149 	 * Calculate radix and skip field used for scanning.
150 	 */
151 	radix = ALIST_BMAP_RADIX;
152 
153 	while (radix < blocks) {
154 		radix *= ALIST_META_RADIX;
155 		skip = (skip + 1) * ALIST_META_RADIX;
156 	}
157 
158 	bl = kmalloc(sizeof(struct alist), mtype, M_WAITOK | M_ZERO);
159 
160 	bl->bl_blocks = blocks;
161 	bl->bl_radix = radix;
162 	bl->bl_skip = skip;
163 	bl->bl_rootblks = 1 +
164 	    alst_radix_init(NULL, bl->bl_radix, bl->bl_skip, blocks);
165 	bl->bl_root = kmalloc(sizeof(almeta_t) * bl->bl_rootblks, mtype, M_WAITOK);
166 
167 #if defined(ALIST_DEBUG)
168 	kprintf(
169 		"ALIST representing %d blocks (%d MB of swap)"
170 		", requiring %dK (%d bytes) of ram\n",
171 		bl->bl_blocks,
172 		bl->bl_blocks * 4 / 1024,
173 		(bl->bl_rootblks * sizeof(almeta_t) + 1023) / 1024,
174 		(bl->bl_rootblks * sizeof(almeta_t))
175 	);
176 	kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
177 #endif
178 	alst_radix_init(bl->bl_root, bl->bl_radix, bl->bl_skip, blocks);
179 
180 	return(bl);
181 }
182 
183 void
184 alist_destroy(alist_t bl, struct malloc_type *mtype)
185 {
186 	kfree(bl->bl_root, mtype);
187 	kfree(bl, mtype);
188 }
189 
190 /*
191  * alist_alloc() - reserve space in the block bitmap.  Return the base
192  *		   of a contiguous region or ALIST_BLOCK_NONE if space
193  *		   could not be allocated.
194  */
195 
196 daddr_t
197 alist_alloc(alist_t bl, daddr_t count)
198 {
199 	daddr_t blk = ALIST_BLOCK_NONE;
200 
201 	KKASSERT((count | (count - 1)) == (count << 1) - 1);
202 
203 	if (bl && count < bl->bl_radix) {
204 		if (bl->bl_radix == ALIST_BMAP_RADIX)
205 			blk = alst_leaf_alloc(bl->bl_root, 0, count);
206 		else
207 			blk = alst_meta_alloc(bl->bl_root, 0, count, bl->bl_radix, bl->bl_skip);
208 		if (blk != ALIST_BLOCK_NONE)
209 			bl->bl_free -= count;
210 	}
211 	return(blk);
212 }
213 
214 /*
215  * alist_free() -	free up space in the block bitmap.  Return the base
216  *		     	of a contiguous region.  Panic if an inconsistancy is
217  *			found.
218  */
219 
220 void
221 alist_free(alist_t bl, daddr_t blkno, daddr_t count)
222 {
223 	if (bl) {
224 		KKASSERT(blkno + count <= bl->bl_blocks);
225 		if (bl->bl_radix == ALIST_BMAP_RADIX)
226 			alst_leaf_free(bl->bl_root, blkno, count);
227 		else
228 			alst_meta_free(bl->bl_root, blkno, count, bl->bl_radix, bl->bl_skip, 0);
229 		bl->bl_free += count;
230 	}
231 }
232 
233 #ifdef ALIST_DEBUG
234 
235 /*
236  * alist_print()    - dump radix tree
237  */
238 
239 void
240 alist_print(alist_t bl)
241 {
242 	kprintf("ALIST {\n");
243 	alst_radix_print(bl->bl_root, 0, bl->bl_radix, bl->bl_skip, 4);
244 	kprintf("}\n");
245 }
246 
247 #endif
248 
249 /************************************************************************
250  *			  ALLOCATION SUPPORT FUNCTIONS			*
251  ************************************************************************
252  *
253  *	These support functions do all the actual work.  They may seem
254  *	rather longish, but that's because I've commented them up.  The
255  *	actual code is straight forward.
256  *
257  */
258 
259 /*
260  * alist_leaf_alloc() -	allocate at a leaf in the radix tree (a bitmap).
261  *
262  *	This is the core of the allocator and is optimized for the 1 block
263  *	and the ALIST_BMAP_RADIX block allocation cases.  Other cases are
264  *	somewhat slower.  The 1 block allocation case is log2 and extremely
265  *	quick.
266  */
267 
268 static daddr_t
269 alst_leaf_alloc(
270 	almeta_t *scan,
271 	daddr_t blk,
272 	int count
273 ) {
274 	u_daddr_t orig = scan->bm_bitmap;
275 
276 	/*
277 	 * Optimize bitmap all-allocated case.  Also, count = 1
278 	 * case assumes at least 1 bit is free in the bitmap, so
279 	 * we have to take care of this case here.
280 	 */
281 	if (orig == 0) {
282 		scan->bm_bighint = 0;
283 		return(ALIST_BLOCK_NONE);
284 	}
285 
286 	/*
287 	 * Optimized code to allocate one bit out of the bitmap
288 	 */
289 	if (count == 1) {
290 		u_daddr_t mask;
291 		int j = ALIST_BMAP_RADIX/2;
292 		int r = 0;
293 
294 		mask = (u_daddr_t)-1 >> (ALIST_BMAP_RADIX/2);
295 
296 		while (j) {
297 			if ((orig & mask) == 0) {
298 			    r += j;
299 			    orig >>= j;
300 			}
301 			j >>= 1;
302 			mask >>= j;
303 		}
304 		scan->bm_bitmap &= ~(1 << r);
305 		return(blk + r);
306 	}
307 
308 	/*
309 	 * non-optimized code to allocate N bits out of the bitmap.
310 	 * The more bits, the faster the code runs.  It will run
311 	 * the slowest allocating 2 bits, but since there aren't any
312 	 * memory ops in the core loop (or shouldn't be, anyway),
313 	 * you probably won't notice the difference.
314 	 *
315 	 * Similar to the blist case, the alist code also requires
316 	 * allocations to be power-of-2 sized and aligned to the
317 	 * size of the allocation, which simplifies the algorithm.
318 	 */
319 	{
320 		int j;
321 		int n = ALIST_BMAP_RADIX - count;
322 		u_daddr_t mask;
323 
324 		mask = (u_daddr_t)-1 >> n;
325 
326 		for (j = 0; j <= n; j += count) {
327 			if ((orig & mask) == mask) {
328 				scan->bm_bitmap &= ~mask;
329 				return(blk + j);
330 			}
331 			mask = mask << count;
332 		}
333 	}
334 
335 	/*
336 	 * We couldn't allocate count in this subtree, update bighint.
337 	 */
338 	scan->bm_bighint = count - 1;
339 	return(ALIST_BLOCK_NONE);
340 }
341 
342 /*
343  * alist_meta_alloc() -	allocate at a meta in the radix tree.
344  *
345  *	Attempt to allocate at a meta node.  If we can't, we update
346  *	bighint and return a failure.  Updating bighint optimize future
347  *	calls that hit this node.  We have to check for our collapse cases
348  *	and we have a few optimizations strewn in as well.
349  */
350 
351 static daddr_t
352 alst_meta_alloc(
353 	almeta_t *scan,
354 	daddr_t blk,
355 	daddr_t count,
356 	daddr_t radix,
357 	int skip
358 ) {
359 	int i;
360 	u_daddr_t mask;
361 	u_daddr_t pmask;
362 	int next_skip = ((u_int)skip / ALIST_META_RADIX);
363 
364 	/*
365 	 * ALL-ALLOCATED special case
366 	 */
367 	if (scan->bm_bitmap == 0)  {
368 		scan->bm_bighint = 0;
369 		return(ALIST_BLOCK_NONE);
370 	}
371 
372 	radix /= ALIST_META_RADIX;
373 
374 	/*
375 	 * Radix now represents each bitmap entry for this meta node.  If
376 	 * the number of blocks being allocated can be fully represented,
377 	 * we allocate directly out of this meta node.
378 	 *
379 	 * Meta node bitmaps use 2 bits per block.
380 	 *
381 	 *	00	ALL-ALLOCATED
382 	 *	01	PARTIALLY-FREE/PARTIALLY-ALLOCATED
383 	 *	10	(RESERVED)
384 	 *	11	ALL-FREE
385 	 */
386 	if (count >= radix) {
387 		int n = count / radix * 2;	/* number of bits */
388 		int j;
389 
390 		mask = (u_daddr_t)-1 >> (ALIST_BMAP_RADIX - n);
391 		for (j = 0; j < ALIST_META_RADIX; j += n / 2) {
392 			if ((scan->bm_bitmap & mask) == mask) {
393 				scan->bm_bitmap &= ~mask;
394 				return(blk + j * radix);
395 			}
396 			mask <<= n;
397 		}
398 		if (scan->bm_bighint >= count)
399 			scan->bm_bighint = count >> 1;
400 		return(ALIST_BLOCK_NONE);
401 	}
402 
403 	/*
404 	 * If not we have to recurse.
405 	 */
406 	mask = 0x00000003;
407 	pmask = 0x00000001;
408 	for (i = 1; i <= skip; i += next_skip) {
409 		if (scan[i].bm_bighint == (daddr_t)-1) {
410 			/*
411 			 * Terminator
412 			 */
413 			break;
414 		}
415 
416 		/*
417 		 * If the element is marked completely free (11), initialize
418 		 * the recursion.
419 		 */
420 		if ((scan->bm_bitmap & mask) == mask) {
421 			scan[i].bm_bitmap = (u_daddr_t)-1;
422 			scan[i].bm_bighint = radix;
423 		}
424 
425 		if ((scan->bm_bitmap & mask) == 0) {
426 			/*
427 			 * Object marked completely allocated, recursion
428 			 * contains garbage.
429 			 */
430 			/* Skip it */
431 		} else if (count <= scan[i].bm_bighint) {
432 			/*
433 			 * count fits in object
434 			 */
435 			daddr_t r;
436 			if (next_skip == 1) {
437 				r = alst_leaf_alloc(&scan[i], blk, count);
438 			} else {
439 				r = alst_meta_alloc(&scan[i], blk, count, radix, next_skip - 1);
440 			}
441 			if (r != ALIST_BLOCK_NONE) {
442 				if (scan[i].bm_bitmap == 0) {
443 					scan->bm_bitmap &= ~mask;
444 				} else {
445 					scan->bm_bitmap &= ~mask;
446 					scan->bm_bitmap |= pmask;
447 				}
448 				return(r);
449 			}
450 		}
451 		blk += radix;
452 		mask <<= 2;
453 		pmask <<= 2;
454 	}
455 
456 	/*
457 	 * We couldn't allocate count in this subtree, update bighint.
458 	 */
459 	if (scan->bm_bighint >= count)
460 		scan->bm_bighint = count >> 1;
461 	return(ALIST_BLOCK_NONE);
462 }
463 
464 /*
465  * BLST_LEAF_FREE() -	free allocated block from leaf bitmap
466  *
467  */
468 static void
469 alst_leaf_free(
470 	almeta_t *scan,
471 	daddr_t blk,
472 	int count
473 ) {
474 	/*
475 	 * free some data in this bitmap
476 	 *
477 	 * e.g.
478 	 *	0000111111111110000
479 	 *          \_________/\__/
480 	 *		v        n
481 	 */
482 	int n = blk & (ALIST_BMAP_RADIX - 1);
483 	u_daddr_t mask;
484 
485 	mask = ((u_daddr_t)-1 << n) &
486 	    ((u_daddr_t)-1 >> (ALIST_BMAP_RADIX - count - n));
487 
488 	if (scan->bm_bitmap & mask)
489 		panic("alst_radix_free: freeing free block");
490 	scan->bm_bitmap |= mask;
491 
492 	/*
493 	 * We could probably do a better job here.  We are required to make
494 	 * bighint at least as large as the biggest contiguous block of
495 	 * data.  If we just shoehorn it, a little extra overhead will
496 	 * be incured on the next allocation (but only that one typically).
497 	 */
498 	scan->bm_bighint = ALIST_BMAP_RADIX;
499 }
500 
501 /*
502  * BLST_META_FREE() - free allocated blocks from radix tree meta info
503  *
504  *	This support routine frees a range of blocks from the bitmap.
505  *	The range must be entirely enclosed by this radix node.  If a
506  *	meta node, we break the range down recursively to free blocks
507  *	in subnodes (which means that this code can free an arbitrary
508  *	range whereas the allocation code cannot allocate an arbitrary
509  *	range).
510  */
511 
512 static void
513 alst_meta_free(
514 	almeta_t *scan,
515 	daddr_t freeBlk,
516 	daddr_t count,
517 	daddr_t radix,
518 	int skip,
519 	daddr_t blk
520 ) {
521 	int next_skip = ((u_int)skip / ALIST_META_RADIX);
522 	u_daddr_t mask;
523 	u_daddr_t pmask;
524 	int i;
525 
526 	/*
527 	 * Break the free down into its components.  Because it is so easy
528 	 * to implement, frees are not limited to power-of-2 sizes.
529 	 *
530 	 * Each block in a meta-node bitmap takes two bits.
531 	 */
532 	radix /= ALIST_META_RADIX;
533 
534 	i = (freeBlk - blk) / radix;
535 	blk += i * radix;
536 	mask = 0x00000003 << (i * 2);
537 	pmask = 0x00000001 << (i * 2);
538 
539 	i = i * next_skip + 1;
540 
541 	while (i <= skip && blk < freeBlk + count) {
542 		daddr_t v;
543 
544 		v = blk + radix - freeBlk;
545 		if (v > count)
546 			v = count;
547 
548 		if (scan->bm_bighint == (daddr_t)-1)
549 			panic("alst_meta_free: freeing unexpected range");
550 
551 		if (freeBlk == blk && count >= radix) {
552 			/*
553 			 * All-free case, no need to update sub-tree
554 			 */
555 			scan->bm_bitmap |= mask;
556 			scan->bm_bighint = radix * ALIST_META_RADIX;/*XXX*/
557 		} else {
558 			/*
559 			 * If we were previously marked all-allocated, fix-up
560 			 * the next layer so we can recurse down into it.
561 			 */
562 			if ((scan->bm_bitmap & mask) == 0) {
563 				scan[i].bm_bitmap = (u_daddr_t)0;
564 				scan[i].bm_bighint = 0;
565 			}
566 
567 			/*
568 			 * Recursion case
569 			 */
570 			if (next_skip == 1)
571 				alst_leaf_free(&scan[i], freeBlk, v);
572 			else
573 				alst_meta_free(&scan[i], freeBlk, v, radix, next_skip - 1, blk);
574 			if (scan[i].bm_bitmap == (u_daddr_t)-1)
575 				scan->bm_bitmap |= mask;
576 			else
577 				scan->bm_bitmap |= pmask;
578 			if (scan->bm_bighint < scan[i].bm_bighint)
579 			    scan->bm_bighint = scan[i].bm_bighint;
580 		}
581 		mask <<= 2;
582 		pmask <<= 2;
583 		count -= v;
584 		freeBlk += v;
585 		blk += radix;
586 		i += next_skip;
587 	}
588 }
589 
590 /*
591  * BLST_RADIX_INIT() - initialize radix tree
592  *
593  *	Initialize our meta structures and bitmaps and calculate the exact
594  *	amount of space required to manage 'count' blocks - this space may
595  *	be considerably less then the calculated radix due to the large
596  *	RADIX values we use.
597  */
598 
599 static daddr_t
600 alst_radix_init(almeta_t *scan, daddr_t radix, int skip, daddr_t count)
601 {
602 	int i;
603 	int next_skip;
604 	daddr_t memindex = 0;
605 	u_daddr_t mask;
606 	u_daddr_t pmask;
607 
608 	/*
609 	 * Leaf node
610 	 */
611 	if (radix == ALIST_BMAP_RADIX) {
612 		if (scan) {
613 			scan->bm_bighint = 0;
614 			scan->bm_bitmap = 0;
615 		}
616 		return(memindex);
617 	}
618 
619 	/*
620 	 * Meta node.  If allocating the entire object we can special
621 	 * case it.  However, we need to figure out how much memory
622 	 * is required to manage 'count' blocks, so we continue on anyway.
623 	 */
624 
625 	if (scan) {
626 		scan->bm_bighint = 0;
627 		scan->bm_bitmap = 0;
628 	}
629 
630 	radix /= ALIST_META_RADIX;
631 	next_skip = ((u_int)skip / ALIST_META_RADIX);
632 	mask = 0x00000003;
633 	pmask = 0x00000001;
634 
635 	for (i = 1; i <= skip; i += next_skip) {
636 		if (count >= radix) {
637 			/*
638 			 * Allocate the entire object
639 			 */
640 			memindex = i + alst_radix_init(
641 			    ((scan) ? &scan[i] : NULL),
642 			    radix,
643 			    next_skip - 1,
644 			    radix
645 			);
646 			count -= radix;
647 			/* already marked as wholely allocated */
648 		} else if (count > 0) {
649 			/*
650 			 * Allocate a partial object
651 			 */
652 			memindex = i + alst_radix_init(
653 			    ((scan) ? &scan[i] : NULL),
654 			    radix,
655 			    next_skip - 1,
656 			    count
657 			);
658 			count = 0;
659 
660 			/*
661 			 * Mark as partially allocated
662 			 */
663 			if (scan)
664 				scan->bm_bitmap |= pmask;
665 		} else {
666 			/*
667 			 * Add terminator and break out
668 			 */
669 			if (scan)
670 				scan[i].bm_bighint = (daddr_t)-1;
671 			/* already marked as wholely allocated */
672 			break;
673 		}
674 		mask <<= 2;
675 		pmask <<= 2;
676 	}
677 	if (memindex < i)
678 		memindex = i;
679 	return(memindex);
680 }
681 
682 #ifdef ALIST_DEBUG
683 
684 static void
685 alst_radix_print(almeta_t *scan, daddr_t blk, daddr_t radix, int skip, int tab)
686 {
687 	int i;
688 	int next_skip;
689 	int lastState = 0;
690 	u_daddr_t mask;
691 
692 	if (radix == ALIST_BMAP_RADIX) {
693 		kprintf(
694 		    "%*.*s(%04x,%d): bitmap %08x big=%d\n",
695 		    tab, tab, "",
696 		    blk, radix,
697 		    scan->bm_bitmap,
698 		    scan->bm_bighint
699 		);
700 		return;
701 	}
702 
703 	if (scan->bm_bitmap == 0) {
704 		kprintf(
705 		    "%*.*s(%04x,%d) ALL ALLOCATED\n",
706 		    tab, tab, "",
707 		    blk,
708 		    radix
709 		);
710 		return;
711 	}
712 	if (scan->bm_bitmap == (u_daddr_t)-1) {
713 		kprintf(
714 		    "%*.*s(%04x,%d) ALL FREE\n",
715 		    tab, tab, "",
716 		    blk,
717 		    radix
718 		);
719 		return;
720 	}
721 
722 	kprintf(
723 	    "%*.*s(%04x,%d): subtree (%d) bitmap=%08x big=%d {\n",
724 	    tab, tab, "",
725 	    blk, radix,
726 	    radix,
727 	    scan->bm_bitmap,
728 	    scan->bm_bighint
729 	);
730 
731 	radix /= ALIST_META_RADIX;
732 	next_skip = ((u_int)skip / ALIST_META_RADIX);
733 	tab += 4;
734 	mask = 0x00000003;
735 
736 	for (i = 1; i <= skip; i += next_skip) {
737 		if (scan[i].bm_bighint == (daddr_t)-1) {
738 			kprintf(
739 			    "%*.*s(%04x,%d): Terminator\n",
740 			    tab, tab, "",
741 			    blk, radix
742 			);
743 			lastState = 0;
744 			break;
745 		}
746 		if ((scan->bm_bitmap & mask) == mask) {
747 			kprintf(
748 			    "%*.*s(%04x,%d): ALL FREE\n",
749 			    tab, tab, "",
750 			    blk, radix
751 			);
752 		} else if ((scan->bm_bitmap & mask) == 0) {
753 			kprintf(
754 			    "%*.*s(%04x,%d): ALL ALLOCATED\n",
755 			    tab, tab, "",
756 			    blk, radix
757 			);
758 		} else {
759 			alst_radix_print(
760 			    &scan[i],
761 			    blk,
762 			    radix,
763 			    next_skip - 1,
764 			    tab
765 			);
766 		}
767 		blk += radix;
768 		mask <<= 2;
769 	}
770 	tab -= 4;
771 
772 	kprintf(
773 	    "%*.*s}\n",
774 	    tab, tab, ""
775 	);
776 }
777 
778 #endif
779 
780 #ifdef ALIST_DEBUG
781 
782 int
783 main(int ac, char **av)
784 {
785 	int size = 1024;
786 	int i;
787 	alist_t bl;
788 
789 	for (i = 1; i < ac; ++i) {
790 		const char *ptr = av[i];
791 		if (*ptr != '-') {
792 			size = strtol(ptr, NULL, 0);
793 			continue;
794 		}
795 		ptr += 2;
796 		fprintf(stderr, "Bad option: %s\n", ptr - 2);
797 		exit(1);
798 	}
799 	bl = alist_create(size, NULL);
800 	alist_free(bl, 0, size);
801 
802 	for (;;) {
803 		char buf[1024];
804 		daddr_t da = 0;
805 		daddr_t count = 0;
806 
807 
808 		kprintf("%d/%d/%d> ", bl->bl_free, size, bl->bl_radix);
809 		fflush(stdout);
810 		if (fgets(buf, sizeof(buf), stdin) == NULL)
811 			break;
812 		switch(buf[0]) {
813 		case 'p':
814 			alist_print(bl);
815 			break;
816 		case 'a':
817 			if (sscanf(buf + 1, "%d", &count) == 1) {
818 				daddr_t blk = alist_alloc(bl, count);
819 				kprintf("    R=%04x\n", blk);
820 			} else {
821 				kprintf("?\n");
822 			}
823 			break;
824 		case 'f':
825 			if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
826 				alist_free(bl, da, count);
827 			} else {
828 				kprintf("?\n");
829 			}
830 			break;
831 		case '?':
832 		case 'h':
833 			puts(
834 			    "p          -print\n"
835 			    "a %d       -allocate\n"
836 			    "f %x %d    -free\n"
837 			    "h/?        -help"
838 			);
839 			break;
840 		default:
841 			kprintf("?\n");
842 			break;
843 		}
844 	}
845 	return(0);
846 }
847 
848 void
849 panic(const char *ctl, ...)
850 {
851 	__va_list va;
852 
853 	__va_start(va, ctl);
854 	vfprintf(stderr, ctl, va);
855 	fprintf(stderr, "\n");
856 	__va_end(va);
857 	exit(1);
858 }
859 
860 #endif
861 
862