2 * ALIST.C - Bitmap allocator/deallocator, using a radix tree with hinting.
3 * Unlimited-size allocations, power-of-2 only, power-of-2
4 * aligned results only.
6 * Copyright (c) 2007 The DragonFly Project. All rights reserved.
8 * This code is derived from software contributed to The DragonFly Project
9 * by Matthew Dillon <dillon@backplane.com>
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
15 * 1. Redistributions of source code must retain the above copyright
16 * notice, this list of conditions and the following disclaimer.
17 * 2. Redistributions in binary form must reproduce the above copyright
18 * notice, this list of conditions and the following disclaimer in
19 * the documentation and/or other materials provided with the
21 * 3. Neither the name of The DragonFly Project nor the names of its
22 * contributors may be used to endorse or promote products derived
23 * from this software without specific, prior written permission.
25 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
26 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
27 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
28 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
29 * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
30 * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
31 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
32 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
33 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
34 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
35 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
38 * $DragonFly: src/sys/kern/subr_alist.c,v 1.4 2008/04/23 17:21:08 dillon Exp $
41 * This module has been adapted from the BLIST module, which was written
42 * by Matthew Dillon many years ago.
44 * This module implements a general power-of-2 bitmap allocator/deallocator.
45 * All allocations must be in powers of 2 and will return similarly aligned
46 * results. The module does not try to interpret the meaning of a 'block'
47 * other then to return ALIST_BLOCK_NONE on an allocation failure.
49 * A maximum of 2 billion blocks is supported so, for example, if one block
50 * represented 64 bytes a maximally sized ALIST would represent
53 * A radix tree is used to maintain the bitmap and layed out in a manner
54 * similar to the blist code. Meta nodes use a radix of 16 and 2 bits per
55 * block while leaf nodes use a radix of 32 and 1 bit per block (stored in
56 * a 32 bit bitmap field). Both meta and leaf nodes have a hint field.
57 * This field gives us a hint as to the largest free contiguous range of
58 * blocks under the node. It may contain a value that is too high, but
59 * will never contain a value that is too low. When the radix tree is
60 * searched, allocation failures in subtrees update the hint.
62 * The radix tree is layed out recursively using a linear array. Each meta
63 * node is immediately followed (layed out sequentially in memory) by
64 * ALIST_META_RADIX lower level nodes. This is a recursive structure but one
65 * that can be easily scanned through a very simple 'skip' calculation. In
66 * order to support large radixes, portions of the tree may reside outside our
67 * memory allocation. We handle this with an early-terminate optimization
68 * in the meta-node. The memory allocation is only large enough to cover
69 * the number of blocks requested at creation time even if it must be
70 * encompassed in larger root-node radix.
72 * This code can be compiled stand-alone for debugging.
77 #include <sys/param.h>
78 #include <sys/systm.h>
80 #include <sys/kernel.h>
81 #include <sys/alist.h>
82 #include <sys/malloc.h>
84 #include <vm/vm_object.h>
85 #include <vm/vm_kern.h>
86 #include <vm/vm_extern.h>
87 #include <vm/vm_page.h>
91 #ifndef ALIST_NO_DEBUG
95 #include <sys/types.h>
102 #define kmalloc(a,b,c) malloc(a)
103 #define kfree(a,b) free(a)
104 #define kprintf printf
105 #define KKASSERT(exp) assert(exp)
108 typedef unsigned int u_daddr_t
;
110 #include <sys/alist.h>
112 void panic(const char *ctl
, ...);
117 * static support functions
120 static daddr_t
alst_leaf_alloc(almeta_t
*scan
, daddr_t blk
, int count
);
121 static daddr_t
alst_meta_alloc(almeta_t
*scan
, daddr_t blk
,
122 daddr_t count
, daddr_t radix
, int skip
);
123 static void alst_leaf_free(almeta_t
*scan
, daddr_t relblk
, int count
);
124 static void alst_meta_free(almeta_t
*scan
, daddr_t freeBlk
, daddr_t count
,
125 daddr_t radix
, int skip
, daddr_t blk
);
126 static daddr_t
alst_radix_init(almeta_t
*scan
, daddr_t radix
,
127 int skip
, daddr_t count
);
129 static void alst_radix_print(almeta_t
*scan
, daddr_t blk
,
130 daddr_t radix
, int skip
, int tab
);
134 * alist_create() - create a alist capable of handling up to the specified
137 * blocks must be greater then 0
139 * The smallest alist consists of a single leaf node capable of
140 * managing ALIST_BMAP_RADIX blocks.
144 alist_create(daddr_t blocks
, struct malloc_type
*mtype
)
151 * Calculate radix and skip field used for scanning.
153 radix
= ALIST_BMAP_RADIX
;
155 while (radix
< blocks
) {
156 radix
*= ALIST_META_RADIX
;
157 skip
= (skip
+ 1) * ALIST_META_RADIX
;
160 bl
= kmalloc(sizeof(struct alist
), mtype
, M_WAITOK
);
162 bzero(bl
, sizeof(*bl
));
164 bl
->bl_blocks
= blocks
;
165 bl
->bl_radix
= radix
;
167 bl
->bl_rootblks
= 1 +
168 alst_radix_init(NULL
, bl
->bl_radix
, bl
->bl_skip
, blocks
);
169 bl
->bl_root
= kmalloc(sizeof(almeta_t
) * bl
->bl_rootblks
, mtype
, M_WAITOK
);
171 #if defined(ALIST_DEBUG)
173 "ALIST representing %d blocks (%d MB of swap)"
174 ", requiring %dK (%d bytes) of ram\n",
176 bl
->bl_blocks
* 4 / 1024,
177 (bl
->bl_rootblks
* sizeof(almeta_t
) + 1023) / 1024,
178 (bl
->bl_rootblks
* sizeof(almeta_t
))
180 kprintf("ALIST raw radix tree contains %d records\n", bl
->bl_rootblks
);
182 alst_radix_init(bl
->bl_root
, bl
->bl_radix
, bl
->bl_skip
, blocks
);
188 alist_destroy(alist_t bl
, struct malloc_type
*mtype
)
190 kfree(bl
->bl_root
, mtype
);
195 * alist_alloc() - reserve space in the block bitmap. Return the base
196 * of a contiguous region or ALIST_BLOCK_NONE if space
197 * could not be allocated.
201 alist_alloc(alist_t bl
, daddr_t count
)
203 daddr_t blk
= ALIST_BLOCK_NONE
;
205 KKASSERT((count
| (count
- 1)) == (count
<< 1) - 1);
207 if (bl
&& count
< bl
->bl_radix
) {
208 if (bl
->bl_radix
== ALIST_BMAP_RADIX
)
209 blk
= alst_leaf_alloc(bl
->bl_root
, 0, count
);
211 blk
= alst_meta_alloc(bl
->bl_root
, 0, count
, bl
->bl_radix
, bl
->bl_skip
);
212 if (blk
!= ALIST_BLOCK_NONE
)
213 bl
->bl_free
-= count
;
219 * alist_free() - free up space in the block bitmap. Return the base
220 * of a contiguous region. Panic if an inconsistancy is
225 alist_free(alist_t bl
, daddr_t blkno
, daddr_t count
)
228 KKASSERT(blkno
+ count
<= bl
->bl_blocks
);
229 if (bl
->bl_radix
== ALIST_BMAP_RADIX
)
230 alst_leaf_free(bl
->bl_root
, blkno
, count
);
232 alst_meta_free(bl
->bl_root
, blkno
, count
, bl
->bl_radix
, bl
->bl_skip
, 0);
233 bl
->bl_free
+= count
;
240 * alist_print() - dump radix tree
244 alist_print(alist_t bl
)
246 kprintf("ALIST {\n");
247 alst_radix_print(bl
->bl_root
, 0, bl
->bl_radix
, bl
->bl_skip
, 4);
253 /************************************************************************
254 * ALLOCATION SUPPORT FUNCTIONS *
255 ************************************************************************
257 * These support functions do all the actual work. They may seem
258 * rather longish, but that's because I've commented them up. The
259 * actual code is straight forward.
264 * alist_leaf_alloc() - allocate at a leaf in the radix tree (a bitmap).
266 * This is the core of the allocator and is optimized for the 1 block
267 * and the ALIST_BMAP_RADIX block allocation cases. Other cases are
268 * somewhat slower. The 1 block allocation case is log2 and extremely
278 u_daddr_t orig
= scan
->bm_bitmap
;
281 * Optimize bitmap all-allocated case. Also, count = 1
282 * case assumes at least 1 bit is free in the bitmap, so
283 * we have to take care of this case here.
286 scan
->bm_bighint
= 0;
287 return(ALIST_BLOCK_NONE
);
291 * Optimized code to allocate one bit out of the bitmap
295 int j
= ALIST_BMAP_RADIX
/2;
298 mask
= (u_daddr_t
)-1 >> (ALIST_BMAP_RADIX
/2);
301 if ((orig
& mask
) == 0) {
308 scan
->bm_bitmap
&= ~(1 << r
);
313 * non-optimized code to allocate N bits out of the bitmap.
314 * The more bits, the faster the code runs. It will run
315 * the slowest allocating 2 bits, but since there aren't any
316 * memory ops in the core loop (or shouldn't be, anyway),
317 * you probably won't notice the difference.
319 * Similar to the blist case, the alist code also requires
320 * allocations to be power-of-2 sized and aligned to the
321 * size of the allocation, which simplifies the algorithm.
325 int n
= ALIST_BMAP_RADIX
- count
;
328 mask
= (u_daddr_t
)-1 >> n
;
330 for (j
= 0; j
<= n
; j
+= count
) {
331 if ((orig
& mask
) == mask
) {
332 scan
->bm_bitmap
&= ~mask
;
335 mask
= mask
<< count
;
340 * We couldn't allocate count in this subtree, update bighint.
342 scan
->bm_bighint
= count
- 1;
343 return(ALIST_BLOCK_NONE
);
347 * alist_meta_alloc() - allocate at a meta in the radix tree.
349 * Attempt to allocate at a meta node. If we can't, we update
350 * bighint and return a failure. Updating bighint optimize future
351 * calls that hit this node. We have to check for our collapse cases
352 * and we have a few optimizations strewn in as well.
366 int next_skip
= ((u_int
)skip
/ ALIST_META_RADIX
);
369 * ALL-ALLOCATED special case
371 if (scan
->bm_bitmap
== 0) {
372 scan
->bm_bighint
= 0;
373 return(ALIST_BLOCK_NONE
);
376 radix
/= ALIST_META_RADIX
;
379 * Radix now represents each bitmap entry for this meta node. If
380 * the number of blocks being allocated can be fully represented,
381 * we allocate directly out of this meta node.
383 * Meta node bitmaps use 2 bits per block.
386 * 01 PARTIALLY-FREE/PARTIALLY-ALLOCATED
390 if (count
>= radix
) {
391 int n
= count
/ radix
* 2; /* number of bits */
394 mask
= (u_daddr_t
)-1 >> (ALIST_BMAP_RADIX
- n
);
395 for (j
= 0; j
< ALIST_META_RADIX
; j
+= n
/ 2) {
396 if ((scan
->bm_bitmap
& mask
) == mask
) {
397 scan
->bm_bitmap
&= ~mask
;
398 return(blk
+ j
* radix
);
402 if (scan
->bm_bighint
>= count
)
403 scan
->bm_bighint
= count
>> 1;
404 return(ALIST_BLOCK_NONE
);
408 * If not we have to recurse.
412 for (i
= 1; i
<= skip
; i
+= next_skip
) {
413 if (scan
[i
].bm_bighint
== (daddr_t
)-1) {
421 * If the element is marked completely free (11), initialize
424 if ((scan
->bm_bitmap
& mask
) == mask
) {
425 scan
[i
].bm_bitmap
= (u_daddr_t
)-1;
426 scan
[i
].bm_bighint
= radix
;
429 if ((scan
->bm_bitmap
& mask
) == 0) {
431 * Object marked completely allocated, recursion
435 } else if (count
<= scan
[i
].bm_bighint
) {
437 * count fits in object
440 if (next_skip
== 1) {
441 r
= alst_leaf_alloc(&scan
[i
], blk
, count
);
443 r
= alst_meta_alloc(&scan
[i
], blk
, count
, radix
, next_skip
- 1);
445 if (r
!= ALIST_BLOCK_NONE
) {
446 if (scan
[i
].bm_bitmap
== 0) {
447 scan
->bm_bitmap
&= ~mask
;
449 scan
->bm_bitmap
&= ~mask
;
450 scan
->bm_bitmap
|= pmask
;
461 * We couldn't allocate count in this subtree, update bighint.
463 if (scan
->bm_bighint
>= count
)
464 scan
->bm_bighint
= count
>> 1;
465 return(ALIST_BLOCK_NONE
);
469 * BLST_LEAF_FREE() - free allocated block from leaf bitmap
479 * free some data in this bitmap
482 * 0000111111111110000
486 int n
= blk
& (ALIST_BMAP_RADIX
- 1);
489 mask
= ((u_daddr_t
)-1 << n
) &
490 ((u_daddr_t
)-1 >> (ALIST_BMAP_RADIX
- count
- n
));
492 if (scan
->bm_bitmap
& mask
)
493 panic("alst_radix_free: freeing free block");
494 scan
->bm_bitmap
|= mask
;
497 * We could probably do a better job here. We are required to make
498 * bighint at least as large as the biggest contiguous block of
499 * data. If we just shoehorn it, a little extra overhead will
500 * be incured on the next allocation (but only that one typically).
502 scan
->bm_bighint
= ALIST_BMAP_RADIX
;
506 * BLST_META_FREE() - free allocated blocks from radix tree meta info
508 * This support routine frees a range of blocks from the bitmap.
509 * The range must be entirely enclosed by this radix node. If a
510 * meta node, we break the range down recursively to free blocks
511 * in subnodes (which means that this code can free an arbitrary
512 * range whereas the allocation code cannot allocate an arbitrary
525 int next_skip
= ((u_int
)skip
/ ALIST_META_RADIX
);
531 * Break the free down into its components. Because it is so easy
532 * to implement, frees are not limited to power-of-2 sizes.
534 * Each block in a meta-node bitmap takes two bits.
536 radix
/= ALIST_META_RADIX
;
538 i
= (freeBlk
- blk
) / radix
;
540 mask
= 0x00000003 << (i
* 2);
541 pmask
= 0x00000001 << (i
* 2);
543 i
= i
* next_skip
+ 1;
545 while (i
<= skip
&& blk
< freeBlk
+ count
) {
548 v
= blk
+ radix
- freeBlk
;
552 if (scan
->bm_bighint
== (daddr_t
)-1)
553 panic("alst_meta_free: freeing unexpected range");
555 if (freeBlk
== blk
&& count
>= radix
) {
557 * All-free case, no need to update sub-tree
559 scan
->bm_bitmap
|= mask
;
560 scan
->bm_bighint
= radix
* ALIST_META_RADIX
;/*XXX*/
563 * If we were previously marked all-allocated, fix-up
564 * the next layer so we can recurse down into it.
566 if ((scan
->bm_bitmap
& mask
) == 0) {
567 scan
[i
].bm_bitmap
= (u_daddr_t
)0;
568 scan
[i
].bm_bighint
= 0;
575 alst_leaf_free(&scan
[i
], freeBlk
, v
);
577 alst_meta_free(&scan
[i
], freeBlk
, v
, radix
, next_skip
- 1, blk
);
578 if (scan
[i
].bm_bitmap
== (u_daddr_t
)-1)
579 scan
->bm_bitmap
|= mask
;
581 scan
->bm_bitmap
|= pmask
;
582 if (scan
->bm_bighint
< scan
[i
].bm_bighint
)
583 scan
->bm_bighint
= scan
[i
].bm_bighint
;
595 * BLST_RADIX_INIT() - initialize radix tree
597 * Initialize our meta structures and bitmaps and calculate the exact
598 * amount of space required to manage 'count' blocks - this space may
599 * be considerably less then the calculated radix due to the large
600 * RADIX values we use.
604 alst_radix_init(almeta_t
*scan
, daddr_t radix
, int skip
, daddr_t count
)
608 daddr_t memindex
= 0;
615 if (radix
== ALIST_BMAP_RADIX
) {
617 scan
->bm_bighint
= 0;
624 * Meta node. If allocating the entire object we can special
625 * case it. However, we need to figure out how much memory
626 * is required to manage 'count' blocks, so we continue on anyway.
630 scan
->bm_bighint
= 0;
634 radix
/= ALIST_META_RADIX
;
635 next_skip
= ((u_int
)skip
/ ALIST_META_RADIX
);
639 for (i
= 1; i
<= skip
; i
+= next_skip
) {
640 if (count
>= radix
) {
642 * Allocate the entire object
644 memindex
= i
+ alst_radix_init(
645 ((scan
) ? &scan
[i
] : NULL
),
651 /* already marked as wholely allocated */
652 } else if (count
> 0) {
654 * Allocate a partial object
656 memindex
= i
+ alst_radix_init(
657 ((scan
) ? &scan
[i
] : NULL
),
665 * Mark as partially allocated
668 scan
->bm_bitmap
|= pmask
;
671 * Add terminator and break out
674 scan
[i
].bm_bighint
= (daddr_t
)-1;
675 /* already marked as wholely allocated */
689 alst_radix_print(almeta_t
*scan
, daddr_t blk
, daddr_t radix
, int skip
, int tab
)
696 if (radix
== ALIST_BMAP_RADIX
) {
698 "%*.*s(%04x,%d): bitmap %08x big=%d\n",
707 if (scan
->bm_bitmap
== 0) {
709 "%*.*s(%04x,%d) ALL ALLOCATED\n",
716 if (scan
->bm_bitmap
== (u_daddr_t
)-1) {
718 "%*.*s(%04x,%d) ALL FREE\n",
727 "%*.*s(%04x,%d): subtree (%d) bitmap=%08x big=%d {\n",
735 radix
/= ALIST_META_RADIX
;
736 next_skip
= ((u_int
)skip
/ ALIST_META_RADIX
);
740 for (i
= 1; i
<= skip
; i
+= next_skip
) {
741 if (scan
[i
].bm_bighint
== (daddr_t
)-1) {
743 "%*.*s(%04x,%d): Terminator\n",
750 if ((scan
->bm_bitmap
& mask
) == mask
) {
752 "%*.*s(%04x,%d): ALL FREE\n",
756 } else if ((scan
->bm_bitmap
& mask
) == 0) {
758 "%*.*s(%04x,%d): ALL ALLOCATED\n",
787 main(int ac
, char **av
)
793 for (i
= 1; i
< ac
; ++i
) {
794 const char *ptr
= av
[i
];
796 size
= strtol(ptr
, NULL
, 0);
800 fprintf(stderr
, "Bad option: %s\n", ptr
- 2);
803 bl
= alist_create(size
, NULL
);
804 alist_free(bl
, 0, size
);
812 kprintf("%d/%d/%d> ", bl
->bl_free
, size
, bl
->bl_radix
);
814 if (fgets(buf
, sizeof(buf
), stdin
) == NULL
)
821 if (sscanf(buf
+ 1, "%d", &count
) == 1) {
822 daddr_t blk
= alist_alloc(bl
, count
);
823 kprintf(" R=%04x\n", blk
);
829 if (sscanf(buf
+ 1, "%x %d", &da
, &count
) == 2) {
830 alist_free(bl
, da
, count
);
853 panic(const char *ctl
, ...)
858 vfprintf(stderr
, ctl
, va
);
859 fprintf(stderr
, "\n");