2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter <clameter@sgi.com>
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License as
8 * published by the Free Software Foundation; either version 2, or (at
9 * your option) any later version.
11 * This program is distributed in the hope that it will be useful, but
12 * WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program; if not, write to the Free Software
18 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 #include <linux/errno.h>
22 #include <linux/init.h>
23 #include <linux/kernel.h>
24 #include <linux/module.h>
25 #include <linux/radix-tree.h>
26 #include <linux/percpu.h>
27 #include <linux/slab.h>
28 #include <linux/notifier.h>
29 #include <linux/cpu.h>
30 #include <linux/gfp.h>
31 #include <linux/string.h>
32 #include <linux/bitops.h>
36 #define RADIX_TREE_MAP_SHIFT 6
38 #define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */
40 #define RADIX_TREE_TAGS 2
42 #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
43 #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
45 #define RADIX_TREE_TAG_LONGS \
46 ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
48 struct radix_tree_node
{
50 void *slots
[RADIX_TREE_MAP_SIZE
];
51 unsigned long tags
[RADIX_TREE_TAGS
][RADIX_TREE_TAG_LONGS
];
54 struct radix_tree_path
{
55 struct radix_tree_node
*node
;
59 #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long))
60 #define RADIX_TREE_MAX_PATH (RADIX_TREE_INDEX_BITS/RADIX_TREE_MAP_SHIFT + 2)
62 static unsigned long height_to_maxindex
[RADIX_TREE_MAX_PATH
] __read_mostly
;
65 * Radix tree node cache.
67 static kmem_cache_t
*radix_tree_node_cachep
;
70 * Per-cpu pool of preloaded nodes
72 struct radix_tree_preload
{
74 struct radix_tree_node
*nodes
[RADIX_TREE_MAX_PATH
];
76 DEFINE_PER_CPU(struct radix_tree_preload
, radix_tree_preloads
) = { 0, };
79 * This assumes that the caller has performed appropriate preallocation, and
80 * that the caller has pinned this thread of control to the current CPU.
82 static struct radix_tree_node
*
83 radix_tree_node_alloc(struct radix_tree_root
*root
)
85 struct radix_tree_node
*ret
;
87 ret
= kmem_cache_alloc(radix_tree_node_cachep
, root
->gfp_mask
);
88 if (ret
== NULL
&& !(root
->gfp_mask
& __GFP_WAIT
)) {
89 struct radix_tree_preload
*rtp
;
91 rtp
= &__get_cpu_var(radix_tree_preloads
);
93 ret
= rtp
->nodes
[rtp
->nr
- 1];
94 rtp
->nodes
[rtp
->nr
- 1] = NULL
;
102 radix_tree_node_free(struct radix_tree_node
*node
)
104 kmem_cache_free(radix_tree_node_cachep
, node
);
108 * Load up this CPU's radix_tree_node buffer with sufficient objects to
109 * ensure that the addition of a single element in the tree cannot fail. On
110 * success, return zero, with preemption disabled. On error, return -ENOMEM
111 * with preemption not disabled.
113 int radix_tree_preload(int gfp_mask
)
115 struct radix_tree_preload
*rtp
;
116 struct radix_tree_node
*node
;
120 rtp
= &__get_cpu_var(radix_tree_preloads
);
121 while (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
)) {
123 node
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
127 rtp
= &__get_cpu_var(radix_tree_preloads
);
128 if (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
))
129 rtp
->nodes
[rtp
->nr
++] = node
;
131 kmem_cache_free(radix_tree_node_cachep
, node
);
138 static inline void tag_set(struct radix_tree_node
*node
, int tag
, int offset
)
140 if (!test_bit(offset
, &node
->tags
[tag
][0]))
141 __set_bit(offset
, &node
->tags
[tag
][0]);
144 static inline void tag_clear(struct radix_tree_node
*node
, int tag
, int offset
)
146 __clear_bit(offset
, &node
->tags
[tag
][0]);
149 static inline int tag_get(struct radix_tree_node
*node
, int tag
, int offset
)
151 return test_bit(offset
, &node
->tags
[tag
][0]);
155 * Return the maximum key which can be store into a
156 * radix tree with height HEIGHT.
158 static inline unsigned long radix_tree_maxindex(unsigned int height
)
160 return height_to_maxindex
[height
];
164 * Extend a radix tree so it can store key @index.
166 static int radix_tree_extend(struct radix_tree_root
*root
, unsigned long index
)
168 struct radix_tree_node
*node
;
170 char tags
[RADIX_TREE_TAGS
];
173 /* Figure out what the height should be. */
174 height
= root
->height
+ 1;
175 while (index
> radix_tree_maxindex(height
))
178 if (root
->rnode
== NULL
) {
179 root
->height
= height
;
184 * Prepare the tag status of the top-level node for propagation
185 * into the newly-pushed top-level node(s)
187 for (tag
= 0; tag
< RADIX_TREE_TAGS
; tag
++) {
191 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
192 if (root
->rnode
->tags
[tag
][idx
]) {
200 if (!(node
= radix_tree_node_alloc(root
)))
203 /* Increase the height. */
204 node
->slots
[0] = root
->rnode
;
206 /* Propagate the aggregated tag info into the new root */
207 for (tag
= 0; tag
< RADIX_TREE_TAGS
; tag
++) {
209 tag_set(node
, tag
, 0);
215 } while (height
> root
->height
);
221 * radix_tree_insert - insert into a radix tree
222 * @root: radix tree root
224 * @item: item to insert
226 * Insert an item into the radix tree at position @index.
228 int radix_tree_insert(struct radix_tree_root
*root
,
229 unsigned long index
, void *item
)
231 struct radix_tree_node
*node
= NULL
, *slot
;
232 unsigned int height
, shift
;
236 /* Make sure the tree is high enough. */
237 if ((!index
&& !root
->rnode
) ||
238 index
> radix_tree_maxindex(root
->height
)) {
239 error
= radix_tree_extend(root
, index
);
245 height
= root
->height
;
246 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
248 offset
= 0; /* uninitialised var warning */
251 /* Have to add a child node. */
252 if (!(slot
= radix_tree_node_alloc(root
)))
255 node
->slots
[offset
] = slot
;
261 /* Go a level down */
262 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
264 slot
= node
->slots
[offset
];
265 shift
-= RADIX_TREE_MAP_SHIFT
;
274 node
->slots
[offset
] = item
;
275 BUG_ON(tag_get(node
, 0, offset
));
276 BUG_ON(tag_get(node
, 1, offset
));
282 EXPORT_SYMBOL(radix_tree_insert
);
285 * radix_tree_lookup - perform lookup operation on a radix tree
286 * @root: radix tree root
289 * Lookup the item at the position @index in the radix tree @root.
291 void *radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
)
293 unsigned int height
, shift
;
294 struct radix_tree_node
*slot
;
296 height
= root
->height
;
297 if (index
> radix_tree_maxindex(height
))
300 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
307 slot
= slot
->slots
[(index
>> shift
) & RADIX_TREE_MAP_MASK
];
308 shift
-= RADIX_TREE_MAP_SHIFT
;
314 EXPORT_SYMBOL(radix_tree_lookup
);
317 * radix_tree_tag_set - set a tag on a radix tree node
318 * @root: radix tree root
322 * Set the search tag corresponging to @index in the radix tree. From
323 * the root all the way down to the leaf node.
325 * Returns the address of the tagged item. Setting a tag on a not-present
328 void *radix_tree_tag_set(struct radix_tree_root
*root
,
329 unsigned long index
, int tag
)
331 unsigned int height
, shift
;
332 struct radix_tree_node
*slot
;
334 height
= root
->height
;
335 if (index
> radix_tree_maxindex(height
))
338 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
344 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
345 tag_set(slot
, tag
, offset
);
346 slot
= slot
->slots
[offset
];
347 BUG_ON(slot
== NULL
);
348 shift
-= RADIX_TREE_MAP_SHIFT
;
354 EXPORT_SYMBOL(radix_tree_tag_set
);
357 * radix_tree_tag_clear - clear a tag on a radix tree node
358 * @root: radix tree root
362 * Clear the search tag corresponging to @index in the radix tree. If
363 * this causes the leaf node to have no tags set then clear the tag in the
364 * next-to-leaf node, etc.
366 * Returns the address of the tagged item on success, else NULL. ie:
367 * has the same return value and semantics as radix_tree_lookup().
369 void *radix_tree_tag_clear(struct radix_tree_root
*root
,
370 unsigned long index
, int tag
)
372 struct radix_tree_path path
[RADIX_TREE_MAX_PATH
], *pathp
= path
;
373 struct radix_tree_node
*slot
;
374 unsigned int height
, shift
;
377 height
= root
->height
;
378 if (index
> radix_tree_maxindex(height
))
381 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
391 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
392 pathp
[1].offset
= offset
;
393 pathp
[1].node
= slot
;
394 slot
= slot
->slots
[offset
];
396 shift
-= RADIX_TREE_MAP_SHIFT
;
407 tag_clear(pathp
->node
, tag
, pathp
->offset
);
408 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
409 if (pathp
->node
->tags
[tag
][idx
])
413 } while (pathp
->node
);
417 EXPORT_SYMBOL(radix_tree_tag_clear
);
419 #ifndef __KERNEL__ /* Only the test harness uses this at present */
421 * radix_tree_tag_get - get a tag on a radix tree node
422 * @root: radix tree root
429 * 1: tag present, set
430 * -1: tag present, unset
432 int radix_tree_tag_get(struct radix_tree_root
*root
,
433 unsigned long index
, int tag
)
435 unsigned int height
, shift
;
436 struct radix_tree_node
*slot
;
437 int saw_unset_tag
= 0;
439 height
= root
->height
;
440 if (index
> radix_tree_maxindex(height
))
443 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
452 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
455 * This is just a debug check. Later, we can bale as soon as
456 * we see an unset tag.
458 if (!tag_get(slot
, tag
, offset
))
461 int ret
= tag_get(slot
, tag
, offset
);
463 BUG_ON(ret
&& saw_unset_tag
);
466 slot
= slot
->slots
[offset
];
467 shift
-= RADIX_TREE_MAP_SHIFT
;
471 EXPORT_SYMBOL(radix_tree_tag_get
);
475 __lookup(struct radix_tree_root
*root
, void **results
, unsigned long index
,
476 unsigned int max_items
, unsigned long *next_index
)
478 unsigned int nr_found
= 0;
479 unsigned int shift
, height
;
480 struct radix_tree_node
*slot
;
483 height
= root
->height
;
487 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
490 for ( ; height
> 1; height
--) {
492 for (i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
493 i
< RADIX_TREE_MAP_SIZE
; i
++) {
494 if (slot
->slots
[i
] != NULL
)
496 index
&= ~((1UL << shift
) - 1);
497 index
+= 1UL << shift
;
499 goto out
; /* 32-bit wraparound */
501 if (i
== RADIX_TREE_MAP_SIZE
)
504 shift
-= RADIX_TREE_MAP_SHIFT
;
505 slot
= slot
->slots
[i
];
508 /* Bottom level: grab some items */
509 for (i
= index
& RADIX_TREE_MAP_MASK
; i
< RADIX_TREE_MAP_SIZE
; i
++) {
511 if (slot
->slots
[i
]) {
512 results
[nr_found
++] = slot
->slots
[i
];
513 if (nr_found
== max_items
)
523 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
524 * @root: radix tree root
525 * @results: where the results of the lookup are placed
526 * @first_index: start the lookup from this key
527 * @max_items: place up to this many items at *results
529 * Performs an index-ascending scan of the tree for present items. Places
530 * them at *@results and returns the number of items which were placed at
533 * The implementation is naive.
536 radix_tree_gang_lookup(struct radix_tree_root
*root
, void **results
,
537 unsigned long first_index
, unsigned int max_items
)
539 const unsigned long max_index
= radix_tree_maxindex(root
->height
);
540 unsigned long cur_index
= first_index
;
541 unsigned int ret
= 0;
543 while (ret
< max_items
) {
544 unsigned int nr_found
;
545 unsigned long next_index
; /* Index of next search */
547 if (cur_index
> max_index
)
549 nr_found
= __lookup(root
, results
+ ret
, cur_index
,
550 max_items
- ret
, &next_index
);
554 cur_index
= next_index
;
558 EXPORT_SYMBOL(radix_tree_gang_lookup
);
561 * FIXME: the two tag_get()s here should use find_next_bit() instead of
562 * open-coding the search.
565 __lookup_tag(struct radix_tree_root
*root
, void **results
, unsigned long index
,
566 unsigned int max_items
, unsigned long *next_index
, int tag
)
568 unsigned int nr_found
= 0;
570 unsigned int height
= root
->height
;
571 struct radix_tree_node
*slot
;
573 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
577 unsigned long i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
579 for ( ; i
< RADIX_TREE_MAP_SIZE
; i
++) {
580 if (tag_get(slot
, tag
, i
)) {
581 BUG_ON(slot
->slots
[i
] == NULL
);
584 index
&= ~((1UL << shift
) - 1);
585 index
+= 1UL << shift
;
587 goto out
; /* 32-bit wraparound */
589 if (i
== RADIX_TREE_MAP_SIZE
)
592 if (height
== 0) { /* Bottom level: grab some items */
593 unsigned long j
= index
& RADIX_TREE_MAP_MASK
;
595 for ( ; j
< RADIX_TREE_MAP_SIZE
; j
++) {
597 if (tag_get(slot
, tag
, j
)) {
598 BUG_ON(slot
->slots
[j
] == NULL
);
599 results
[nr_found
++] = slot
->slots
[j
];
600 if (nr_found
== max_items
)
605 shift
-= RADIX_TREE_MAP_SHIFT
;
606 slot
= slot
->slots
[i
];
614 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
616 * @root: radix tree root
617 * @results: where the results of the lookup are placed
618 * @first_index: start the lookup from this key
619 * @max_items: place up to this many items at *results
620 * @tag: the tag index
622 * Performs an index-ascending scan of the tree for present items which
623 * have the tag indexed by @tag set. Places the items at *@results and
624 * returns the number of items which were placed at *@results.
627 radix_tree_gang_lookup_tag(struct radix_tree_root
*root
, void **results
,
628 unsigned long first_index
, unsigned int max_items
, int tag
)
630 const unsigned long max_index
= radix_tree_maxindex(root
->height
);
631 unsigned long cur_index
= first_index
;
632 unsigned int ret
= 0;
634 while (ret
< max_items
) {
635 unsigned int nr_found
;
636 unsigned long next_index
; /* Index of next search */
638 if (cur_index
> max_index
)
640 nr_found
= __lookup_tag(root
, results
+ ret
, cur_index
,
641 max_items
- ret
, &next_index
, tag
);
645 cur_index
= next_index
;
649 EXPORT_SYMBOL(radix_tree_gang_lookup_tag
);
652 * radix_tree_delete - delete an item from a radix tree
653 * @root: radix tree root
656 * Remove the item at @index from the radix tree rooted at @root.
658 * Returns the address of the deleted item, or NULL if it was not present.
660 void *radix_tree_delete(struct radix_tree_root
*root
, unsigned long index
)
662 struct radix_tree_path path
[RADIX_TREE_MAX_PATH
], *pathp
= path
;
663 struct radix_tree_path
*orig_pathp
;
664 struct radix_tree_node
*slot
;
665 unsigned int height
, shift
;
667 char tags
[RADIX_TREE_TAGS
];
670 height
= root
->height
;
671 if (index
> radix_tree_maxindex(height
))
674 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
678 for ( ; height
> 0; height
--) {
684 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
685 pathp
[1].offset
= offset
;
686 pathp
[1].node
= slot
;
687 slot
= slot
->slots
[offset
];
689 shift
-= RADIX_TREE_MAP_SHIFT
;
699 * Clear all tags associated with the just-deleted item
701 memset(tags
, 0, sizeof(tags
));
705 nr_cleared_tags
= RADIX_TREE_TAGS
;
706 for (tag
= 0; tag
< RADIX_TREE_TAGS
; tag
++) {
712 tag_clear(pathp
->node
, tag
, pathp
->offset
);
714 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
715 if (pathp
->node
->tags
[tag
][idx
]) {
723 } while (pathp
->node
&& nr_cleared_tags
);
725 /* Now free the nodes we do not need anymore */
726 for (pathp
= orig_pathp
; pathp
->node
; pathp
--) {
727 pathp
->node
->slots
[pathp
->offset
] = NULL
;
728 if (--pathp
->node
->count
)
731 /* Node with zero slots in use so free it */
732 radix_tree_node_free(pathp
->node
);
739 EXPORT_SYMBOL(radix_tree_delete
);
742 * radix_tree_tagged - test whether any items in the tree are tagged
743 * @root: radix tree root
746 int radix_tree_tagged(struct radix_tree_root
*root
, int tag
)
752 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
753 if (root
->rnode
->tags
[tag
][idx
])
758 EXPORT_SYMBOL(radix_tree_tagged
);
761 radix_tree_node_ctor(void *node
, kmem_cache_t
*cachep
, unsigned long flags
)
763 memset(node
, 0, sizeof(struct radix_tree_node
));
766 static __init
unsigned long __maxindex(unsigned int height
)
768 unsigned int tmp
= height
* RADIX_TREE_MAP_SHIFT
;
769 unsigned long index
= (~0UL >> (RADIX_TREE_INDEX_BITS
- tmp
- 1)) >> 1;
771 if (tmp
>= RADIX_TREE_INDEX_BITS
)
776 static __init
void radix_tree_init_maxindex(void)
780 for (i
= 0; i
< ARRAY_SIZE(height_to_maxindex
); i
++)
781 height_to_maxindex
[i
] = __maxindex(i
);
784 #ifdef CONFIG_HOTPLUG_CPU
785 static int radix_tree_callback(struct notifier_block
*nfb
,
786 unsigned long action
,
789 int cpu
= (long)hcpu
;
790 struct radix_tree_preload
*rtp
;
792 /* Free per-cpu pool of perloaded nodes */
793 if (action
== CPU_DEAD
) {
794 rtp
= &per_cpu(radix_tree_preloads
, cpu
);
796 kmem_cache_free(radix_tree_node_cachep
,
797 rtp
->nodes
[rtp
->nr
-1]);
798 rtp
->nodes
[rtp
->nr
-1] = NULL
;
804 #endif /* CONFIG_HOTPLUG_CPU */
806 void __init
radix_tree_init(void)
808 radix_tree_node_cachep
= kmem_cache_create("radix_tree_node",
809 sizeof(struct radix_tree_node
), 0,
810 SLAB_PANIC
, radix_tree_node_ctor
, NULL
);
811 radix_tree_init_maxindex();
812 hotcpu_notifier(radix_tree_callback
, 0);